diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000000..022b84144a1 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,5 @@ +# +# https://help.github.com/articles/dealing-with-line-endings/ +# +# These are explicitly windows files and should use crlf +*.bat text eol=crlf diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 00000000000..0a13df4a964 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,91 @@ +name: 🐛 ​ Bug report +description: Report an issue or problem. +labels: ["bug"] +body: + - type: markdown + attributes: + value: | + Hello! Thanks for reporting a bug to the spark team. + + Before going any further, please check to see if your bug has already been: + + * fixed (check the [changelog](https://spark.lucko.me/changelog)) + * reported by someone else (check the [open issues](https://github.com/lucko/spark/issues)) + + Please also consider raising the issue in our [Discord](https://discord.gg/PAGT2fu) before reporting it here. (more often you can get help faster there!) + + Thanks! :) + - type: textarea + id: description + attributes: + label: Description + description: Please provide a short description of the issue in the space below. + placeholder: Description goes here! + validations: + required: true + - type: textarea + id: reproduction-steps + attributes: + label: Reproduction Steps + description: Please provide a clear list of steps we should follow in order to reproduce your issue in the space below. + placeholder: List reproduction steps here, please! + validations: + required: true + - type: textarea + id: expected-behaviour + attributes: + label: Expected Behaviour + description: Please provide a description of what you expected to happen. + placeholder: Describe what you expected to happen here. + validations: + required: true + - type: textarea + id: platform-version + attributes: + label: Platform Information + description: | + Please provide details about the platform that spark is running on. + + This *must* include the versions of all relevant components. + + * Minecraft Version - e.g. "Minecraft 1.19.4" + * Platform Type - e.g. "server", "client" or "proxy" + * Platform Brand - e.g. "Forge", "Fabric", "Paper" + * Platform Version - e.g. "Forge 47.1.44", "Paper git-Paper-124" + value: | + * Minecraft Version: + * Platform Type: + * Platform Brand: + * Platform Version: + validations: + required: true + - type: input + id: spark-version + attributes: + label: Spark Version + description: Please provide the exact version number of spark you are using. + placeholder: v1.10.50 + validations: + required: true + - type: textarea + id: logs + attributes: + label: Logs and Configs + description: If you think it would be helpful, please also include a pastebin of any relevant config files or log output. You should use https://gist.github.com/ to upload, then include the link below. + placeholder: https://gist.github.com/HelpfulBugReporter/ThisIsAVeryUsefulLogOutput + validations: + required: false + - type: textarea + id: extra-info + attributes: + label: Extra Details + description: Please include any other relevant details in the space below. + placeholder: I was able to reproduce this only at the end of a rainbow. + validations: + required: false + - type: markdown + attributes: + value: | + Please fill in the fields above with as much detail as possible before submitting. :) + + Thanks! diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000000..6a6c4aa9ee7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: 💬 ​ Ask a question + url: https://discord.gg/PAGT2fu + about: We use Discord to provide support for the project. If you have a question, need help, or aren't sure if you've found a bug or not, please come and talk to us there! diff --git a/.github/ISSUE_TEMPLATE/suggestion.yml b/.github/ISSUE_TEMPLATE/suggestion.yml new file mode 100644 index 00000000000..f36b870e0ef --- /dev/null +++ b/.github/ISSUE_TEMPLATE/suggestion.yml @@ -0,0 +1,37 @@ +name: 💡 ​ Suggestion +description: Suggest an idea for an improvement or change. +labels: ["suggestion"] +body: + - type: markdown + attributes: + value: | + Before going any further, please check to see if your suggestion has already been: + + * implemented (check the [docs](https://spark.lucko.me/docs)) + * proposed by someone else (check the [open issues](https://github.com/lucko/spark/issues)) + + Thanks! + - type: textarea + id: description + attributes: + label: Description + description: Please provide a short description of your suggestion in the space below. + placeholder: Hello, perhaps you should... + validations: + required: true + - type: textarea + id: proposed-behaviour + attributes: + label: Proposed Behaviour + description: Please provide a short explanation of how the feature should work / be changed, and how this will affect the project. + placeholder: It would... + validations: + required: true + - type: textarea + id: extra-info + attributes: + label: Extra Details + description: Please include any other relevant details in the space below. + placeholder: I think this should only function at the end of a rainbow. + validations: + required: false \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000000..7e27bc995f3 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,52 @@ +name: Build Gradle + +on: + push: + branches: + - 'master' + tags: + - 'v*' + pull_request: + branches: + - 'master' + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build-gradle: + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Validate Gradle wrapper + uses: gradle/actions/wrapper-validation@v4 + + - name: Setup Java + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '21' + + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v4 + + - name: Run build and tests with Gradle wrapper + run: ./gradlew test build + + - name: Upload all artifacts + uses: actions/upload-artifact@v4 + if: matrix.os == 'ubuntu-latest' + with: + name: jars + path: | + **/build/libs/spark-1.*-*.jar diff --git a/README.md b/README.md index 74db6fb566c..0c2fcb69726 100644 --- a/README.md +++ b/README.md @@ -37,8 +37,8 @@ It is: It works by sampling statistical data about the systems activity, and constructing a call graph based on this data. The call graph is then displayed in an online viewer for further analysis by the user. There are two different profiler engines: -* Native `AsyncGetCallTrace` + `perf_events` - uses [async-profiler](https://github.com/jvm-profiling-tools/async-profiler) (*only available on Linux x86_64 systems*) -* Built-in Java `ThreadMXBean` - an improved version of the popular [WarmRoast profiler](https://github.com/sk89q/WarmRoast) by sk89q. +* Native/Async - uses the [async-profiler](https://github.com/async-profiler/async-profiler) library (*only available on Linux & macOS systems*) +* Java - uses `ThreadMXBean`, an improved version of the popular [WarmRoast profiler](https://github.com/sk89q/WarmRoast) by sk89q. ### :zap: Memory Inspection diff --git a/build.gradle b/build.gradle index f96ec861d4d..76fd404b96d 100644 --- a/build.gradle +++ b/build.gradle @@ -2,13 +2,12 @@ plugins { id 'org.cadixdev.licenser' version '0.6.1' apply false } +import org.gradle.api.tasks.testing.logging.TestExceptionFormat +import org.gradle.api.tasks.testing.logging.TestLogEvent + allprojects { group = 'me.lucko' version = '1.10-SNAPSHOT' - - configurations { - compileClasspath // Fabric-loom needs this for remap jar for some reason - } } subprojects { @@ -22,12 +21,39 @@ subprojects { patchVersion = determinePatchVersion() pluginVersion = baseVersion + '.' + patchVersion pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' + + applyExcludes = { Jar jarTask -> + jarTask.exclude 'module-info.class' + jarTask.exclude 'META-INF/maven/**' + jarTask.exclude 'META-INF/proguard/**' + jarTask.exclude 'META-INF/LICENSE' + jarTask.exclude 'META-INF/NOTICE' + // protobuf + jarTask.exclude '**/*.proto' + jarTask.exclude '**/*.proto.bin' + // async-profiler + jarTask.exclude 'linux-arm64/**' + jarTask.exclude 'linux-x64/**' + jarTask.exclude 'macos/**' + } } - tasks.withType(JavaCompile) { - options.encoding = 'UTF-8' - if (JavaVersion.current() != JavaVersion.VERSION_1_8) { - options.release = 8 + if(it.name != "spark-forge1710") { + tasks.withType(JavaCompile).configureEach { + options.encoding = 'UTF-8' + if (JavaVersion.current() != JavaVersion.VERSION_1_8) { + options.release = 8 + } + } + } + + tasks.withType(Test).configureEach { + testLogging { + events = [TestLogEvent.PASSED, TestLogEvent.FAILED, TestLogEvent.SKIPPED] + exceptionFormat = TestExceptionFormat.FULL + showExceptions = true + showCauses = true + showStackTraces = true } } @@ -41,8 +67,18 @@ subprojects { } repositories { + // Fix issue with lwjgl-freetype not being found on macOS / ForgeGradle issue + // + // Could not resolve all files for configuration ':_compileJava_1'. + // Could not find lwjgl-freetype-3.3.3-natives-macos-patch.jar (org.lwjgl:lwjgl-freetype:3.3.3). + maven { + url "https://libraries.minecraft.net" + content { + includeModule("org.lwjgl", "lwjgl-freetype") + } + } mavenCentral() - maven { url "https://oss.sonatype.org/content/repositories/snapshots/" } + //maven { url "https://central.sonatype.com/repository/maven-snapshots/" } maven { url "https://repo.lucko.me/" } maven { url "https://jitpack.io" } } diff --git a/gradle.properties b/gradle.properties index bf86fb7158f..546c1503807 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1 +1,3 @@ -org.gradle.jvmargs=-Xmx2G \ No newline at end of file +org.gradle.jvmargs=-Xmx2G + +rfg.disableDependencyDeobfuscation=true \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7454180f2ae..1b33c55baab 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index e750102e092..d4081da476b 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip +networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 1b6c787337f..23d15a93670 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,13 +82,11 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -114,7 +114,7 @@ case "$( uname )" in #( NONSTOP* ) nonstop=true ;; esac -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar +CLASSPATH="\\\"\\\"" # Determine the Java command to use to start the JVM. @@ -133,22 +133,29 @@ location of your Java installation." fi else JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -193,18 +200,28 @@ if "$cygwin" || "$msys" ; then done fi -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ -classpath "$CLASSPATH" \ - org.gradle.wrapper.GradleWrapperMain \ + -jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \ "$@" +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. diff --git a/gradlew.bat b/gradlew.bat index 107acd32c4e..db3a6ac207e 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,8 +13,10 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +27,8 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,13 +43,13 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute +if %ERRORLEVEL% equ 0 goto execute -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail @@ -56,32 +59,34 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto execute -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail :execute @rem Setup the command line -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar +set CLASSPATH= @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %* :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/settings.gradle b/settings.gradle index fe12f645abd..ae9a9c00737 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,13 +1,34 @@ pluginManagement { repositories { + gradlePluginPortal() maven { name = 'Fabric' url = 'https://maven.fabricmc.net/' } - gradlePluginPortal() + maven { + name = 'Forge' + url = 'https://maven.minecraftforge.net/' + } + maven { + name = 'NeoForge' + url = 'https://maven.neoforged.net/releases' + } + maven { + // RetroFuturaGradle + name = "GTNH Maven" + url = uri("https://nexus.gtnewhorizons.com/repository/public/") + mavenContent { + includeGroupByRegex("com\\.gtnewhorizons\\..+") + includeGroup("com.gtnewhorizons") + } + } } } +plugins { + id 'org.gradle.toolchains.foojay-resolver-convention' version '0.8.0' +} + rootProject.name = 'spark' include ( 'spark-api', diff --git a/spark-api/build.gradle b/spark-api/build.gradle index 0fbe9e1ac2a..9d5cc424527 100644 --- a/spark-api/build.gradle +++ b/spark-api/build.gradle @@ -5,21 +5,26 @@ plugins { version = '0.1-SNAPSHOT' dependencies { - compileOnly 'org.checkerframework:checker-qual:3.8.0' - compileOnly 'org.jetbrains:annotations:20.1.0' + compileOnly 'org.checkerframework:checker-qual:3.44.0' + compileOnly 'org.jetbrains:annotations:24.1.0' } license { header = project.file('HEADER.txt') } +java { + withSourcesJar() + withJavadocJar() +} + publishing { //repositories { // maven { - // url = 'https://oss.sonatype.org/content/repositories/snapshots' + // url = 'https://nexus.lucko.me/repository/maven-snapshots/' // credentials { - // username = sonatypeUsername - // password = sonatypePassword + // username = luckoNexusUsername + // password = luckoNexusPassword // } // } //} diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index 653eb5361e2..a5f20d6b325 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -26,10 +26,10 @@ package me.lucko.spark.api; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.placeholder.PlaceholderResolver; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; - import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import org.jetbrains.annotations.Unmodifiable; @@ -84,4 +84,12 @@ public interface Spark { */ @NonNull @Unmodifiable Map gc(); + /** + * Gets a placeholder resolver. + * + * @return a placeholder resolver + */ + @NonNull + PlaceholderResolver placeholders(); + } diff --git a/spark-api/src/main/java/me/lucko/spark/api/placeholder/PlaceholderResolver.java b/spark-api/src/main/java/me/lucko/spark/api/placeholder/PlaceholderResolver.java new file mode 100644 index 00000000000..20834cb7d95 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/placeholder/PlaceholderResolver.java @@ -0,0 +1,54 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.placeholder; + +import org.checkerframework.checker.nullness.qual.NonNull; +import org.checkerframework.checker.nullness.qual.Nullable; + +/** + * Resolves spark placeholders. + * + *

See spark docs for more info.

+ */ +public interface PlaceholderResolver { + + /** + * Resolves the given placeholder to a legacy formatted string. + * + * @param placeholder the placeholder to resolve + * @return the resolved placeholder + */ + @Nullable String resolveLegacyFormatting(@NonNull String placeholder); + + /** + * Resolves the given placeholder to a text component serialised to json. + * + * @param placeholder the placeholder to resolve + * @return the resolved placeholder + */ + @Nullable String resolveComponentJson(@NonNull String placeholder); + +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/statistic/types/DoubleStatistic.java b/spark-api/src/main/java/me/lucko/spark/api/statistic/types/DoubleStatistic.java index 50c484e3ab1..e5486e17c5f 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/statistic/types/DoubleStatistic.java +++ b/spark-api/src/main/java/me/lucko/spark/api/statistic/types/DoubleStatistic.java @@ -27,7 +27,6 @@ import me.lucko.spark.api.statistic.Statistic; import me.lucko.spark.api.statistic.StatisticWindow; - import org.checkerframework.checker.nullness.qual.NonNull; /** diff --git a/spark-api/src/main/java/me/lucko/spark/api/statistic/types/GenericStatistic.java b/spark-api/src/main/java/me/lucko/spark/api/statistic/types/GenericStatistic.java index c91801b6c0e..a2d298daed1 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/statistic/types/GenericStatistic.java +++ b/spark-api/src/main/java/me/lucko/spark/api/statistic/types/GenericStatistic.java @@ -27,7 +27,6 @@ import me.lucko.spark.api.statistic.Statistic; import me.lucko.spark.api.statistic.StatisticWindow; - import org.checkerframework.checker.nullness.qual.NonNull; /** diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle index 4c33dfc55e3..9b6dd473900 100644 --- a/spark-bukkit/build.gradle +++ b/spark-bukkit/build.gradle @@ -1,11 +1,15 @@ plugins { - id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'com.gradleup.shadow' version '8.3.8' +} + +java { + disableAutoTargetJvm() } dependencies { implementation project(':spark-common') - implementation 'net.kyori:adventure-platform-bukkit:4.2.0' - compileOnly 'com.destroystokyo.paper:paper-api:1.16.4-R0.1-SNAPSHOT' + implementation 'net.kyori:adventure-platform-bukkit:4.4.0' + compileOnly 'io.papermc.paper:paper-api:1.21.1-R0.1-SNAPSHOT' // placeholders compileOnly 'me.clip:placeholderapi:2.10.3' @@ -29,19 +33,19 @@ processResources { } shadowJar { - archiveName = "spark-${project.pluginVersion}-bukkit.jar" + archiveFileName = "spark-${project.pluginVersion}-bukkit.jar" relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java index f9c0c0b998e..e505907cdac 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java @@ -21,7 +21,6 @@ package me.lucko.spark.bukkit; import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import org.bukkit.plugin.java.JavaPlugin; import java.lang.reflect.Field; @@ -30,11 +29,22 @@ public class BukkitClassSourceLookup extends ClassSourceLookup.ByClassLoader { private static final Class PLUGIN_CLASS_LOADER; private static final Field PLUGIN_FIELD; + private static final Class PAPER_PLUGIN_CLASS_LOADER; + private static final Field PAPER_PLUGIN_FIELD; + static { try { PLUGIN_CLASS_LOADER = Class.forName("org.bukkit.plugin.java.PluginClassLoader"); PLUGIN_FIELD = PLUGIN_CLASS_LOADER.getDeclaredField("plugin"); PLUGIN_FIELD.setAccessible(true); + + PAPER_PLUGIN_CLASS_LOADER = getPaperPluginClassLoader(); + if (PAPER_PLUGIN_CLASS_LOADER != null) { + PAPER_PLUGIN_FIELD = PAPER_PLUGIN_CLASS_LOADER.getDeclaredField("loadedJavaPlugin"); + PAPER_PLUGIN_FIELD.setAccessible(true); + } else { + PAPER_PLUGIN_FIELD = null; + } } catch (ReflectiveOperationException e) { throw new ExceptionInInitializerError(e); } @@ -45,8 +55,19 @@ public String identify(ClassLoader loader) throws ReflectiveOperationException { if (PLUGIN_CLASS_LOADER.isInstance(loader)) { JavaPlugin plugin = (JavaPlugin) PLUGIN_FIELD.get(loader); return plugin.getName(); + } else if (PAPER_PLUGIN_CLASS_LOADER != null && PAPER_PLUGIN_CLASS_LOADER.isInstance(loader)) { + JavaPlugin plugin = (JavaPlugin) PAPER_PLUGIN_FIELD.get(loader); + return plugin.getName(); } return null; } + + private static Class getPaperPluginClassLoader() { + try { + return Class.forName("io.papermc.paper.plugin.entrypoint.classloader.PaperPluginClassLoader"); + } catch (ClassNotFoundException e) { + return null; + } + } } diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitCommandSender.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitCommandSender.java index df6f6581422..11f7e9ec007 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitCommandSender.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitCommandSender.java @@ -21,11 +21,9 @@ package me.lucko.spark.bukkit; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.audience.Audience; import net.kyori.adventure.platform.bukkit.BukkitAudiences; import net.kyori.adventure.text.Component; - import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java index 2bf17accc45..60522f4bf3c 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.bukkit; import me.lucko.spark.common.platform.PlatformInfo; - import org.bukkit.Server; import java.lang.reflect.Field; @@ -44,6 +43,11 @@ public String getName() { return "Bukkit"; } + @Override + public String getBrand() { + return this.server.getName(); + } + @Override public String getVersion() { return this.server.getVersion(); diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java index 2cf58cf0714..988ff9f01c9 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.bukkit; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import org.bukkit.Server; import org.bukkit.entity.Player; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index 5db1b38c6d4..e5181406f52 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -20,6 +20,7 @@ package me.lucko.spark.bukkit; +import co.aikar.timings.TimingsManager; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.gson.Gson; @@ -27,19 +28,15 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; - import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; - import org.bukkit.Bukkit; import org.bukkit.World; import org.bukkit.configuration.MemorySection; import org.bukkit.configuration.file.YamlConfiguration; -import co.aikar.timings.TimingsManager; - import java.io.BufferedReader; import java.io.IOException; import java.nio.file.Files; @@ -134,7 +131,8 @@ private static Map getNestedFiles(Path configDir, String prefix) { .put("spigot.yml", YamlConfigParser.INSTANCE) .put("paper.yml", YamlConfigParser.INSTANCE) .put("paper/", SplitYamlConfigParser.INSTANCE) - .put("purpur.yml", YamlConfigParser.INSTANCE); + .put("purpur.yml", YamlConfigParser.INSTANCE) + .put("pufferfish.yml", YamlConfigParser.INSTANCE); for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { files.put(config, YamlConfigParser.INSTANCE); @@ -149,6 +147,7 @@ private static Map getNestedFiles(Path configDir, String prefix) { .add("motd") .add("resource-pack") .add("rconpassword") + .add("rconip") .add("level-seed") .add("world-settings.*.feature-seeds") .add("world-settings.*.seed-*") diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index 87490ea2b3e..e40c5845333 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -34,9 +34,7 @@ import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; - import net.kyori.adventure.platform.bukkit.BukkitAudiences; - import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; @@ -62,6 +60,15 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { @Override public void onEnable() { + boolean detectedSparkMod = classExists("me.lucko.spark.forge.ForgeSparkMod") + || classExists("me.lucko.spark.fabric.FabricSparkMod") + || classExists("me.lucko.spark.neoforge.NeoForgeSparkMod"); + if (detectedSparkMod) { + getLogger().warning("The spark Bukkit plugin should not be installed when running hybrid Bukkit/modded servers if the spark mod is also installed. Disabling."); + getServer().getPluginManager().disablePlugin(this); + return; + } + this.audienceFactory = BukkitAudiences.create(this); this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread()); @@ -100,7 +107,9 @@ public boolean hasPermission(String permission) { @Override public void onDisable() { - this.platform.disable(); + if (this.platform != null) { + this.platform.disable(); + } if (this.tpsCommand != null) { CommandMapUtil.unregisterCommand(this.tpsCommand); } @@ -155,6 +164,11 @@ public void log(Level level, String msg) { getLogger().log(level, msg); } + @Override + public void log(Level level, String msg, Throwable throwable) { + getLogger().log(level, msg, throwable); + } + @Override public ThreadDumper getDefaultThreadDumper() { return this.gameThreadDumper; @@ -173,7 +187,7 @@ public TickHook createTickHook() { @Override public TickReporter createTickReporter() { - if (classExists("com.destroystokyo.paper.event.server.ServerTickStartEvent")) { + if (classExists("com.destroystokyo.paper.event.server.ServerTickEndEvent")) { return new PaperTickReporter(this); } return null; @@ -190,7 +204,8 @@ public Collection getKnownSources() { Arrays.asList(getServer().getPluginManager().getPlugins()), Plugin::getName, plugin -> plugin.getDescription().getVersion(), - plugin -> String.join(", ", plugin.getDescription().getAuthors()) + plugin -> String.join(", ", plugin.getDescription().getAuthors()), + plugin -> plugin.getDescription().getDescription() ); } diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickHook.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickHook.java index 6547971b8cf..eb49e0bf65b 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickHook.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickHook.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import org.bukkit.plugin.Plugin; import org.bukkit.scheduler.BukkitTask; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java index babb0bc774a..303cc3eea12 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java @@ -23,8 +23,8 @@ import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; - import org.bukkit.Chunk; +import org.bukkit.GameRule; import org.bukkit.Server; import org.bukkit.World; import org.bukkit.block.BlockState; @@ -32,10 +32,16 @@ import org.bukkit.entity.EntityType; import java.util.ArrayList; +import java.util.Collection; import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.stream.Collectors; public class BukkitWorldInfoProvider implements WorldInfoProvider { private static final boolean SUPPORTS_PAPER_COUNT_METHODS; + private static final boolean SUPPORTS_GAMERULES; + private static final boolean SUPPORTS_DATAPACKS; static { boolean supportsPaperCountMethods = false; @@ -47,7 +53,27 @@ public class BukkitWorldInfoProvider implements WorldInfoProvider { } catch (Exception e) { // ignored } + + boolean supportsGameRules = false; + try { + Class.forName("org.bukkit.GameRule"); + World.class.getMethod("getGameRuleValue", GameRule.class); + supportsGameRules = true; + } catch (Exception e) { + // ignored + } + + boolean supportsDataPacks = false; + try { + Server.class.getMethod("getDataPackManager"); + supportsDataPacks = true; + } catch (Exception e) { + // ignored + } + SUPPORTS_PAPER_COUNT_METHODS = supportsPaperCountMethods; + SUPPORTS_GAMERULES = supportsGameRules; + SUPPORTS_DATAPACKS = supportsDataPacks; } private final Server server; @@ -111,6 +137,53 @@ public ChunksResult pollChunks() { return data; } + @Override + public GameRulesResult pollGameRules() { + if (!SUPPORTS_GAMERULES) { + return null; + } + + GameRulesResult data = new GameRulesResult(); + + boolean addDefaults = true; // add defaults in the first iteration + for (World world : this.server.getWorlds()) { + for (String gameRule : world.getGameRules()) { + GameRule ruleObj = GameRule.getByName(gameRule); + if (ruleObj == null) { + continue; + } + + if (addDefaults) { + Object defaultValue = world.getGameRuleDefault(ruleObj); + data.putDefault(gameRule, Objects.toString(defaultValue)); + } + + Object value = world.getGameRuleValue(ruleObj); + data.put(gameRule, world.getName(), Objects.toString(value)); + } + + addDefaults = false; + } + + return data; + } + + @SuppressWarnings("removal") + @Override + public Collection pollDataPacks() { + if (!SUPPORTS_DATAPACKS) { + return null; + } + + return this.server.getDataPackManager().getDataPacks().stream() + .map(pack -> new DataPackInfo( + pack.getTitle(), + pack.getDescription(), + pack.getSource().name().toLowerCase(Locale.ROOT).replace("_", "") + )) + .collect(Collectors.toList()); + } + static final class BukkitChunkInfo extends AbstractChunkInfo { private final CountMap entityCounts; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java index e604321f8b8..9240d9b2f14 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java @@ -21,7 +21,6 @@ package me.lucko.spark.bukkit; import com.google.common.base.Preconditions; - import org.bukkit.Bukkit; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickHook.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickHook.java index 43d5a24786a..d245623d8d6 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickHook.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickHook.java @@ -21,10 +21,8 @@ package me.lucko.spark.bukkit; import com.destroystokyo.paper.event.server.ServerTickStartEvent; - import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import org.bukkit.event.EventHandler; import org.bukkit.event.HandlerList; import org.bukkit.event.Listener; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickReporter.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickReporter.java index f4a1ee92773..587196e6630 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickReporter.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickReporter.java @@ -21,10 +21,8 @@ package me.lucko.spark.bukkit; import com.destroystokyo.paper.event.server.ServerTickEndEvent; - import me.lucko.spark.common.tick.AbstractTickReporter; import me.lucko.spark.common.tick.TickReporter; - import org.bukkit.event.EventHandler; import org.bukkit.event.HandlerList; import org.bukkit.event.Listener; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java index 7fa6e02c713..11dadc99f50 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java @@ -20,13 +20,12 @@ package me.lucko.spark.bukkit.placeholder; -import me.lucko.spark.bukkit.BukkitSparkPlugin; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.util.SparkPlaceholder; - import be.maximvdw.placeholderapi.PlaceholderAPI; import be.maximvdw.placeholderapi.PlaceholderReplaceEvent; import be.maximvdw.placeholderapi.PlaceholderReplacer; +import me.lucko.spark.bukkit.BukkitSparkPlugin; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.util.SparkPlaceholder; public class SparkMVdWPlaceholders implements PlaceholderReplacer { private final SparkPlatform platform; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java index b3919ddd5ae..7c599a75259 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java @@ -24,7 +24,6 @@ import me.lucko.spark.bukkit.BukkitSparkPlugin; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.util.SparkPlaceholder; - import org.bukkit.OfflinePlayer; import org.bukkit.entity.Player; diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle index 7e6b93f05c6..a84080b125a 100644 --- a/spark-bungeecord/build.gradle +++ b/spark-bungeecord/build.gradle @@ -1,11 +1,13 @@ plugins { - id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'com.gradleup.shadow' version '8.3.8' } dependencies { implementation project(':spark-common') - implementation 'net.kyori:adventure-platform-bungeecord:4.2.0' - compileOnly 'net.md-5:bungeecord-api:1.16-R0.4' + implementation 'net.kyori:adventure-platform-bungeecord:4.4.0' + compileOnly('net.md-5:bungeecord-api:1.19-R0.1-SNAPSHOT') { + exclude(module: 'bungeecord-protocol') + } } processResources { @@ -19,19 +21,19 @@ processResources { } shadowJar { - archiveName = "spark-${project.pluginVersion}-bungeecord.jar" + archiveFileName = "spark-${project.pluginVersion}-bungeecord.jar" relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java index 2024d54fd7e..b0665fd6e81 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java @@ -21,7 +21,6 @@ package me.lucko.spark.bungeecord; import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import net.md_5.bungee.api.plugin.PluginDescription; import java.lang.reflect.Field; diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordCommandSender.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordCommandSender.java index 917111a60a5..51d1a7d512d 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordCommandSender.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordCommandSender.java @@ -21,7 +21,6 @@ package me.lucko.spark.bungeecord; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.audience.Audience; import net.kyori.adventure.platform.bungeecord.BungeeAudiences; import net.kyori.adventure.text.Component; diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java index fc5c5886d6e..df8cc8dc856 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.bungeecord; import me.lucko.spark.common.platform.PlatformInfo; - import net.md_5.bungee.api.ProxyServer; public class BungeeCordPlatformInfo implements PlatformInfo { @@ -41,6 +40,11 @@ public String getName() { return "BungeeCord"; } + @Override + public String getBrand() { + return this.proxy.getName(); + } + @Override public String getVersion() { return this.proxy.getVersion(); diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java index 37955a30a9c..fcb3e31ad43 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.bungeecord; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import net.md_5.bungee.api.ProxyServer; import net.md_5.bungee.api.connection.ProxiedPlayer; diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java index 71beddb1ceb..c8cafb32efc 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java @@ -26,7 +26,6 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; - import net.kyori.adventure.platform.bungeecord.BungeeAudiences; import net.md_5.bungee.api.CommandSender; import net.md_5.bungee.api.plugin.Command; @@ -88,6 +87,11 @@ public void log(Level level, String msg) { getLogger().log(level, msg); } + @Override + public void log(Level level, String msg, Throwable throwable) { + getLogger().log(level, msg, throwable); + } + @Override public ClassSourceLookup createClassSourceLookup() { return new BungeeCordClassSourceLookup(); @@ -99,7 +103,8 @@ public Collection getKnownSources() { getProxy().getPluginManager().getPlugins(), plugin -> plugin.getDescription().getName(), plugin -> plugin.getDescription().getVersion(), - plugin -> plugin.getDescription().getAuthor() + plugin -> plugin.getDescription().getAuthor(), + plugin -> plugin.getDescription().getDescription() ); } @@ -130,5 +135,10 @@ public void execute(CommandSender sender, String[] args) { public Iterable onTabComplete(CommandSender sender, String[] args) { return this.plugin.platform.tabCompleteCommand(new BungeeCordCommandSender(sender, this.plugin.audienceFactory), args); } + + @Override + public boolean hasPermission(CommandSender sender) { + return this.plugin.platform.hasPermissionForAnyCommand(new BungeeCordCommandSender(sender, this.plugin.audienceFactory)); + } } } diff --git a/spark-common/build.gradle b/spark-common/build.gradle index 514c5dd7455..1000d62bc9f 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -1,39 +1,39 @@ -import org.cadixdev.gradle.licenser.LicenseExtension - plugins { - id 'com.google.protobuf' version '0.9.1' + id 'com.google.protobuf' version '0.9.5' + id 'maven-publish' } license { exclude '**/sampler/async/jfr/**' -} - -extensions.configure(LicenseExtension.class) { - it.exclude { + exclude { it.file.toString().startsWith(buildDir.toString()) } } dependencies { api project(':spark-api') - implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.3' - implementation 'org.ow2.asm:asm:9.1' - implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' - implementation 'com.google.protobuf:protobuf-javalite:3.21.11' - implementation 'me.lucko:bytesocks-java-client-api:1.0-SNAPSHOT' + + implementation 'tools.profiler:async-profiler:4.0' // spark native version: 87b7b42 (v4.0) + implementation 'org.ow2.asm:asm:9.7' + implementation 'net.bytebuddy:byte-buddy-agent:1.14.17' + implementation 'com.google.protobuf:protobuf-javalite:4.31.1' implementation 'com.neovisionaries:nv-websocket-client:2.14' - api('net.kyori:adventure-api:4.12.0') { + implementation('me.lucko:bytesocks-java-client:1.0-SNAPSHOT') { + exclude(module: 'slf4j-api') + } + + api('net.kyori:adventure-api:4.21.0') { exclude(module: 'adventure-bom') exclude(module: 'checker-qual') exclude(module: 'annotations') } - api('net.kyori:adventure-text-serializer-gson:4.12.0') { + api('net.kyori:adventure-text-serializer-gson:4.21.0') { exclude(module: 'adventure-bom') exclude(module: 'adventure-api') exclude(module: 'gson') } - api('net.kyori:adventure-text-serializer-legacy:4.12.0') { + api('net.kyori:adventure-text-serializer-legacy:4.21.0') { exclude(module: 'adventure-bom') exclude(module: 'adventure-api') } @@ -42,12 +42,28 @@ dependencies { } compileOnly 'com.google.code.gson:gson:2.7' compileOnly 'com.google.guava:guava:19.0' - compileOnly 'org.checkerframework:checker-qual:3.8.0' + compileOnly 'org.checkerframework:checker-qual:3.44.0' + + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.11.0-M2' + testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.11.0-M2' + testImplementation 'org.junit.jupiter:junit-jupiter-params:5.11.0-M2' + // testImplementation "org.testcontainers:junit-jupiter:1.19.8" + // testImplementation 'org.mockito:mockito-core:5.12.0' + // testImplementation 'org.mockito:mockito-junit-jupiter:5.12.0' + + testImplementation 'com.google.code.gson:gson:2.7' + testImplementation 'com.google.guava:guava:19.0' + testImplementation 'org.checkerframework:checker-qual:3.44.0' + + testImplementation('net.kyori:adventure-text-serializer-ansi:4.17.0') { + exclude(module: 'adventure-bom') + exclude(module: 'adventure-api') + } } protobuf { protoc { - artifact = 'com.google.protobuf:protoc:3.21.11' + artifact = 'com.google.protobuf:protoc:4.31.1' } generateProtoTasks { all().each { task -> @@ -59,3 +75,26 @@ protobuf { } } } + +test { + useJUnitPlatform {} + systemProperty('net.kyori.ansi.colorLevel', 'indexed16') +} + +publishing { + //repositories { + // maven { + // url = 'https://nexus.lucko.me/repository/maven-snapshots/' + // credentials { + // username = luckoNexusUsername + // password = luckoNexusPassword + // } + // } + //} + publications { + maven(MavenPublication) { + from components.java + version = "${project.pluginVersion}-SNAPSHOT" + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 24b879a175a..484a16557ff 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; - import me.lucko.bytesocks.client.BytesocksClient; import me.lucko.spark.common.activitylog.ActivityLog; import me.lucko.spark.common.api.SparkApi; @@ -47,6 +46,7 @@ import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.SparkTickStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.PlatformStatisticsProvider; import me.lucko.spark.common.sampler.BackgroundSamplerManager; import me.lucko.spark.common.sampler.SamplerContainer; @@ -54,10 +54,13 @@ import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.BytebinClient; -import me.lucko.spark.common.util.Configuration; import me.lucko.spark.common.util.TemporaryFiles; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.config.Configuration; +import me.lucko.spark.common.util.config.FileConfiguration; +import me.lucko.spark.common.util.config.RuntimeConfiguration; +import me.lucko.spark.common.util.log.SparkStaticLogger; import me.lucko.spark.common.ws.TrustedKeyStore; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; @@ -72,11 +75,14 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.stream.Collectors; +import java.util.stream.Stream; import static net.kyori.adventure.text.Component.space; import static net.kyori.adventure.text.Component.text; @@ -119,9 +125,17 @@ public class SparkPlatform { public SparkPlatform(SparkPlugin plugin) { this.plugin = plugin; + SparkStaticLogger.setLogger(plugin); - this.temporaryFiles = new TemporaryFiles(this.plugin.getPluginDirectory().resolve("tmp")); - this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json")); + this.temporaryFiles = new TemporaryFiles(this.plugin.getPlatformInfo().getType() == PlatformInfo.Type.CLIENT + ? this.plugin.getPluginDirectory().resolve("tmp-client") + : this.plugin.getPluginDirectory().resolve("tmp") + ); + this.configuration = Configuration.combining( + RuntimeConfiguration.SYSTEM_PROPERTIES, + RuntimeConfiguration.ENVIRONMENT_VARIABLES, + new FileConfiguration(this.plugin.getPluginDirectory().resolve("config.json")) + ); this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); String bytebinUrl = this.configuration.getString("bytebinUrl", "https://spark-usercontent.lucko.me/"); @@ -129,6 +143,7 @@ public SparkPlatform(SparkPlugin plugin) { this.bytebinClient = new BytebinClient(bytebinUrl, "spark-plugin"); this.bytesocksClient = LegacyBytesocksClientFactory.newClient(bytesocksHost, "spark-plugin"); + //this.bytesocksClient = BytesocksClient.create(bytesocksHost, "spark-plugin"); this.trustedKeyStore = new TrustedKeyStore(this.configuration); this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false); @@ -286,6 +301,10 @@ public ClassSourceLookup createClassSourceLookup() { return this.plugin.createClassSourceLookup(); } + public ClassFinder createClassFinder() { + return this.plugin.createClassFinder(); + } + public TickStatistics getTickStatistics() { return this.tickStatistics; } @@ -302,6 +321,10 @@ public long getServerNormalOperationStartTime() { return this.serverNormalOperationStartTime; } + public boolean hasEnabled() { + return this.enabled.get(); + } + public Path resolveSaveFile(String prefix, String extension) { Path pluginFolder = this.plugin.getPluginDirectory(); try { @@ -322,11 +345,21 @@ private List getAvailableCommands(CommandSender sender) { .collect(Collectors.toList()); } + public Set getAllSparkPermissions() { + return Stream.concat( + Stream.of("spark"), + this.commands.stream() + .map(Command::primaryAlias) + .map(alias -> "spark." + alias) + ).collect(Collectors.toSet()); + } + public boolean hasPermissionForAnyCommand(CommandSender sender) { return !getAvailableCommands(sender).isEmpty(); } - public void executeCommand(CommandSender sender, String[] args) { + public CompletableFuture executeCommand(CommandSender sender, String[] args) { + CompletableFuture future = new CompletableFuture<>(); AtomicReference executorThread = new AtomicReference<>(); AtomicReference timeoutThread = new AtomicReference<>(); AtomicBoolean completed = new AtomicBoolean(false); @@ -337,9 +370,10 @@ public void executeCommand(CommandSender sender, String[] args) { this.commandExecuteLock.lock(); try { executeCommand0(sender, args); - } catch (Exception e) { - this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command"); - e.printStackTrace(); + future.complete(null); + } catch (Throwable e) { + this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command", e); + future.completeExceptionally(e); } finally { this.commandExecuteLock.unlock(); executorThread.set(null); @@ -355,10 +389,16 @@ public void executeCommand(CommandSender sender, String[] args) { // schedule a task to detect timeouts this.plugin.executeAsync(() -> { timeoutThread.set(Thread.currentThread()); + int warningIntervalSeconds = 5; + try { + if (completed.get()) { + return; + } + for (int i = 1; i <= 3; i++) { try { - Thread.sleep(5000); + Thread.sleep(warningIntervalSeconds * 1000); } catch (InterruptedException e) { // ignore } @@ -370,7 +410,8 @@ public void executeCommand(CommandSender sender, String[] args) { Thread executor = executorThread.get(); if (executor == null) { getPlugin().log(Level.WARNING, "A command execution has not completed after " + - (i * 5) + " seconds but there is no executor present. Perhaps the executor shutdown?"); + (i * warningIntervalSeconds) + " seconds but there is no executor present. Perhaps the executor shutdown?"); + getPlugin().log(Level.WARNING, "If the command subsequently completes without any errors, this warning should be ignored. :)"); } else { String stackTrace = Arrays.stream(executor.getStackTrace()) @@ -378,13 +419,16 @@ public void executeCommand(CommandSender sender, String[] args) { .collect(Collectors.joining("\n")); getPlugin().log(Level.WARNING, "A command execution has not completed after " + - (i * 5) + " seconds, it might be stuck. Trace: \n" + stackTrace); + (i * warningIntervalSeconds) + " seconds, it *might* be stuck. Trace: \n" + stackTrace); + getPlugin().log(Level.WARNING, "If the command subsequently completes without any errors, this warning should be ignored. :)"); } } } finally { timeoutThread.set(null); } }); + + return future; } private void executeCommand0(CommandSender sender, String[] args) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index a3bdceb2a27..9901bdb9938 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -33,17 +33,20 @@ import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; +import me.lucko.spark.common.util.classfinder.InstrumentationClassFinder; +import me.lucko.spark.common.util.log.Logger; import java.nio.file.Path; import java.util.Collection; import java.util.Collections; -import java.util.logging.Level; import java.util.stream.Stream; /** * Spark plugin interface */ -public interface SparkPlugin { +public interface SparkPlugin extends Logger { /** * Gets the version of the plugin. @@ -89,14 +92,6 @@ default void executeSync(Runnable task) { throw new UnsupportedOperationException(); } - /** - * Print to the plugin logger. - * - * @param level the log level - * @param msg the message - */ - void log(Level level, String msg); - /** * Gets the default {@link ThreadDumper} to be used by the plugin. * @@ -149,6 +144,18 @@ default ClassSourceLookup createClassSourceLookup() { return ClassSourceLookup.NO_OP; } + /** + * Creates a class finder for the platform. + * + * @return the class finder + */ + default ClassFinder createClassFinder() { + return ClassFinder.combining( + new InstrumentationClassFinder(this), + FallbackClassFinder.INSTANCE + ); + } + /** * Gets a list of known sources (plugins/mods) on the platform. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/activitylog/Activity.java b/spark-common/src/main/java/me/lucko/spark/common/activitylog/Activity.java index 8b1b558d644..c66888aadc7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/activitylog/Activity.java +++ b/spark-common/src/main/java/me/lucko/spark/common/activitylog/Activity.java @@ -23,7 +23,6 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; - import me.lucko.spark.common.command.sender.CommandSender; import java.util.concurrent.TimeUnit; @@ -39,12 +38,12 @@ public final class Activity { private final String dataType; private final String dataValue; - public static Activity urlActivity(CommandSender user, long time, String type, String url) { - return new Activity(user.toData(), time, type, DATA_TYPE_URL, url); + public static Activity urlActivity(CommandSender.Data user, long time, String type, String url) { + return new Activity(user, time, type, DATA_TYPE_URL, url); } - public static Activity fileActivity(CommandSender user, long time, String type, String filePath) { - return new Activity(user.toData(), time, type, DATA_TYPE_FILE, filePath); + public static Activity fileActivity(CommandSender.Data user, long time, String type, String filePath) { + return new Activity(user, time, type, DATA_TYPE_FILE, filePath); } private Activity(CommandSender.Data user, long time, String type, String dataType, String dataValue) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java b/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java index 49a6ccb0ce5..e7610132ca6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java @@ -24,7 +24,6 @@ import me.lucko.spark.api.statistic.StatisticWindow; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; - import org.checkerframework.checker.nullness.qual.NonNull; import java.lang.reflect.Array; diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java index fc14c67f9d8..3dfdab285b9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java @@ -22,7 +22,6 @@ import me.lucko.spark.api.gc.GarbageCollector; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; - import org.checkerframework.checker.nullness.qual.NonNull; public class GarbageCollectorInfo implements GarbageCollector { diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index 9e4eee4879e..81b4f70ff13 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -21,10 +21,10 @@ package me.lucko.spark.common.api; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.api.Spark; import me.lucko.spark.api.SparkProvider; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.placeholder.PlaceholderResolver; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; @@ -32,7 +32,7 @@ import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; - +import me.lucko.spark.common.util.SparkPlaceholder; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; @@ -174,6 +174,21 @@ public DoubleAverageInfo poll(@NonNull MillisPerTick window) { return ImmutableMap.copyOf(map); } + @Override + public @NonNull PlaceholderResolver placeholders() { + return new PlaceholderResolver() { + @Override + public @Nullable String resolveLegacyFormatting(@NonNull String placeholder) { + return SparkPlaceholder.resolveFormattingCode(SparkApi.this.platform, placeholder); + } + + @Override + public @Nullable String resolveComponentJson(@NonNull String placeholder) { + return SparkPlaceholder.resolveComponentJson(SparkApi.this.platform, placeholder); + } + }; + } + public static void register(Spark spark) { try { SINGLETON_SET_METHOD.invoke(null, spark); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java index c6871a95000..3e87fa6eb08 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java @@ -21,10 +21,8 @@ package me.lucko.spark.common.command; import com.google.common.collect.ImmutableList; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; - import net.kyori.adventure.text.Component; import java.util.Collections; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java index d1481bd6268..3a894cadb29 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java @@ -22,11 +22,11 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.JoinConfiguration; import net.kyori.adventure.text.TextComponent; +import java.lang.ref.WeakReference; import java.util.Set; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -49,20 +49,22 @@ public class CommandResponseHandler { .build(); private final SparkPlatform platform; - private final CommandSender sender; + private final CommandSender.Data senderData; + private final WeakReference sender; private String commandPrimaryAlias; public CommandResponseHandler(SparkPlatform platform, CommandSender sender) { this.platform = platform; - this.sender = sender; + this.senderData = sender.toData(); + this.sender = new WeakReference<>(sender); } public void setCommandPrimaryAlias(String commandPrimaryAlias) { this.commandPrimaryAlias = commandPrimaryAlias; } - public CommandSender sender() { - return this.sender; + public CommandSender.Data senderData() { + return this.senderData; } public void allSenders(Consumer action) { @@ -74,17 +76,24 @@ public void allSenders(Consumer action) { .filter(s -> s.hasPermission("spark") || s.hasPermission("spark." + this.commandPrimaryAlias)) .collect(Collectors.toSet()); - senders.add(this.sender); + CommandSender sender = this.sender.get(); + if (sender != null) { + senders.add(sender); + } + senders.forEach(action); } public void reply(Component message) { - this.sender.sendMessage(message); + CommandSender sender = this.sender.get(); + if (sender != null) { + sender.sendMessage(message); + } } public void reply(Iterable message) { Component joinedMsg = Component.join(JoinConfiguration.separator(Component.newline()), message); - this.sender.sendMessage(joinedMsg); + reply(joinedMsg); } public void broadcast(Component message) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java index 6252ac79ba6..c72038bb852 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java @@ -24,7 +24,6 @@ import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; import me.lucko.spark.common.command.tabcomplete.TabCompleter; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.TextComponent; import net.kyori.adventure.text.event.ClickEvent; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java index a2da0a06f84..1d0c226c463 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.command.modules; import com.sun.management.GarbageCollectionNotificationInfo; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; @@ -29,7 +28,6 @@ import me.lucko.spark.common.monitor.memory.GarbageCollectionMonitor; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.util.FormatUtil; - import net.kyori.adventure.text.Component; import java.lang.management.MemoryUsage; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java index 16eadc8aa16..6bfdd0efe31 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java @@ -21,6 +21,7 @@ package me.lucko.spark.common.command.modules; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; @@ -35,11 +36,15 @@ import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PingSummary; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.SparkMetadata; +import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.util.FormatUtil; +import me.lucko.spark.common.util.MediaTypes; import me.lucko.spark.common.util.RollingAverage; import me.lucko.spark.common.util.StatisticFormatter; - +import me.lucko.spark.proto.SparkProtos; import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.event.ClickEvent; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; @@ -52,6 +57,7 @@ import java.util.Map; import java.util.Set; import java.util.function.Consumer; +import java.util.logging.Level; import static net.kyori.adventure.text.Component.empty; import static net.kyori.adventure.text.Component.space; @@ -85,10 +91,11 @@ public void registerCommands(Consumer consumer) { consumer.accept(Command.builder() .aliases("healthreport", "health", "ht") + .argumentUsage("upload", null) .argumentUsage("memory", null) .argumentUsage("network", null) .executor(HealthModule::healthReport) - .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory", "--network")) + .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--upload", "--memory", "--network")) .build() ); } @@ -186,6 +193,12 @@ private static void ping(SparkPlatform platform, CommandSender sender, CommandRe private static void healthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { resp.replyPrefixed(text("Generating server health report...")); + + if (arguments.boolFlag("upload")) { + uploadHealthReport(platform, sender, resp, arguments); + return; + } + List report = new LinkedList<>(); report.add(empty()); @@ -210,6 +223,37 @@ private static void healthReport(SparkPlatform platform, CommandSender sender, C resp.reply(report); } + private static void uploadHealthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + SparkProtos.HealthMetadata.Builder metadata = SparkProtos.HealthMetadata.newBuilder(); + SparkMetadata.gather(platform, sender.toData(), platform.getStartupGcStatistics()).writeTo(metadata); + + SparkProtos.HealthData.Builder data = SparkProtos.HealthData.newBuilder() + .setMetadata(metadata); + + Sampler activeSampler = platform.getSamplerContainer().getActiveSampler(); + if (activeSampler != null) { + data.putAllTimeWindowStatistics(activeSampler.exportWindowStatistics()); + } + + try { + String key = platform.getBytebinClient().postContent(data.build(), MediaTypes.SPARK_HEALTH_MEDIA_TYPE).key(); + String url = platform.getViewerUrl() + key; + + resp.broadcastPrefixed(text("Health report:", GOLD)); + resp.broadcast(text() + .content(url) + .color(GRAY) + .clickEvent(ClickEvent.openUrl(url)) + .build() + ); + + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Health report", url)); + } catch (Exception e) { + resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED)); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst uploading data", e); + } + } + private static void addTickStats(List report, TickStatistics tickStatistics) { report.add(text() .append(text(">", DARK_GRAY, BOLD)) diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 6ac3b2f1011..9a97ca7edb6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -34,7 +34,6 @@ import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.MediaTypes; import me.lucko.spark.proto.SparkHeapProtos; - import net.kyori.adventure.text.event.ClickEvent; import java.io.IOException; @@ -45,6 +44,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.function.LongConsumer; +import java.util.logging.Level; import static net.kyori.adventure.text.Component.text; import static net.kyori.adventure.text.format.NamedTextColor.GOLD; @@ -86,11 +86,11 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co heapDump = HeapDumpSummary.createNew(); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst inspecting the heap.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst inspecting the heap.", e); return; } - SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender); + SparkHeapProtos.HeapData output = heapDump.toProto(platform, resp.senderData()); boolean saveToFile = false; if (arguments.boolFlag("save-to-file")) { @@ -108,10 +108,10 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co .build() ); - platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url)); + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump summary", url)); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst uploading the data.", e); saveToFile = true; } } @@ -129,10 +129,10 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co ); resp.broadcastPrefixed(text("You can read the heap dump summary file using the viewer web-app - " + platform.getViewerUrl(), GRAY)); - platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump summary", file.toString())); + platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump summary", file.toString())); } catch (IOException e) { resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst saving the data.", e); } } @@ -154,7 +154,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma HeapDump.dumpHeap(file, liveOnly); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst creating a heap dump.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst creating a heap dump.", e); return; } @@ -164,7 +164,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma .append(text(file.toString(), GRAY)) .build() ); - platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString())); + platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump", file.toString())); Compression compressionMethod = null; @@ -181,7 +181,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma try { heapDumpCompress(platform, resp, file, compressionMethod); } catch (IOException e) { - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst compressing the heap dump.", e); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 27e790ff1a0..d82ec63c27c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.command.modules; import com.google.common.collect.Iterables; - import me.lucko.bytesocks.client.BytesocksClient; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.activitylog.Activity; @@ -38,15 +37,13 @@ import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.async.AsyncSampler; -import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.java.MergeStrategy; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.MediaTypes; -import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkSamplerProtos; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; @@ -61,7 +58,10 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import java.util.function.Supplier; +import java.util.logging.Level; +import static net.kyori.adventure.text.Component.empty; import static net.kyori.adventure.text.Component.space; import static net.kyori.adventure.text.Component.text; import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY; @@ -190,7 +190,6 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command } boolean ignoreSleeping = arguments.boolFlag("ignore-sleeping"); - boolean ignoreNative = arguments.boolFlag("ignore-native"); boolean forceJavaSampler = arguments.boolFlag("force-java-sampler"); Set threads = arguments.stringFlag("thread"); @@ -209,7 +208,7 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command } } - ThreadGrouper threadGrouper; + Supplier threadGrouper; if (arguments.boolFlag("combine-all")) { threadGrouper = ThreadGrouper.AS_ONE; } else if (arguments.boolFlag("not-combined")) { @@ -239,7 +238,6 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command } builder.samplingInterval(interval); builder.ignoreSleeping(ignoreSleeping); - builder.ignoreNative(ignoreNative); builder.forceJavaSampler(forceJavaSampler); builder.allocLiveOnly(allocLiveOnly); if (ticksOver != -1) { @@ -278,8 +276,8 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command // send message if profiling fails future.whenCompleteAsync((s, throwable) -> { if (throwable != null) { - resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable.toString(), RED)); - throwable.printStackTrace(); + resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable, RED)); + platform.getPlugin().log(Level.SEVERE, "Profiler operation failed unexpectedly", throwable); } }); @@ -339,7 +337,7 @@ private void profilerInfo(SparkPlatform platform, CommandResponseHandler resp) { private void profilerOpen(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { BytesocksClient bytesocksClient = platform.getBytesocksClient(); if (bytesocksClient == null) { - resp.replyPrefixed(text("The live viewer is only supported on Java 11 or newer.", RED)); + resp.replyPrefixed(text("The live viewer is not supported.", RED)); return; } @@ -437,10 +435,10 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S .build() ); - platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url)); + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Profiler", url)); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Error whilst uploading profiler results", e); saveToFile = true; } } @@ -454,10 +452,10 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S resp.broadcastPrefixed(text("Data has been written to: " + file)); resp.broadcastPrefixed(text("You can view the profile file using the web app @ " + platform.getViewerUrl(), GRAY)); - platform.getActivityLog().addToLog(Activity.fileActivity(resp.sender(), System.currentTimeMillis(), "Profiler", file.toString())); + platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Profiler", file.toString())); } catch (IOException e) { resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Error whilst saving profiler results", e); } } } @@ -481,23 +479,33 @@ private void handleOpen(SparkPlatform platform, BytesocksClient bytesocksClient, .build() ); - platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler (live)", url)); + String cmd = "/" + platform.getPlugin().getCommandName() + " profiler stop"; + resp.broadcast(empty()); + resp.broadcast(text() + .append(text("(NOTE: this link is temporary and will expire after a short period of time. " + + "If you need a link to share with other people (e.g. in a bug report), please use ", GRAY)) + .append(text() + .content(cmd) + .color(WHITE) + .clickEvent(ClickEvent.runCommand(cmd)) + .build() + ) + .append(text(" instead.)", GRAY)) + .build() + ); + + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Profiler (live)", url)); } catch (Exception e) { resp.replyPrefixed(text("An error occurred whilst opening the live profiler.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Error whilst opening live profiler", e); } } private Sampler.ExportProps getExportProps(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { return new Sampler.ExportProps() - .creator(resp.sender().toData()) + .creator(resp.senderData()) .comment(Iterables.getFirst(arguments.stringFlag("comment"), null)) - .mergeMode(() -> { - MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); - return arguments.boolFlag("separate-parent-calls") - ? MergeMode.separateParentCalls(methodDisambiguator) - : MergeMode.sameMethod(methodDisambiguator); - }) + .mergeStrategy(arguments.boolFlag("separate-parent-calls") ? MergeStrategy.SEPARATE_PARENT_CALLS : MergeStrategy.SAME_METHOD) .classSourceLookup(() -> ClassSourceLookup.create(platform)); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java index f5f4fce525f..85dfaf1d3c9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java @@ -28,7 +28,6 @@ import me.lucko.spark.common.monitor.tick.ReportPredicate; import me.lucko.spark.common.monitor.tick.TickMonitor; import me.lucko.spark.common.tick.TickHook; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.format.NamedTextColor; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/sender/AbstractCommandSender.java b/spark-common/src/main/java/me/lucko/spark/common/command/sender/AbstractCommandSender.java index ce48889138f..02ef25df28d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/sender/AbstractCommandSender.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/sender/AbstractCommandSender.java @@ -27,16 +27,20 @@ public AbstractCommandSender(S delegate) { this.delegate = delegate; } + protected Object getObjectForComparison() { + return this.delegate; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AbstractCommandSender that = (AbstractCommandSender) o; - return this.delegate.equals(that.delegate); + return this.getObjectForComparison().equals(that.getObjectForComparison()); } @Override public int hashCode() { - return this.delegate.hashCode(); + return this.getObjectForComparison().hashCode(); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java index bae5ddfa3c8..4dc53eb6e49 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java @@ -23,9 +23,7 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; - import me.lucko.spark.proto.SparkProtos.CommandSenderMetadata; - import net.kyori.adventure.text.Component; import java.util.UUID; @@ -36,6 +34,10 @@ public interface CommandSender { UUID getUniqueId(); + default boolean isPlayer() { + return getUniqueId() != null; + } + void sendMessage(Component message); boolean hasPermission(String permission); diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java index 955bafe9c10..707adbb3055 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java @@ -20,15 +20,14 @@ package me.lucko.spark.common.heapdump; +import javax.management.JMX; +import javax.management.MBeanServer; +import javax.management.ObjectName; import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.reflect.Method; import java.nio.file.Path; -import javax.management.JMX; -import javax.management.MBeanServer; -import javax.management.ObjectName; - /** * Utility for creating .hprof memory heap snapshots. */ diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java index eaedd312d31..a5e7039d091 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -22,12 +22,16 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.platform.SparkMetadata; import me.lucko.spark.proto.SparkHeapProtos.HeapData; import me.lucko.spark.proto.SparkHeapProtos.HeapEntry; import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; - +import org.jetbrains.annotations.VisibleForTesting; import org.objectweb.asm.Type; +import javax.management.JMX; +import javax.management.MBeanServer; +import javax.management.ObjectName; import java.lang.management.ManagementFactory; import java.util.Arrays; import java.util.List; @@ -36,10 +40,6 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; -import javax.management.JMX; -import javax.management.MBeanServer; -import javax.management.ObjectName; - /** * Represents a "heap dump summary" from the VM. * @@ -125,21 +125,14 @@ private HeapDumpSummary(List entries) { this.entries = entries; } - public HeapData toProto(SparkPlatform platform, CommandSender creator) { - HeapMetadata.Builder metadata = HeapMetadata.newBuilder() - .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toData().toProto()); - try { - metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null, true)); - } catch (Exception e) { - e.printStackTrace(); - } + @VisibleForTesting + List getEntries() { + return this.entries; + } - try { - metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics()); - } catch (Exception e) { - e.printStackTrace(); - } + public HeapData toProto(SparkPlatform platform, CommandSender.Data creator) { + HeapMetadata.Builder metadata = HeapMetadata.newBuilder(); + SparkMetadata.gather(platform, creator, platform.getStartupGcStatistics()).writeTo(metadata); HeapData.Builder proto = HeapData.newBuilder(); proto.setMetadata(metadata); @@ -188,6 +181,16 @@ public HeapEntry toProto() { .setType(this.type) .build(); } + + @Override + public String toString() { + return "Entry{" + + "order=" + this.order + + ", instances=" + this.instances + + ", bytes=" + this.bytes + + ", type='" + this.type + '\'' + + '}'; + } } public interface DiagnosticCommandMXBean { diff --git a/spark-common/src/main/java/me/lucko/spark/common/legacy/LegacyBytesocksClient.java b/spark-common/src/main/java/me/lucko/spark/common/legacy/LegacyBytesocksClient.java index b3e774e9ddd..517a47f1314 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/legacy/LegacyBytesocksClient.java +++ b/spark-common/src/main/java/me/lucko/spark/common/legacy/LegacyBytesocksClient.java @@ -1,20 +1,13 @@ package me.lucko.spark.common.legacy; -import com.google.common.collect.ImmutableList; import com.neovisionaries.ws.client.*; import me.lucko.bytesocks.client.BytesocksClient; -import java.lang.invoke.MethodHandle; -import java.lang.invoke.MethodHandles; -import java.lang.reflect.Method; import java.net.HttpURLConnection; import java.net.URI; import java.net.URL; -import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.WeakHashMap; -import java.util.concurrent.CompletableFuture; /** * Implementation of BytesocksClient that works on Java 8. @@ -78,52 +71,14 @@ public BytesocksClient.Socket connect(String channelId, BytesocksClient.Listener private static final class SocketImpl implements BytesocksClient.Socket { private final String id; private final WebSocket ws; - private final WeakHashMap> frameFutures = new WeakHashMap<>(); - - /* ugly hacks to track sending of websocket */ - private static final MethodHandle SPLIT_METHOD; - - static { - try { - Method m = WebSocket.class.getDeclaredMethod("splitIfNecessary", WebSocketFrame.class); - m.setAccessible(true); - SPLIT_METHOD = MethodHandles.lookup().unreflect(m); - } catch(ReflectiveOperationException e) { - throw new RuntimeException(e); - } - } private SocketImpl(String id, WebSocket ws) { this.id = id; this.ws = ws; - this.ws.addListener(new WebSocketAdapter() { - @Override - public void onFrameSent(WebSocket websocket, WebSocketFrame frame) throws Exception { - synchronized (frameFutures) { - CompletableFuture future = frameFutures.remove(frame); - if(future != null) - future.complete(null); - else { - System.err.println("Sent frame without associated CompletableFuture"); - } - } - } - - @Override - public void onFrameUnsent(WebSocket websocket, WebSocketFrame frame) throws Exception { - synchronized (frameFutures) { - CompletableFuture future = frameFutures.remove(frame); - if(future != null) - future.completeExceptionally(new Exception("Failed to send frame")); - else - System.err.println("Received error without associated CompletableFuture"); - } - } - }); } @Override - public String getChannelId() { + public String channelId() { return this.id; } @@ -133,35 +88,18 @@ public boolean isOpen() { } @Override - public CompletableFuture send(CharSequence msg) { - WebSocketFrame targetFrame = WebSocketFrame.createTextFrame(msg.toString()); - // split ourselves so we know what the last frame was - List splitFrames; - try { - splitFrames = (List)SPLIT_METHOD.invokeExact(this.ws, targetFrame); - } catch(Throwable e) { - throw new RuntimeException(e); - } - if(splitFrames == null) - splitFrames = ImmutableList.of(targetFrame); - // FIXME this code is not really that efficient (allocating a whole new CompletableFuture for every frame), but - // it's the simplest solution for now and seems to be good enough. We have to track all frames to correctly - // report errors/success - List> futures = new ArrayList<>(); - for(WebSocketFrame frame : splitFrames) { - CompletableFuture future = new CompletableFuture<>(); - synchronized (frameFutures) { - frameFutures.put(frame, future); - } - futures.add(future); - this.ws.sendFrame(frame); - } - return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])); + public void send(String msg) { + this.ws.sendText(msg); } @Override public void close(int statusCode, String reason) { - this.ws.sendClose(statusCode, reason); + this.ws.disconnect(statusCode, reason, 0); + } + + @Override + public void closeGracefully(int statusCode, String reason) { + this.ws.disconnect(statusCode, reason); } } @@ -179,7 +117,11 @@ public void onConnected(WebSocket websocket, Map> headers) @Override public void onDisconnected(WebSocket websocket, WebSocketFrame serverCloseFrame, WebSocketFrame clientCloseFrame, boolean closedByServer) throws Exception { - this.listener.onClose(serverCloseFrame.getCloseCode(), serverCloseFrame.getCloseReason()); + if (serverCloseFrame != null) { + this.listener.onClose(serverCloseFrame.getCloseCode(), serverCloseFrame.getCloseReason()); + } else { + this.listener.onClose(WebSocketCloseCode.ABNORMAL, "connection reset"); + } } @Override diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java new file mode 100644 index 00000000000..c279f31132c --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor; + +import org.checkerframework.checker.nullness.qual.NonNull; + +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +/** + * Utility for reading from sysctl on macOS systems. + */ +public enum MacosSysctl { + + SYSCTL("sysctl", "-a"),; + + private static final boolean SUPPORTED = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "").equals("macosx"); + + private final String[] cmdArgs; + + MacosSysctl(String... cmdArgs) { + this.cmdArgs = cmdArgs; + } + + public @NonNull List read() { + if (SUPPORTED) { + ProcessBuilder process = new ProcessBuilder(this.cmdArgs).redirectErrorStream(true); + try (BufferedReader buf = new BufferedReader(new InputStreamReader(process.start().getInputStream()))) { + List lines = new ArrayList<>(); + + String line; + while ((line = buf.readLine()) != null) { + lines.add(line); + } + + return lines; + } catch (Exception e) { + // ignore + } + } + + return Collections.emptyList(); + } +} + diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java index 9954bd5151a..07875cc3749 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java @@ -21,6 +21,7 @@ package me.lucko.spark.common.monitor.cpu; import me.lucko.spark.common.monitor.LinuxProc; +import me.lucko.spark.common.monitor.MacosSysctl; import me.lucko.spark.common.monitor.WindowsWmic; import java.util.regex.Pattern; @@ -52,6 +53,12 @@ public static String queryCpuModel() { } } + for (String line : MacosSysctl.SYSCTL.read()) { + if (line.startsWith("machdep.cpu.brand_string:")) { + return line.substring("machdep.cpu.brand_string:".length()).trim(); + } + } + return ""; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java index b4ab8315de1..987af7b5b6e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java @@ -23,13 +23,12 @@ import me.lucko.spark.common.monitor.MonitoringExecutor; import me.lucko.spark.common.util.RollingAverage; -import java.lang.management.ManagementFactory; -import java.math.BigDecimal; -import java.util.concurrent.TimeUnit; - import javax.management.JMX; import javax.management.MBeanServer; import javax.management.ObjectName; +import java.lang.management.ManagementFactory; +import java.math.BigDecimal; +import java.util.concurrent.TimeUnit; /** * Exposes and monitors the system/process CPU usage. diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java index 9bff1e24b47..d9abf9b635a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java @@ -22,16 +22,15 @@ import com.sun.management.GarbageCollectionNotificationInfo; -import java.lang.management.GarbageCollectorMXBean; -import java.lang.management.ManagementFactory; -import java.util.ArrayList; -import java.util.List; - import javax.management.ListenerNotFoundException; import javax.management.Notification; import javax.management.NotificationEmitter; import javax.management.NotificationListener; import javax.management.openmbean.CompositeData; +import java.lang.management.GarbageCollectorMXBean; +import java.lang.management.ManagementFactory; +import java.util.ArrayList; +import java.util.List; /** * Monitoring process for garbage collections. diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java index 8f63f71271f..b260d7ee3af 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java @@ -22,13 +22,12 @@ import me.lucko.spark.common.monitor.LinuxProc; -import java.lang.management.ManagementFactory; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - import javax.management.JMX; import javax.management.MBeanServer; import javax.management.ObjectName; +import java.lang.management.ManagementFactory; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Utility to query information about system memory usage. diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java index 332077a4ed9..01bd3a7b1ba 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java @@ -21,10 +21,9 @@ package me.lucko.spark.common.monitor.net; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.LinuxProc; - import org.checkerframework.checker.nullness.qual.NonNull; +import org.jetbrains.annotations.VisibleForTesting; import java.util.Arrays; import java.util.Collections; @@ -202,7 +201,8 @@ public NetworkInterfaceInfo subtract(NetworkInterfaceInfo other) { private static final Pattern PROC_NET_DEV_PATTERN = Pattern.compile("^\\s*(\\w+):([\\d\\s]+)$"); - private static @NonNull Map read(List output) { + @VisibleForTesting + static @NonNull Map read(List output) { // Inter-| Receive | Transmit // face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed // lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0 diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java index 49fcbe1bf57..803a183664f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.monitor.MonitoringExecutor; import me.lucko.spark.common.util.RollingAverage; - import org.checkerframework.checker.nullness.qual.Nullable; import java.math.BigDecimal; diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/SparkTickStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/SparkTickStatistics.java index 5877cbe585f..427e1dfc88c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/SparkTickStatistics.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/SparkTickStatistics.java @@ -81,6 +81,11 @@ public void onTick(int currentTick) { } long diff = now - this.last; + if (diff <= 0) { + // avoid division by zero + return; + } + BigDecimal currentTps = TPS_BASE.divide(new BigDecimal(diff), 30, RoundingMode.HALF_UP); BigDecimal total = currentTps.multiply(new BigDecimal(diff)); diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java index 944fa83ea28..f9b41f319fe 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java @@ -21,11 +21,9 @@ package me.lucko.spark.common.monitor.tick; import com.sun.management.GarbageCollectionNotificationInfo; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.monitor.memory.GarbageCollectionMonitor; import me.lucko.spark.common.tick.TickHook; - import net.kyori.adventure.text.Component; import java.text.DecimalFormat; diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java index 082389d574f..96549a169a4 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java @@ -30,6 +30,8 @@ public interface PlatformInfo { String getName(); + String getBrand(); + String getVersion(); String getMinecraftVersion(); @@ -40,13 +42,14 @@ default int getSparkVersion() { } default Data toData() { - return new Data(getType(), getName(), getVersion(), getMinecraftVersion(), getSparkVersion()); + return new Data(getType(), getName(), getBrand(), getVersion(), getMinecraftVersion(), getSparkVersion()); } enum Type { SERVER(PlatformMetadata.Type.SERVER), CLIENT(PlatformMetadata.Type.CLIENT), - PROXY(PlatformMetadata.Type.PROXY); + PROXY(PlatformMetadata.Type.PROXY), + APPLICATION(PlatformMetadata.Type.APPLICATION); private final PlatformMetadata.Type type; @@ -62,13 +65,15 @@ public PlatformMetadata.Type toProto() { final class Data { private final Type type; private final String name; + private final String brand; private final String version; private final String minecraftVersion; private final int sparkVersion; - public Data(Type type, String name, String version, String minecraftVersion, int sparkVersion) { + public Data(Type type, String name, String brand, String version, String minecraftVersion, int sparkVersion) { this.type = type; this.name = name; + this.brand = brand; this.version = version; this.minecraftVersion = minecraftVersion; this.sparkVersion = sparkVersion; @@ -82,6 +87,10 @@ public String getName() { return this.name; } + public String getBrand() { + return this.brand; + } + public String getVersion() { return this.version; } @@ -98,6 +107,7 @@ public PlatformMetadata toProto() { PlatformMetadata.Builder proto = PlatformMetadata.newBuilder() .setType(this.type.toProto()) .setName(this.name) + .setBrand(this.brand) .setVersion(this.version) .setSparkVersion(this.sparkVersion); diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index b0987c9abb0..d9fbff2ab81 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -22,6 +22,7 @@ import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.cpu.CpuInfo; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.disk.DiskUsage; @@ -40,9 +41,16 @@ import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.lang.management.ManagementFactory; +import java.lang.management.MemoryPoolMXBean; +import java.lang.management.MemoryType; import java.lang.management.MemoryUsage; import java.lang.management.RuntimeMXBean; +import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.logging.Level; +import java.util.regex.Pattern; +import java.util.stream.Collectors; public class PlatformStatisticsProvider { private final SparkPlatform platform; @@ -55,6 +63,8 @@ public SystemStatistics getSystemStatistics() { RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean(); OperatingSystemInfo osInfo = OperatingSystemInfo.poll(); + String vmArgs = String.join(" ", runtimeBean.getInputArguments()); + SystemStatistics.Builder builder = SystemStatistics.newBuilder() .setCpu(SystemStatistics.Cpu.newBuilder() .setThreads(Runtime.getRuntime().availableProcessors()) @@ -99,7 +109,13 @@ public SystemStatistics getSystemStatistics() { .setVendor(System.getProperty("java.vendor", "unknown")) .setVersion(System.getProperty("java.version", "unknown")) .setVendorVersion(System.getProperty("java.vendor.version", "unknown")) - .setVmArgs(String.join(" ", runtimeBean.getInputArguments())) + .setVmArgs(VmArgRedactor.replace(vmArgs)) + .build() + ) + .setJvm(SystemStatistics.Jvm.newBuilder() + .setName(System.getProperty("java.vm.name", "unknown")) + .setVendor(System.getProperty("java.vm.vendor", "unknown")) + .setVersion(System.getProperty("java.vm.version", "unknown")) .build() ); @@ -130,18 +146,35 @@ public SystemStatistics getSystemStatistics() { return builder.build(); } - public PlatformStatistics getPlatformStatistics(Map startingGcStatistics, boolean includeWorld) { + public PlatformStatistics getPlatformStatistics(Map startingGcStatistics, boolean includeWorldStatistics) { PlatformStatistics.Builder builder = PlatformStatistics.newBuilder(); - MemoryUsage memoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage(); - builder.setMemory(PlatformStatistics.Memory.newBuilder() - .setHeap(PlatformStatistics.Memory.MemoryPool.newBuilder() - .setUsed(memoryUsage.getUsed()) - .setTotal(memoryUsage.getCommitted()) - .build() - ) - .build() - ); + PlatformStatistics.Memory.Builder memory = PlatformStatistics.Memory.newBuilder() + .setHeap(memoryUsageProto(ManagementFactory.getMemoryMXBean().getHeapMemoryUsage())) + .setNonHeap(memoryUsageProto(ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage())); + + List memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans(); + for (MemoryPoolMXBean memoryPool : memoryPoolMXBeans) { + if (memoryPool.getType() != MemoryType.HEAP) { + continue; + } + + MemoryUsage usage = memoryPool.getUsage(); + MemoryUsage collectionUsage = memoryPool.getCollectionUsage(); + + if (usage.getMax() == -1) { + usage = new MemoryUsage(usage.getInit(), usage.getUsed(), usage.getCommitted(), usage.getCommitted()); + } + + memory.addPools(PlatformStatistics.Memory.MemoryPool.newBuilder() + .setName(memoryPool.getName()) + .setUsage(memoryUsageProto(usage)) + .setCollectionUsage(memoryUsageProto(collectionUsage)) + .build() + ); + } + + builder.setMemory(memory.build()); long uptime = System.currentTimeMillis() - this.platform.getServerNormalOperationStartTime(); builder.setUptime(uptime); @@ -183,13 +216,29 @@ public PlatformStatistics getPlatformStatistics(Map senders = this.platform.getPlugin().getCommandSenders().collect(Collectors.toList()); + PlatformInfo.Type platformType = this.platform.getPlugin().getPlatformInfo().getType(); - if (platformType != PlatformInfo.Type.CLIENT) { - long playerCount = this.platform.getPlugin().getCommandSenders().count() - 1; // includes console + if (platformType == PlatformInfo.Type.SERVER || platformType == PlatformInfo.Type.PROXY) { + long playerCount = senders.size() - 1; // includes console builder.setPlayerCount(playerCount); } - if (includeWorld) { + UUID anyOnlinePlayerUniqueId = senders.stream() + .filter(CommandSender::isPlayer) + .map(CommandSender::getUniqueId) + .filter(uniqueId -> uniqueId.version() == 4 || uniqueId.version() == 3) + .findAny() + .orElse(null); + + builder.setOnlineMode(anyOnlinePlayerUniqueId == null + ? PlatformStatistics.OnlineMode.UNKNOWN + : anyOnlinePlayerUniqueId.version() == 4 + ? PlatformStatistics.OnlineMode.ONLINE + : PlatformStatistics.OnlineMode.OFFLINE + ); + + if (includeWorldStatistics) { try { WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider( new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider()) @@ -199,7 +248,7 @@ public PlatformStatistics getPlatformStatistics(Map"); + input = MACOS_USERNAME.matcher(input).replaceAll("/Users/"); + input = LINUX_USERNAME.matcher(input).replaceAll("/home/"); + return input; + } + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java new file mode 100644 index 00000000000..36f942ce563 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java @@ -0,0 +1,154 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; +import me.lucko.spark.proto.SparkProtos.HealthMetadata; +import me.lucko.spark.proto.SparkProtos.PlatformMetadata; +import me.lucko.spark.proto.SparkProtos.PlatformStatistics; +import me.lucko.spark.proto.SparkProtos.SystemStatistics; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; + +import java.util.Collection; +import java.util.Locale; +import java.util.Map; +import java.util.logging.Level; + +public class SparkMetadata { + + public static SparkMetadata gather(SparkPlatform platform, CommandSender.Data creator, Map initialGcStats) { + PlatformMetadata platformMetadata = platform.getPlugin().getPlatformInfo().toData().toProto(); + + PlatformStatistics platformStatistics = null; + try { + platformStatistics = platform.getStatisticsProvider().getPlatformStatistics(initialGcStats, true); + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather platform statistics", e); + } + + SystemStatistics systemStatistics = null; + try { + systemStatistics = platform.getStatisticsProvider().getSystemStatistics(); + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather system statistics", e); + } + + long generatedTime = System.currentTimeMillis(); + + Map serverConfigurations = null; + try { + ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); + if (serverConfigProvider != null) { + serverConfigurations = serverConfigProvider.export(); + } + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather server configurations", e); + } + + Collection sources = platform.getPlugin().getKnownSources(); + + Map extraPlatformMetadata = null; + try { + MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); + if (extraMetadataProvider != null) { + extraPlatformMetadata = extraMetadataProvider.export(); + } + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather extra platform metadata", e); + } + + return new SparkMetadata(creator, platformMetadata, platformStatistics, systemStatistics, generatedTime, serverConfigurations, sources, extraPlatformMetadata); + } + + private final CommandSender.Data creator; + private final PlatformMetadata platformMetadata; + private final PlatformStatistics platformStatistics; + private final SystemStatistics systemStatistics; + private final long generatedTime; + private final Map serverConfigurations; + private final Collection sources; + private final Map extraPlatformMetadata; + + public SparkMetadata(CommandSender.Data creator, PlatformMetadata platformMetadata, PlatformStatistics platformStatistics, SystemStatistics systemStatistics, long generatedTime, Map serverConfigurations, Collection sources, Map extraPlatformMetadata) { + this.creator = creator; + this.platformMetadata = platformMetadata; + this.platformStatistics = platformStatistics; + this.systemStatistics = systemStatistics; + this.generatedTime = generatedTime; + this.serverConfigurations = serverConfigurations; + this.sources = sources; + this.extraPlatformMetadata = extraPlatformMetadata; + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(HealthMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setGeneratedTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(SamplerMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setEndTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(HeapMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setGeneratedTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java index 485f215813c..6503df9f536 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableMap; import com.google.gson.JsonElement; - import me.lucko.spark.common.platform.MetadataProvider; import java.util.Arrays; diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java index 82cddeff470..707097a858e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java @@ -23,6 +23,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; +import java.util.Collection; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -80,6 +81,14 @@ public CompletableFuture>> return async(WorldInfoProvider::pollChunks); } + public CompletableFuture pollGameRules() { + return async(WorldInfoProvider::pollGameRules); + } + + public CompletableFuture> pollDataPacks() { + return async(WorldInfoProvider::pollDataPacks); + } + public WorldInfoProvider.CountsResult getCounts() { return get(pollCounts()); } @@ -87,4 +96,12 @@ public WorldInfoProvider.CountsResult getCounts() { public WorldInfoProvider.ChunksResult> getChunks() { return get(pollChunks()); } + + public WorldInfoProvider.GameRulesResult getGameRules() { + return get(pollGameRules()); + } + + public Collection getDataPacks() { + return get(pollDataPacks()); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java index 7fb581db012..457f8c9b5cc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.platform.world; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -39,6 +40,16 @@ public CountsResult pollCounts() { public ChunksResult> pollChunks() { return null; } + + @Override + public GameRulesResult pollGameRules() { + return null; + } + + @Override + public Collection pollDataPacks() { + return null; + } }; /** @@ -55,6 +66,20 @@ public ChunksResult> pollChunks() { */ ChunksResult> pollChunks(); + /** + * Polls for game rules. + * + * @return the game rules + */ + GameRulesResult pollGameRules(); + + /** + * Polls for data packs. + * + * @return the data packs + */ + Collection pollDataPacks(); + default boolean mustCallSync() { return true; } @@ -101,4 +126,61 @@ public int chunks() { } } + final class GameRulesResult { + private final Map rules = new HashMap<>(); + + private GameRule rule(String name) { + return this.rules.computeIfAbsent(name, k -> new GameRule()); + } + + public void put(String gameRuleName, String worldName, String value) { + rule(gameRuleName).worldValues.put(worldName, value); + } + + public void putDefault(String gameRuleName, String value) { + rule(gameRuleName).defaultValue = value; + } + + public Map getRules() { + return this.rules; + } + + public static final class GameRule { + Map worldValues = new HashMap<>(); + String defaultValue = null; + + public String getDefaultValue() { + return this.defaultValue; + } + + public Map getWorldValues() { + return this.worldValues; + } + } + } + + final class DataPackInfo { + private final String name; + private final String description; + private final String source; + + public DataPackInfo(String name, String description, String source) { + this.name = name; + this.description = description; + this.source = source; + } + + public String name() { + return this.name; + } + + public String description() { + return this.description; + } + + public String source() { + return this.source; + } + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java index 7e63222c5c7..0154346f0f7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -21,12 +21,16 @@ package me.lucko.spark.common.platform.world; import me.lucko.spark.proto.SparkProtos.WorldStatistics; +import org.jetbrains.annotations.VisibleForTesting; +import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -38,7 +42,7 @@ public WorldStatisticsProvider(AsyncWorldInfoProvider provider) { } public WorldStatistics getWorldStatistics() { - WorldInfoProvider.ChunksResult> result = provider.getChunks(); + WorldInfoProvider.ChunksResult> result = this.provider.getChunks(); if (result == null) { return null; } @@ -70,6 +74,26 @@ public WorldStatistics getWorldStatistics() { stats.setTotalEntities(combinedTotal.get()); combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get())); + WorldInfoProvider.GameRulesResult gameRules = this.provider.getGameRules(); + if (gameRules != null) { + gameRules.getRules().forEach((ruleName, rule) -> stats.addGameRules(WorldStatistics.GameRule.newBuilder() + .setName(ruleName) + .setDefaultValue(rule.getDefaultValue()) + .putAllWorldValues(rule.getWorldValues()) + .build() + )); + } + + Collection dataPacks = this.provider.getDataPacks(); + if (dataPacks != null) { + dataPacks.forEach(dataPack -> stats.addDataPacks(WorldStatistics.DataPack.newBuilder() + .setName(dataPack.name()) + .setDescription(dataPack.description()) + .setSource(dataPack.source()) + .build() + )); + } + return stats.build(); } @@ -101,37 +125,57 @@ private static WorldStatistics.Chunk chunkToProto(ChunkInfo chunk, CountM return builder.build(); } - private static List groupIntoRegions(List> chunks) { + @VisibleForTesting + static List groupIntoRegions(List> chunks) { List regions = new ArrayList<>(); + LinkedHashMap> chunkMap = new LinkedHashMap<>(chunks.size()); + for (ChunkInfo chunk : chunks) { CountMap counts = chunk.getEntityCounts(); if (counts.total().get() == 0) { continue; } + chunkMap.put(new ChunkCoordinate(chunk.getX(), chunk.getZ()), chunk); + } - boolean found = false; + ArrayDeque> queue = new ArrayDeque<>(); + ChunkCoordinate index = new ChunkCoordinate(); // avoid allocating per check - for (Region region : regions) { - if (region.isAdjacent(chunk)) { - found = true; - region.add(chunk); - - // if the chunk is adjacent to more than one region, merge the regions together - for (Iterator iterator = regions.iterator(); iterator.hasNext(); ) { - Region otherRegion = iterator.next(); - if (region != otherRegion && otherRegion.isAdjacent(chunk)) { - iterator.remove(); - region.merge(otherRegion); + while (!chunkMap.isEmpty()) { + Map.Entry> first = chunkMap.entrySet().iterator().next(); + ChunkInfo firstValue = first.getValue(); + + chunkMap.remove(first.getKey()); + + Region region = new Region(firstValue); + regions.add(region); + + queue.add(firstValue); + + ChunkInfo queued; + while ((queued = queue.pollFirst()) != null) { + int queuedX = queued.getX(); + int queuedZ = queued.getZ(); + + // merge adjacent chunks + for (int dz = -1; dz <= 1; ++dz) { + for (int dx = -1; dx <= 1; ++dx) { + if ((dx | dz) == 0) { + continue; } - } - break; - } - } + index.setCoordinate(queuedX + dx, queuedZ + dz); + ChunkInfo adjacent = chunkMap.remove(index); - if (!found) { - regions.add(new Region(chunk)); + if (adjacent == null) { + continue; + } + + region.add(adjacent); + queue.add(adjacent); + } + } } } @@ -141,8 +185,7 @@ private static List groupIntoRegions(List> chunks /** * A map of nearby chunks grouped together by Euclidean distance. */ - private static final class Region { - private static final int DISTANCE_THRESHOLD = 2; + static final class Region { private final Set> chunks; private final AtomicInteger totalEntities; @@ -160,30 +203,53 @@ public AtomicInteger getTotalEntities() { return this.totalEntities; } - public boolean isAdjacent(ChunkInfo chunk) { - for (ChunkInfo el : this.chunks) { - if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) { - return true; - } - } - return false; - } - public void add(ChunkInfo chunk) { this.chunks.add(chunk); this.totalEntities.addAndGet(chunk.getEntityCounts().total().get()); } + } + + static final class ChunkCoordinate implements Comparable { + long key; + + ChunkCoordinate() {} + + ChunkCoordinate(int chunkX, int chunkZ) { + this.setCoordinate(chunkX, chunkZ); + } - public void merge(Region group) { - this.chunks.addAll(group.getChunks()); - this.totalEntities.addAndGet(group.getTotalEntities().get()); + ChunkCoordinate(long key) { + this.setKey(key); } - private static long squaredEuclideanDistance(ChunkInfo a, ChunkInfo b) { - long dx = a.getX() - b.getX(); - long dz = a.getZ() - b.getZ(); - return (dx * dx) + (dz * dz); + public void setCoordinate(int chunkX, int chunkZ) { + this.setKey(((long) chunkZ << 32) | (chunkX & 0xFFFFFFFFL)); } - } + public void setKey(long key) { + this.key = key; + } + + @Override + public int hashCode() { + // fastutil hash without the last step, as it is done by HashMap + // doing the last step twice (h ^= (h >>> 16)) is both more expensive and destroys the hash + long h = this.key * 0x9E3779B97F4A7C15L; + h ^= h >>> 32; + return (int) h; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof ChunkCoordinate)) { + return false; + } + return this.key == ((ChunkCoordinate) obj).key; + } + + @Override + public int compareTo(ChunkCoordinate other) { + return Long.compare(this.key, other.key); + } + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index d81400260cf..792fa7f0649 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -23,27 +23,28 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; -import me.lucko.spark.common.platform.MetadataProvider; -import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.SparkMetadata; import me.lucko.spark.common.sampler.aggregator.DataAggregator; -import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.node.exporter.NodeExporter; import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; +import me.lucko.spark.common.util.classfinder.ClassFinder; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; -import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.logging.Level; /** * Base implementation class for {@link Sampler}s. @@ -78,7 +79,7 @@ public abstract class AbstractSampler implements Sampler { protected Map initialGcStats; /** A set of viewer sockets linked to the sampler */ - protected List viewerSockets = new ArrayList<>(); + protected List viewerSockets = new CopyOnWriteArrayList<>(); protected AbstractSampler(SparkPlatform platform, SamplerSettings settings) { this.platform = platform; @@ -120,6 +121,11 @@ protected Map getInitialGcStats() { return this.initialGcStats; } + @Override + public Map exportWindowStatistics() { + return this.windowStatisticsCollector.export(); + } + @Override public void start() { this.startTime = System.currentTimeMillis(); @@ -156,6 +162,7 @@ protected void processWindowRotate() { protected void sendStatisticsToSocket() { try { + this.viewerSockets.removeIf(socket -> !socket.isOpen()); if (this.viewerSockets.isEmpty()) { return; } @@ -167,73 +174,43 @@ protected void sendStatisticsToSocket() { viewerSocket.sendUpdatedStatistics(platform, system); } } catch (Exception e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while sending statistics to viewer", e); } } protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender.Data creator, String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() + .setSamplerEngine(getType().asProto()) .setSamplerMode(getMode().asProto()) - .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toProto()) .setStartTime(this.startTime) - .setEndTime(System.currentTimeMillis()) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(dataAggregator.getMetadata()); + SparkMetadata.gather(platform, creator, getInitialGcStats()).writeTo(metadata); + if (comment != null) { metadata.setComment(comment); } + String libraryVersion = getLibraryVersion(); + if (libraryVersion != null) { + metadata.setSamplerEngineVersion(libraryVersion); + } + int totalTicks = this.windowStatisticsCollector.getTotalTicks(); if (totalTicks != -1) { metadata.setNumberOfTicks(totalTicks); } - try { - metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats(), true)); - } catch (Exception e) { - e.printStackTrace(); - } - - try { - metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics()); - } catch (Exception e) { - e.printStackTrace(); - } - - try { - ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); - if (serverConfigProvider != null) { - metadata.putAllServerConfigurations(serverConfigProvider.export()); - } - } catch (Exception e) { - e.printStackTrace(); - } - - try { - MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); - if (extraMetadataProvider != null) { - metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export()); - } - } catch (Exception e) { - e.printStackTrace(); - } - - Collection knownSources = platform.getPlugin().getKnownSources(); - for (SourceMetadata source : knownSources) { - metadata.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); - } - proto.setMetadata(metadata); } - protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Function nodeExporterFunction, ClassSourceLookup classSourceLookup, Supplier classFinderSupplier) { List data = dataAggregator.exportData(); data.sort(Comparator.comparing(ThreadNode::getThreadLabel)); - ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup); + ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup, classFinderSupplier); ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(getMode().valueTransformer(), data); int[] timeWindows = timeEncoder.getKeys(); @@ -244,8 +221,10 @@ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAg this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows); proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export()); + NodeExporter exporter = nodeExporterFunction.apply(timeEncoder); + for (ThreadNode entry : data) { - proto.addThreads(entry.toProto(mergeMode, timeEncoder)); + proto.addThreads(exporter.export(entry)); classSourceVisitor.visit(entry); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java index 4e9ca9e073e..25cdc6bbb93 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java @@ -22,8 +22,9 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.util.Configuration; +import me.lucko.spark.common.util.config.Configuration; +import java.util.function.Supplier; import java.util.logging.Level; public class BackgroundSamplerManager { @@ -88,7 +89,7 @@ public void initialise() { } } catch (Throwable e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Failed to start background profiler."); } } @@ -103,7 +104,7 @@ public boolean restartBackgroundSampler() { private void startSampler() { boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java"); - ThreadGrouper threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool")); + Supplier threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool")); ThreadDumper threadDumper = ThreadDumper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_DUMPER, "default")); if (threadDumper == null) { threadDumper = this.platform.getPlugin().getDefaultThreadDumper(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 844ab0bf757..50e6c58f281 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -22,13 +22,15 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.java.MergeStrategy; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.ws.ViewerSocket; +import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SocketChannelInfo; import java.util.Collection; +import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.function.Supplier; @@ -82,6 +84,20 @@ public interface Sampler { */ boolean isRunningInBackground(); + /** + * Gets the sampler type. + * + * @return the sampler type + */ + SamplerType getType(); + + /** + * Gets the version of the sampler. + * + * @return the library version if known, else null + */ + String getLibraryVersion(); + /** * Gets the sampler mode. * @@ -96,13 +112,20 @@ public interface Sampler { */ CompletableFuture getFuture(); + /** + * Exports the current set of window statistics. + * + * @return the window statistics + */ + Map exportWindowStatistics(); + // Methods used to export the sampler data to the web viewer. SamplerData toProto(SparkPlatform platform, ExportProps exportProps); final class ExportProps { private CommandSender.Data creator; private String comment; - private Supplier mergeMode; + private MergeStrategy mergeStrategy; private Supplier classSourceLookup; private SocketChannelInfo channelInfo; @@ -117,8 +140,8 @@ public String comment() { return this.comment; } - public Supplier mergeMode() { - return this.mergeMode; + public MergeStrategy mergeStrategy() { + return this.mergeStrategy; } public Supplier classSourceLookup() { @@ -139,8 +162,8 @@ public ExportProps comment(String comment) { return this; } - public ExportProps mergeMode(Supplier mergeMode) { - this.mergeMode = mergeMode; + public ExportProps mergeStrategy(MergeStrategy mergeStrategy) { + this.mergeStrategy = mergeStrategy; return this; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index b6895ce8ac6..efae202f54a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -28,6 +28,8 @@ import me.lucko.spark.common.tick.TickHook; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import java.util.logging.Level; /** * Builds {@link Sampler} instances. @@ -38,13 +40,12 @@ public class SamplerBuilder { private SamplerMode mode = SamplerMode.EXECUTION; private double samplingInterval = -1; private boolean ignoreSleeping = false; - private boolean ignoreNative = false; - private boolean useAsyncProfiler = true; + private boolean forceJavaSampler = false; private boolean allocLiveOnly = false; private long autoEndTime = -1; private boolean background = false; private ThreadDumper threadDumper = ThreadDumper.ALL; - private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; + private Supplier threadGrouper = ThreadGrouper.BY_NAME; private int ticksOver = -1; private TickHook tickHook = null; @@ -80,7 +81,7 @@ public SamplerBuilder threadDumper(ThreadDumper threadDumper) { return this; } - public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) { + public SamplerBuilder threadGrouper(Supplier threadGrouper) { this.threadGrouper = threadGrouper; return this; } @@ -96,13 +97,8 @@ public SamplerBuilder ignoreSleeping(boolean ignoreSleeping) { return this; } - public SamplerBuilder ignoreNative(boolean ignoreNative) { - this.ignoreNative = ignoreNative; - return this; - } - public SamplerBuilder forceJavaSampler(boolean forceJavaSampler) { - this.useAsyncProfiler = !forceJavaSampler; + this.forceJavaSampler = forceJavaSampler; return this; } @@ -116,14 +112,22 @@ public Sampler start(SparkPlatform platform) throws UnsupportedOperationExceptio throw new IllegalArgumentException("samplingInterval = " + this.samplingInterval); } + AsyncProfilerAccess asyncProfiler = AsyncProfilerAccess.getInstance(platform); + boolean onlyTicksOverMode = this.ticksOver != -1 && this.tickHook != null; - boolean canUseAsyncProfiler = this.useAsyncProfiler && - !onlyTicksOverMode && - !(this.ignoreSleeping || this.ignoreNative) && - AsyncProfilerAccess.getInstance(platform).checkSupported(platform); + boolean canUseAsyncProfiler = asyncProfiler.checkSupported(platform) && (!onlyTicksOverMode || platform.getTickReporter() != null); - if (this.mode == SamplerMode.ALLOCATION && (!canUseAsyncProfiler || !AsyncProfilerAccess.getInstance(platform).checkAllocationProfilingSupported(platform))) { - throw new UnsupportedOperationException("Allocation profiling is not supported on your system. Check the console for more info."); + if (this.mode == SamplerMode.ALLOCATION) { + if (!canUseAsyncProfiler || !asyncProfiler.checkAllocationProfilingSupported(platform)) { + throw new UnsupportedOperationException("Allocation profiling is not supported on your system. Check the console for more info."); + } + if (this.ignoreSleeping) { + platform.getPlugin().log(Level.WARNING, "Ignoring sleeping threads is not supported in allocation profiling mode. Sleeping threads will be included in the results."); + } + } + + if (this.forceJavaSampler) { + canUseAsyncProfiler = false; } int interval = (int) (this.mode == SamplerMode.EXECUTION ? @@ -131,17 +135,20 @@ public Sampler start(SparkPlatform platform) throws UnsupportedOperationExceptio this.samplingInterval ); - SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background); + SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper.get(), this.autoEndTime, this.background, this.ignoreSleeping); Sampler sampler; - if (this.mode == SamplerMode.ALLOCATION) { - sampler = new AsyncSampler(platform, settings, new SampleCollector.Allocation(interval, this.allocLiveOnly)); - } else if (canUseAsyncProfiler) { - sampler = new AsyncSampler(platform, settings, new SampleCollector.Execution(interval)); - } else if (onlyTicksOverMode) { - sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); + if (canUseAsyncProfiler) { + SampleCollector collector = this.mode == SamplerMode.ALLOCATION + ? new SampleCollector.Allocation(interval, this.allocLiveOnly) + : new SampleCollector.Execution(interval); + sampler = onlyTicksOverMode + ? new AsyncSampler(platform, settings, collector, this.ticksOver) + : new AsyncSampler(platform, settings, collector); } else { - sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative); + sampler = onlyTicksOverMode + ? new JavaSampler(platform, settings, this.tickHook, this.ticksOver) + : new JavaSampler(platform, settings); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java index 6e55a43be84..dc28d2af1d3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java @@ -30,13 +30,15 @@ public class SamplerSettings { private final ThreadGrouper threadGrouper; private final long autoEndTime; private final boolean runningInBackground; + private final boolean ignoreSleeping; - public SamplerSettings(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long autoEndTime, boolean runningInBackground) { + public SamplerSettings(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long autoEndTime, boolean runningInBackground, boolean ignoreSleeping) { this.interval = interval; this.threadDumper = threadDumper; this.threadGrouper = threadGrouper; this.autoEndTime = autoEndTime; this.runningInBackground = runningInBackground; + this.ignoreSleeping = ignoreSleeping; } public int interval() { @@ -58,4 +60,8 @@ public long autoEndTime() { public boolean runningInBackground() { return this.runningInBackground; } + + public boolean ignoreSleeping() { + return this.ignoreSleeping; + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerType.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerType.java new file mode 100644 index 00000000000..aad4b23b798 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerType.java @@ -0,0 +1,47 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.common.sampler.async.AsyncSampler; +import me.lucko.spark.common.sampler.java.JavaSampler; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; + +public enum SamplerType { + JAVA(JavaSampler.class, SamplerMetadata.SamplerEngine.JAVA), + ASYNC(AsyncSampler.class, SamplerMetadata.SamplerEngine.ASYNC); + + private final Class expectedClass; + private final SamplerMetadata.SamplerEngine proto; + + SamplerType(Class expectedClass, SamplerMetadata.SamplerEngine proto) { + this.expectedClass = expectedClass; + this.proto = proto; + } + + public Class implClass() { + return this.expectedClass; + } + + public SamplerMetadata.SamplerEngine asProto() { + return this.proto; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java index b6cfbea691b..7a791bcb786 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java @@ -26,6 +26,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -64,7 +65,7 @@ public interface ThreadGrouper { * @param setting the config setting * @return the thread grouper */ - static ThreadGrouper parseConfigSetting(String setting) { + static Supplier parseConfigSetting(String setting) { switch (setting) { case "as-one": return AS_ONE; @@ -75,10 +76,15 @@ static ThreadGrouper parseConfigSetting(String setting) { } } + /** + * Supplier for {@link ByName} thread groupers. + */ + Supplier BY_NAME = ByName::new; + /** * Implementation of {@link ThreadGrouper} that just groups by thread name. */ - ThreadGrouper BY_NAME = new ThreadGrouper() { + class ByName implements ThreadGrouper { @Override public String getGroup(long threadId, String threadName) { return threadName; @@ -93,7 +99,12 @@ public String getLabel(String group) { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME; } - }; + } + + /** + * Supplier for {@link ByPool} thread groupers. + */ + Supplier BY_POOL = ByPool::new; /** * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool @@ -102,8 +113,8 @@ public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { *

The regex pattern used to match pools expects a digit at the end of the thread name, * separated from the pool name with any of one or more of ' ', '-', or '#'.

*/ - ThreadGrouper BY_POOL = new ThreadGrouper() { - private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$"); + class ByPool implements ThreadGrouper { + private static final Pattern PATTERN = Pattern.compile("^(.*?)[-# ]+\\d+$"); // thread id -> group private final Map cache = new ConcurrentHashMap<>(); @@ -117,7 +128,7 @@ public String getGroup(long threadId, String threadName) { return cached; } - Matcher matcher = this.pattern.matcher(threadName); + Matcher matcher = PATTERN.matcher(threadName); if (!matcher.matches()) { return threadName; } @@ -141,13 +152,18 @@ public String getLabel(String group) { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL; } - }; + } + + /** + * Supplier for {@link AsOne} thread groupers. + */ + Supplier AS_ONE = AsOne::new; /** * Implementation of {@link ThreadGrouper} which groups all threads as one, under * the name "All". */ - ThreadGrouper AS_ONE = new ThreadGrouper() { + class AsOne implements ThreadGrouper { private final Set seen = ConcurrentHashMap.newKeySet(); @Override @@ -165,6 +181,6 @@ public String getLabel(String group) { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.AS_ONE; } - }; + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java index 2c003e5c6bf..744ad41b3a6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java @@ -40,8 +40,12 @@ public abstract class AbstractDataAggregator implements DataAggregator { /** The instance used to group threads together */ protected final ThreadGrouper threadGrouper; - protected AbstractDataAggregator(ThreadGrouper threadGrouper) { + /** If sleeping threads should be ignored */ + protected final boolean ignoreSleeping; + + protected AbstractDataAggregator(ThreadGrouper threadGrouper, boolean ignoreSleeping) { this.threadGrouper = threadGrouper; + this.ignoreSleeping = ignoreSleeping; } protected ThreadNode getNode(String group) { @@ -65,4 +69,13 @@ public List exportData() { } return data; } + + protected static boolean isSleeping(String clazz, String method) { + // java.lang.Thread.yield() + // jdk.internal.misc.Unsafe.park() + // sun.misc.Unsafe.park() + return (clazz.equals("java.lang.Thread") && method.equals("yield")) || + (clazz.equals("jdk.internal.misc.Unsafe") && method.equals("park")) || + (clazz.equals("sun.misc.Unsafe") && method.equals("park")); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java index b9a80e04d60..808241b5673 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java @@ -29,14 +29,14 @@ /** * Data aggregator for {@link AsyncSampler}. */ -public class AsyncDataAggregator extends AbstractDataAggregator { +public class AsyncDataAggregator extends AbstractDataAggregator implements AutoCloseable { /** A describer for async-profiler stack trace elements. */ private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> - new StackTraceNode.Description(element.getClassName(), element.getMethodName(), element.getMethodDescription()); + new StackTraceNode.AsyncDescription(element.getClassName(), element.getMethodName(), element.getMethodDescription()); - protected AsyncDataAggregator(ThreadGrouper threadGrouper) { - super(threadGrouper); + protected AsyncDataAggregator(ThreadGrouper threadGrouper, boolean ignoreSleeping) { + super(threadGrouper, ignoreSleeping); } @Override @@ -48,6 +48,9 @@ public SamplerMetadata.DataAggregator getMetadata() { } public void insertData(ProfileSegment element, int window) { + if (this.ignoreSleeping && isSleeping(element)) { + return; + } try { ThreadNode node = getNode(this.threadGrouper.getGroup(element.getNativeThreadId(), element.getThreadName())); node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getValue(), window); @@ -56,4 +59,28 @@ public void insertData(ProfileSegment element, int window) { } } + private static boolean isSleeping(ProfileSegment element) { + // thread states written by async-profiler: + // https://github.com/async-profiler/async-profiler/blob/116504c9f75721911b2f561e29eda065c224caf6/src/flightRecorder.cpp#L1017-L1023 + String threadState = element.getThreadState(); + if (threadState.equals("STATE_SLEEPING")) { + return true; + } + + // async-profiler includes native frames - let's check more than just the top frame + AsyncStackTraceElement[] stackTrace = element.getStackTrace(); + for (int i = 0; i < Math.min(3, stackTrace.length); i++) { + String clazz = stackTrace[i].getClassName(); + String method = stackTrace[i].getMethodName(); + if (isSleeping(clazz, method)) { + return true; + } + } + return false; + } + + @Override + public void close() { + + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java new file mode 100644 index 00000000000..ef68c46052c --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java @@ -0,0 +1,63 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.exporter.AbstractNodeExporter; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.Collection; + +/** + * Node exporter for the {@link AsyncSampler}. + */ +public class AsyncNodeExporter extends AbstractNodeExporter { + public AsyncNodeExporter(ProtoTimeEncoder timeEncoder) { + super(timeEncoder); + } + + @Override + protected SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs) { + SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName(stackTraceNode.getClassName()) + .setMethodName(stackTraceNode.getMethodName()); + + double[] times = stackTraceNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + String methodDescription = stackTraceNode.getMethodDescription(); + if (methodDescription != null) { + proto.setMethodDesc(methodDescription); + } + + proto.addAllChildrenRefs(childrenRefs); + + return proto.build(); + } + + @Override + protected Collection exportChildren(Collection children) { + return children; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index 5bee56f015c..4920bce0a6b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -23,15 +23,11 @@ import com.google.common.collect.ImmutableTable; import com.google.common.collect.Table; import com.google.common.io.ByteStreams; - import me.lucko.spark.common.SparkPlatform; - import one.profiler.AsyncProfiler; import one.profiler.Events; -import java.io.BufferedReader; import java.io.InputStream; -import java.io.InputStreamReader; import java.io.OutputStream; import java.net.URL; import java.nio.file.Files; @@ -39,7 +35,6 @@ import java.util.Locale; import java.util.Objects; import java.util.logging.Level; -import java.util.stream.Collectors; /** * Provides a bridge between spark and async-profiler. @@ -75,14 +70,10 @@ public static synchronized AsyncProfilerAccess getInstance(SparkPlatform platfor try { profiler = load(platform); - if (isEventSupported(profiler, ProfilingEvent.ALLOC, false)) { allocationProfilingEvent = ProfilingEvent.ALLOC; } - - if (isEventSupported(profiler, ProfilingEvent.CPU, false)) { - profilingEvent = ProfilingEvent.CPU; - } else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) { + if (isEventSupported(profiler, ProfilingEvent.WALL, true)) { profilingEvent = ProfilingEvent.WALL; } } catch (Exception e) { @@ -116,13 +107,19 @@ public boolean checkSupported(SparkPlatform platform) { if (this.setupException instanceof UnsupportedSystemException) { platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your os/arch (" + this.setupException.getMessage() + "), so the built-in Java engine will be used instead."); + } else if (this.setupException instanceof UnsupportedJvmException) { + platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your JVM (" + + this.setupException.getMessage() + "), so the built-in Java engine will be used instead."); } else if (this.setupException instanceof NativeLoadingException && this.setupException.getCause().getMessage().contains("libstdc++")) { platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine because libstdc++ is not installed."); platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler#install-libstdc"); } else { - platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + this.setupException.getMessage()); + String error = this.setupException.getMessage(); + if (this.setupException.getCause() != null) { + error += " (" + this.setupException.getCause().getMessage() + ")"; + } + platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + error); platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler"); - this.setupException.printStackTrace(); } } @@ -138,18 +135,18 @@ public boolean checkAllocationProfilingSupported(SparkPlatform platform) { return supported; } + public String getVersion() { + return this.profiler.getVersion(); + } + private static AsyncProfiler load(SparkPlatform platform) throws Exception { // check compatibility String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT); - - if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) { - arch = "amd64-musl"; - } + String jvm = System.getProperty("java.vm.name"); Table supported = ImmutableTable.builder() .put("linux", "amd64", "linux/amd64") - .put("linux", "amd64-musl", "linux/amd64-musl") .put("linux", "aarch64", "linux/aarch64") .put("macosx", "amd64", "macos") .put("macosx", "aarch64", "macos") @@ -161,7 +158,7 @@ private static AsyncProfiler load(SparkPlatform platform) throws Exception { } // extract the profiler binary from the spark jar file - String resource = "spark/" + libPath + "/libasyncProfiler.so"; + String resource = "spark-native/" + libPath + "/libasyncProfiler.so"; URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource); if (profilerResource == null) { throw new IllegalStateException("Could not find " + resource + " in spark jar file"); @@ -203,8 +200,7 @@ private static boolean isEventSupported(AsyncProfiler profiler, ProfilingEvent e return false; } - enum ProfilingEvent { - CPU(Events.CPU), + public enum ProfilingEvent { WALL(Events.WALL), ALLOC(Events.ALLOC); @@ -226,25 +222,15 @@ public UnsupportedSystemException(String os, String arch) { } } - private static final class NativeLoadingException extends RuntimeException { - public NativeLoadingException(Throwable cause) { - super("A runtime error occurred whilst loading the native library", cause); + private static final class UnsupportedJvmException extends UnsupportedOperationException { + public UnsupportedJvmException(String jvm) { + super(jvm); } } - // Checks if the system is using musl instead of glibc - private static boolean isLinuxMusl() { - try { - InputStream stream = new ProcessBuilder("sh", "-c", "ldd `which ls`") - .start() - .getInputStream(); - - BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); - String output = reader.lines().collect(Collectors.joining()); - return output.contains("musl"); // shrug - } catch (Throwable e) { - // ignore - return false; + private static final class NativeLoadingException extends RuntimeException { + public NativeLoadingException(Throwable cause) { + super("A runtime error occurred whilst loading the native library", cause); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java index 2fd304c1919..f869adc3876 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -21,11 +21,9 @@ package me.lucko.spark.common.sampler.async; import com.google.common.collect.ImmutableList; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.async.jfr.JfrReader; - import one.profiler.AsyncProfiler; import java.io.IOException; @@ -86,6 +84,8 @@ static AsyncProfilerJob createNew(AsyncProfilerAccess access, AsyncProfiler prof private int window; /** If the profiler should run in quiet mode */ private boolean quiet; + /** If the profiler needs to use the same clock as {@link System#nanoTime()} */ + private boolean forceNanoTime; /** The file used by async-profiler to output data */ private Path outputFile; @@ -119,12 +119,13 @@ private void checkActive() { } // Initialise the job - public void init(SparkPlatform platform, SampleCollector collector, ThreadDumper threadDumper, int window, boolean quiet) { + public void init(SparkPlatform platform, SampleCollector collector, ThreadDumper threadDumper, int window, boolean quiet, boolean forceNanoTime) { this.platform = platform; this.sampleCollector = collector; this.threadDumper = threadDumper; this.window = window; this.quiet = quiet; + this.forceNanoTime = forceNanoTime; } /** @@ -153,6 +154,9 @@ public void start() { if (this.threadDumper instanceof ThreadDumper.Specific) { command.add("filter"); } + if (this.forceNanoTime) { + command.add("clock=monotonic"); + } // start the profiler String resp = execute(command.build()).trim(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 961c3e99dfa..31f64badcf2 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -21,11 +21,11 @@ package me.lucko.spark.common.sampler.async; import com.google.common.util.concurrent.ThreadFactoryBuilder; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.SamplerMode; import me.lucko.spark.common.sampler.SamplerSettings; +import me.lucko.spark.common.sampler.SamplerType; import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.SparkThreadFactory; @@ -37,6 +37,7 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.function.IntPredicate; +import java.util.logging.Level; /** * A sampler implementation using async-profiler. @@ -52,6 +53,9 @@ public class AsyncSampler extends AbstractSampler { /** Responsible for aggregating and then outputting collected sampling data */ private final AsyncDataAggregator dataAggregator; + /** Whether to force the sampler to use monotonic/nano time */ + private final boolean forceNanoTime; + /** Mutex for the current profiler job */ private final Object[] currentJobMutex = new Object[0]; @@ -65,10 +69,19 @@ public class AsyncSampler extends AbstractSampler { private ScheduledFuture socketStatisticsTask; public AsyncSampler(SparkPlatform platform, SamplerSettings settings, SampleCollector collector) { + this(platform, settings, collector, new AsyncDataAggregator(settings.threadGrouper(), settings.ignoreSleeping()), false); + } + + public AsyncSampler(SparkPlatform platform, SamplerSettings settings, SampleCollector collector, int tickLengthThreshold) { + this(platform, settings, collector, new TickedAsyncDataAggregator(settings.threadGrouper(), settings.ignoreSleeping(), platform.getTickReporter(), tickLengthThreshold), true); + } + + private AsyncSampler(SparkPlatform platform, SamplerSettings settings, SampleCollector collector, AsyncDataAggregator dataAggregator, boolean forceNanoTime) { super(platform, settings); this.sampleCollector = collector; + this.dataAggregator = dataAggregator; + this.forceNanoTime = forceNanoTime; this.profilerAccess = AsyncProfilerAccess.getInstance(platform); - this.dataAggregator = new AsyncDataAggregator(settings.threadGrouper()); this.scheduler = Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder() .setNameFormat("spark-async-sampler-worker-thread") @@ -92,7 +105,7 @@ public void start() { int window = ProfilingWindowUtils.windowNow(); AsyncProfilerJob job = this.profilerAccess.startNewProfilerJob(); - job.init(this.platform, this.sampleCollector, this.threadDumper, window, this.background); + job.init(this.platform, this.sampleCollector, this.threadDumper, window, this.background, this.forceNanoTime); job.start(); this.windowStatisticsCollector.recordWindowStartTime(window); this.currentJob = job; @@ -124,13 +137,13 @@ private void rotateProfilerJob() { // stop the previous job previousJob.stop(); } catch (Exception e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Failed to stop previous profiler job", e); } // start a new job int window = previousJob.getWindow() + 1; AsyncProfilerJob newJob = this.profilerAccess.startNewProfilerJob(); - newJob.init(this.platform, this.sampleCollector, this.threadDumper, window, this.background); + newJob.init(this.platform, this.sampleCollector, this.threadDumper, window, this.background, this.forceNanoTime); newJob.start(); this.windowStatisticsCollector.recordWindowStartTime(window); this.currentJob = newJob; @@ -139,7 +152,7 @@ private void rotateProfilerJob() { try { this.windowStatisticsCollector.measureNow(previousJob.getWindow()); } catch (Exception e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Failed to measure window statistics", e); } // aggregate the output of the previous job @@ -153,7 +166,7 @@ private void rotateProfilerJob() { this.scheduler.execute(this::processWindowRotate); } } catch (Throwable e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while rotating profiler job", e); } } @@ -168,8 +181,12 @@ private void scheduleTimeout() { } this.scheduler.schedule(() -> { - stop(false); - this.future.complete(this); + try { + stop(false); + this.future.complete(this); + } catch (Exception e) { + this.future.completeExceptionally(e); + } }, delay, TimeUnit.MILLISECONDS); } @@ -199,6 +216,7 @@ public void stop(boolean cancelled) { this.scheduler.shutdown(); this.scheduler = null; } + this.dataAggregator.close(); } @Override @@ -210,6 +228,16 @@ public void attachSocket(ViewerSocket socket) { } } + @Override + public SamplerType getType() { + return SamplerType.ASYNC; + } + + @Override + public String getLibraryVersion() { + return this.profilerAccess.getVersion(); + } + @Override public SamplerMode getMode() { return this.sampleCollector.getMode(); @@ -222,7 +250,7 @@ public SamplerData toProto(SparkPlatform platform, ExportProps exportProps) { proto.setChannelInfo(exportProps.channelInfo()); } writeMetadataToProto(proto, platform, exportProps.creator(), exportProps.comment(), this.dataAggregator); - writeDataToProto(proto, this.dataAggregator, exportProps.mergeMode().get(), exportProps.classSourceLookup().get()); + writeDataToProto(proto, this.dataAggregator, AsyncNodeExporter::new, exportProps.classSourceLookup().get(), platform::createClassFinder); return proto.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ExceedingTicksFilter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ExceedingTicksFilter.java new file mode 100644 index 00000000000..d1a4ab1fab7 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ExceedingTicksFilter.java @@ -0,0 +1,97 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +import me.lucko.spark.common.tick.TickReporter; + +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.LongSupplier; + +class ExceedingTicksFilter implements TickReporter.Callback { + + /** The ticks that exceeded the threshold, cleared one-by-one when inserting data */ + private final Queue ticksOver = new ConcurrentLinkedQueue<>(); + + /** Counts the number of ticks aggregated */ + private final AtomicInteger tickCounter = new AtomicInteger(); + + /** Tick durations under this threshold will not be inserted, measured in milliseconds */ + private final int tickLengthThreshold; + + /** The source to get the current nano time from */ + private final LongSupplier nanoTimeSource; + + ExceedingTicksFilter(int tickLengthThreshold, LongSupplier nanoTimeSource) { + this.tickLengthThreshold = tickLengthThreshold; + this.nanoTimeSource = nanoTimeSource; + } + + public ExceedingTicksFilter(int tickLengthThreshold) { + this(tickLengthThreshold, System::nanoTime); + } + + @Override + public void onTick(double duration) { + if (duration > this.tickLengthThreshold) { + long end = this.nanoTimeSource.getAsLong(); + long start = (long) (end - (duration * 1_000_000)); // ms to ns + this.ticksOver.add(new ExceededTick(start, end)); + this.tickCounter.getAndIncrement(); + } + } + + public int exceedingTicksCount() { + return this.tickCounter.get(); + } + + public boolean duringExceedingTick(long time) { + while (true) { + ExceededTick earliestExceeding = this.ticksOver.peek(); + if (earliestExceeding == null) { + // no tick over threshold anymore + return false; + } else if (time - earliestExceeding.start < 0) { + // segment happened before current exceeding + return false; + } else if (earliestExceeding.end - time < 0) { + // segment happened after current exceeding, + // but it might fall into the next one + this.ticksOver.remove(); + } else { + // segment falls exactly into exceeding, record it + return true; + } + } + } + + private static final class ExceededTick { + // times are in nanoseconds from System.nanoTime() + private final long start; + private final long end; + + ExceededTick(long start, long end) { + this.start = start; + this.end = end; + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java index 0804ccf2d9d..632fa45da06 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java @@ -20,9 +20,11 @@ package me.lucko.spark.common.sampler.async; +import com.google.common.collect.ImmutableMap; import me.lucko.spark.common.sampler.async.jfr.JfrReader; import java.nio.charset.StandardCharsets; +import java.util.Map; /** * Represents a profile "segment". @@ -31,6 +33,8 @@ */ public class ProfileSegment { + private static final String UNKNOWN_THREAD_STATE = ""; + /** The native thread id (does not correspond to Thread#getId) */ private final int nativeThreadId; /** The name of the thread */ @@ -39,12 +43,18 @@ public class ProfileSegment { private final AsyncStackTraceElement[] stackTrace; /** The time spent executing this segment in microseconds */ private final long value; + /** The state of the thread. {@value #UNKNOWN_THREAD_STATE} if state is unknown */ + private final String threadState; + /** The time at which this segment was recorded, as if it was produced by {@link System#nanoTime()} */ + private final long time; - public ProfileSegment(int nativeThreadId, String threadName, AsyncStackTraceElement[] stackTrace, long value) { + private ProfileSegment(int nativeThreadId, String threadName, AsyncStackTraceElement[] stackTrace, long value, String threadState, long time) { this.nativeThreadId = nativeThreadId; this.threadName = threadName; this.stackTrace = stackTrace; this.value = value; + this.threadState = threadState; + this.time = time; } public int getNativeThreadId() { @@ -63,16 +73,31 @@ public long getValue() { return this.value; } + public String getThreadState() { + return this.threadState; + } + + public long getTime() { + return this.time; + } + public static ProfileSegment parseSegment(JfrReader reader, JfrReader.Event sample, String threadName, long value) { JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId); - int len = stackTrace.methods.length; + int len = stackTrace != null ? stackTrace.methods.length : 0; AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len]; for (int i = 0; i < len; i++) { stack[i] = parseStackFrame(reader, stackTrace.methods[i]); } + String threadState = UNKNOWN_THREAD_STATE; + if (sample instanceof JfrReader.ExecutionSample) { + JfrReader.ExecutionSample executionSample = (JfrReader.ExecutionSample) sample; + + Map threadStateLookup = reader.enums.getOrDefault("jdk.types.ThreadState", ImmutableMap.of()); + threadState = threadStateLookup.getOrDefault(executionSample.threadState, UNKNOWN_THREAD_STATE); + } - return new ProfileSegment(sample.tid, threadName, stack, value); + return new ProfileSegment(sample.tid, threadName, stack, value, threadState, sample.time); } private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/SampleCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/SampleCollector.java index 6054b915737..cd3f7503f07 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/SampleCollector.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/SampleCollector.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.sampler.async; import com.google.common.collect.ImmutableList; - import me.lucko.spark.common.sampler.SamplerMode; import me.lucko.spark.common.sampler.async.AsyncProfilerAccess.ProfilingEvent; import me.lucko.spark.common.sampler.async.jfr.JfrReader.AllocationSample; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/TickedAsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/TickedAsyncDataAggregator.java new file mode 100644 index 00000000000..73babc4a36c --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/TickedAsyncDataAggregator.java @@ -0,0 +1,74 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.tick.TickReporter; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.concurrent.TimeUnit; + +public class TickedAsyncDataAggregator extends AsyncDataAggregator { + + /** The callback called when this aggregator is closed, to clean up resources */ + private final Runnable closeCallback; + + /** Tick durations under this threshold will not be inserted, measured in milliseconds */ + private final long tickLengthThreshold; + + private final ExceedingTicksFilter filter; + + protected TickedAsyncDataAggregator(ThreadGrouper threadGrouper, boolean ignoreSleeping, TickReporter tickReporter, int tickLengthThreshold) { + super(threadGrouper, ignoreSleeping); + this.tickLengthThreshold = TimeUnit.MILLISECONDS.toMicros(tickLengthThreshold); + this.filter = new ExceedingTicksFilter(tickLengthThreshold); + tickReporter.addCallback(this.filter); + this.closeCallback = () -> tickReporter.removeCallback(this.filter); + } + + @Override + public void insertData(ProfileSegment element, int window) { + // with async-profiler clock=monotonic, the event time uses the same clock + // as System.nanoTime(), so we can compare it directly + long time = element.getTime(); + if (!this.filter.duringExceedingTick(time)) { + return; + } + super.insertData(element, window); + } + + @Override + public SparkSamplerProtos.SamplerMetadata.DataAggregator getMetadata() { + return SparkSamplerProtos.SamplerMetadata.DataAggregator.newBuilder() + .setType(SparkSamplerProtos.SamplerMetadata.DataAggregator.Type.TICKED) + .setThreadGrouper(this.threadGrouper.asProto()) + .setTickLengthThreshold(this.tickLengthThreshold) + .setNumberOfIncludedTicks(this.filter.exceedingTicksCount()) + .build(); + + } + + @Override + public void close() { + this.closeCallback.run(); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java index 60f654377ac..4a327f20e65 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java @@ -1,17 +1,6 @@ /* - * Copyright 2020 Andrei Pangin - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright The async-profiler authors + * SPDX-License-Identifier: Apache-2.0 */ package me.lucko.spark.common.sampler.async.jfr; @@ -37,9 +26,11 @@ public void clear() { size = 0; } + // spark start public int size() { return this.size; } + // spark end public void put(long key, T value) { if (key == 0) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java index ea4985eee78..cc110cd29c7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java @@ -1,17 +1,6 @@ /* - * Copyright 2020 Andrei Pangin - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright The async-profiler authors + * SPDX-License-Identifier: Apache-2.0 */ package me.lucko.spark.common.sampler.async.jfr; @@ -20,7 +9,10 @@ import java.io.Closeable; import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.charset.StandardCharsets; import java.nio.file.Path; @@ -39,41 +31,55 @@ public class JfrReader implements Closeable { private static final int CHUNK_HEADER_SIZE = 68; private static final int CHUNK_SIGNATURE = 0x464c5200; + private static final byte STATE_NEW_CHUNK = 0; + private static final byte STATE_READING = 1; + private static final byte STATE_EOF = 2; + private static final byte STATE_INCOMPLETE = 3; + private final FileChannel ch; private ByteBuffer buf; + private final long fileSize; private long filePosition; + private byte state; - public boolean incomplete; public long startNanos = Long.MAX_VALUE; public long endNanos = Long.MIN_VALUE; public long startTicks = Long.MAX_VALUE; + public long chunkStartNanos; + public long chunkEndNanos; + public long chunkStartTicks; public long ticksPerSec; + public boolean stopAtNewChunk; public final Dictionary types = new Dictionary<>(); public final Map typesByName = new HashMap<>(); - public final Map threads = new HashMap<>(); // spark + public final Map threads = new HashMap<>(); // spark - convert to map public final Dictionary classes = new Dictionary<>(); + public final Dictionary strings = new Dictionary<>(); public final Dictionary symbols = new Dictionary<>(); public final Dictionary methods = new Dictionary<>(); public final Dictionary stackTraces = new Dictionary<>(); - public final Dictionary stackFrames = new Dictionary<>(); // spark - public final Map frameTypes = new HashMap<>(); - public final Map threadStates = new HashMap<>(); + public final Dictionary stackFrames = new Dictionary<>(); // spark - add field public final Map settings = new HashMap<>(); + public final Map> enums = new HashMap<>(); + + private final Dictionary> customEvents = new Dictionary<>(); private int executionSample; private int nativeMethodSample; + private int wallClockSample; private int allocationInNewTLAB; private int allocationOutsideTLAB; private int allocationSample; + private int liveObject; private int monitorEnter; private int threadPark; private int activeSetting; - private boolean activeSettingHasStack; public JfrReader(Path path) throws IOException { // spark - Path instead of String this.ch = FileChannel.open(path, StandardOpenOption.READ); // spark - Path instead of String this.buf = ByteBuffer.allocateDirect(BUFFER_SIZE); + this.fileSize = ch.size(); buf.flip(); ensureBytes(CHUNK_HEADER_SIZE); @@ -82,15 +88,52 @@ public JfrReader(Path path) throws IOException { // spark - Path instead of Stri } } + public JfrReader(ByteBuffer buf) throws IOException { + this.ch = null; + this.buf = buf; + this.fileSize = buf.limit(); + + buf.order(ByteOrder.BIG_ENDIAN); + if (!readChunk(0)) { + throw new IOException("Incomplete JFR file"); + } + } + @Override public void close() throws IOException { - ch.close(); + if (ch != null) { + ch.close(); + } + } + + public boolean eof() { + return state >= STATE_EOF; + } + + public boolean incomplete() { + return state == STATE_INCOMPLETE; } public long durationNanos() { return endNanos - startNanos; } + public void registerEvent(String name, Class eventClass) { + JfrClass type = typesByName.get(name); + if (type != null) { + try { + customEvents.put(type.id, eventClass.getConstructor(JfrReader.class)); + } catch (NoSuchMethodException e) { + throw new IllegalArgumentException("No suitable constructor found"); + } + } + } + + // Similar to eof(), but parses the next chunk header + public boolean hasMoreChunks() throws IOException { + return state == STATE_NEW_CHUNK ? readChunk(buf.position()) : state == STATE_READING; + } + public List readAllEvents() throws IOException { return readAllEvents(null); } @@ -116,41 +159,58 @@ public E readEvent(Class cls) throws IOException { int type = getVarint(); if (type == 'L' && buf.getInt(pos) == CHUNK_SIGNATURE) { - if (readChunk(pos)) { + if (state != STATE_NEW_CHUNK && stopAtNewChunk) { + buf.position(pos); + state = STATE_NEW_CHUNK; + } else if (readChunk(pos)) { continue; } - break; + return null; } if (type == executionSample || type == nativeMethodSample) { - if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(); + if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(false); + } else if (type == wallClockSample) { + if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(true); } else if (type == allocationInNewTLAB) { if (cls == null || cls == AllocationSample.class) return (E) readAllocationSample(true); } else if (type == allocationOutsideTLAB || type == allocationSample) { if (cls == null || cls == AllocationSample.class) return (E) readAllocationSample(false); + } else if (type == liveObject) { + if (cls == null || cls == LiveObject.class) return (E) readLiveObject(); } else if (type == monitorEnter) { if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(false); } else if (type == threadPark) { if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(true); } else if (type == activeSetting) { readActiveSetting(); - } - - if ((pos += size) <= buf.limit()) { - buf.position(pos); } else { - seek(filePosition + pos); + Constructor customEvent = customEvents.get(type); + if (customEvent != null && (cls == null || cls == customEvent.getDeclaringClass())) { + try { + return (E) customEvent.newInstance(this); + } catch (ReflectiveOperationException e) { + throw new IllegalStateException(e); + } finally { + seek(filePosition + pos + size); + } + } } + + seek(filePosition + pos + size); } + + state = STATE_EOF; return null; } - private ExecutionSample readExecutionSample() { + private ExecutionSample readExecutionSample(boolean hasSamples) { long time = getVarlong(); int tid = getVarint(); int stackTraceId = getVarint(); int threadState = getVarint(); - return new ExecutionSample(time, tid, stackTraceId, threadState); + int samples = hasSamples ? getVarint() : 1; + return new ExecutionSample(time, tid, stackTraceId, threadState, samples); } private AllocationSample readAllocationSample(boolean tlab) { @@ -163,6 +223,16 @@ private AllocationSample readAllocationSample(boolean tlab) { return new AllocationSample(time, tid, stackTraceId, classId, allocationSize, tlabSize); } + private LiveObject readLiveObject() { + long time = getVarlong(); + int tid = getVarint(); + int stackTraceId = getVarint(); + int classId = getVarint(); + long allocationSize = getVarlong(); + long allocatimeTime = getVarlong(); + return new LiveObject(time, tid, stackTraceId, classId, allocationSize, allocatimeTime); + } + private ContendedLock readContendedLock(boolean hasTimeout) { long time = getVarlong(); long duration = getVarlong(); @@ -176,11 +246,12 @@ private ContendedLock readContendedLock(boolean hasTimeout) { } private void readActiveSetting() { - long time = getVarlong(); - long duration = getVarlong(); - int tid = getVarint(); - if (activeSettingHasStack) getVarint(); - long id = getVarlong(); + for (JfrField field : typesByName.get("jdk.ActiveSetting").fields) { + getVarlong(); + if ("id".equals(field.name)) { + break; + } + } String name = getString(); String value = getString(); settings.put(name, value); @@ -196,27 +267,38 @@ private boolean readChunk(int pos) throws IOException { throw new IOException("Unsupported JFR version: " + (version >>> 16) + "." + (version & 0xffff)); } + long chunkStart = filePosition + pos; + long chunkSize = buf.getLong(pos + 8); + if (chunkStart + chunkSize > fileSize) { + state = STATE_INCOMPLETE; + return false; + } + long cpOffset = buf.getLong(pos + 16); long metaOffset = buf.getLong(pos + 24); if (cpOffset == 0 || metaOffset == 0) { - incomplete = true; + state = STATE_INCOMPLETE; return false; } - startNanos = Math.min(startNanos, buf.getLong(pos + 32)); - endNanos = Math.max(endNanos, buf.getLong(pos + 32) + buf.getLong(pos + 40)); - startTicks = Math.min(startTicks, buf.getLong(pos + 48)); + chunkStartNanos = buf.getLong(pos + 32); + chunkEndNanos = buf.getLong(pos + 32) + buf.getLong(pos + 40); + chunkStartTicks = buf.getLong(pos + 48); ticksPerSec = buf.getLong(pos + 56); + startNanos = Math.min(startNanos, chunkStartNanos); + endNanos = Math.max(endNanos, chunkEndNanos); + startTicks = Math.min(startTicks, chunkStartTicks); + types.clear(); typesByName.clear(); - long chunkStart = filePosition + pos; readMeta(chunkStart + metaOffset); readConstantPool(chunkStart + cpOffset); cacheEventTypes(); seek(chunkStart + CHUNK_HEADER_SIZE); + state = STATE_READING; return true; } @@ -224,7 +306,8 @@ private void readMeta(long metaOffset) throws IOException { seek(metaOffset); ensureBytes(5); - ensureBytes(getVarint() - buf.position()); + int posBeforeSize = buf.position(); + ensureBytes(getVarint() - (buf.position() - posBeforeSize)); getVarint(); getVarlong(); getVarlong(); @@ -277,7 +360,8 @@ private void readConstantPool(long cpOffset) throws IOException { seek(cpOffset); ensureBytes(5); - ensureBytes(getVarint() - buf.position()); + int posBeforeSize = buf.position(); + ensureBytes(getVarint() - (buf.position() - posBeforeSize)); getVarint(); getVarlong(); getVarlong(); @@ -298,10 +382,13 @@ private void readConstants(JfrClass type) { buf.position(buf.position() + (CHUNK_HEADER_SIZE + 3)); break; case "java.lang.Thread": - readThreads(type.field("group") != null); + readThreads(type.fields.size()); break; case "java.lang.Class": - readClasses(type.field("hidden") != null); + readClasses(type.fields.size()); + break; + case "java.lang.String": + readStrings(); break; case "jdk.types.Symbol": readSymbols(); @@ -312,31 +399,29 @@ private void readConstants(JfrClass type) { case "jdk.types.StackTrace": readStackTraces(); break; - case "jdk.types.FrameType": - readMap(frameTypes); - break; - case "jdk.types.ThreadState": - readMap(threadStates); - break; default: - readOtherConstants(type.fields); + if (type.simpleType && type.fields.size() == 1) { + readEnumValues(type.name); + } else { + readOtherConstants(type.fields); + } } } - private void readThreads(boolean hasGroup) { - int count = getVarint(); //threads.preallocate(getVarint()); + private void readThreads(int fieldCount) { + int count = getVarint(); // spark - don't preallocate for (int i = 0; i < count; i++) { long id = getVarlong(); String osName = getString(); int osThreadId = getVarint(); String javaName = getString(); long javaThreadId = getVarlong(); - if (hasGroup) getVarlong(); + readFields(fieldCount - 4); threads.put(id, javaName != null ? javaName : osName); } } - private void readClasses(boolean hasHidden) { + private void readClasses(int fieldCount) { int count = classes.preallocate(getVarint()); for (int i = 0; i < count; i++) { long id = getVarlong(); @@ -344,7 +429,7 @@ private void readClasses(boolean hasHidden) { long name = getVarlong(); long pkg = getVarlong(); int modifiers = getVarint(); - if (hasHidden) getVarint(); + readFields(fieldCount - 4); classes.put(id, new ClassRef(name)); } } @@ -360,7 +445,7 @@ private void readMethods() { int hidden = getVarint(); methods.put(id, new MethodRef(cls, name, sig)); } - stackFrames.preallocate(count); // spark + stackFrames.preallocate(count); // spark - preallocate frames size } private void readStackTraces() { @@ -388,6 +473,13 @@ private StackTrace readStackTrace() { return new StackTrace(methods, types, locations); } + private void readStrings() { + int count = strings.preallocate(getVarint()); + for (int i = 0; i < count; i++) { + strings.put(getVarlong(), getString()); + } + } + private void readSymbols() { int count = symbols.preallocate(getVarint()); for (int i = 0; i < count; i++) { @@ -399,11 +491,13 @@ private void readSymbols() { } } - private void readMap(Map map) { + private void readEnumValues(String typeName) { + HashMap map = new HashMap<>(); int count = getVarint(); for (int i = 0; i < count; i++) { - map.put(getVarint(), getString()); + map.put((int) getVarlong(), getString()); } + enums.put(typeName, map); } private void readOtherConstants(List fields) { @@ -432,16 +526,28 @@ private void readFields(boolean[] numeric) { } } + private void readFields(int count) { + while (count-- > 0) { + getVarlong(); + } + } + private void cacheEventTypes() { executionSample = getTypeId("jdk.ExecutionSample"); nativeMethodSample = getTypeId("jdk.NativeMethodSample"); + wallClockSample = getTypeId("profiler.WallClockSample"); allocationInNewTLAB = getTypeId("jdk.ObjectAllocationInNewTLAB"); allocationOutsideTLAB = getTypeId("jdk.ObjectAllocationOutsideTLAB"); allocationSample = getTypeId("jdk.ObjectAllocationSample"); + liveObject = getTypeId("profiler.LiveObject"); monitorEnter = getTypeId("jdk.JavaMonitorEnter"); threadPark = getTypeId("jdk.ThreadPark"); activeSetting = getTypeId("jdk.ActiveSetting"); - activeSettingHasStack = activeSetting >= 0 && typesByName.get("jdk.ActiveSetting").field("stackTrace") != null; + + registerEvent("jdk.CPULoad", CPULoad.class); + registerEvent("jdk.GCHeapSummary", GCHeapSummary.class); + registerEvent("jdk.ObjectCount", ObjectCount.class); + registerEvent("jdk.ObjectCountAfterGC", ObjectCount.class); } private int getTypeId(String typeName) { @@ -449,7 +555,23 @@ private int getTypeId(String typeName) { return type != null ? type.id : -1; } - private int getVarint() { + public int getEnumKey(String typeName, String value) { + Map enumValues = enums.get(typeName); + if (enumValues != null) { + for (Map.Entry entry : enumValues.entrySet()) { + if (value.equals(entry.getValue())) { + return entry.getKey(); + } + } + } + return -1; + } + + public String getEnumValue(String typeName, int key) { + return enums.get(typeName).get(key); + } + + public int getVarint() { int result = 0; for (int shift = 0; ; shift += 7) { byte b = buf.get(); @@ -460,7 +582,7 @@ private int getVarint() { } } - private long getVarlong() { + public long getVarlong() { long result = 0; for (int shift = 0; shift < 56; shift += 7) { byte b = buf.get(); @@ -472,12 +594,22 @@ private long getVarlong() { return result | (buf.get() & 0xffL) << 56; } - private String getString() { + public float getFloat() { + return buf.getFloat(); + } + + public double getDouble() { + return buf.getDouble(); + } + + public String getString() { switch (buf.get()) { case 0: return null; case 1: return ""; + case 2: + return strings.get(getVarlong()); case 3: return new String(getBytes(), StandardCharsets.UTF_8); case 4: { @@ -494,16 +626,21 @@ private String getString() { } } - private byte[] getBytes() { + public byte[] getBytes() { byte[] bytes = new byte[getVarint()]; buf.get(bytes); return bytes; } private void seek(long pos) throws IOException { - filePosition = pos; - ch.position(pos); - buf.rewind().flip(); + long bufPosition = pos - filePosition; + if (bufPosition >= 0 && bufPosition <= buf.limit()) { + buf.position((int) bufPosition); + } else { + filePosition = pos; + ch.position(pos); + buf.rewind().flip(); + } } private boolean ensureBytes(int needed) throws IOException { @@ -511,6 +648,10 @@ private boolean ensureBytes(int needed) throws IOException { return true; } + if (ch == null) { + return false; + } + filePosition += buf.position(); if (buf.capacity() < needed) { @@ -544,11 +685,13 @@ void addChild(Element e) { static class JfrClass extends Element { final int id; + final boolean simpleType; final String name; final List fields; JfrClass(Map attributes) { this.id = Integer.parseInt(attributes.get("id")); + this.simpleType = "true".equals(attributes.get("simpleType")); this.name = attributes.get("name"); this.fields = new ArrayList<>(2); } @@ -560,7 +703,7 @@ void addChild(Element e) { } } - JfrField field(String name) { + public JfrField field(String name) { for (JfrField field : fields) { if (field.name.equals(name)) { return field; @@ -606,162 +749,196 @@ public StackTrace(long[] methods, byte[] types, int[] locations) { } } - public static abstract class Event implements Comparable { - public final long time; - public final int tid; - public final int stackTraceId; + public static class AllocationSample extends Event { + public final int classId; + public final long allocationSize; + public final long tlabSize; - protected Event(long time, int tid, int stackTraceId) { - this.time = time; - this.tid = tid; - this.stackTraceId = stackTraceId; + public AllocationSample(long time, int tid, int stackTraceId, int classId, long allocationSize, long tlabSize) { + super(time, tid, stackTraceId); + this.classId = classId; + this.allocationSize = allocationSize; + this.tlabSize = tlabSize; } @Override - public int compareTo(Event o) { - return Long.compare(time, o.time); + public int hashCode() { + return classId * 127 + stackTraceId + (tlabSize == 0 ? 17 : 0); } @Override - public int hashCode() { - return stackTraceId; + public boolean sameGroup(Event o) { + if (o instanceof AllocationSample) { + AllocationSample a = (AllocationSample) o; + return classId == a.classId && (tlabSize == 0) == (a.tlabSize == 0); + } + return false; } - public boolean sameGroup(Event o) { - return getClass() == o.getClass(); + @Override + public long classId() { + return classId; } + @Override public long value() { - return 1; + return tlabSize != 0 ? tlabSize : allocationSize; } } - public static class EventAggregator { - private static final int INITIAL_CAPACITY = 1024; - - private final boolean threads; - private final boolean total; - private Event[] keys; - private long[] values; - private int size; + static class CPULoad extends Event { + public final float jvmUser; + public final float jvmSystem; + public final float machineTotal; - public EventAggregator(boolean threads, boolean total) { - this.threads = threads; - this.total = total; - this.keys = new Event[INITIAL_CAPACITY]; - this.values = new long[INITIAL_CAPACITY]; + public CPULoad(JfrReader jfr) { + super(jfr.getVarlong(), 0, 0); + this.jvmUser = jfr.getFloat(); + this.jvmSystem = jfr.getFloat(); + this.machineTotal = jfr.getFloat(); } + } - public void collect(Event e) { - int mask = keys.length - 1; - int i = hashCode(e) & mask; - while (keys[i] != null) { - if (sameGroup(keys[i], e)) { - values[i] += total ? e.value() : 1; - return; - } - i = (i + 1) & mask; - } + static class ContendedLock extends Event { + public final long duration; + public final int classId; - keys[i] = e; - values[i] = total ? e.value() : 1; + public ContendedLock(long time, int tid, int stackTraceId, long duration, int classId) { + super(time, tid, stackTraceId); + this.duration = duration; + this.classId = classId; + } - if (++size * 2 > keys.length) { - resize(keys.length * 2); - } + @Override + public int hashCode() { + return classId * 127 + stackTraceId; } - public long getValue(Event e) { - int mask = keys.length - 1; - int i = hashCode(e) & mask; - while (keys[i] != null && !sameGroup(keys[i], e)) { - i = (i + 1) & mask; + @Override + public boolean sameGroup(Event o) { + if (o instanceof ContendedLock) { + ContendedLock c = (ContendedLock) o; + return classId == c.classId; } - return values[i]; + return false; } - public void forEach(Visitor visitor) { - for (int i = 0; i < keys.length; i++) { - if (keys[i] != null) { - visitor.visit(keys[i], values[i]); - } - } + @Override + public long classId() { + return classId; + } + + @Override + public long value() { + return duration; } + } + + public static abstract class Event implements Comparable { + public final long time; + public final int tid; + public final int stackTraceId; - private int hashCode(Event e) { - return e.hashCode() + (threads ? e.tid * 31 : 0); + protected Event(long time, int tid, int stackTraceId) { + this.time = time; + this.tid = tid; + this.stackTraceId = stackTraceId; } - private boolean sameGroup(Event e1, Event e2) { - return e1.stackTraceId == e2.stackTraceId && (!threads || e1.tid == e2.tid) && e1.sameGroup(e2); + @Override + public int compareTo(Event o) { + return Long.compare(time, o.time); } - private void resize(int newCapacity) { - Event[] newKeys = new Event[newCapacity]; - long[] newValues = new long[newCapacity]; - int mask = newKeys.length - 1; + @Override + public int hashCode() { + return stackTraceId; + } - for (int i = 0; i < keys.length; i++) { - if (keys[i] != null) { - for (int j = hashCode(keys[i]) & mask; ; j = (j + 1) & mask) { - if (newKeys[j] == null) { - newKeys[j] = keys[i]; - newValues[j] = values[i]; - break; - } - } + @Override + public String toString() { + StringBuilder sb = new StringBuilder(getClass().getSimpleName()) + .append("{time=").append(time) + .append(",tid=").append(tid) + .append(",stackTraceId=").append(stackTraceId); + for (Field f : getClass().getDeclaredFields()) { + try { + sb.append(',').append(f.getName()).append('=').append(f.get(this)); + } catch (ReflectiveOperationException e) { + break; } } + return sb.append('}').toString(); + } + + public boolean sameGroup(Event o) { + return getClass() == o.getClass(); + } + + public long classId() { + return 0; + } - keys = newKeys; - values = newValues; + public long samples() { + return 1; } - public interface Visitor { - void visit(Event event, long value); + public long value() { + return 1; } } - public static class AllocationSample extends Event { - public final int classId; - public final long allocationSize; - public final long tlabSize; + public static class ExecutionSample extends Event { + public final int threadState; + public final int samples; - public AllocationSample(long time, int tid, int stackTraceId, int classId, long allocationSize, long tlabSize) { + public ExecutionSample(long time, int tid, int stackTraceId, int threadState, int samples) { super(time, tid, stackTraceId); - this.classId = classId; - this.allocationSize = allocationSize; - this.tlabSize = tlabSize; + this.threadState = threadState; + this.samples = samples; } @Override - public int hashCode() { - return classId * 127 + stackTraceId + (tlabSize == 0 ? 17 : 0); + public long samples() { + return samples; } @Override - public boolean sameGroup(Event o) { - if (o instanceof AllocationSample) { - AllocationSample a = (AllocationSample) o; - return classId == a.classId && (tlabSize == 0) == (a.tlabSize == 0); - } - return false; + public long value() { + return samples; } + } - @Override - public long value() { - return tlabSize != 0 ? tlabSize : allocationSize; + static class GCHeapSummary extends Event { + public final int gcId; + public final boolean afterGC; + public final long committed; + public final long reserved; + public final long used; + + public GCHeapSummary(JfrReader jfr) { + super(jfr.getVarlong(), 0, 0); + this.gcId = jfr.getVarint(); + this.afterGC = jfr.getVarint() > 0; + long start = jfr.getVarlong(); + long committedEnd = jfr.getVarlong(); + this.committed = jfr.getVarlong(); + long reservedEnd = jfr.getVarlong(); + this.reserved = jfr.getVarlong(); + this.used = jfr.getVarlong(); } } - public static class ContendedLock extends Event { - public final long duration; + static class LiveObject extends Event { public final int classId; + public final long allocationSize; + public final long allocationTime; - public ContendedLock(long time, int tid, int stackTraceId, long duration, int classId) { + public LiveObject(long time, int tid, int stackTraceId, int classId, long allocationSize, long allocationTime) { super(time, tid, stackTraceId); - this.duration = duration; this.classId = classId; + this.allocationSize = allocationSize; + this.allocationTime = allocationTime; } @Override @@ -771,26 +948,36 @@ public int hashCode() { @Override public boolean sameGroup(Event o) { - if (o instanceof ContendedLock) { - ContendedLock c = (ContendedLock) o; - return classId == c.classId; + if (o instanceof LiveObject) { + LiveObject a = (LiveObject) o; + return classId == a.classId; } return false; } + @Override + public long classId() { + return classId; + } + @Override public long value() { - return duration; + return allocationSize; } } - public static class ExecutionSample extends Event { - public final int threadState; + static class ObjectCount extends Event { + public final int gcId; + public final int classId; + public final long count; + public final long totalSize; - public ExecutionSample(long time, int tid, int stackTraceId, int threadState) { - super(time, tid, stackTraceId); - this.threadState = threadState; + public ObjectCount(JfrReader jfr) { + super(jfr.getVarlong(), 0, 0); + this.gcId = jfr.getVarint(); + this.classId = jfr.getVarint(); + this.count = jfr.getVarlong(); + this.totalSize = jfr.getVarlong(); } } - } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java index c51ec0523ca..b2f250feb5f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java @@ -39,7 +39,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator { /** A describer for java.lang.StackTraceElement */ private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> { int parentLineNumber = parent == null ? StackTraceNode.NULL_LINE_NUMBER : parent.getLineNumber(); - return new StackTraceNode.Description(element.getClassName(), element.getMethodName(), element.getLineNumber(), parentLineNumber); + return new StackTraceNode.JavaDescription(element.getClassName(), element.getMethodName(), element.getLineNumber(), parentLineNumber); }; /** The worker pool for inserting stack nodes */ @@ -48,18 +48,10 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator { /** The interval to wait between sampling, in microseconds */ protected final int interval; - /** If sleeping threads should be ignored */ - private final boolean ignoreSleeping; - - /** If threads executing native code should be ignored */ - private final boolean ignoreNative; - - public JavaDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping, boolean ignoreNative) { - super(threadGrouper); + public JavaDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping) { + super(threadGrouper, ignoreSleeping); this.workerPool = workerPool; this.interval = interval; - this.ignoreSleeping = ignoreSleeping; - this.ignoreNative = ignoreNative; } /** @@ -74,9 +66,6 @@ protected void writeData(ThreadInfo threadInfo, int window) { if (this.ignoreSleeping && isSleeping(threadInfo)) { return; } - if (this.ignoreNative && threadInfo.isInNative()) { - return; - } try { ThreadNode node = getNode(this.threadGrouper.getGroup(threadInfo.getThreadId(), threadInfo.getThreadName())); @@ -99,7 +88,7 @@ public List exportData() { return super.exportData(); } - private static boolean isSleeping(ThreadInfo thread) { + static boolean isSleeping(ThreadInfo thread) { if (thread.getThreadState() == Thread.State.WAITING || thread.getThreadState() == Thread.State.TIMED_WAITING) { return true; } @@ -113,12 +102,7 @@ private static boolean isSleeping(ThreadInfo thread) { String clazz = call.getClassName(); String method = call.getMethodName(); - // java.lang.Thread.yield() - // jdk.internal.misc.Unsafe.park() - // sun.misc.Unsafe.park() - return (clazz.equals("java.lang.Thread") && method.equals("yield")) || - (clazz.equals("jdk.internal.misc.Unsafe") && method.equals("park")) || - (clazz.equals("sun.misc.Unsafe") && method.equals("park")); + return isSleeping(clazz, method); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java new file mode 100644 index 00000000000..c1100860b8d --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java @@ -0,0 +1,97 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.java; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.exporter.AbstractNodeExporter; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * Node exporter for the {@link JavaSampler}. + */ +public class JavaNodeExporter extends AbstractNodeExporter { + private final MergeStrategy mergeStrategy; + private final MethodDisambiguator methodDisambiguator; + + public JavaNodeExporter(ProtoTimeEncoder timeEncoder, MergeStrategy mergeStrategy, MethodDisambiguator methodDisambiguator) { + super(timeEncoder); + this.mergeStrategy = mergeStrategy; + this.methodDisambiguator = methodDisambiguator; + } + + protected SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs) { + SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName(stackTraceNode.getClassName()) + .setMethodName(stackTraceNode.getMethodName()); + + double[] times = stackTraceNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + int lineNumber = stackTraceNode.getLineNumber(); + if (lineNumber >= 0) { + proto.setLineNumber(lineNumber); + } + + if (this.mergeStrategy.separateParentCalls()) { + int parentLineNumber = stackTraceNode.getParentLineNumber(); + if (parentLineNumber >= 0) { + proto.setParentLineNumber(parentLineNumber); + } + } + + this.methodDisambiguator.disambiguate(stackTraceNode) + .map(MethodDisambiguator.MethodDescription::getDescription) + .ifPresent(proto::setMethodDesc); + + proto.addAllChildrenRefs(childrenRefs); + + return proto.build(); + } + + @Override + protected Collection exportChildren(Collection children) { + if (children.isEmpty()) { + return children; + } + + List list = new ArrayList<>(children.size()); + + outer: + for (StackTraceNode child : children) { + for (StackTraceNode other : list) { + if (this.mergeStrategy.shouldMerge(this.methodDisambiguator, other, child)) { + other.merge(child); + continue outer; + } + } + list.add(child); + } + return list; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index e29619b5d9f..cf4af7d86ed 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -21,14 +21,15 @@ package me.lucko.spark.common.sampler.java; import com.google.common.util.concurrent.ThreadFactoryBuilder; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.SamplerMode; import me.lucko.spark.common.sampler.SamplerSettings; +import me.lucko.spark.common.sampler.SamplerType; import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; @@ -72,14 +73,14 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** The last window that was profiled */ private final AtomicInteger lastWindow = new AtomicInteger(); - public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative) { + public JavaSampler(SparkPlatform platform, SamplerSettings settings) { super(platform, settings); - this.dataAggregator = new SimpleDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative); + this.dataAggregator = new SimpleJavaDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), settings.ignoreSleeping()); } - public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + public JavaSampler(SparkPlatform platform, SamplerSettings settings, TickHook tickHook, int tickLengthThreshold) { super(platform, settings); - this.dataAggregator = new TickedDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); + this.dataAggregator = new TickedJavaDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), settings.ignoreSleeping(), tickHook, tickLengthThreshold); } @Override @@ -88,9 +89,9 @@ public void start() { TickHook tickHook = this.platform.getTickHook(); if (tickHook != null) { - if (this.dataAggregator instanceof TickedDataAggregator) { + if (this.dataAggregator instanceof TickedJavaDataAggregator) { WindowStatisticsCollector.ExplicitTickCounter counter = this.windowStatisticsCollector.startCountingTicksExplicit(tickHook); - ((TickedDataAggregator) this.dataAggregator).setTickCounter(counter); + ((TickedJavaDataAggregator) this.dataAggregator).setTickCounter(counter); } else { this.windowStatisticsCollector.startCountingTicks(tickHook); } @@ -193,11 +194,25 @@ public SamplerData toProto(SparkPlatform platform, ExportProps exportProps) { if (exportProps.channelInfo() != null) { proto.setChannelInfo(exportProps.channelInfo()); } + writeMetadataToProto(proto, platform, exportProps.creator(), exportProps.comment(), this.dataAggregator); - writeDataToProto(proto, this.dataAggregator, exportProps.mergeMode().get(), exportProps.classSourceLookup().get()); + + MethodDisambiguator methodDisambiguator = new MethodDisambiguator(platform.createClassFinder()); + writeDataToProto(proto, this.dataAggregator, timeEncoder -> new JavaNodeExporter(timeEncoder, exportProps.mergeStrategy(), methodDisambiguator), exportProps.classSourceLookup().get(), platform::createClassFinder); + return proto.build(); } + @Override + public SamplerType getType() { + return SamplerType.JAVA; + } + + @Override + public String getLibraryVersion() { + return null; + } + @Override public SamplerMode getMode() { return SamplerMode.EXECUTION; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java similarity index 64% rename from spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java rename to spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java index 18a0ed316f1..eac1c6fcb2e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java @@ -18,37 +18,27 @@ * along with this program. If not, see . */ -package me.lucko.spark.common.sampler.node; +package me.lucko.spark.common.sampler.java; +import me.lucko.spark.common.sampler.node.StackTraceNode; import me.lucko.spark.common.util.MethodDisambiguator; import java.util.Objects; /** - * Function to determine if {@link StackTraceNode}s should be merged. + * Strategy used to determine if {@link StackTraceNode}s should be merged. */ -public final class MergeMode { +public enum MergeStrategy { - public static MergeMode sameMethod(MethodDisambiguator methodDisambiguator) { - return new MergeMode(methodDisambiguator, false); - } - - public static MergeMode separateParentCalls(MethodDisambiguator methodDisambiguator) { - return new MergeMode(methodDisambiguator, true); - } + SAME_METHOD(false), + SEPARATE_PARENT_CALLS(true); - private final MethodDisambiguator methodDisambiguator; private final boolean separateParentCalls; - MergeMode(MethodDisambiguator methodDisambiguator, boolean separateParentCalls) { - this.methodDisambiguator = methodDisambiguator; + MergeStrategy(boolean separateParentCalls) { this.separateParentCalls = separateParentCalls; } - public MethodDisambiguator getMethodDisambiguator() { - return this.methodDisambiguator; - } - public boolean separateParentCalls() { return this.separateParentCalls; } @@ -56,11 +46,12 @@ public boolean separateParentCalls() { /** * Test if two stack trace nodes should be considered the same and merged. * + * @param disambiguator the method disambiguator * @param n1 the first node * @param n2 the second node * @return if the nodes should be merged */ - public boolean shouldMerge(StackTraceNode n1, StackTraceNode n2) { + public boolean shouldMerge(MethodDisambiguator disambiguator, StackTraceNode n1, StackTraceNode n2) { // are the class names the same? if (!n1.getClassName().equals(n2.getClassName())) { return false; @@ -77,8 +68,8 @@ public boolean shouldMerge(StackTraceNode n1, StackTraceNode n2) { } // are the method descriptions the same? (is it the same method?) - String desc1 = this.methodDisambiguator.disambiguate(n1).map(MethodDisambiguator.MethodDescription::getDesc).orElse(null); - String desc2 = this.methodDisambiguator.disambiguate(n2).map(MethodDisambiguator.MethodDescription::getDesc).orElse(null); + String desc1 = disambiguator.disambiguate(n1).map(MethodDisambiguator.MethodDescription::getDescription).orElse(null); + String desc2 = disambiguator.disambiguate(n2).map(MethodDisambiguator.MethodDescription::getDescription).orElse(null); if (desc1 == null && desc2 == null) { return true; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleJavaDataAggregator.java similarity index 87% rename from spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java rename to spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleJavaDataAggregator.java index 54173fe6f9f..461e34cfa50 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleJavaDataAggregator.java @@ -30,9 +30,9 @@ /** * Basic implementation of {@link DataAggregator}. */ -public class SimpleDataAggregator extends JavaDataAggregator { - public SimpleDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping, boolean ignoreNative) { - super(workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); +public class SimpleJavaDataAggregator extends JavaDataAggregator { + public SimpleJavaDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping) { + super(workerPool, threadGrouper, interval, ignoreSleeping); } @Override diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedJavaDataAggregator.java similarity index 87% rename from spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java rename to spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedJavaDataAggregator.java index 08cb71939ec..c950648c63e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedJavaDataAggregator.java @@ -38,7 +38,7 @@ * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks" * which exceed a certain threshold in duration. */ -public class TickedDataAggregator extends JavaDataAggregator { +public class TickedJavaDataAggregator extends JavaDataAggregator { /** Used to monitor the current "tick" of the server */ private final TickHook tickHook; @@ -59,8 +59,8 @@ public class TickedDataAggregator extends JavaDataAggregator { // guards currentData private final Object mutex = new Object(); - public TickedDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); + public TickedJavaDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping, TickHook tickHook, int tickLengthThreshold) { + super(workerPool, threadGrouper, interval, ignoreSleeping); this.tickHook = tickHook; this.tickLengthThreshold = TimeUnit.MILLISECONDS.toMicros(tickLengthThreshold); // 50 millis in a tick, plus 10 so we have a bit of room to go over @@ -110,7 +110,7 @@ private void pushCurrentTick(Executor executor) { } // approximate how long the tick lasted - int tickLengthMicros = currentData.getList().size() * this.interval; + int tickLengthMicros = currentData.sizeWithoutTrailingSleeping() * this.interval; // don't push data below the threshold if (tickLengthMicros < this.tickLengthThreshold) { @@ -151,6 +151,16 @@ public List getList() { return this.list; } + public int sizeWithoutTrailingSleeping() { + // find the last index at which the thread wasn't sleeping + for (int i = this.list.size() - 1; i >= 0; i--) { + if (!isSleeping(this.list.get(i))) { + return i + 1; // add one to go from index to size + } + } + return 0; + } + public void addData(ThreadInfo data) { this.list.add(data); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index 163365cb9c4..d3b77b41dff 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -22,10 +22,7 @@ import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -86,7 +83,7 @@ public boolean removeTimeWindows(IntPredicate predicate) { * * @return the total times */ - protected double[] encodeTimesForProto(ProtoTimeEncoder encoder) { + public double[] encodeTimesForProto(ProtoTimeEncoder encoder) { return encoder.encode(this.times); } @@ -107,35 +104,11 @@ protected StackTraceNode resolveChild(StackTraceNode.Description description) { * * @param other the other node */ - protected void merge(AbstractNode other) { + public void merge(AbstractNode other) { other.times.forEach((key, value) -> getTimeAccumulator(key).add(value.longValue())); for (Map.Entry child : other.children.entrySet()) { resolveChild(child.getKey()).merge(child.getValue()); } } - protected List exportChildren(MergeMode mergeMode) { - if (this.children.isEmpty()) { - return Collections.emptyList(); - } - - List list = new ArrayList<>(this.children.size()); - - outer: - for (StackTraceNode child : this.children.values()) { - // attempt to find an existing node we can merge into - for (StackTraceNode other : list) { - if (mergeMode.shouldMerge(other, child)) { - other.merge(child); - continue outer; - } - } - - // just add - list.add(child); - } - - return list; - } - } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java index c0dcc5bd0d8..27cfa54038c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java @@ -20,10 +20,6 @@ package me.lucko.spark.common.sampler.node; -import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import me.lucko.spark.common.util.MethodDisambiguator; -import me.lucko.spark.proto.SparkSamplerProtos; - import org.checkerframework.checker.nullness.qual.Nullable; import java.util.Objects; @@ -46,58 +42,33 @@ public StackTraceNode(Description description) { } public String getClassName() { - return this.description.className; + return this.description.className(); } public String getMethodName() { - return this.description.methodName; + return this.description.methodName(); } public String getMethodDescription() { - return this.description.methodDescription; + return this.description instanceof AsyncDescription + ? ((AsyncDescription) this.description).methodDescription() + : null; } public int getLineNumber() { - return this.description.lineNumber; + return this.description instanceof JavaDescription + ? ((JavaDescription) this.description).lineNumber() + : NULL_LINE_NUMBER; } public int getParentLineNumber() { - return this.description.parentLineNumber; - } - - public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder, Iterable childrenRefs) { - SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() - .setClassName(this.description.className) - .setMethodName(this.description.methodName); - - double[] times = encodeTimesForProto(timeEncoder); - for (double time : times) { - proto.addTimes(time); - } - - if (this.description.lineNumber >= 0) { - proto.setLineNumber(this.description.lineNumber); - } - - if (mergeMode.separateParentCalls() && this.description.parentLineNumber >= 0) { - proto.setParentLineNumber(this.description.parentLineNumber); - } - - if (this.description.methodDescription != null) { - proto.setMethodDesc(this.description.methodDescription); - } else { - mergeMode.getMethodDisambiguator().disambiguate(this) - .map(MethodDisambiguator.MethodDescription::getDesc) - .ifPresent(proto::setMethodDesc); - } - - proto.addAllChildrenRefs(childrenRefs); - - return proto.build(); + return this.description instanceof JavaDescription + ? ((JavaDescription) this.description).parentLineNumber() + : NULL_LINE_NUMBER; } /** - * Function to construct a {@link StackTraceNode.Description} from a stack trace element + * Function to construct a {@link Description} from a stack trace element * of type {@code T}. * * @param the stack trace element type, e.g. {@link java.lang.StackTraceElement} @@ -115,53 +86,101 @@ public interface Describer { Description describe(T element, @Nullable T parent); } - /** - * Encapsulates the attributes of a {@link StackTraceNode}. - */ - public static final class Description { + public interface Description { + String className(); + + String methodName(); + } + + public static final class AsyncDescription implements Description { private final String className; private final String methodName; - - // async-profiler private final String methodDescription; - // Java + private final int hash; + + public AsyncDescription(String className, String methodName, String methodDescription) { + this.className = className; + this.methodName = methodName; + this.methodDescription = methodDescription; + this.hash = Objects.hash(this.className, this.methodName, this.methodDescription); + } + + @Override + public String className() { + return this.className; + } + + @Override + public String methodName() { + return this.methodName; + } + + public String methodDescription() { + return this.methodDescription; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AsyncDescription description = (AsyncDescription) o; + return this.hash == description.hash && + this.className.equals(description.className) && + this.methodName.equals(description.methodName) && + Objects.equals(this.methodDescription, description.methodDescription); + } + + @Override + public int hashCode() { + return this.hash; + } + } + + public static final class JavaDescription implements Description { + private final String className; + private final String methodName; private final int lineNumber; private final int parentLineNumber; private final int hash; - // Constructor used by the Java sampler - public Description(String className, String methodName, int lineNumber, int parentLineNumber) { + public JavaDescription(String className, String methodName, int lineNumber, int parentLineNumber) { this.className = className; this.methodName = methodName; - this.methodDescription = null; this.lineNumber = lineNumber; this.parentLineNumber = parentLineNumber; this.hash = Objects.hash(this.className, this.methodName, this.lineNumber, this.parentLineNumber); } - // Constructor used by the async-profiler sampler - public Description(String className, String methodName, String methodDescription) { - this.className = className; - this.methodName = methodName; - this.methodDescription = methodDescription; - this.lineNumber = StackTraceNode.NULL_LINE_NUMBER; - this.parentLineNumber = StackTraceNode.NULL_LINE_NUMBER; - this.hash = Objects.hash(this.className, this.methodName, this.methodDescription); + @Override + public String className() { + return this.className; + } + + @Override + public String methodName() { + return this.methodName; + } + + public int lineNumber() { + return this.lineNumber; + } + + public int parentLineNumber() { + return this.parentLineNumber; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Description description = (Description) o; + JavaDescription description = (JavaDescription) o; return this.hash == description.hash && this.lineNumber == description.lineNumber && this.parentLineNumber == description.parentLineNumber && this.className.equals(description.className) && - this.methodName.equals(description.methodName) && - Objects.equals(this.methodDescription, description.methodDescription); + this.methodName.equals(description.methodName); } @Override diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index 37ff359d823..f934e535b67 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -20,16 +20,9 @@ package me.lucko.spark.common.sampler.node; -import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import me.lucko.spark.common.util.IndexedListBuilder; -import me.lucko.spark.proto.SparkSamplerProtos; - import java.util.ArrayDeque; import java.util.Collection; -import java.util.Deque; import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; import java.util.Queue; import java.util.function.IntPredicate; @@ -134,92 +127,4 @@ public boolean removeTimeWindowsRecursively(IntPredicate predicate) { return getTimeWindows().isEmpty(); } - public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) { - SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() - .setName(getThreadLabel()); - - double[] times = encodeTimesForProto(timeEncoder); - for (double time : times) { - proto.addTimes(time); - } - - // When converting to a proto, we change the data structure from a recursive tree to an array. - // Effectively, instead of: - // - // { - // data: 'one', - // children: [ - // { - // data: 'two', - // children: [{ data: 'four' }] - // }, - // { data: 'three' } - // ] - // } - // - // we transmit: - // - // [ - // { data: 'one', children: [1, 2] }, - // { data: 'two', children: [3] } - // { data: 'three', children: [] } - // { data: 'four', children: [] } - // ] - // - - // the flattened array of nodes - IndexedListBuilder nodesArray = new IndexedListBuilder<>(); - - // Perform a depth-first post order traversal of the tree - Deque stack = new ArrayDeque<>(); - - // push the thread node's children to the stack - List childrenRefs = new LinkedList<>(); - for (StackTraceNode child : exportChildren(mergeMode)) { - stack.push(new Node(child, childrenRefs)); - } - - Node node; - while (!stack.isEmpty()) { - node = stack.peek(); - - // on the first visit, just push this node's children and leave it on the stack - if (node.firstVisit) { - for (StackTraceNode child : node.stackTraceNode.exportChildren(mergeMode)) { - stack.push(new Node(child, node.childrenRefs)); - } - node.firstVisit = false; - continue; - } - - // convert StackTraceNode to a proto - // - at this stage, we have already visited this node's children - // - the refs for each child are stored in node.childrenRefs - SparkSamplerProtos.StackTraceNode childProto = node.stackTraceNode.toProto(mergeMode, timeEncoder, node.childrenRefs); - - // add the child proto to the nodes array, and record the ref in the parent - int childIndex = nodesArray.add(childProto); - node.parentChildrenRefs.add(childIndex); - - // pop from the stack - stack.pop(); - } - - proto.addAllChildrenRefs(childrenRefs); - proto.addAllChildren(nodesArray.build()); - - return proto.build(); - } - - private static final class Node { - private final StackTraceNode stackTraceNode; - private boolean firstVisit = true; - private final List childrenRefs = new LinkedList<>(); - private final List parentChildrenRefs; - - private Node(StackTraceNode node, List parentChildrenRefs) { - this.stackTraceNode = node; - this.parentChildrenRefs = parentChildrenRefs; - } - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java new file mode 100644 index 00000000000..bc548572ae6 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java @@ -0,0 +1,136 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.node.exporter; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.util.IndexedListBuilder; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.ArrayDeque; +import java.util.Collection; +import java.util.Deque; +import java.util.LinkedList; +import java.util.List; + +public abstract class AbstractNodeExporter implements NodeExporter { + protected final ProtoTimeEncoder timeEncoder; + + protected AbstractNodeExporter(ProtoTimeEncoder timeEncoder) { + this.timeEncoder = timeEncoder; + } + + @Override + public SparkSamplerProtos.ThreadNode export(ThreadNode threadNode) { + SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() + .setName(threadNode.getThreadLabel()); + + double[] times = threadNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + // When converting to a proto, we change the data structure from a recursive tree to an array. + // Effectively, instead of: + // + // { + // data: 'one', + // children: [ + // { + // data: 'two', + // children: [{ data: 'four' }] + // }, + // { data: 'three' } + // ] + // } + // + // we transmit: + // + // [ + // { data: 'one', children: [1, 2] }, + // { data: 'two', children: [3] } + // { data: 'three', children: [] } + // { data: 'four', children: [] } + // ] + // + + // the flattened array of nodes + IndexedListBuilder nodesArray = new IndexedListBuilder<>(); + + // Perform a depth-first post order traversal of the tree + Deque stack = new ArrayDeque<>(); + + // push the thread node's children to the stack + List childrenRefs = new LinkedList<>(); + for (StackTraceNode child : exportChildren(threadNode.getChildren())) { + stack.push(new Node(child, childrenRefs)); + } + + Node node; + while (!stack.isEmpty()) { + node = stack.peek(); + + // on the first visit, just push this node's children and leave it on the stack + if (node.firstVisit) { + for (StackTraceNode child : exportChildren(node.stackTraceNode.getChildren())) { + stack.push(new Node(child, node.childrenRefs)); + } + node.firstVisit = false; + continue; + } + + // convert StackTraceNode to a proto + // - at this stage, we have already visited this node's children + // - the refs for each child are stored in node.childrenRefs + SparkSamplerProtos.StackTraceNode childProto = this.export(node.stackTraceNode, node.childrenRefs); + + // add the child proto to the nodes array, and record the ref in the parent + int childIndex = nodesArray.add(childProto); + node.parentChildrenRefs.add(childIndex); + + // pop from the stack + stack.pop(); + } + + proto.addAllChildrenRefs(childrenRefs); + proto.addAllChildren(nodesArray.build()); + + return proto.build(); + } + + protected abstract SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs); + + protected abstract Collection exportChildren(Collection children); + + private static final class Node { + private final StackTraceNode stackTraceNode; + private boolean firstVisit = true; + private final List childrenRefs = new LinkedList<>(); + private final List parentChildrenRefs; + + private Node(StackTraceNode node, List parentChildrenRefs) { + this.stackTraceNode = node; + this.parentChildrenRefs = parentChildrenRefs; + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java new file mode 100644 index 00000000000..b599fc01ab5 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java @@ -0,0 +1,39 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.node.exporter; + +import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.proto.SparkSamplerProtos; + +/** + * Exports a {@link ThreadNode} to a protobuf message. + */ +public interface NodeExporter { + + /** + * Exports a {@link ThreadNode} to a protobuf message. + * + * @param threadNode the thread node + * @return the exported protobuf message + */ + SparkSamplerProtos.ThreadNode export(ThreadNode threadNode); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java index ab63c003d35..5397a0e332a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java @@ -23,8 +23,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.node.StackTraceNode; import me.lucko.spark.common.sampler.node.ThreadNode; -import me.lucko.spark.common.util.ClassFinder; - +import me.lucko.spark.common.util.classfinder.ClassFinder; import org.checkerframework.checker.nullness.qual.Nullable; import java.io.IOException; @@ -43,6 +42,8 @@ import java.util.Objects; import java.util.Queue; import java.util.function.Function; +import java.util.function.Supplier; +import java.util.logging.Level; import java.util.stream.Collectors; /** @@ -92,7 +93,7 @@ static ClassSourceLookup create(SparkPlatform platform) { try { return platform.createClassSourceLookup(); } catch (Exception e) { - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Failed to create ClassSourceLookup", e); return NO_OP; } } @@ -204,11 +205,11 @@ interface Visitor { Map getLineSourceMapping(); } - static Visitor createVisitor(ClassSourceLookup lookup) { + static Visitor createVisitor(ClassSourceLookup lookup, Supplier classFinderSupplier) { if (lookup == ClassSourceLookup.NO_OP) { return NoOpVisitor.INSTANCE; // don't bother! } - return new VisitorImpl(lookup); + return new VisitorImpl(lookup, classFinderSupplier.get()); } enum NoOpVisitor implements Visitor { @@ -255,14 +256,15 @@ public Map getLineSourceMapping() { */ class VisitorImpl implements Visitor { private final ClassSourceLookup lookup; - private final ClassFinder classFinder = new ClassFinder(); + private final ClassFinder classFinder; private final SourcesMap classSources = new SourcesMap<>(Function.identity()); private final SourcesMap methodSources = new SourcesMap<>(MethodCall::toString); private final SourcesMap lineSources = new SourcesMap<>(MethodCallByLine::toString); - VisitorImpl(ClassSourceLookup lookup) { + VisitorImpl(ClassSourceLookup lookup, ClassFinder classFinder) { this.lookup = lookup; + this.classFinder = classFinder; } @Override @@ -288,7 +290,7 @@ private void visitStackNode(StackTraceNode node) { if (node.getMethodDescription() != null) { MethodCall methodCall = new MethodCall(node.getClassName(), node.getMethodName(), node.getMethodDescription()); this.methodSources.computeIfAbsent(methodCall, this.lookup::identify); - } else { + } else if (node.getLineNumber() != StackTraceNode.NULL_LINE_NUMBER) { MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber()); this.lineSources.computeIfAbsent(methodCall, this.lookup::identify); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java index 0808d660ee8..d023a68eb05 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java @@ -21,8 +21,7 @@ package me.lucko.spark.common.sampler.source; import com.google.common.collect.ImmutableList; - -import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; +import me.lucko.spark.proto.SparkProtos.PluginOrModMetadata; import java.util.Collection; import java.util.List; @@ -34,15 +33,16 @@ */ public class SourceMetadata { - public static List gather(Collection sources, Function nameFunction, Function versionFunction, Function authorFunction) { + public static List gather(Collection sources, Function name, Function version, Function author, Function description) { ImmutableList.Builder builder = ImmutableList.builder(); for (T source : sources) { - String name = nameFunction.apply(source); - String version = versionFunction.apply(source); - String author = authorFunction.apply(source); - - SourceMetadata metadata = new SourceMetadata(name, version, author); + SourceMetadata metadata = new SourceMetadata( + name.apply(source), + version.apply(source), + author.apply(source), + description.apply(source) + ); builder.add(metadata); } @@ -52,11 +52,13 @@ public static List gather(Collection sources, Function index in the keys array */ private final Map keysToIndex; - public ProtoTimeEncoder(LongToDoubleFunction valueTransformer, List sourceData) { + @VisibleForTesting + ProtoTimeEncoder(LongToDoubleFunction valueTransformer, IntStream keys) { this.valueTransformer = valueTransformer; - - // get an array of all keys that show up in the source data - this.keys = sourceData.stream() - .map(n -> n.getTimeWindows().stream().mapToInt(i -> i)) - .reduce(IntStream.empty(), IntStream::concat) - .distinct() - .sorted() - .toArray(); + this.keys = keys.distinct().sorted().toArray(); // construct a reverse index lookup this.keysToIndex = new HashMap<>(this.keys.length); @@ -61,6 +55,13 @@ public ProtoTimeEncoder(LongToDoubleFunction valueTransformer, List } } + public ProtoTimeEncoder(LongToDoubleFunction valueTransformer, List sourceData) { + this(valueTransformer, sourceData.stream() + .map(n -> n.getTimeWindows().stream().mapToInt(i -> i)) + .reduce(IntStream.empty(), IntStream::concat) + ); + } + /** * Gets an array of the keys that could be encoded by this encoder. * @@ -71,7 +72,7 @@ public int[] getKeys() { } /** - * Encode a {@link Dictionary} (map) of times/durations into a double array. + * Encode a map of times/durations into a double array. * * @param times a dictionary of times (unix-time millis -> duration in microseconds) * @return the times encoded as a double array diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java index 86c0b20d5b7..d561cdf7ebb 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java @@ -26,6 +26,7 @@ import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.java.TickedJavaDataAggregator; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkProtos; @@ -190,7 +191,7 @@ private SparkProtos.WindowStatistics measure(int window) { builder.setChunks(counts.chunks()); } } catch (Exception e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while getting world info", e); } return builder.build(); @@ -287,7 +288,7 @@ public int getCountedTicksThisWindowAndReset() { * Counts the number of ticks in a window according to the number of times * {@link #increment()} is called. * - * Used by the {@link me.lucko.spark.common.sampler.java.TickedDataAggregator}. + * Used by the {@link TickedJavaDataAggregator}. */ public static final class ExplicitTickCounter extends BaseTickCounter { private final AtomicInteger counted = new AtomicInteger(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java b/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java new file mode 100644 index 00000000000..42de280abbe --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java @@ -0,0 +1,47 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.jetbrains.annotations.VisibleForTesting; + +public enum JavaVersion { + ; + + private static final int JAVA_VERSION; + static { + JAVA_VERSION = parseJavaVersion(System.getProperty("java.version")); + } + + @VisibleForTesting + static int parseJavaVersion(String version) { + if (version.startsWith("1.")) { + // Java 8 and below + return Integer.parseInt(version.substring(2, 3)); + } else { + // Java 9 and above + return Integer.parseInt(version.split("\\.|\\-")[0]); + } + } + + public static int getJavaVersion() { + return JAVA_VERSION; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java b/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java index 2c495401304..47a33955447 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java @@ -25,5 +25,6 @@ public enum MediaTypes { public static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; public static final String SPARK_HEAP_MEDIA_TYPE = "application/x-spark-heap"; + public static final String SPARK_HEALTH_MEDIA_TYPE = "application/x-spark-health"; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java b/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java index c03e7cb0358..8553abbf5cf 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java @@ -23,9 +23,8 @@ import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ListMultimap; - import me.lucko.spark.common.sampler.node.StackTraceNode; - +import me.lucko.spark.common.util.classfinder.ClassFinder; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Label; @@ -45,8 +44,13 @@ * to a method (method name + method description). */ public final class MethodDisambiguator { - private final Map cache = new ConcurrentHashMap<>(); - private final ClassFinder classFinder = new ClassFinder(); + private final ClassFinder classFinder; + private final Map cache; + + public MethodDisambiguator(ClassFinder classFinder) { + this.classFinder = classFinder; + this.cache = new ConcurrentHashMap<>(); + } public Optional disambiguate(StackTraceNode element) { String desc = element.getMethodDescription(); @@ -81,6 +85,29 @@ public Optional disambiguate(String className, String methodN } } + private ComputedClass compute(String className) throws IOException { + final ImmutableListMultimap.Builder descriptionsByName = ImmutableListMultimap.builder(); + final Map descriptionsByLine = new HashMap<>(); + + ClassReader classReader = getClassReader(className); + classReader.accept(new ClassVisitor(Opcodes.ASM7) { + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodDescription description = new MethodDescription(name, descriptor); + descriptionsByName.put(name, description); + + return new MethodVisitor(Opcodes.ASM7) { + @Override + public void visitLineNumber(int line, Label start) { + descriptionsByLine.put(line, description); + } + }; + } + }, Opcodes.ASM7); + + return new ComputedClass(descriptionsByName.build(), ImmutableMap.copyOf(descriptionsByLine)); + } + private ClassReader getClassReader(String className) throws IOException { String resource = className.replace('.', '/') + ".class"; @@ -102,28 +129,6 @@ private ClassReader getClassReader(String className) throws IOException { throw new IOException("Unable to get resource: " + className); } - private ComputedClass compute(String className) throws IOException { - ImmutableListMultimap.Builder descriptionsByName = ImmutableListMultimap.builder(); - Map descriptionsByLine = new HashMap<>(); - - getClassReader(className).accept(new ClassVisitor(Opcodes.ASM7) { - @Override - public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { - MethodDescription description = new MethodDescription(name, descriptor); - descriptionsByName.put(name, description); - - return new MethodVisitor(Opcodes.ASM7) { - @Override - public void visitLineNumber(int line, Label start) { - descriptionsByLine.put(line, description); - } - }; - } - }, Opcodes.ASM7); - - return new ComputedClass(descriptionsByName.build(), ImmutableMap.copyOf(descriptionsByLine)); - } - private static final class ComputedClass { private static final ComputedClass EMPTY = new ComputedClass(ImmutableListMultimap.of(), ImmutableMap.of()); @@ -138,24 +143,24 @@ private ComputedClass(ListMultimap descriptionsByName public static final class MethodDescription { private final String name; - private final String desc; + private final String description; - private MethodDescription(String name, String desc) { + private MethodDescription(String name, String description) { this.name = name; - this.desc = desc; + this.description = description; } public String getName() { return this.name; } - public String getDesc() { - return this.desc; + public String getDescription() { + return this.description; } @Override public String toString() { - return this.name + this.desc; + return this.name + this.description; } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java index be5bbc28f12..b4acc7befde 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java @@ -23,9 +23,9 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.tick.TickStatistics; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.TextComponent; +import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; import java.util.Locale; @@ -187,5 +187,13 @@ public static String resolveFormattingCode(SparkPlatform platform, String placeh } return LegacyComponentSerializer.legacySection().serialize(result); } + + public static String resolveComponentJson(SparkPlatform platform, String placeholder) { + TextComponent result = resolveComponent(platform, placeholder); + if (result == null) { + return null; + } + return GsonComponentSerializer.gson().serialize(result); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java index 42dca12a456..99e50ff98f1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java @@ -20,14 +20,16 @@ package me.lucko.spark.common.util; +import me.lucko.spark.common.util.log.SparkStaticLogger; + import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; +import java.util.logging.Level; public class SparkThreadFactory implements ThreadFactory { public static final Thread.UncaughtExceptionHandler EXCEPTION_HANDLER = (t, e) -> { - System.err.println("Uncaught exception thrown by thread " + t.getName()); - e.printStackTrace(); + SparkStaticLogger.log(Level.SEVERE, "Uncaught exception thrown by thread " + t.getName(), e); }; private static final AtomicInteger poolNumber = new AtomicInteger(1); diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java index b488f505456..e7b8cca2643 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java @@ -21,9 +21,7 @@ package me.lucko.spark.common.util; import com.google.common.base.Strings; - import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.TextComponent; import net.kyori.adventure.text.format.TextColor; diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java index 91a474cd729..01dfccf5463 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java @@ -25,8 +25,11 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.FileSystems; +import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; @@ -58,16 +61,21 @@ public final class TemporaryFiles { private final Set files = Collections.synchronizedSet(new HashSet<>()); public TemporaryFiles(Path tmpDirectory) { - this.tmpDirectory = tmpDirectory; + boolean useOsTmpDir = Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false")); + if (useOsTmpDir) { + this.tmpDirectory = null; + } else { + this.tmpDirectory = init(tmpDirectory); + } } public Path create(String prefix, String suffix) throws IOException { Path file; - if (ensureDirectoryIsReady()) { + if (this.tmpDirectory == null) { + file = Files.createTempFile(prefix, suffix); + } else { String name = prefix + Long.toHexString(System.nanoTime()) + suffix; file = Files.createFile(this.tmpDirectory.resolve(name), OWNER_ONLY_FILE_PERMISSIONS); - } else { - file = Files.createTempFile(prefix, suffix); } return register(file); } @@ -92,19 +100,33 @@ public void deleteTemporaryFiles() { } } - private boolean ensureDirectoryIsReady() { - if (Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false"))) { - return false; - } - - if (Files.isDirectory(this.tmpDirectory)) { - return true; - } - + private static Path init(Path tmpDirectory) { try { - Files.createDirectories(this.tmpDirectory); - - Files.write(this.tmpDirectory.resolve("about.txt"), ImmutableList.of( + Files.createDirectories(tmpDirectory); + Path readmePath = tmpDirectory.resolve("about.txt"); + + Files.walkFileTree( + tmpDirectory, + new SimpleFileVisitor() { + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + if (!dir.equals(tmpDirectory)) { + Files.delete(dir); + } + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + if (!file.equals(readmePath)) { + Files.delete(file); + } + return FileVisitResult.CONTINUE; + } + } + ); + + Files.write(readmePath, ImmutableList.of( "# What is this directory?", "", "* In order to perform certain functions, spark sometimes needs to write temporary data to the disk. ", @@ -116,11 +138,10 @@ private boolean ensureDirectoryIsReady() { "", "tl;dr: spark uses this folder to store some temporary data." ), StandardCharsets.UTF_8); - - return true; } catch (IOException e) { - return false; + // ignore } + return tmpDirectory; } } diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlayerPingProvider.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/ClassFinder.java similarity index 52% rename from spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlayerPingProvider.java rename to spark-common/src/main/java/me/lucko/spark/common/util/classfinder/ClassFinder.java index 0b50c0adbcc..1ee75c66dba 100644 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlayerPingProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/ClassFinder.java @@ -18,24 +18,29 @@ * along with this program. If not, see . */ -package me.lucko.spark.minestom; +package me.lucko.spark.common.util.classfinder; -import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableList; +import org.checkerframework.checker.nullness.qual.Nullable; -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +public interface ClassFinder { -import net.minestom.server.MinecraftServer; -import net.minestom.server.entity.Player; + /** + * Creates a ClassFinder that combines the results of multiple other finders. + * + * @param finders the other class finders + * @return the combined class finder + */ + static ClassFinder combining(ClassFinder... finders) { + return new CombinedClassFinder(ImmutableList.copyOf(finders)); + } -import java.util.Map; + /** + * Attempts to find a class by name. + * + * @param className the name of the class + * @return the class, if found + */ + @Nullable Class findClass(String className); -public class MinestomPlayerPingProvider implements PlayerPingProvider { - @Override - public Map poll() { - ImmutableMap.Builder builder = ImmutableMap.builder(); - for (Player player : MinecraftServer.getConnectionManager().getOnlinePlayers()) { - builder.put(player.getUsername(), player.getLatency()); - } - return builder.build(); - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/CombinedClassFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/CombinedClassFinder.java new file mode 100644 index 00000000000..ed63f36c028 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/CombinedClassFinder.java @@ -0,0 +1,44 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.classfinder; + +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.util.List; + +class CombinedClassFinder implements ClassFinder { + private final List finders; + + CombinedClassFinder(List finders) { + this.finders = finders; + } + + @Override + public @Nullable Class findClass(String className) { + for (ClassFinder finder : this.finders) { + Class clazz = finder.findClass(className); + if (clazz != null) { + return clazz; + } + } + return null; + } +} diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/FallbackClassFinder.java similarity index 58% rename from spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlatformInfo.java rename to spark-common/src/main/java/me/lucko/spark/common/util/classfinder/FallbackClassFinder.java index add258a5db4..dd3c9f009f5 100644 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlatformInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/FallbackClassFinder.java @@ -18,30 +18,23 @@ * along with this program. If not, see . */ -package me.lucko.spark.minestom; +package me.lucko.spark.common.util.classfinder; -import me.lucko.spark.common.platform.PlatformInfo; +import org.checkerframework.checker.nullness.qual.Nullable; -import net.minestom.server.MinecraftServer; - -public class MinestomPlatformInfo implements PlatformInfo { - @Override - public Type getType() { - return Type.SERVER; - } - - @Override - public String getName() { - return "Minestom"; - } +/** + * Uses {@link Class#forName(String)} to find a class reference for given class names. + */ +public enum FallbackClassFinder implements ClassFinder { + INSTANCE; @Override - public String getVersion() { - return MinecraftServer.VERSION_NAME + "-" + MinecraftServer.getBrandName(); + public @Nullable Class findClass(String className) { + try { + return Class.forName(className); + } catch (Throwable e) { + return null; + } } - @Override - public String getMinecraftVersion() { - return MinecraftServer.VERSION_NAME; - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java similarity index 60% rename from spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java rename to spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java index f132613cb02..1381d4d3e84 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java @@ -18,32 +18,49 @@ * along with this program. If not, see . */ -package me.lucko.spark.common.util; +package me.lucko.spark.common.util.classfinder; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.util.JavaVersion; import net.bytebuddy.agent.ByteBuddyAgent; - import org.checkerframework.checker.nullness.qual.Nullable; import java.lang.instrument.Instrumentation; import java.util.HashMap; import java.util.Map; +import java.util.logging.Level; /** * Uses {@link Instrumentation} to find a class reference for given class names. * *

This is necessary as we don't always have access to the classloader for a given class.

*/ -public class ClassFinder { +public class InstrumentationClassFinder implements ClassFinder { - private final Map> classes = new HashMap<>(); + private static boolean warned = false; - public ClassFinder() { - Instrumentation instrumentation; + private static Instrumentation loadInstrumentation(SparkPlugin plugin) { + Instrumentation instrumentation = null; try { instrumentation = ByteBuddyAgent.install(); + if (!warned && JavaVersion.getJavaVersion() >= 21) { + warned = true; + plugin.log(Level.INFO, "If you see a warning above that says \"WARNING: A Java agent has been loaded dynamically\", it can be safely ignored."); + plugin.log(Level.INFO, "See here for more information: https://spark.lucko.me/docs/misc/Java-agent-warning"); + } } catch (Exception e) { - return; + // ignored } + return instrumentation; + } + + private final Map> classes = new HashMap<>(); + + public InstrumentationClassFinder(SparkPlugin plugin) { + this(loadInstrumentation(plugin)); + } + + public InstrumentationClassFinder(Instrumentation instrumentation) { if (instrumentation == null) { return; } @@ -54,21 +71,9 @@ public ClassFinder() { } } + @Override public @Nullable Class findClass(String className) { - // try instrumentation - Class clazz = this.classes.get(className); - if (clazz != null) { - return clazz; - } - - // try Class.forName - try { - return Class.forName(className); - } catch (Throwable e) { - // ignore - } - - return null; + return this.classes.get(className); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java new file mode 100644 index 00000000000..ff7388aeab9 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java @@ -0,0 +1,132 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.config; + +import com.google.common.collect.ImmutableList; + +import java.util.Collections; +import java.util.List; + +class CombinedConfiguration implements Configuration { + + private final List configurations; + + CombinedConfiguration(Configuration... configurations) { + this.configurations = ImmutableList.copyOf(configurations).reverse(); + } + + @Override + public void load() { + for (Configuration configuration : this.configurations) { + configuration.load(); + } + } + + @Override + public void save() { + for (Configuration configuration : this.configurations) { + configuration.save(); + } + } + + @Override + public String getString(String path, String def) { + String result = def; + for (Configuration configuration : this.configurations) { + result = configuration.getString(path, result); + } + return result; + } + + @Override + public boolean getBoolean(String path, boolean def) { + boolean result = def; + for (Configuration configuration : this.configurations) { + result = configuration.getBoolean(path, result); + } + return result; + } + + @Override + public int getInteger(String path, int def) { + int result = def; + for (Configuration configuration : this.configurations) { + result = configuration.getInteger(path, result); + } + return result; + } + + @Override + public List getStringList(String path) { + for (Configuration configuration : this.configurations) { + List result = configuration.getStringList(path); + if (!result.isEmpty()) { + return result; + } + } + return Collections.emptyList(); + } + + @Override + public void setString(String path, String value) { + for (Configuration configuration : this.configurations) { + configuration.setString(path, value); + } + } + + @Override + public void setBoolean(String path, boolean value) { + for (Configuration configuration : this.configurations) { + configuration.setBoolean(path, value); + } + } + + @Override + public void setInteger(String path, int value) { + for (Configuration configuration : this.configurations) { + configuration.setInteger(path, value); + } + } + + @Override + public void setStringList(String path, List value) { + for (Configuration configuration : this.configurations) { + configuration.setStringList(path, value); + } + } + + @Override + public boolean contains(String path) { + for (Configuration configuration : this.configurations) { + if (configuration.contains(path)) { + return true; + } + } + return false; + } + + @Override + public void remove(String path) { + for (Configuration configuration : this.configurations) { + configuration.remove(path); + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java new file mode 100644 index 00000000000..c2c2d88b489 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java @@ -0,0 +1,54 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.config; + +import java.util.List; + +public interface Configuration { + + static Configuration combining(Configuration... configurations) { + return new CombinedConfiguration(configurations); + } + + void load(); + + void save(); + + String getString(String path, String def); + + boolean getBoolean(String path, boolean def); + + int getInteger(String path, int def); + + List getStringList(String path); + + void setString(String path, String value); + + void setBoolean(String path, boolean value); + + void setInteger(String path, int value); + + void setStringList(String path, List value); + + boolean contains(String path); + + void remove(String path); +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java similarity index 92% rename from spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java rename to spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java index d19ba64ed62..72a4681ffcf 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java @@ -18,7 +18,7 @@ * along with this program. If not, see . */ -package me.lucko.spark.common.util; +package me.lucko.spark.common.util.config; import com.google.gson.Gson; import com.google.gson.GsonBuilder; @@ -37,17 +37,18 @@ import java.util.Collections; import java.util.List; -public final class Configuration { +public class FileConfiguration implements Configuration { private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); private final Path file; private JsonObject root; - public Configuration(Path file) { + public FileConfiguration(Path file) { this.file = file; load(); } + @Override public void load() { JsonObject root = null; if (Files.exists(this.file)) { @@ -64,6 +65,7 @@ public void load() { this.root = root; } + @Override public void save() { try { Files.createDirectories(this.file.getParent()); @@ -78,6 +80,7 @@ public void save() { } } + @Override public String getString(String path, String def) { JsonElement el = this.root.get(path); if (el == null || !el.isJsonPrimitive()) { @@ -87,6 +90,7 @@ public String getString(String path, String def) { return el.getAsJsonPrimitive().getAsString(); } + @Override public boolean getBoolean(String path, boolean def) { JsonElement el = this.root.get(path); if (el == null || !el.isJsonPrimitive()) { @@ -97,6 +101,7 @@ public boolean getBoolean(String path, boolean def) { return val.isBoolean() ? val.getAsBoolean() : def; } + @Override public int getInteger(String path, int def) { JsonElement el = this.root.get(path); if (el == null || !el.isJsonPrimitive()) { @@ -104,9 +109,10 @@ public int getInteger(String path, int def) { } JsonPrimitive val = el.getAsJsonPrimitive(); - return val.isBoolean() ? val.getAsInt() : def; + return val.isNumber() ? val.getAsInt() : def; } + @Override public List getStringList(String path) { JsonElement el = this.root.get(path); if (el == null || !el.isJsonArray()) { @@ -122,18 +128,22 @@ public List getStringList(String path) { return list; } + @Override public void setString(String path, String value) { this.root.add(path, new JsonPrimitive(value)); } + @Override public void setBoolean(String path, boolean value) { this.root.add(path, new JsonPrimitive(value)); } + @Override public void setInteger(String path, int value) { this.root.add(path, new JsonPrimitive(value)); } + @Override public void setStringList(String path, List value) { JsonArray array = new JsonArray(); for (String str : value) { @@ -142,10 +152,12 @@ public void setStringList(String path, List value) { this.root.add(path, array); } + @Override public boolean contains(String path) { return this.root.has(path); } + @Override public void remove(String path) { this.root.remove(path); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java new file mode 100644 index 00000000000..d0765547713 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java @@ -0,0 +1,106 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.config; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public enum RuntimeConfiguration implements Configuration { + SYSTEM_PROPERTIES { + @Override + public String getString(String path, String def) { + return System.getProperty("spark." + path, def); + } + }, + + ENVIRONMENT_VARIABLES { + @Override + public String getString(String path, String def) { + String name = "SPARK_" + path.replace(".", "_").replace("-", "_").toUpperCase(); + String value = System.getenv(name); + return value != null ? value : def; + } + }; + + @Override + public boolean getBoolean(String path, boolean def) { + return Boolean.parseBoolean(getString(path, Boolean.toString(def))); + } + + @Override + public int getInteger(String path, int def) { + try { + return Integer.parseInt(getString(path, Integer.toString(def))); + } catch (NumberFormatException e) { + return def; + } + } + + @Override + public List getStringList(String path) { + String value = getString(path, ""); + if (value.isEmpty()) { + return Collections.emptyList(); + } + return Arrays.asList(value.split(",")); + } + + @Override + public boolean contains(String path) { + return getString(path, null) != null; + } + + @Override + public void load() { + // no-op + } + + @Override + public void save() { + // no-op + } + + @Override + public void setString(String path, String value) { + // no-op + } + + @Override + public void setBoolean(String path, boolean value) { + // no-op + } + + @Override + public void setInteger(String path, int value) { + // no-op + } + + @Override + public void setStringList(String path, List value) { + // no-op + } + + @Override + public void remove(String path) { + // no-op + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/log/Logger.java b/spark-common/src/main/java/me/lucko/spark/common/util/log/Logger.java new file mode 100644 index 00000000000..1f1478770a1 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/log/Logger.java @@ -0,0 +1,80 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.log; + +import java.util.logging.Level; + +public interface Logger { + + /** + * Print to the plugin logger. + * + * @param level the log level + * @param msg the message + */ + void log(Level level, String msg); + + /** + * Print to the plugin logger. + * + * @param level the log level + * @param msg the message + * @param throwable the throwable + */ + void log(Level level, String msg, Throwable throwable); + + /** + * A fallback logger + */ + Logger FALLBACK = new Logger() { + @Override + public void log(Level level, String msg) { + if (level.intValue() >= 1000) { + System.err.println(msg); + } else { + System.out.println(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (isSevere(level)) { + System.err.println(msg); + if (throwable != null) { + throwable.printStackTrace(System.err); + } + } else { + System.out.println(msg); + if (throwable != null) { + throwable.printStackTrace(System.out); + } + } + } + }; + + static boolean isSevere(Level level) { + return level.intValue() >= 1000; + } + + static boolean isWarning(Level level) { + return level.intValue() >= 900; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/log/SparkStaticLogger.java b/spark-common/src/main/java/me/lucko/spark/common/util/log/SparkStaticLogger.java new file mode 100644 index 00000000000..3c118fa3db7 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/log/SparkStaticLogger.java @@ -0,0 +1,51 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.log; + +import java.util.logging.Level; + +/** + * Special logger for use by classes that don't easily have access to a + * {@link me.lucko.spark.common.SparkPlatform} instance. + * + *

This avoids warnings on platforms like Paper that get upset if plugins use + * {@link System#out} or {@link System#err}.

+ */ +public enum SparkStaticLogger { + ; + + private static Logger logger = Logger.FALLBACK; + + public synchronized static void setLogger(Logger logger) { + if (SparkStaticLogger.logger == null) { + SparkStaticLogger.logger = logger; + } + } + + public static void log(Level level, String msg, Throwable throwable) { + logger.log(level, msg, throwable); + } + + public static void log(Level level, String msg) { + logger.log(level, msg); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java b/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java index 1605a385c14..0d825145a83 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java +++ b/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.ws; -import me.lucko.spark.common.util.Configuration; +import me.lucko.spark.common.util.config.Configuration; import java.security.KeyPair; import java.security.PrivateKey; diff --git a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java index 6a9c2b74095..49b2837d272 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java +++ b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.ws; import com.google.protobuf.ByteString; - import me.lucko.bytesocks.client.BytesocksClient; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; @@ -118,8 +117,7 @@ public void processWindowRotate(AbstractSampler sampler) { String key = this.platform.getBytebinClient().postContent(samplerData, MediaTypes.SPARK_SAMPLER_MEDIA_TYPE, "live").key(); sendUpdatedSamplerData(key); } catch (Exception e) { - this.platform.getPlugin().log(Level.WARNING, "Error whilst sending updated sampler data to the socket"); - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Error whilst sending updated sampler data to the socket", e); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocketConnection.java b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocketConnection.java index 9079860e4c2..5ec72aace42 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocketConnection.java +++ b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocketConnection.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.ws; import com.google.protobuf.ByteString; - import me.lucko.bytesocks.client.BytesocksClient; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.proto.SparkWebSocketProtos.PacketWrapper; @@ -87,7 +86,7 @@ public interface Listener { * @return the channel id */ public String getChannelId() { - return this.socket.getChannelId(); + return this.socket.channelId(); } /** @@ -100,20 +99,18 @@ public boolean isOpen() { } @Override - public void onText(CharSequence data) { + public void onText(String data) { try { RawPacket packet = decodeRawPacket(data); handleRawPacket(packet); } catch (Exception e) { - this.platform.getPlugin().log(Level.WARNING, "Exception occurred while reading data from the socket"); - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while reading data from the socket", e); } } @Override public void onError(Throwable error) { - this.platform.getPlugin().log(Level.INFO, "Socket error: " + error.getClass().getName() + " " + error.getMessage()); - error.printStackTrace(); + this.platform.getPlugin().log(Level.INFO, "Socket error: " + error.getClass().getName() + " " + error.getMessage(), error); } @Override @@ -134,8 +131,7 @@ public void sendPacket(Consumer packetBuilder) { try { sendPacket(wrapper); } catch (Exception e) { - this.platform.getPlugin().log(Level.WARNING, "Exception occurred while sending data to the socket"); - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while sending data to the socket", e); } } @@ -178,8 +174,8 @@ private void sendRawPacket(RawPacket packet) throws IOException { * @param data the encoded data * @return the decoded packet */ - private RawPacket decodeRawPacket(CharSequence data) throws IOException { - byte[] buf = Base64.getDecoder().decode(data.toString()); + private RawPacket decodeRawPacket(String data) throws IOException { + byte[] buf = Base64.getDecoder().decode(data); return RawPacket.parseFrom(buf); } diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index 20044155554..4b95a4a34ee 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -11,6 +11,7 @@ message PlatformMetadata { string version = 3; string minecraft_version = 4; // optional int32 spark_version = 7; + string brand = 8; // replaced reserved 5, 6; @@ -20,6 +21,7 @@ message PlatformMetadata { SERVER = 0; CLIENT = 1; PROXY = 2; + APPLICATION = 3; } } @@ -32,6 +34,7 @@ message SystemStatistics { Java java = 6; int64 uptime = 7; map net = 8; + Jvm jvm = 9; message Cpu { int32 threads = 1; @@ -79,6 +82,12 @@ message SystemStatistics { string vm_args = 4; } + message Jvm { + string name = 1; + string vendor = 2; + string version = 3; + } + message NetInterface { RollingAverageValues rx_bytes_per_second = 1; RollingAverageValues tx_bytes_per_second = 2; @@ -96,13 +105,24 @@ message PlatformStatistics { Ping ping = 6; // optional int64 player_count = 7; // optional WorldStatistics world = 8; // optional + OnlineMode online_mode = 9; // optional message Memory { - MemoryPool heap = 1; + MemoryUsage heap = 1; + MemoryUsage non_heap = 2; + repeated MemoryPool pools = 3; message MemoryPool { + string name = 1; + MemoryUsage usage = 2; + MemoryUsage collection_usage = 3; + } + + message MemoryUsage { int64 used = 1; - int64 total = 2; + int64 committed = 2; // previously called 'total' + int64 init = 3; // optional + int64 max = 4; // optional } } @@ -126,12 +146,20 @@ message PlatformStatistics { message Ping { RollingAverageValues last15m = 1; } + + enum OnlineMode { + UNKNOWN = 0; + OFFLINE = 1; + ONLINE = 2; + } } message WorldStatistics { int32 total_entities = 1; map entity_counts = 2; repeated World worlds = 3; + repeated GameRule game_rules = 4; + repeated DataPack data_packs = 5; message World { string name = 1; @@ -150,6 +178,18 @@ message WorldStatistics { int32 total_entities = 3; map entity_counts = 4; } + + message GameRule { + string name = 1; + string default_value = 2; + map world_values = 3; + } + + message DataPack { + string name = 1; + string description = 2; + string source = 3; + } } message WindowStatistics { @@ -190,3 +230,26 @@ message CommandSenderMetadata { PLAYER = 1; } } + +message PluginOrModMetadata { + string name = 1; + string version = 2; + string author = 3; + string description = 4; +} + +message HealthData { + HealthMetadata metadata = 1; + map time_window_statistics = 2; +} + +message HealthMetadata { + CommandSenderMetadata creator = 1; + PlatformMetadata platform_metadata = 2; + PlatformStatistics platform_statistics = 3; + SystemStatistics system_statistics = 4; + int64 generated_time = 5; + map server_configurations = 6; + map sources = 7; + map extra_platform_metadata = 8; +} diff --git a/spark-common/src/main/proto/spark/spark_heap.proto b/spark-common/src/main/proto/spark/spark_heap.proto index 59f2b854cc2..aef7888bb18 100644 --- a/spark-common/src/main/proto/spark/spark_heap.proto +++ b/spark-common/src/main/proto/spark/spark_heap.proto @@ -17,6 +17,10 @@ message HeapMetadata { PlatformMetadata platform_metadata = 2; PlatformStatistics platform_statistics = 3; SystemStatistics system_statistics = 4; + int64 generated_time = 5; + map server_configurations = 6; + map sources = 7; + map extra_platform_metadata = 8; } message HeapEntry { diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index dbc336a774c..445e541cad4 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -31,9 +31,11 @@ message SamplerMetadata { map server_configurations = 10; int64 end_time = 11; int32 number_of_ticks = 12; - map sources = 13; + map sources = 13; map extra_platform_metadata = 14; SamplerMode sampler_mode = 15; + SamplerEngine sampler_engine = 16; + string sampler_engine_version = 17; message ThreadDumper { Type type = 1; @@ -65,15 +67,15 @@ message SamplerMetadata { } } - message SourceMetadata { - string name = 1; - string version = 2; - } - enum SamplerMode { EXECUTION = 0; ALLOCATION = 1; } + + enum SamplerEngine { + JAVA = 0; + ASYNC = 1; + } } message ThreadNode { diff --git a/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so new file mode 100644 index 00000000000..1dda384bede Binary files /dev/null and b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so new file mode 100644 index 00000000000..a321733dbd6 Binary files /dev/null and b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so new file mode 100644 index 00000000000..693b3367a51 Binary files /dev/null and b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so deleted file mode 100755 index 800cf919d71..00000000000 Binary files a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so and /dev/null differ diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so deleted file mode 100755 index 3c81d1cb620..00000000000 Binary files a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so and /dev/null differ diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so deleted file mode 100755 index 5af5071855d..00000000000 Binary files a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so and /dev/null differ diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so deleted file mode 100755 index 4930c678006..00000000000 Binary files a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so and /dev/null differ diff --git a/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java b/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java new file mode 100644 index 00000000000..fb2ae5e477c --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java @@ -0,0 +1,107 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common; + +import com.google.common.collect.ImmutableSet; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import net.kyori.adventure.text.Component; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.HashSet; +import java.util.Set; +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class SparkPlatformTest { + + @Test + public void testEnableDisable(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + assertTrue(plugin.platform().hasEnabled()); + } + } + + @Test + public void testPermissions(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkPlatform platform = plugin.platform(); + + Set permissions = platform.getAllSparkPermissions(); + assertEquals( + ImmutableSet.of( + "spark", + "spark.profiler", + "spark.tps", + "spark.ping", + "spark.healthreport", + "spark.tickmonitor", + "spark.gc", + "spark.gcmonitor", + "spark.heapsummary", + "spark.heapdump", + "spark.activity" + ), + permissions + ); + + TestCommandSender testSender = new TestCommandSender(); + assertFalse(platform.hasPermissionForAnyCommand(testSender)); + + testSender.permissions.add("spark.tps"); + assertTrue(platform.hasPermissionForAnyCommand(testSender)); + + testSender.permissions.clear(); + testSender.permissions.add("spark"); + assertTrue(platform.hasPermissionForAnyCommand(testSender)); + } + } + + private static final class TestCommandSender implements CommandSender { + private final Set permissions = new HashSet<>(); + + @Override + public String getName() { + return "Test"; + } + + @Override + public UUID getUniqueId() { + return new UUID(0, 0); + } + + @Override + public void sendMessage(Component message) { + + } + + @Override + public boolean hasPermission(String permission) { + return this.permissions.contains(permission); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityLogTest.java b/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityLogTest.java new file mode 100644 index 00000000000..4891960b9e6 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityLogTest.java @@ -0,0 +1,56 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.activitylog; + +import me.lucko.spark.common.command.sender.CommandSender; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.UUID; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ActivityLogTest { + + private static final CommandSender.Data USER = new CommandSender.Data("Test", UUID.fromString("5937921d-7051-45e1-bac7-3bbfdc12444f")); + + @Test + public void testSaveLoad(@TempDir Path tempDir) { + long time = System.currentTimeMillis(); + + ActivityLog log = new ActivityLog(tempDir.resolve("activity-log.json")); + log.addToLog(Activity.fileActivity(USER, time, "Profiler", "path/to/profile.sparkprofile")); + log.addToLog(Activity.urlActivity(USER, time, "Profiler", "https://spark.lucko.me/abcd")); + log.save(); + + ActivityLog log2 = new ActivityLog(tempDir.resolve("activity-log.json")); + log2.load(); + + // check the log contents + assertEquals( + log.getLog().stream().map(Activity::serialize).collect(Collectors.toList()), + log2.getLog().stream().map(Activity::serialize).collect(Collectors.toList()) + ); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityTest.java b/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityTest.java new file mode 100644 index 00000000000..5bf88f81892 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityTest.java @@ -0,0 +1,78 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.activitylog; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import me.lucko.spark.common.command.sender.CommandSender; +import org.junit.jupiter.api.Test; + +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ActivityTest { + private static final Gson GSON = new Gson(); + + private static final CommandSender.Data USER = new CommandSender.Data("Test", UUID.fromString("5937921d-7051-45e1-bac7-3bbfdc12444f")); + + private static final String FILE_ACTIVITY_JSON = "{\"user\":{\"type\":\"player\",\"name\":\"Test\",\"uniqueId\":\"5937921d-7051-45e1-bac7-3bbfdc12444f\"},\"time\":1721937782184,\"type\":\"Profiler\",\"data\":{\"type\":\"file\",\"value\":\"path/to/profile.sparkprofile\"}}"; + private static final String URL_ACTIVITY_JSON = "{\"user\":{\"type\":\"player\",\"name\":\"Test\",\"uniqueId\":\"5937921d-7051-45e1-bac7-3bbfdc12444f\"},\"time\":1721937782184,\"type\":\"Profiler\",\"data\":{\"type\":\"url\",\"value\":\"https://spark.lucko.me/abcd\"}}"; + + @Test + public void testSerialize() { + Activity fileActivity = Activity.fileActivity( + USER, + 1721937782184L, + "Profiler", + "path/to/profile.sparkprofile" + ); + assertEquals(FILE_ACTIVITY_JSON, GSON.toJson(fileActivity.serialize())); + + Activity urlActivity = Activity.urlActivity( + USER, + 1721937782184L, + "Profiler", + "https://spark.lucko.me/abcd" + ); + assertEquals(URL_ACTIVITY_JSON, GSON.toJson(urlActivity.serialize())); + } + + @Test + public void testDeserialize() { + Activity fileActivity = Activity.deserialize(GSON.fromJson(FILE_ACTIVITY_JSON, JsonElement.class)); + assertEquals(USER.getUniqueId(), fileActivity.getUser().getUniqueId()); + assertEquals(USER.getName(), fileActivity.getUser().getName()); + assertEquals(1721937782184L, fileActivity.getTime()); + assertEquals("Profiler", fileActivity.getType()); + assertEquals(Activity.DATA_TYPE_FILE, fileActivity.getDataType()); + assertEquals("path/to/profile.sparkprofile", fileActivity.getDataValue()); + + Activity urlActivity = Activity.deserialize(GSON.fromJson(URL_ACTIVITY_JSON, JsonElement.class)); + assertEquals(USER.getUniqueId(), urlActivity.getUser().getUniqueId()); + assertEquals(USER.getName(), urlActivity.getUser().getName()); + assertEquals(1721937782184L, urlActivity.getTime()); + assertEquals("Profiler", urlActivity.getType()); + assertEquals(Activity.DATA_TYPE_URL, urlActivity.getDataType()); + assertEquals("https://spark.lucko.me/abcd", urlActivity.getDataValue()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java b/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java new file mode 100644 index 00000000000..dda6c5d4400 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java @@ -0,0 +1,137 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.command; + +import com.google.common.collect.ImmutableList; +import org.junit.jupiter.api.Test; + +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrowsExactly; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ArgumentsTest { + + @Test + public void testInitialParse() { + Arguments arguments = new Arguments(ImmutableList.of("hello"), true); + assertEquals("hello", arguments.subCommand()); + + Arguments.ParseException exception = assertThrowsExactly( + Arguments.ParseException.class, + () -> new Arguments(ImmutableList.of("hello"), false) + ); + assertEquals("Expected flag at position 0 but got 'hello' instead!", exception.getMessage()); + + exception = assertThrowsExactly( + Arguments.ParseException.class, + () -> new Arguments(ImmutableList.of("hello", "world"), true) + ); + assertEquals("Expected flag at position 1 but got 'world' instead!", exception.getMessage()); + } + + @Test + public void testStringFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + Set values = arguments.stringFlag("test-flag"); + assertEquals(1, values.size()); + assertEquals("hello", values.iterator().next()); + } + + @Test + public void testStringFlagWithSpace() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello", "world"), false); + + Set values = arguments.stringFlag("test-flag"); + assertEquals(1, values.size()); + assertEquals("hello world", values.iterator().next()); + } + + @Test + public void testStringFlagWithMultipleValues() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello", "--test-flag", "world"), false); + + Set values = arguments.stringFlag("test-flag"); + assertEquals(2, values.size()); + assertEquals(ImmutableList.of("hello", "world"), ImmutableList.copyOf(values)); + } + + @Test + public void testMissingStringFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + Set values = arguments.stringFlag("missing-flag"); + assertEquals(0, values.size()); + } + + @Test + public void testIntFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "123", "--negative-test", "-100"), false); + + int value = arguments.intFlag("test-flag"); + assertEquals(123, value); + + value = arguments.intFlag("negative-test"); + assertEquals(100, value); + } + + @Test + public void testMissingIntFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + int value = arguments.intFlag("missing-flag"); + assertEquals(-1, value); + } + + @Test + public void testDoubleFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "123.45", "--negative-test", "-100.5"), false); + + double value = arguments.doubleFlag("test-flag"); + assertEquals(123.45, value, 0.0001); + + value = arguments.doubleFlag("negative-test"); + assertEquals(100.5, value, 0.0001); + } + + @Test + public void testMissingDoubleFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + double value = arguments.doubleFlag("missing-flag"); + assertEquals(-1, value); + } + + @Test + public void testBooleanFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag"), false); + + boolean value = arguments.boolFlag("test-flag"); + assertTrue(value); + + value = arguments.boolFlag("negative-test"); + assertFalse(value); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java new file mode 100644 index 00000000000..b2bb384ee0f --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java @@ -0,0 +1,63 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.heapdump; + +import me.lucko.spark.proto.SparkHeapProtos; +import me.lucko.spark.test.TestClass; +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class HeapDumpSummaryTest { + + @Test + public void testHeapDumpSummary(@TempDir Path directory) throws Exception { + TestClass testClass1 = new TestClass(); + TestClass testClass2 = new TestClass(); + + HeapDumpSummary dump = HeapDumpSummary.createNew(); + List entries = dump.getEntries(); + + HeapDumpSummary.Entry thisClassEntry = entries.stream().filter(entry -> entry.getType().equals(TestClass.class.getName())).findAny().orElse(null); + assertNotNull(thisClassEntry); + assertEquals(2, thisClassEntry.getInstances()); + assertEquals(32, thisClassEntry.getBytes()); + + SparkHeapProtos.HeapData proto; + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + proto = dump.toProto(plugin.platform(), TestCommandSender.INSTANCE.toData()); + } + assertNotNull(proto); + + SparkHeapProtos.HeapEntry protoEntry = proto.getEntriesList().stream().filter(entry -> entry.getType().equals(TestClass.class.getName())).findAny().orElse(null); + assertNotNull(protoEntry); + assertEquals(2, protoEntry.getInstances()); + assertEquals(32, protoEntry.getSize()); + } + +} diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpTest.java similarity index 56% rename from spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java rename to spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpTest.java index 899ce58ccb6..5df5c5d9cf0 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java +++ b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpTest.java @@ -18,27 +18,24 @@ * along with this program. If not, see . */ -package me.lucko.spark.sponge; +package me.lucko.spark.common.heapdump; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - -import org.spongepowered.api.Game; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import java.nio.file.Files; import java.nio.file.Path; -public class Sponge7ClassSourceLookup extends ClassSourceLookup.ByCodeSource { - private final Path modsDirectory; - - public Sponge7ClassSourceLookup(Game game) { - this.modsDirectory = game.getGameDirectory().resolve("mods").toAbsolutePath().normalize(); - } +import static org.junit.jupiter.api.Assertions.assertTrue; - @Override - public String identifyFile(Path path) { - if (!path.startsWith(this.modsDirectory)) { - return null; - } +public class HeapDumpTest { - return super.identifyFileName(this.modsDirectory.relativize(path).toString()); + @Test + public void testHeapDump(@TempDir Path tempDir) throws Exception { + Path file = tempDir.resolve("heapdump.hprof"); + HeapDump.dumpHeap(file, false); + assertTrue(Files.exists(file)); + Files.delete(file); } + } diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuInfoTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuInfoTest.java new file mode 100644 index 00000000000..047e80dd696 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuInfoTest.java @@ -0,0 +1,38 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.cpu; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class CpuInfoTest { + + @Test + public void testCpuInfo() { + String model = CpuInfo.queryCpuModel(); + assertNotNull(model); + assertFalse(model.isEmpty()); + System.out.println(model); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuMonitorTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuMonitorTest.java new file mode 100644 index 00000000000..d554976eea2 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuMonitorTest.java @@ -0,0 +1,35 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.cpu; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class CpuMonitorTest { + + @Test + public void testCpuLoad() { + assertTrue(CpuMonitor.processLoad() >= 0); + assertTrue(CpuMonitor.systemLoad() >= 0); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/disk/DiskUsageTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/disk/DiskUsageTest.java new file mode 100644 index 00000000000..d961b2fc374 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/disk/DiskUsageTest.java @@ -0,0 +1,35 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.disk; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class DiskUsageTest { + + @Test + public void testDiskUsage() { + assertTrue(DiskUsage.getUsed() > 0); + assertTrue(DiskUsage.getTotal() > 0); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/memory/MemoryInfoTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/memory/MemoryInfoTest.java new file mode 100644 index 00000000000..5ae8fdc0aea --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/memory/MemoryInfoTest.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.memory; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class MemoryInfoTest { + + @Test + public void testMemoryInfo() { + assertTrue(MemoryInfo.getUsedPhysicalMemory() > 0); + assertTrue(MemoryInfo.getTotalPhysicalMemory() > 0); + assertTrue(MemoryInfo.getAvailablePhysicalMemory() > 0); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfoTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfoTest.java new file mode 100644 index 00000000000..6b50584c6fa --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfoTest.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.net; + +import com.google.common.collect.ImmutableSet; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class NetworkInterfaceInfoTest { + + @Test + public void testLinuxProcParse() { + String input = + "Inter-| Receive | Transmit\n" + + " face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed\n" + + " lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0\n" + + " eth0: 1215645 2751 1 0 0 0 0 0 1782404 4324 2 0 0 427 0 0\n" + + " ppp0: 1622270 5552 1 0 0 0 0 0 354130 5669 0 0 0 0 0 0\n" + + " tap0: 7714 81 0 0 0 0 0 0 7714 81 0 0 0 0 0 0"; + + Map map = NetworkInterfaceInfo.read(Arrays.asList(input.split("\n"))); + assertNotNull(map); + assertEquals(ImmutableSet.of("lo", "eth0", "ppp0", "tap0"), map.keySet()); + + NetworkInterfaceInfo eth0 = map.get("eth0"); + assertEquals(1215645, eth0.getReceivedBytes()); + assertEquals(2751, eth0.getReceivedPackets()); + assertEquals(1, eth0.getReceiveErrors()); + assertEquals(1782404, eth0.getTransmittedBytes()); + assertEquals(4324, eth0.getTransmittedPackets()); + assertEquals(2, eth0.getTransmitErrors()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/os/OperatingSystemInfoTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/os/OperatingSystemInfoTest.java new file mode 100644 index 00000000000..3e4fd132a56 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/os/OperatingSystemInfoTest.java @@ -0,0 +1,42 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.os; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class OperatingSystemInfoTest { + + @Test + public void testOperatingSystemInfo() { + OperatingSystemInfo info = OperatingSystemInfo.poll(); + assertNotNull(info); + assertNotNull(info.name()); + assertNotNull(info.version()); + assertNotNull(info.arch()); + + System.out.println(info.name()); + System.out.println(info.version()); + System.out.println(info.arch()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java new file mode 100644 index 00000000000..4b26322c953 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java @@ -0,0 +1,51 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform; + +import me.lucko.spark.proto.SparkProtos; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.Collections; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class PlatformStatisticsProviderTest { + + @Test + public void testSystemStatistics(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkProtos.SystemStatistics systemStatistics = new PlatformStatisticsProvider(plugin.platform()).getSystemStatistics(); + assertNotNull(systemStatistics); + } + } + + @Test + public void testPlatformStatistics(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkProtos.PlatformStatistics platformStatistics = new PlatformStatisticsProvider(plugin.platform()).getPlatformStatistics(Collections.emptyMap(), true); + assertNotNull(platformStatistics); + } + } + +} diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickHook.java b/spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java similarity index 53% rename from spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickHook.java rename to spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java index 44c1c2d68e9..e2b8374616f 100644 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickHook.java +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java @@ -18,30 +18,26 @@ * along with this program. If not, see . */ -package me.lucko.spark.minestom; +package me.lucko.spark.common.platform; -import me.lucko.spark.common.tick.AbstractTickHook; +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; -import net.minestom.server.MinecraftServer; -import net.minestom.server.timer.Task; -import net.minestom.server.timer.TaskSchedule; +import java.nio.file.Path; +import java.util.Collections; -public class MinestomTickHook extends AbstractTickHook { - private Task task; +import static org.junit.jupiter.api.Assertions.assertNotNull; - @Override - public void start() { - this.task = MinecraftServer.getSchedulerManager() - .buildTask(this::onTick) - .delay(TaskSchedule.tick(1)) - .repeat(TaskSchedule.tick(1)) - .schedule(); - } +public class SparkMetadataTest { - @Override - public void close() { - if (this.task != null) { - this.task.cancel(); + @Test + public void testGather(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkMetadata metadata = SparkMetadata.gather(plugin.platform(), TestCommandSender.INSTANCE.toData(), Collections.emptyMap()); + assertNotNull(metadata); } } + } diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilterTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilterTest.java new file mode 100644 index 00000000000..ba6f958dd4e --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilterTest.java @@ -0,0 +1,106 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonPrimitive; +import org.junit.jupiter.api.Test; + +import java.util.Map; +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ExcludedConfigFilterTest { + + @Test + public void testFilter() { + Set excluded = ImmutableSet.builder() + .add("database") + .add("settings.bungeecord-addresses") + .add("rconpassword") + .add("world-settings.*.feature-seeds") + .add("world-settings.*.seed-*") + .build(); + + ExcludedConfigFilter filter = new ExcludedConfigFilter(excluded); + + JsonPrimitive value = new JsonPrimitive("hello"); + JsonObject before = obj( + element("hello", value), + element("database", obj( + element("username", value), + element("password", value) + )), + element("settings", obj( + element("hello", value), + element("bungeecord-addresses", value) + )), + element("rcon.password", value), + element("world-settings", obj( + element("world1", obj( + element("hello", value), + element("feature-seeds", value), + element("seed-test", value) + )), + element("world2", obj( + element("hello", value), + element("feature-seeds", value), + element("seed-test", value) + )) + )) + ); + JsonObject after = obj( + element("hello", value), + element("settings", obj( + element("hello", value) + )), + element("world-settings", obj( + element("world1", obj( + element("hello", value) + )), + element("world2", obj( + element("hello", value) + )) + )) + ); + + + assertEquals(after, filter.apply(before)); + } + + @SafeVarargs + private static JsonObject obj(Map.Entry... elements) { + JsonObject object = new JsonObject(); + for (Map.Entry element : elements) { + object.add(element.getKey(), element.getValue()); + } + return object; + } + + private static Map.Entry element(String key, JsonElement value) { + return Maps.immutableEntry(key, value); + } + +} diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java b/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParserTest.java similarity index 54% rename from spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java rename to spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParserTest.java index 18f36a7c4ab..2b686cac308 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParserTest.java @@ -18,29 +18,33 @@ * along with this program. If not, see . */ -package me.lucko.spark.velocity; +package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; -import com.velocitypowered.api.proxy.ProxyServer; -import com.velocitypowered.api.proxy.connection.Player; - -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import org.junit.jupiter.api.Test; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.StringReader; import java.util.Map; -public class Velocity4PlayerPingProvider implements PlayerPingProvider { - private final ProxyServer proxy; +import static org.junit.jupiter.api.Assertions.assertEquals; - public Velocity4PlayerPingProvider(ProxyServer proxy) { - this.proxy = proxy; - } +public class PropertiesConfigParserTest { - @Override - public Map poll() { - ImmutableMap.Builder builder = ImmutableMap.builder(); - for (Player player : this.proxy.connectedPlayers()) { - builder.put(player.username(), (int) player.ping()); - } - return builder.build(); + @Test + public void testParse() throws IOException { + String properties = + "hello=world\n" + + "a.b.c=1\n" + + "foo=true\n"; + + Map parse = PropertiesConfigParser.INSTANCE.parse(new BufferedReader(new StringReader(properties))); + assertEquals(ImmutableMap.of( + "hello", "world", + "a.b.c", 1L, + "foo", true + ), parse); } + } diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/world/CountMapTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/world/CountMapTest.java new file mode 100644 index 00000000000..8dcc8a95230 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/world/CountMapTest.java @@ -0,0 +1,49 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import org.junit.jupiter.api.Test; + +import java.util.HashMap; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class CountMapTest { + + @Test + public void testSimple() { + CountMap.Simple countMap = new CountMap.Simple<>(new HashMap<>()); + assertTrue(countMap.asMap().isEmpty()); + + countMap.increment("test"); + assertTrue(countMap.asMap().containsKey("test")); + assertEquals(1, countMap.asMap().get("test").get()); + + countMap.add("test", 5); + assertEquals(6, countMap.asMap().get("test").get()); + + countMap.increment("test2"); + + assertEquals(7, countMap.total().get()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/world/WorldStatisticsProviderTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/world/WorldStatisticsProviderTest.java new file mode 100644 index 00000000000..c87b70dd47f --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/world/WorldStatisticsProviderTest.java @@ -0,0 +1,126 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import org.junit.jupiter.api.Test; + +import java.util.HashMap; +import java.util.List; +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class WorldStatisticsProviderTest { + + @Test + public void testGroupIntoRegionsEmpty() { + List regions = WorldStatisticsProvider.groupIntoRegions(ImmutableList.of()); + assertEquals(0, regions.size()); + } + + @Test + public void testGroupIntoRegionsSingle() { + TestChunkInfo chunk1 = new TestChunkInfo(0, 0); + List regions = WorldStatisticsProvider.groupIntoRegions(ImmutableList.of(chunk1)); + + assertEquals(1, regions.size()); + WorldStatisticsProvider.Region region = regions.get(0); + + Set> chunks = region.getChunks(); + assertEquals(1, chunks.size()); + assertEquals(ImmutableSet.of(chunk1), chunks); + } + + @Test + public void testGroupIntoRegionsMultiple() { + TestChunkInfo chunk1 = new TestChunkInfo(0, 0); + TestChunkInfo chunk2 = new TestChunkInfo(0, 1); + TestChunkInfo chunk3 = new TestChunkInfo(1, 0); + TestChunkInfo chunk4 = new TestChunkInfo(0, 2); + + List regions = WorldStatisticsProvider.groupIntoRegions(ImmutableList.of(chunk1, chunk2, chunk3, chunk4)); + + assertEquals(1, regions.size()); + + WorldStatisticsProvider.Region region = regions.get(0); + Set> chunks = region.getChunks(); + assertEquals(4, chunks.size()); + assertEquals(ImmutableSet.of(chunk1, chunk2, chunk3, chunk4), chunks); + } + + @Test + public void testGroupIntoRegionsMultipleRegions() { + TestChunkInfo chunk1 = new TestChunkInfo(0, 0); + TestChunkInfo chunk2 = new TestChunkInfo(0, 1); + TestChunkInfo chunk3 = new TestChunkInfo(1, 0); + TestChunkInfo chunk4 = new TestChunkInfo(2, 2); + + List regions = WorldStatisticsProvider.groupIntoRegions(ImmutableList.of(chunk1, chunk2, chunk3, chunk4)); + + assertEquals(2, regions.size()); + + WorldStatisticsProvider.Region region1 = regions.get(0); + Set> chunks1 = region1.getChunks(); + assertEquals(3, chunks1.size()); + assertEquals(ImmutableSet.of(chunk1, chunk2, chunk3), chunks1); + + WorldStatisticsProvider.Region region2 = regions.get(1); + Set> chunks2 = region2.getChunks(); + assertEquals(1, chunks2.size()); + assertEquals(ImmutableSet.of(chunk4), chunks2); + } + + private static final class TestChunkInfo implements ChunkInfo { + private final int x; + private final int z; + private final CountMap entityCounts; + + public TestChunkInfo(int x, int z) { + this.x = x; + this.z = z; + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + this.entityCounts.increment("test"); + } + + @Override + public int getX() { + return this.x; + } + + @Override + public int getZ() { + return this.z; + } + + @Override + public CountMap getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(String type) { + return type; + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java new file mode 100644 index 00000000000..07a6e35f030 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java @@ -0,0 +1,103 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.common.sampler.java.MergeStrategy; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.proto.SparkSamplerProtos; +import me.lucko.spark.test.TestClass2; +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; + +import java.nio.file.Path; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.TimeUnit; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +public class SamplerTest { + + @ParameterizedTest + @EnumSource + public void testSampler(SamplerType samplerType, @TempDir Path directory) { + if (samplerType == SamplerType.ASYNC) { + String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); + assumeTrue(os.equals("linux") || os.equals("macosx"), "async profiler is only supported on Linux and macOS"); + } + + Thread thread = new Thread(new TestClass2(), "Test Thread"); + thread.start(); + + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + Sampler sampler = new SamplerBuilder() + .threadDumper(new ThreadDumper.Specific(thread)) + .threadGrouper(ThreadGrouper.BY_POOL) + .samplingInterval(10) + .forceJavaSampler(samplerType == SamplerType.JAVA) + .completeAfter(2, TimeUnit.SECONDS) + .start(plugin.platform()); + + String libraryVersion = sampler.getLibraryVersion(); + if (samplerType == SamplerType.ASYNC) { + assertNotNull(libraryVersion); + } else { + assertNull(libraryVersion); + } + + assertInstanceOf(samplerType.implClass(), sampler); + assertEquals(samplerType, sampler.getType()); + + assertNotEquals(-1, sampler.getAutoEndTime()); + sampler.getFuture().join(); + + Sampler.ExportProps exportProps = new Sampler.ExportProps() + .creator(TestCommandSender.INSTANCE.toData()) + .classSourceLookup(() -> ClassSourceLookup.create(plugin.platform())); + + if (samplerType == SamplerType.JAVA) { + exportProps.mergeStrategy(MergeStrategy.SAME_METHOD); + } + + SparkSamplerProtos.SamplerData proto = sampler.toProto(plugin.platform(), exportProps); + assertNotNull(proto); + + List threads = proto.getThreadsList(); + assertEquals(1, threads.size()); + + SparkSamplerProtos.ThreadNode protoThread = threads.get(0); + assertEquals("Test Thread", protoThread.getName()); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("test"))); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("testA"))); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("testB"))); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadDumperTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadDumperTest.java new file mode 100644 index 00000000000..b96f1493962 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadDumperTest.java @@ -0,0 +1,64 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import org.junit.jupiter.api.Test; + +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ThreadDumperTest { + + @Test + public void testAll() { + assertTrue(ThreadDumper.ALL.isThreadIncluded(1, "test")); + assertTrue(ThreadDumper.ALL.isThreadIncluded(2, "test2")); + } + + @Test + public void testSpecific() { + Thread thread = new Thread(() -> { + try { + Thread.sleep(100_000); + } catch (InterruptedException e) { + // ignore + } + }, "test-thread-1"); + thread.start(); + + ThreadDumper.Specific specific = new ThreadDumper.Specific(thread); + + assertTrue(specific.isThreadIncluded(thread.getId(), "test-thread-1")); + + Set threads = specific.getThreads(); + assertEquals(1, threads.size()); + assertTrue(threads.contains(thread)); + + Set threadNames = specific.getThreadNames(); + assertEquals(1, threadNames.size()); + assertTrue(threadNames.contains("test-thread-1")); + + thread.interrupt(); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadGrouperTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadGrouperTest.java new file mode 100644 index 00000000000..5f4e5aeb3fb --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadGrouperTest.java @@ -0,0 +1,80 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ThreadGrouperTest { + + @Test + public void testByName() { + ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME.get(); + + String group = threadGrouper.getGroup(1, "main"); + assertEquals("main", group); + + String label = threadGrouper.getLabel("main"); + assertEquals("main", label); + } + + @Test + public void testAsOne() { + ThreadGrouper threadGrouper = ThreadGrouper.AS_ONE.get(); + + String group = threadGrouper.getGroup(1, "main"); + assertEquals("root", group); + + String label = threadGrouper.getLabel("root"); + assertEquals("All (x1)", label); + + group = threadGrouper.getGroup(2, "main2"); + assertEquals("root", group); + + label = threadGrouper.getLabel("root"); + assertEquals("All (x2)", label); + } + + @Test + public void testByPool() { + ThreadGrouper threadGrouper = ThreadGrouper.BY_POOL.get(); + + String group = threadGrouper.getGroup(1, "main"); + assertEquals("main", group); + + String label = threadGrouper.getLabel("main"); + assertEquals("main", label); + + group = threadGrouper.getGroup(2, "Test Pool - #1"); + assertEquals("Test Pool", group); + + label = threadGrouper.getLabel("Test Pool"); + assertEquals("Test Pool (x1)", label); + + group = threadGrouper.getGroup(3, "Test Pool - #2"); + assertEquals("Test Pool", group); + + label = threadGrouper.getLabel("Test Pool"); + assertEquals("Test Pool (x2)", label); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/async/ExceedingTicksFilterTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/async/ExceedingTicksFilterTest.java new file mode 100644 index 00000000000..0a51b286294 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/async/ExceedingTicksFilterTest.java @@ -0,0 +1,114 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ExceedingTicksFilterTest { + + @Test + public void testAggregateEmpty() { + AtomicLong fakeNanos = new AtomicLong(); + ExceedingTicksFilter filter = new ExceedingTicksFilter(1, fakeNanos::get); + assertEquals(0, filter.exceedingTicksCount()); + assertFalse(filter.duringExceedingTick(0)); + } + + @Test + public void testAggregateEmptyAfterTicks() { + AtomicLong fakeNanos = new AtomicLong(); + ExceedingTicksFilter filter = new ExceedingTicksFilter(1, fakeNanos::get); + tickWithDuration(filter, fakeNanos, 0); + tickWithDuration(filter, fakeNanos, 500_000); // 0.5 ms + tickWithDuration(filter, fakeNanos, 900_000); // 0.9 ms + assertEquals(0, filter.exceedingTicksCount()); + assertFalse(filter.duringExceedingTick(0)); + } + + @Test + public void testAggregateOneExceeding() { + AtomicLong fakeNanos = new AtomicLong(); + ExceedingTicksFilter filter = new ExceedingTicksFilter(1, fakeNanos::get); + tickWithDuration(filter, fakeNanos, 500_000); // 0.5 ms + long startOfExceeding = tickWithDuration(filter, fakeNanos, 1_500_000); // 1.5 ms + tickWithDuration(filter, fakeNanos, 500_000); // 0.5 ms + assertEquals(1, filter.exceedingTicksCount()); + assertFalse(filter.duringExceedingTick(startOfExceeding - 1)); + assertTrue(filter.duringExceedingTick(startOfExceeding)); + assertTrue(filter.duringExceedingTick(startOfExceeding + 1)); + assertTrue(filter.duringExceedingTick(startOfExceeding + 1_499_999)); + assertTrue(filter.duringExceedingTick(startOfExceeding + 1_500_000)); + assertFalse(filter.duringExceedingTick(startOfExceeding + 1_500_001)); + } + + @Test + public void testAggregateMultipleExceeding() { + AtomicLong fakeNanos = new AtomicLong(); + ExceedingTicksFilter filter = new ExceedingTicksFilter(1, fakeNanos::get); + List starts = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + tickWithDuration(filter, fakeNanos, 500_000); // 0.5 ms + long startOfExceeding = tickWithDuration(filter, fakeNanos, 1_500_000); // 1.5 ms + starts.add(startOfExceeding); + tickWithDuration(filter, fakeNanos, 500_000); // 0.5 ms + } + assertEquals(10, filter.exceedingTicksCount()); + for (long startOfExceeding : starts) { + assertFalse(filter.duringExceedingTick(startOfExceeding - 1)); + assertTrue(filter.duringExceedingTick(startOfExceeding + 1)); + assertTrue(filter.duringExceedingTick(startOfExceeding)); + assertTrue(filter.duringExceedingTick(startOfExceeding + 1_499_999)); + assertTrue(filter.duringExceedingTick(startOfExceeding + 1_500_000)); + assertFalse(filter.duringExceedingTick(startOfExceeding + 1_500_001)); + } + } + + @Test + public void testAggregateDuringTicking() { + AtomicLong fakeNanos = new AtomicLong(); + ExceedingTicksFilter filter = new ExceedingTicksFilter(1, fakeNanos::get); + // no exceeding tick at time 1 yet + assertFalse(filter.duringExceedingTick(1)); + tickWithDuration(filter, fakeNanos, 1_500_000); + // tick exceeded at time 1 now + assertTrue(filter.duringExceedingTick(1)); + // exceeded tick is still there + assertTrue(filter.duringExceedingTick(1)); + // time after the exceeded tick + assertFalse(filter.duringExceedingTick(1_500_001)); + // the exceeded tick was consumed now already + assertFalse(filter.duringExceedingTick(1)); + } + + private static long tickWithDuration(ExceedingTicksFilter filter, AtomicLong fakeNanos, long durationNanos) { + long before = fakeNanos.getAndAdd(durationNanos); + filter.onTick(durationNanos / 1_000_000d); + return before; + } +} \ No newline at end of file diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java new file mode 100644 index 00000000000..52477ce408e --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java @@ -0,0 +1,196 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.node; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import me.lucko.spark.common.sampler.SamplerMode; +import me.lucko.spark.common.sampler.async.AsyncNodeExporter; +import me.lucko.spark.common.sampler.async.AsyncStackTraceElement; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.proto.SparkSamplerProtos; +import org.junit.jupiter.api.Test; + +import java.util.Collection; +import java.util.concurrent.TimeUnit; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class NodeTest { + + private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> new StackTraceNode.AsyncDescription(element.getClassName(), element.getMethodName(), element.getMethodDescription()); + private static final int WINDOW = 10; + + private static final AsyncStackTraceElement NODE_0 = new AsyncStackTraceElement("java.lang.Thread", "run", "()V"); + private static final AsyncStackTraceElement NODE_1_1 = new AsyncStackTraceElement("test.Foo", "run", "()V"); + private static final AsyncStackTraceElement NODE_1_2_1 = new AsyncStackTraceElement("test.Foo", "example", "()V"); + private static final AsyncStackTraceElement NODE_2_1 = new AsyncStackTraceElement("test.Bar", "run", "()V"); + private static final AsyncStackTraceElement NODE_2_2_1 = new AsyncStackTraceElement("test.Bar", "example", "()V"); + + private static final AsyncStackTraceElement[] STACK_1 = {NODE_1_2_1, NODE_1_1, NODE_0}; + private static final AsyncStackTraceElement[] STACK_2 = {NODE_2_2_1, NODE_2_1, NODE_0}; + + @Test + public void testThreadLabels() { + ThreadNode node = new ThreadNode("Test Thread"); + assertEquals("Test Thread", node.getThreadGroup()); + assertEquals("Test Thread", node.getThreadLabel()); + + node.setThreadLabel("Test"); + assertEquals("Test", node.getThreadLabel()); + } + + @Test + public void testBasicLog() { + ThreadNode threadNode = new ThreadNode("Test Thread"); + assertEquals(0, threadNode.getTimeWindows().size()); + + threadNode.log(STACK_TRACE_DESCRIBER, STACK_1, TimeUnit.SECONDS.toMicros(1), WINDOW); + + Collection children1 = threadNode.getChildren(); + assertEquals(1, children1.size()); + assertEquals(ImmutableSet.of(WINDOW), threadNode.getTimeWindows()); + + StackTraceNode node1 = children1.iterator().next(); + assertEquals(ImmutableSet.of(WINDOW), node1.getTimeWindows()); + assertEquals("java.lang.Thread", node1.getClassName()); + assertEquals("run", node1.getMethodName()); + assertEquals("()V", node1.getMethodDescription()); + assertEquals(StackTraceNode.NULL_LINE_NUMBER, node1.getLineNumber()); + assertEquals(StackTraceNode.NULL_LINE_NUMBER, node1.getParentLineNumber()); + assertEquals(TimeUnit.SECONDS.toMicros(1), node1.getTimeAccumulator(WINDOW).longValue()); + + threadNode.log(STACK_TRACE_DESCRIBER, STACK_2, TimeUnit.SECONDS.toMicros(1), WINDOW); + assertEquals(TimeUnit.SECONDS.toMicros(2), node1.getTimeAccumulator(WINDOW).longValue()); + + Collection children2 = node1.getChildren(); + assertEquals(2, children2.size()); + + for (StackTraceNode node2 : children2) { + assertEquals(ImmutableSet.of(WINDOW), node2.getTimeWindows()); + assertEquals(TimeUnit.SECONDS.toMicros(1), node2.getTimeAccumulator(WINDOW).longValue()); + } + } + + @Test + public void testExport() { + ThreadNode threadNode = new ThreadNode("Test Thread"); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_1, TimeUnit.SECONDS.toMicros(1), WINDOW); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_1, TimeUnit.SECONDS.toMicros(1), WINDOW + 1); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_2, TimeUnit.SECONDS.toMicros(1), WINDOW + 1); + + ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(SamplerMode.EXECUTION.valueTransformer(), ImmutableList.of(threadNode)); + int[] keys = timeEncoder.getKeys(); + assertArrayEquals(new int[]{WINDOW, WINDOW + 1}, keys); + + SparkSamplerProtos.ThreadNode proto = new AsyncNodeExporter(timeEncoder).export(threadNode); + + SparkSamplerProtos.ThreadNode expected = SparkSamplerProtos.ThreadNode.newBuilder() + .setName("Test Thread") + .addTimes(1000) + .addTimes(2000) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("test.Bar") + .setMethodDesc("()V") + .setMethodName("example") + .addTimes(0) + .addTimes(1000) + ) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("test.Bar") + .setMethodDesc("()V") + .setMethodName("run") + .addTimes(0) + .addTimes(1000) + .addChildrenRefs(0) + ) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("test.Foo") + .setMethodDesc("()V") + .setMethodName("example") + .addTimes(1000) + .addTimes(1000) + ) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("test.Foo") + .setMethodDesc("()V") + .setMethodName("run") + .addTimes(1000) + .addTimes(1000) + .addChildrenRefs(2) + ) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("java.lang.Thread") + .setMethodDesc("()V") + .setMethodName("run") + .addTimes(1000) + .addTimes(2000) + .addChildrenRefs(1) + .addChildrenRefs(3) + ) + .addChildrenRefs(4) + .build(); + + assertEquals(expected, proto); + } + + @Test + public void testRemoveTimeWindows() { + ThreadNode threadNode = new ThreadNode("Test Thread"); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_1, TimeUnit.SECONDS.toMicros(1), WINDOW); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_2, TimeUnit.SECONDS.toMicros(1), WINDOW + 1); + + StackTraceNode threadRunNode = threadNode.getChildren().iterator().next(); + Collection fooBarNodes = threadRunNode.getChildren(); + + assertEquals(2, threadNode.getTimeWindows().size()); + assertEquals(2, threadRunNode.getChildren().size()); + assertEquals(2, threadRunNode.getTimeWindows().size()); + + for (StackTraceNode node : fooBarNodes) { + assertEquals(1, node.getTimeWindows().size()); + assertEquals(1, node.getChildren().size()); + assertEquals(1, node.getChildren().iterator().next().getTimeWindows().size()); + assertEquals(0, node.getChildren().iterator().next().getChildren().size()); + } + + assertFalse(threadNode.removeTimeWindowsRecursively(w -> w == WINDOW)); + assertEquals(1, threadNode.getTimeWindows().size()); + assertEquals(1, threadRunNode.getChildren().size()); + assertEquals(1, threadRunNode.getTimeWindows().size()); + + assertTrue(threadNode.removeTimeWindowsRecursively(w -> w == WINDOW + 1)); + assertEquals(0, threadNode.getTimeWindows().size()); + assertEquals(0, threadNode.getChildren().size()); + + // doesn't bother updating nested children that have been removed + for (StackTraceNode node : fooBarNodes) { + assertEquals(1, node.getTimeWindows().size()); + assertEquals(1, node.getChildren().size()); + assertEquals(1, node.getChildren().iterator().next().getTimeWindows().size()); + assertEquals(0, node.getChildren().iterator().next().getChildren().size()); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtilsTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtilsTest.java new file mode 100644 index 00000000000..4161fe99a6a --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtilsTest.java @@ -0,0 +1,68 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +import org.junit.jupiter.api.Test; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.Month; +import java.time.ZoneOffset; +import java.util.concurrent.TimeUnit; +import java.util.function.IntPredicate; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ProfilingWindowUtilsTest { + + @Test + public void testMillisToWindow() { + int baseWindow = 28532770; + Instant baseTime = LocalDateTime.of(2024, Month.APRIL, 1, 10, 10, 0).toInstant(ZoneOffset.UTC); + + assertEquals(TimeUnit.MILLISECONDS.toMinutes(baseTime.toEpochMilli()), baseWindow); // should scale with unix time + + assertEquals(baseWindow, ProfilingWindowUtils.unixMillisToWindow(baseTime.toEpochMilli())); + assertEquals(baseWindow, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusMillis(1).toEpochMilli())); + assertEquals(baseWindow, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusSeconds(1).toEpochMilli())); + assertEquals(baseWindow, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusSeconds(59).toEpochMilli())); + assertEquals(baseWindow + 1, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusSeconds(60).toEpochMilli())); + assertEquals(baseWindow + 1, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusSeconds(61).toEpochMilli())); + assertEquals(baseWindow - 1, ProfilingWindowUtils.unixMillisToWindow(baseTime.minusMillis(1).toEpochMilli())); + assertEquals(baseWindow - 1, ProfilingWindowUtils.unixMillisToWindow(baseTime.minusSeconds(1).toEpochMilli())); + } + + @Test + public void testKeepHistoryBefore() { + IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(100); + assertFalse(predicate.test(99)); + assertFalse(predicate.test(100)); + assertFalse(predicate.test(101)); + + assertFalse(predicate.test(40)); + assertTrue(predicate.test(39)); + assertTrue(predicate.test(0)); + assertTrue(predicate.test(-10)); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoderTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoderTest.java new file mode 100644 index 00000000000..82dcb12521a --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoderTest.java @@ -0,0 +1,54 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.Test; + +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ProtoTimeEncoderTest { + + @Test + public void testSimple() { + ProtoTimeEncoder encoder = new ProtoTimeEncoder(l -> l, IntStream.of(7, 1, 3, 5)); + assertArrayEquals(new int[]{1, 3, 5, 7}, encoder.getKeys()); + + assertArrayEquals(new double[]{0, 0, 0, 0}, encoder.encode(ImmutableMap.of())); + assertArrayEquals(new double[]{0, 100, 0, 0}, encoder.encode(ImmutableMap.of(3, longAdder(100)))); + assertArrayEquals(new double[]{0, 100, 200, 0}, encoder.encode(ImmutableMap.of(3, longAdder(100), 5, longAdder(200)))); + + RuntimeException ex = assertThrows(RuntimeException.class, () -> encoder.encode(ImmutableMap.of(9, longAdder(300)))); + assertTrue(ex.getMessage().startsWith("No index for key 9")); + } + + private static LongAdder longAdder(long l) { + LongAdder longAdder = new LongAdder(); + longAdder.add(l); + return longAdder; + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/tick/TickHookTest.java b/spark-common/src/test/java/me/lucko/spark/common/tick/TickHookTest.java new file mode 100644 index 00000000000..f9202620270 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/tick/TickHookTest.java @@ -0,0 +1,69 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.tick; + +import com.google.common.collect.ImmutableList; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class TickHookTest { + + @Test + public void testAbstractHook() { + AbstractTickHook hook = new AbstractTickHook() { + @Override + public void start() { + + } + + @Override + public void close() { + + } + }; + + assertEquals(0, hook.getCurrentTick()); + + List ticks = new ArrayList<>(); + TickHook.Callback callback = ticks::add; + + hook.addCallback(callback); + + hook.onTick(); + assertEquals(1, hook.getCurrentTick()); + assertEquals(ImmutableList.of(0), ticks); + + hook.onTick(); + assertEquals(2, hook.getCurrentTick()); + assertEquals(ImmutableList.of(0, 1), ticks); + + hook.removeCallback(callback); + + hook.onTick(); + assertEquals(3, hook.getCurrentTick()); + assertEquals(ImmutableList.of(0, 1), ticks); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/tick/TickReporterTest.java b/spark-common/src/test/java/me/lucko/spark/common/tick/TickReporterTest.java new file mode 100644 index 00000000000..f7402c5c37d --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/tick/TickReporterTest.java @@ -0,0 +1,94 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.tick; + +import com.google.common.collect.ImmutableList; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class TickReporterTest { + + @Test + public void testAbstractReporter() { + AbstractTickReporter reporter = new AbstractTickReporter() { + @Override + public void start() { + + } + + @Override + public void close() { + + } + }; + + List durations = new ArrayList<>(); + TickReporter.Callback callback = durations::add; + + reporter.addCallback(callback); + + reporter.onTick(1.0); + assertEquals(ImmutableList.of(1.0), durations); + + reporter.onTick(2.0); + assertEquals(ImmutableList.of(1.0, 2.0), durations); + + reporter.removeCallback(callback); + + reporter.onTick(3.0); + assertEquals(ImmutableList.of(1.0, 2.0), durations); + } + + @Test + public void testSimpleReporter() { + SimpleTickReporter reporter = new SimpleTickReporter() { + @Override + public void start() { + + } + }; + + List durations = new ArrayList<>(); + TickReporter.Callback callback = durations::add; + + reporter.addCallback(callback); + + reporter.onStart(); + assertEquals(0, durations.size()); + + try { + Thread.sleep(10); + } catch (InterruptedException e) { + // ignore + } + + reporter.onEnd(); + + assertEquals(1, durations.size()); + assertTrue(durations.get(0) > 0); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/FormatUtilTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/FormatUtilTest.java new file mode 100644 index 00000000000..5b77fb52ffd --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/FormatUtilTest.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class FormatUtilTest { + + @Test + public void testPercent() { + assertEquals("50%", FormatUtil.percent(0.5, 1)); + assertEquals("100%", FormatUtil.percent(1, 1)); + assertEquals("0%", FormatUtil.percent(0, 1)); + + assertEquals("50%", FormatUtil.percent(50, 100)); + assertEquals("100%", FormatUtil.percent(100, 100)); + assertEquals("0%", FormatUtil.percent(0, 100)); + } + + @Test + public void testBytes() { + assertEquals("0 bytes", FormatUtil.formatBytes(0)); + assertEquals("1.0 bytes", FormatUtil.formatBytes(1)); + assertEquals("1.0 KB", FormatUtil.formatBytes(1024)); + assertEquals("1.0 MB", FormatUtil.formatBytes(1024 * 1024)); + assertEquals("1.0 GB", FormatUtil.formatBytes(1024 * 1024 * 1024)); + assertEquals("1.0 TB", FormatUtil.formatBytes(1024L * 1024 * 1024 * 1024)); + + assertEquals("2.5 KB", FormatUtil.formatBytes((long) (1024 * 2.5d))); + assertEquals("2.5 MB", FormatUtil.formatBytes((long) (1024 * 1024 * 2.5d))); + } + + @Test + public void testSeconds() { + assertEquals("0s", FormatUtil.formatSeconds(0)); + assertEquals("1s", FormatUtil.formatSeconds(1)); + assertEquals("59s", FormatUtil.formatSeconds(59)); + assertEquals("1m", FormatUtil.formatSeconds(60)); + assertEquals("1m 1s", FormatUtil.formatSeconds(61)); + assertEquals("1m 59s", FormatUtil.formatSeconds(119)); + assertEquals("2m", FormatUtil.formatSeconds(120)); + assertEquals("2m 1s", FormatUtil.formatSeconds(121)); + assertEquals("2m 59s", FormatUtil.formatSeconds(179)); + assertEquals("3m", FormatUtil.formatSeconds(180)); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/IndexedListBuilderTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/IndexedListBuilderTest.java new file mode 100644 index 00000000000..bb3820e7fd9 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/IndexedListBuilderTest.java @@ -0,0 +1,42 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import java.util.Arrays; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class IndexedListBuilderTest { + + @Test + public void testIndexes() { + IndexedListBuilder builder = new IndexedListBuilder<>(); + + assertEquals(0, builder.add("a")); + assertEquals(1, builder.add("b")); + assertEquals(2, builder.add("c")); + + assertEquals(Arrays.asList("a", "b", "c"), builder.build()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/JavaVersionTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/JavaVersionTest.java new file mode 100644 index 00000000000..469c5bf422e --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/JavaVersionTest.java @@ -0,0 +1,43 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class JavaVersionTest { + + @Test + public void testJavaVersion() { + assertEquals(7, JavaVersion.parseJavaVersion("1.7")); + assertEquals(8, JavaVersion.parseJavaVersion("1.8")); + assertEquals(9, JavaVersion.parseJavaVersion("9")); + assertEquals(11, JavaVersion.parseJavaVersion("11")); + assertEquals(17, JavaVersion.parseJavaVersion("17")); + assertEquals(9, JavaVersion.parseJavaVersion("9.0.1")); + assertEquals(11, JavaVersion.parseJavaVersion("11.0.1")); + assertEquals(17, JavaVersion.parseJavaVersion("17.0.1")); + assertEquals(17, JavaVersion.parseJavaVersion("17-ea")); + assertEquals(17, JavaVersion.parseJavaVersion("17.0.1-ea")); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/MethodDisambiguatorTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/MethodDisambiguatorTest.java new file mode 100644 index 00000000000..ad2f1e1d75f --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/MethodDisambiguatorTest.java @@ -0,0 +1,66 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import me.lucko.spark.common.util.MethodDisambiguator.MethodDescription; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; +import me.lucko.spark.test.TestClass; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +public class MethodDisambiguatorTest { + + private static final MethodDisambiguator DISAMBIGUATOR = new MethodDisambiguator(FallbackClassFinder.INSTANCE); + + @ParameterizedTest + @CsvSource({ + "25, test(Ljava/lang/String;)V", + "26, test(Ljava/lang/String;)V", + "27, test(Ljava/lang/String;)V", + "28, test(Ljava/lang/String;)V", + "31, test(I)V", + "32, test(I)V", + "33, test(I)V", + "34, test(I)V", + "37, test(Z)V", + "38, test(Z)V", + "39, test(Z)V", + "40, test(Z)V", + }) + public void testSuccessfulDisambiguate(int line, String expectedDesc) { + MethodDescription method = DISAMBIGUATOR.disambiguate(TestClass.class.getName(), "test", line).orElse(null); + assertNotNull(method); + assertEquals(expectedDesc, method.toString()); + } + + @ParameterizedTest + @ValueSource(ints = {24, 29, 100}) + public void testUnsuccessfulDisambiguate(int line) { + MethodDescription method = DISAMBIGUATOR.disambiguate(TestClass.class.getName(), "test", line).orElse(null); + assertNull(method); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/RollingAverageTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/RollingAverageTest.java new file mode 100644 index 00000000000..a5b4a00d92b --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/RollingAverageTest.java @@ -0,0 +1,81 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import java.math.BigDecimal; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class RollingAverageTest { + + @Test + public void testMean() { + RollingAverage ra = new RollingAverage(3); + ra.add(BigDecimal.valueOf(1)); + ra.add(BigDecimal.valueOf(2)); + ra.add(BigDecimal.valueOf(3)); + + assertEquals(2, ra.mean()); + ra.add(BigDecimal.valueOf(4)); + assertEquals(3, ra.mean()); + ra.add(BigDecimal.valueOf(5)); + assertEquals(4, ra.mean()); + ra.add(BigDecimal.valueOf(6)); + assertEquals(5, ra.mean()); + } + + @Test + public void testMax() { + RollingAverage ra = new RollingAverage(3); + ra.add(BigDecimal.valueOf(1)); + ra.add(BigDecimal.valueOf(2)); + ra.add(BigDecimal.valueOf(3)); + + assertEquals(3, ra.max()); + } + + @Test + public void testMin() { + RollingAverage ra = new RollingAverage(3); + ra.add(BigDecimal.valueOf(1)); + ra.add(BigDecimal.valueOf(2)); + ra.add(BigDecimal.valueOf(3)); + + assertEquals(1, ra.min()); + } + + @Test + public void testPercentile() { + RollingAverage ra = new RollingAverage(3); + ra.add(BigDecimal.valueOf(1)); + ra.add(BigDecimal.valueOf(2)); + ra.add(BigDecimal.valueOf(3)); + + assertEquals(1, ra.percentile(0)); + assertEquals(2, ra.percentile(0.25)); + assertEquals(2, ra.percentile(0.5)); + assertEquals(3, ra.percentile(0.75)); + assertEquals(3, ra.percentile(1)); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/TemporaryFilesTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/TemporaryFilesTest.java new file mode 100644 index 00000000000..9e073818c1c --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/TemporaryFilesTest.java @@ -0,0 +1,77 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class TemporaryFilesTest { + + @Test + public void testDelete(@TempDir Path tempDir) throws IOException { + Path dir = tempDir.resolve("test"); + TemporaryFiles temporaryFiles = new TemporaryFiles(dir); + + assertTrue(Files.exists(dir) && Files.isDirectory(dir)); + assertTrue(Files.exists(dir.resolve("about.txt"))); + assertEquals("# What is this directory?", Files.readAllLines(dir.resolve("about.txt")).get(0)); + + Path temporaryFile = temporaryFiles.create("test", ".txt"); + Files.write(temporaryFile, "Hello, world!".getBytes()); + + assertTrue(Files.exists(temporaryFile)); + temporaryFiles.deleteTemporaryFiles(); + assertFalse(Files.exists(temporaryFile)); + } + + @Test + public void testCleanupOnInit(@TempDir Path tempDir) throws IOException { + Path dir = tempDir.resolve("test"); + + Path nestedDirectory = dir.resolve("hello").resolve("world"); + Files.createDirectories(nestedDirectory); + + Path testFile = nestedDirectory.resolve("file.txt"); + Files.write(testFile, "Hello, world!".getBytes()); + assertTrue(Files.exists(testFile)); + + TemporaryFiles temporaryFiles = new TemporaryFiles(dir); + + assertFalse(Files.exists(testFile)); + } + + @Test + public void testSecondInit(@TempDir Path tempDir) throws IOException { + Path dir = tempDir.resolve("test"); + + TemporaryFiles temporaryFiles = new TemporaryFiles(dir); + TemporaryFiles temporaryFiles2 = new TemporaryFiles(dir); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/ThreadFinderTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/ThreadFinderTest.java new file mode 100644 index 00000000000..bffbf278522 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/ThreadFinderTest.java @@ -0,0 +1,50 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ThreadFinderTest { + + @Test + public void testFindThread() { + Thread thread = new Thread(() -> { + try { + Thread.sleep(100_000); + } catch (InterruptedException e) { + // ignore + } + }, "test-thread-1"); + thread.start(); + + ThreadFinder threadFinder = new ThreadFinder(); + List threads = threadFinder.getThreads().collect(Collectors.toList()); + assertTrue(threads.contains(thread)); + + thread.interrupt(); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/test/TestClass.java b/spark-common/src/test/java/me/lucko/spark/test/TestClass.java new file mode 100644 index 00000000000..bd7ff4afe1e --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/test/TestClass.java @@ -0,0 +1,41 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.test; + +public class TestClass { + public void test(String string) { + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + } + + public void test(int integer) { + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + } + + public void test(boolean bool) { + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + } +} diff --git a/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java b/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java new file mode 100644 index 00000000000..17ee88ef2e1 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java @@ -0,0 +1,52 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.test; + +public class TestClass2 implements Runnable { + + @Override + public void run() { + while (true) { + try { + test(); + } catch (InterruptedException e) { + return; + } + } + } + + public void test() throws InterruptedException { + for (int i = 0; i < 10; i++) { + testA(); + Thread.sleep(100); + testB(); + } + } + + public void testA() throws InterruptedException { + Thread.sleep(100); + } + + public void testB() throws InterruptedException { + Thread.sleep(100); + + } +} diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestCommandSender.java similarity index 58% rename from spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java rename to spark-common/src/test/java/me/lucko/spark/test/plugin/TestCommandSender.java index bb2f26b664d..294b0732e2f 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java +++ b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestCommandSender.java @@ -18,36 +18,36 @@ * along with this program. If not, see . */ -package me.lucko.spark.velocity; +package me.lucko.spark.test.plugin; -import com.velocitypowered.api.proxy.ProxyServer; +import me.lucko.spark.common.command.sender.CommandSender; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.serializer.ansi.ANSIComponentSerializer; -import me.lucko.spark.common.platform.PlatformInfo; +import java.util.UUID; -public class Velocity4PlatformInfo implements PlatformInfo { - private final ProxyServer proxy; +public enum TestCommandSender implements CommandSender { + INSTANCE; - public Velocity4PlatformInfo(ProxyServer proxy) { - this.proxy = proxy; - } + private final UUID uniqueId = new UUID(0, 0); @Override - public Type getType() { - return Type.PROXY; + public String getName() { + return "Test"; } @Override - public String getName() { - return "Velocity"; + public UUID getUniqueId() { + return this.uniqueId; } @Override - public String getVersion() { - return this.proxy.version().version(); + public void sendMessage(Component message) { + System.out.println(ANSIComponentSerializer.ansi().serialize(message)); } @Override - public String getMinecraftVersion() { - return null; + public boolean hasPermission(String permission) { + return true; } } diff --git a/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java new file mode 100644 index 00000000000..77dffd26090 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java @@ -0,0 +1,143 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.test.plugin; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; + +import java.nio.file.Path; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Stream; + +public class TestSparkPlugin implements SparkPlugin, AutoCloseable { + + private static final Logger LOGGER = Logger.getLogger("spark-test"); + private static final ScheduledExecutorService EXECUTOR_SERVICE = Executors.newScheduledThreadPool(16); + + private final Path directory; + private final Map props; + + private final SparkPlatform platform; + + public TestSparkPlugin(Path directory, Map config) { + this.directory = directory; + this.props = new HashMap<>(config); + this.props.putIfAbsent("backgroundProfiler", "false"); + + this.props.forEach((k, v) -> System.setProperty("spark." + k, v)); + this.platform = new SparkPlatform(this); + this.platform.enable(); + } + + public TestSparkPlugin(Path directory) { + this(directory, Collections.emptyMap()); + } + + public SparkPlatform platform() { + return this.platform; + } + + @Override + public void close() { + this.platform.disable(); + this.props.keySet().forEach((k) -> System.clearProperty("spark." + k)); + } + + @Override + public String getVersion() { + return "1.0-test"; + } + + @Override + public Path getPluginDirectory() { + return this.directory; + } + + @Override + public String getCommandName() { + return "spark"; + } + + @Override + public Stream getCommandSenders() { + return Stream.of(TestCommandSender.INSTANCE); + } + + @Override + public void executeAsync(Runnable task) { + EXECUTOR_SERVICE.execute(task); + } + + @Override + public void log(Level level, String msg) { + LOGGER.log(level, msg); + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + LOGGER.log(level, msg, throwable); + } + + @Override + public PlatformInfo getPlatformInfo() { + return new PlatformInfo() { + @Override + public Type getType() { + return Type.SERVER; + } + + @Override + public String getName() { + return "Test"; + } + + @Override + public String getBrand() { + return "Test"; + } + + @Override + public String getVersion() { + return "v1.0-test"; + } + + @Override + public String getMinecraftVersion() { + return null; + } + }; + } + + @Override + public ClassFinder createClassFinder() { + return FallbackClassFinder.INSTANCE; + } +} diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle index 23ebf319ecd..3da2bf37890 100644 --- a/spark-fabric/build.gradle +++ b/spark-fabric/build.gradle @@ -1,13 +1,13 @@ import net.fabricmc.loom.task.RemapJarTask plugins { - id 'fabric-loom' version '1.1-SNAPSHOT' - id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'fabric-loom' version '1.11-SNAPSHOT' + id 'com.gradleup.shadow' version '8.3.8' } tasks.withType(JavaCompile) { - // override, compile targeting J17 - options.release = 17 + // override, compile targeting J21 + options.release = 21 } repositories { @@ -28,9 +28,9 @@ configurations { dependencies { // https://modmuss50.me/fabric.html - minecraft 'com.mojang:minecraft:1.19.4' - mappings 'net.fabricmc:yarn:1.19.4+build.1:v2' - modImplementation 'net.fabricmc:fabric-loader:0.14.17' + minecraft 'com.mojang:minecraft:1.21.8' + mappings 'net.fabricmc:yarn:1.21.8+build.1:v2' + modImplementation 'net.fabricmc:fabric-loader:0.16.14' Set apiModules = [ "fabric-api-base", @@ -40,12 +40,12 @@ dependencies { // Add each module as a dependency apiModules.forEach { - modImplementation(fabricApi.module(it, '0.76.0+1.19.4')) + modImplementation(fabricApi.module(it, '0.129.0+1.21.8')) } - include(modImplementation('me.lucko:fabric-permissions-api:0.1-SNAPSHOT')) + include(modImplementation('me.lucko:fabric-permissions-api:0.4.1')) - modImplementation('eu.pb4:placeholder-api:2.0.0-beta.4+1.19') + modImplementation('eu.pb4:placeholder-api:2.7.0+1.21.6') shade project(':spark-common') } @@ -76,15 +76,15 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' // relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' + project.applyExcludes(delegate) dependencies { exclude(dependency('org.ow2.asm::')) diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java index ef22d9c34e6..c79c2b04c1b 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java @@ -21,16 +21,13 @@ package me.lucko.spark.fabric; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.util.ClassFinder; +import me.lucko.spark.common.util.classfinder.ClassFinder; import me.lucko.spark.fabric.smap.MixinUtils; import me.lucko.spark.fabric.smap.SourceMap; import me.lucko.spark.fabric.smap.SourceMapProvider; - import net.fabricmc.loader.api.FabricLoader; import net.fabricmc.loader.api.ModContainer; - import org.objectweb.asm.Type; import org.spongepowered.asm.mixin.FabricUtil; import org.spongepowered.asm.mixin.extensibility.IMixinConfig; @@ -44,14 +41,15 @@ import java.util.Map; public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource { - - private final ClassFinder classFinder = new ClassFinder(); - private final SourceMapProvider smapProvider = new SourceMapProvider(); - + private final ClassFinder classFinder; + private final SourceMapProvider smapProvider; private final Path modsDirectory; private final Map pathToModMap; - public FabricClassSourceLookup() { + public FabricClassSourceLookup(ClassFinder classFinder) { + this.classFinder = classFinder; + this.smapProvider = new SourceMapProvider(); + FabricLoader loader = FabricLoader.getInstance(); this.modsDirectory = loader.getGameDir().resolve("mods").toAbsolutePath().normalize(); this.pathToModMap = constructPathToModIdMap(loader.getAllMods()); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClientCommandSender.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClientCommandSender.java new file mode 100644 index 00000000000..1dcad0254d2 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClientCommandSender.java @@ -0,0 +1,73 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric; + +import com.google.gson.JsonParseException; +import com.mojang.serialization.JsonOps; +import me.lucko.spark.common.command.sender.AbstractCommandSender; +import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; +import net.minecraft.client.network.ClientCommandSource; +import net.minecraft.registry.DynamicRegistryManager; +import net.minecraft.text.Text; +import net.minecraft.text.TextCodecs; + +import java.util.UUID; + +public class FabricClientCommandSender extends AbstractCommandSender { + public FabricClientCommandSender(FabricClientCommandSource commandSource) { + super(commandSource); + } + + public FabricClientCommandSender(ClientCommandSource commandSource) { + this((FabricClientCommandSource) commandSource); + } + + @Override + public String getName() { + return this.delegate.getPlayer().getGameProfile().getName(); + } + + @Override + public UUID getUniqueId() { + return this.delegate.getPlayer().getUuid(); + } + + @Override + public void sendMessage(Component message) { + Text component = TextCodecs.CODEC.decode( + DynamicRegistryManager.EMPTY.getOps(JsonOps.INSTANCE), + GsonComponentSerializer.gson().serializeToTree(message) + ).getOrThrow(JsonParseException::new).getFirst(); + this.delegate.sendFeedback(component); + } + + @Override + public boolean hasPermission(String permission) { + return true; + } + + @Override + protected Object getObjectForComparison() { + return this.delegate.getPlayer(); + } +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java deleted file mode 100644 index 2138dbe95d4..00000000000 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.fabric; - -import me.lucko.spark.common.command.sender.AbstractCommandSender; -import me.lucko.spark.fabric.plugin.FabricSparkPlugin; - -import net.kyori.adventure.text.Component; -import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; -import net.minecraft.entity.player.PlayerEntity; -import net.minecraft.server.MinecraftServer; -import net.minecraft.server.command.CommandOutput; -import net.minecraft.server.rcon.RconCommandOutput; -import net.minecraft.text.Text; - -import java.util.UUID; - -public class FabricCommandSender extends AbstractCommandSender { - private final FabricSparkPlugin plugin; - - public FabricCommandSender(CommandOutput commandOutput, FabricSparkPlugin plugin) { - super(commandOutput); - this.plugin = plugin; - } - - @Override - public String getName() { - if (super.delegate instanceof PlayerEntity) { - return ((PlayerEntity) super.delegate).getGameProfile().getName(); - } else if (super.delegate instanceof MinecraftServer) { - return "Console"; - } else if (super.delegate instanceof RconCommandOutput) { - return "RCON Console"; - } else { - return "unknown:" + super.delegate.getClass().getSimpleName(); - } - } - - @Override - public UUID getUniqueId() { - if (super.delegate instanceof PlayerEntity) { - return ((PlayerEntity) super.delegate).getUuid(); - } - return null; - } - - @Override - public void sendMessage(Component message) { - Text component = Text.Serializer.fromJson(GsonComponentSerializer.gson().serialize(message)); - super.delegate.sendMessage(component); - } - - @Override - public boolean hasPermission(String permission) { - return this.plugin.hasPermission(super.delegate, permission); - } -} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java deleted file mode 100644 index 22794c27644..00000000000 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.fabric; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - -import me.lucko.spark.common.platform.MetadataProvider; - -import net.minecraft.resource.ResourcePackManager; -import net.minecraft.resource.ResourcePackProfile; -import net.minecraft.resource.ResourcePackSource; - -import java.util.LinkedHashMap; -import java.util.Map; - -public class FabricExtraMetadataProvider implements MetadataProvider { - - private final ResourcePackManager resourcePackManager; - - public FabricExtraMetadataProvider(ResourcePackManager resourcePackManager) { - this.resourcePackManager = resourcePackManager; - } - - @Override - public Map get() { - Map metadata = new LinkedHashMap<>(); - metadata.put("datapacks", datapackMetadata()); - return metadata; - } - - private JsonElement datapackMetadata() { - JsonObject datapacks = new JsonObject(); - for (ResourcePackProfile profile : this.resourcePackManager.getEnabledProfiles()) { - JsonObject obj = new JsonObject(); - obj.addProperty("name", profile.getDisplayName().getString()); - obj.addProperty("description", profile.getDescription().getString()); - obj.addProperty("source", resourcePackSource(profile.getSource())); - datapacks.add(profile.getName(), obj); - } - return datapacks; - } - - private static String resourcePackSource(ResourcePackSource source) { - if (source == ResourcePackSource.NONE) { - return "none"; - } else if (source == ResourcePackSource.BUILTIN) { - return "builtin"; - } else if (source == ResourcePackSource.WORLD) { - return "world"; - } else if (source == ResourcePackSource.SERVER) { - return "server"; - } else { - return "unknown"; - } - } -} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java index e298121e41f..57251e1feaa 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.fabric; import me.lucko.spark.common.platform.PlatformInfo; - import net.fabricmc.loader.api.FabricLoader; import java.util.Optional; @@ -43,6 +42,13 @@ public String getName() { return "Fabric"; } + @Override + public String getBrand() { + return FabricLoader.getInstance().getModContainer("fabric") + .map(container -> container.getMetadata().getName()) + .orElse("Fabric"); + } + @Override public String getVersion() { return getModVersion("fabricloader").orElse("unknown"); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java index bae6c4112dd..42b6b3738f2 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.fabric; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import net.minecraft.server.MinecraftServer; import net.minecraft.server.network.ServerPlayerEntity; @@ -40,7 +38,7 @@ public FabricPlayerPingProvider(MinecraftServer server) { public Map poll() { ImmutableMap.Builder builder = ImmutableMap.builder(); for (ServerPlayerEntity player : this.server.getPlayerManager().getPlayerList()) { - builder.put(player.getGameProfile().getName(), player.pingMilliseconds); + builder.put(player.getGameProfile().getName(), player.networkHandler.getLatency()); } return builder.build(); } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerCommandSender.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerCommandSender.java new file mode 100644 index 00000000000..c6e2c84dbe5 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerCommandSender.java @@ -0,0 +1,95 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric; + +import com.google.gson.JsonParseException; +import com.mojang.serialization.JsonOps; +import me.lucko.fabric.api.permissions.v0.Permissions; +import me.lucko.spark.common.command.sender.AbstractCommandSender; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; +import net.minecraft.entity.Entity; +import net.minecraft.registry.DynamicRegistryManager; +import net.minecraft.server.MinecraftServer; +import net.minecraft.server.command.ServerCommandSource; +import net.minecraft.server.network.ServerPlayerEntity; +import net.minecraft.text.Text; +import net.minecraft.text.TextCodecs; + +import java.util.UUID; + +public class FabricServerCommandSender extends AbstractCommandSender { + public FabricServerCommandSender(ServerCommandSource commandSource) { + super(commandSource); + } + + @Override + public String getName() { + String name = this.delegate.getName(); + if (this.delegate.getEntity() != null && name.equals("Server")) { + return "Console"; + } + return name; + } + + @Override + public UUID getUniqueId() { + Entity entity = this.delegate.getEntity(); + return entity != null ? entity.getUuid() : null; + } + + @Override + public void sendMessage(Component message) { + Text component = TextCodecs.CODEC.decode( + DynamicRegistryManager.EMPTY.getOps(JsonOps.INSTANCE), + GsonComponentSerializer.gson().serializeToTree(message) + ).getOrThrow(JsonParseException::new).getFirst(); + this.delegate.sendMessage(component); + } + + @Override + public boolean hasPermission(String permission) { + return Permissions.getPermissionValue(this.delegate, permission).orElseGet(() -> { + ServerPlayerEntity player = this.delegate.getPlayer(); + MinecraftServer server = this.delegate.getServer(); + if (player != null) { + if (server != null && server.isHost(player.getGameProfile())) { + return true; + } + return player.hasPermissionLevel(4); + } + return true; + }); + } + + @Override + protected Object getObjectForComparison() { + UUID uniqueId = getUniqueId(); + if (uniqueId != null) { + return uniqueId; + } + Entity entity = this.delegate.getEntity(); + if (entity != null) { + return entity; + } + return getName(); + } +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java index 325a324d486..bc70228d226 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; - import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java index ad419f7fc65..8772786ef69 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java @@ -21,10 +21,8 @@ package me.lucko.spark.fabric; import com.mojang.brigadier.CommandDispatcher; - import me.lucko.spark.fabric.plugin.FabricClientSparkPlugin; import me.lucko.spark.fabric.plugin.FabricServerSparkPlugin; - import net.fabricmc.api.ModInitializer; import net.fabricmc.fabric.api.command.v2.CommandRegistrationCallback; import net.fabricmc.fabric.api.event.lifecycle.v1.ServerLifecycleEvents; @@ -62,7 +60,7 @@ public void onInitialize() { CommandRegistrationCallback.EVENT.register(this::onServerCommandRegister); } - // client (called be entrypoint defined in fabric.mod.json) + // client (called by entrypoint defined in fabric.mod.json) public static void initializeClient() { Objects.requireNonNull(FabricSparkMod.mod, "mod"); FabricClientSparkPlugin.register(FabricSparkMod.mod, MinecraftClient.getInstance()); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickHook.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickHook.java index b76ad9bf01e..4f25f2b41ed 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickHook.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickHook.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import net.fabricmc.fabric.api.client.event.lifecycle.v1.ClientTickEvents; import net.fabricmc.fabric.api.event.lifecycle.v1.ServerTickEvents; import net.minecraft.client.MinecraftClient; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickReporter.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickReporter.java index b71ac848bb5..6a6addde59f 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickReporter.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickReporter.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.SimpleTickReporter; import me.lucko.spark.common.tick.TickReporter; - import net.fabricmc.fabric.api.client.event.lifecycle.v1.ClientTickEvents; import net.fabricmc.fabric.api.event.lifecycle.v1.ServerTickEvents; import net.minecraft.client.MinecraftClient; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java index 156db89ca89..5484c7d63a1 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java @@ -20,9 +20,7 @@ package me.lucko.spark.fabric; -import it.unimi.dsi.fastutil.longs.LongIterator; -import it.unimi.dsi.fastutil.longs.LongSet; - +import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -30,39 +28,56 @@ import me.lucko.spark.fabric.mixin.ClientWorldAccessor; import me.lucko.spark.fabric.mixin.ServerEntityManagerAccessor; import me.lucko.spark.fabric.mixin.ServerWorldAccessor; - +import me.lucko.spark.fabric.mixin.WorldAccessor; +import net.fabricmc.loader.api.FabricLoader; import net.minecraft.client.MinecraftClient; -import net.minecraft.client.world.ClientEntityManager; import net.minecraft.client.world.ClientWorld; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityType; +import net.minecraft.resource.ResourcePackManager; +import net.minecraft.resource.ResourcePackSource; import net.minecraft.server.MinecraftServer; import net.minecraft.server.world.ServerEntityManager; import net.minecraft.server.world.ServerWorld; import net.minecraft.util.math.ChunkPos; +import net.minecraft.world.GameRules; +import net.minecraft.world.entity.ClientEntityManager; import net.minecraft.world.entity.EntityIndex; -import net.minecraft.world.entity.EntityTrackingSection; -import net.minecraft.world.entity.SectionedEntityCache; +import net.minecraft.world.entity.EntityLookup; -import java.util.ArrayList; +import java.lang.reflect.Method; +import java.util.Collection; import java.util.HashMap; import java.util.List; -import java.util.stream.Stream; +import java.util.stream.Collectors; public abstract class FabricWorldInfoProvider implements WorldInfoProvider { - protected List getChunksFromCache(SectionedEntityCache cache) { - LongSet loadedChunks = cache.getChunkPositions(); - List list = new ArrayList<>(loadedChunks.size()); - - for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { - long chunkPos = iterator.nextLong(); - Stream> sections = cache.getTrackingSections(chunkPos); + protected abstract ResourcePackManager getResourcePackManager(); + + @Override + public Collection pollDataPacks() { + return getResourcePackManager().getEnabledProfiles().stream() + .map(pack -> new DataPackInfo( + pack.getId(), + pack.getDescription().getString(), + resourcePackSource(pack.getSource()) + )) + .collect(Collectors.toList()); + } - list.add(new FabricChunkInfo(chunkPos, sections)); + private static String resourcePackSource(ResourcePackSource source) { + if (source == ResourcePackSource.NONE) { + return "none"; + } else if (source == ResourcePackSource.BUILTIN) { + return "builtin"; + } else if (source == ResourcePackSource.WORLD) { + return "world"; + } else if (source == ResourcePackSource.SERVER) { + return "server"; + } else { + return "unknown"; } - - return list; } public static final class Server extends FabricWorldInfoProvider { @@ -79,10 +94,15 @@ public CountsResult pollCounts() { int chunks = 0; for (ServerWorld world : this.server.getWorlds()) { - ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); - EntityIndex entityIndex = ((ServerEntityManagerAccessor) entityManager).getIndex(); - entities += entityIndex.size(); + if (FabricLoader.getInstance().isModLoaded("moonrise")) { + entities += MoonriseMethods.getEntityCount(((WorldAccessor) world).spark$getEntityLookup()); + } else { + ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); + EntityIndex entityIndex = ((ServerEntityManagerAccessor) entityManager).getIndex(); + entities += entityIndex.size(); + } + chunks += world.getChunkManager().getLoadedChunkCount(); } @@ -94,15 +114,46 @@ public ChunksResult pollChunks() { ChunksResult data = new ChunksResult<>(); for (ServerWorld world : this.server.getWorlds()) { - ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); - SectionedEntityCache cache = ((ServerEntityManagerAccessor) entityManager).getCache(); + Long2ObjectOpenHashMap worldInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : ((WorldAccessor) world).spark$getEntityLookup().iterate()) { + FabricChunkInfo info = worldInfos.computeIfAbsent( + entity.getChunkPos().toLong(), FabricChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } - List list = getChunksFromCache(cache); - data.put(world.getRegistryKey().getValue().getPath(), list); + data.put(world.getRegistryKey().getValue().getPath(), List.copyOf(worldInfos.values())); } return data; } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + Iterable worlds = this.server.getWorlds(); + + for (ServerWorld world : worlds) { + String worldName = world.getRegistryKey().getValue().getPath(); + + world.getGameRules().accept(new GameRules.Visitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getName(), defaultValue); + + String value = world.getGameRules().get(key).serialize(); + data.put(key.getName(), worldName, value); + } + }); + } + return data; + } + + @Override + protected ResourcePackManager getResourcePackManager() { + return this.server.getDataPackManager(); + } } public static final class Client extends FabricWorldInfoProvider { @@ -119,10 +170,16 @@ public CountsResult pollCounts() { return null; } - ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); - EntityIndex entityIndex = ((ClientEntityManagerAccessor) entityManager).getIndex(); + int entities; + + if (FabricLoader.getInstance().isModLoaded("moonrise")) { + entities = MoonriseMethods.getEntityCount(((WorldAccessor) world).spark$getEntityLookup()); + } else { + ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); + EntityIndex entityIndex = ((ClientEntityManagerAccessor) entityManager).getIndex(); + entities = entityIndex.size(); + } - int entities = entityIndex.size(); int chunks = world.getChunkManager().getLoadedChunkCount(); return new CountsResult(-1, entities, -1, chunks); @@ -130,37 +187,44 @@ public CountsResult pollCounts() { @Override public ChunksResult pollChunks() { - ChunksResult data = new ChunksResult<>(); - ClientWorld world = this.client.world; if (world == null) { return null; } - ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); - SectionedEntityCache cache = ((ClientEntityManagerAccessor) entityManager).getCache(); + ChunksResult data = new ChunksResult<>(); + + Long2ObjectOpenHashMap worldInfos = new Long2ObjectOpenHashMap<>(); - List list = getChunksFromCache(cache); - data.put(world.getRegistryKey().getValue().getPath(), list); + for (Entity entity : ((WorldAccessor) world).spark$getEntityLookup().iterate()) { + FabricChunkInfo info = worldInfos.computeIfAbsent(entity.getChunkPos().toLong(), FabricChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } + + data.put(world.getRegistryKey().getValue().getPath(), List.copyOf(worldInfos.values())); return data; } + + @Override + public GameRulesResult pollGameRules() { + // Not available on client since 24w39a + return null; + } + + @Override + protected ResourcePackManager getResourcePackManager() { + return this.client.getResourcePackManager(); + } } static final class FabricChunkInfo extends AbstractChunkInfo> { private final CountMap> entityCounts; - FabricChunkInfo(long chunkPos, Stream> entities) { + FabricChunkInfo(long chunkPos) { super(ChunkPos.getPackedX(chunkPos), ChunkPos.getPackedZ(chunkPos)); this.entityCounts = new CountMap.Simple<>(new HashMap<>()); - entities.forEach(section -> { - if (section.getStatus().shouldTrack()) { - section.stream().forEach(entity -> - this.entityCounts.increment(entity.getType()) - ); - } - }); } @Override @@ -174,5 +238,28 @@ public String entityTypeName(EntityType type) { } } + private static final class MoonriseMethods { + private static Method getEntityCount; + + private static Method getEntityCountMethod(EntityLookup getter) { + if (getEntityCount == null) { + try { + getEntityCount = getter.getClass().getMethod("getEntityCount"); + } catch (final ReflectiveOperationException e) { + throw new RuntimeException("Cannot find Moonrise getEntityCount method", e); + } + } + return getEntityCount; + } + + private static int getEntityCount(EntityLookup getter) { + try { + return (int) getEntityCountMethod(getter).invoke(getter); + } catch (final ReflectiveOperationException e) { + throw new RuntimeException("Failed to invoke Moonrise getEntityCount method", e); + } + } + } + } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java index 994c9a39e67..d2c266c7c7c 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java @@ -20,11 +20,10 @@ package me.lucko.spark.fabric.mixin; -import net.minecraft.client.world.ClientEntityManager; import net.minecraft.entity.Entity; +import net.minecraft.world.entity.ClientEntityManager; import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.SectionedEntityCache; - import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java index 01562efe331..3940101a877 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java @@ -20,10 +20,9 @@ package me.lucko.spark.fabric.mixin; -import net.minecraft.client.world.ClientEntityManager; import net.minecraft.client.world.ClientWorld; import net.minecraft.entity.Entity; - +import net.minecraft.world.entity.ClientEntityManager; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java index 7a4fb785256..dfe9494d43a 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java @@ -21,7 +21,6 @@ package me.lucko.spark.fabric.mixin; import net.minecraft.client.MinecraftClient; - import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java index 2c67502c73a..8acc76d773b 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java @@ -24,7 +24,6 @@ import net.minecraft.server.world.ServerEntityManager; import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.SectionedEntityCache; - import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java index cf2e7e854bd..0254e750e19 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java @@ -23,7 +23,6 @@ import net.minecraft.entity.Entity; import net.minecraft.server.world.ServerEntityManager; import net.minecraft.server.world.ServerWorld; - import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/WorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/WorldAccessor.java new file mode 100644 index 00000000000..e50dc20a250 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/WorldAccessor.java @@ -0,0 +1,35 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.entity.Entity; +import net.minecraft.world.World; +import net.minecraft.world.entity.EntityLookup; +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Invoker; + +@Mixin(World.class) +public interface WorldAccessor { + + @Invoker(value = "getEntityLookup") + EntityLookup spark$getEntityLookup(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java index 69303e3de05..42be3c1932e 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java @@ -20,19 +20,20 @@ package me.lucko.spark.fabric.placeholder; +import com.google.gson.JsonParseException; +import com.mojang.serialization.JsonOps; import eu.pb4.placeholders.api.PlaceholderContext; import eu.pb4.placeholders.api.PlaceholderHandler; import eu.pb4.placeholders.api.PlaceholderResult; import eu.pb4.placeholders.api.Placeholders; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.util.SparkPlaceholder; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; +import net.minecraft.registry.DynamicRegistryManager; import net.minecraft.text.Text; +import net.minecraft.text.TextCodecs; import net.minecraft.util.Identifier; - import org.jetbrains.annotations.Nullable; public enum SparkFabricPlaceholderApi { @@ -41,7 +42,7 @@ public enum SparkFabricPlaceholderApi { public static void register(SparkPlatform platform) { for (SparkPlaceholder placeholder : SparkPlaceholder.values()) { Placeholders.register( - new Identifier("spark", placeholder.getName()), + Identifier.of("spark", placeholder.getName()), new Handler(platform, placeholder) ); } @@ -60,7 +61,10 @@ private static PlaceholderResult toResult(Component component) { } private static Text toText(Component component) { - return Text.Serializer.fromJson(GsonComponentSerializer.gson().serialize(component)); + return TextCodecs.CODEC.decode( + DynamicRegistryManager.EMPTY.getOps(JsonOps.INSTANCE), + GsonComponentSerializer.gson().serializeToTree(component) + ).getOrThrow(JsonParseException::new).getFirst(); } } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index faf4eefb7c4..0d8a88f2847 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -27,28 +27,24 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; - -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; -import me.lucko.spark.fabric.FabricCommandSender; -import me.lucko.spark.fabric.FabricExtraMetadataProvider; +import me.lucko.spark.fabric.FabricClientCommandSender; import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; import me.lucko.spark.fabric.FabricWorldInfoProvider; import me.lucko.spark.fabric.mixin.MinecraftClientAccessor; - import net.fabricmc.fabric.api.client.command.v2.ClientCommandRegistrationCallback; import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource; import net.fabricmc.fabric.api.client.event.lifecycle.v1.ClientLifecycleEvents; import net.minecraft.client.MinecraftClient; +import net.minecraft.client.network.ClientPlayNetworkHandler; import net.minecraft.command.CommandRegistryAccess; -import net.minecraft.server.command.CommandOutput; import java.util.concurrent.CompletableFuture; import java.util.stream.Stream; @@ -95,7 +91,7 @@ public int run(CommandContext context) throws Command return 0; } - this.platform.executeCommand(new FabricCommandSender(context.getSource().getEntity(), this), args); + this.platform.executeCommand(new FabricClientCommandSender(context.getSource()), args); return Command.SINGLE_SUCCESS; } @@ -106,17 +102,16 @@ public CompletableFuture getSuggestions(CommandContext getCommandSenders() { - return Stream.of(new FabricCommandSender(this.minecraft.player, this)); + public Stream getCommandSenders() { + ClientPlayNetworkHandler networkHandler = this.minecraft.getNetworkHandler(); + if (networkHandler == null) { + return Stream.empty(); + } + return Stream.of(new FabricClientCommandSender(networkHandler.getCommandSource())); } @Override @@ -139,11 +134,6 @@ public TickReporter createTickReporter() { return new FabricTickReporter.Client(); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new FabricExtraMetadataProvider(this.minecraft.getResourcePackManager()); - } - @Override public WorldInfoProvider createWorldInfoProvider() { return new FabricWorldInfoProvider.Client(this.minecraft); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index 1606d57e8cd..4f6500cec51 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -27,32 +27,26 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; - -import me.lucko.fabric.api.permissions.v0.Permissions; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; -import me.lucko.spark.fabric.FabricCommandSender; -import me.lucko.spark.fabric.FabricExtraMetadataProvider; import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricPlayerPingProvider; +import me.lucko.spark.fabric.FabricServerCommandSender; import me.lucko.spark.fabric.FabricServerConfigProvider; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; import me.lucko.spark.fabric.FabricWorldInfoProvider; import me.lucko.spark.fabric.placeholder.SparkFabricPlaceholderApi; - import net.fabricmc.loader.api.FabricLoader; -import net.minecraft.entity.player.PlayerEntity; import net.minecraft.server.MinecraftServer; -import net.minecraft.server.command.CommandOutput; import net.minecraft.server.command.ServerCommandSource; +import net.minecraft.server.network.ServerPlayerEntity; import java.util.concurrent.CompletableFuture; import java.util.stream.Stream; @@ -102,8 +96,7 @@ public int run(CommandContext context) throws CommandSyntax return 0; } - CommandOutput source = context.getSource().getEntity() != null ? context.getSource().getEntity() : context.getSource().getServer(); - this.platform.executeCommand(new FabricCommandSender(source, this), args); + this.platform.executeCommand(new FabricServerCommandSender(context.getSource()), args); return Command.SINGLE_SUCCESS; } @@ -114,31 +107,15 @@ public CompletableFuture getSuggestions(CommandContext { - MinecraftServer server = player.getServer(); - if (server != null && server.isHost(player.getGameProfile())) { - return true; - } - - return player.hasPermissionLevel(4); - }); - } else { - return true; - } - } - - @Override - public Stream getCommandSenders() { + public Stream getCommandSenders() { return Stream.concat( - this.server.getPlayerManager().getPlayerList().stream(), - Stream.of(this.server) - ).map(sender -> new FabricCommandSender(sender, this)); + this.server.getPlayerManager().getPlayerList().stream().map(ServerPlayerEntity::getCommandSource), + Stream.of(this.server.getCommandSource()) + ).map(FabricServerCommandSender::new); } @Override @@ -171,11 +148,6 @@ public ServerConfigProvider createServerConfigProvider() { return new FabricServerConfigProvider(); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new FabricExtraMetadataProvider(this.server.getDataPackManager()); - } - @Override public WorldInfoProvider createWorldInfoProvider() { return new FabricWorldInfoProvider.Server(this.server); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java index 8b7e1381ae4..cfc8c950fac 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java @@ -21,7 +21,6 @@ package me.lucko.spark.fabric.plugin; import me.lucko.spark.fabric.smap.SourceDebugCache; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.objectweb.asm.tree.ClassNode; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java index 9a03b4e6008..348284bf26c 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java @@ -30,7 +30,6 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import com.mojang.brigadier.tree.LiteralCommandNode; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; @@ -39,11 +38,8 @@ import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.fabric.FabricClassSourceLookup; import me.lucko.spark.fabric.FabricSparkMod; - import net.fabricmc.loader.api.FabricLoader; import net.fabricmc.loader.api.metadata.Person; -import net.minecraft.server.command.CommandOutput; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -80,8 +76,6 @@ public void disable() { this.scheduler.shutdown(); } - public abstract boolean hasPermission(CommandOutput sender, String permission); - @Override public String getVersion() { return this.mod.getVersion(); @@ -99,20 +93,29 @@ public void executeAsync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } } @Override public ClassSourceLookup createClassSourceLookup() { - return new FabricClassSourceLookup(); + return new FabricClassSourceLookup(createClassFinder()); } @Override @@ -123,7 +126,8 @@ public Collection getKnownSources() { mod -> mod.getMetadata().getVersion().getFriendlyString(), mod -> mod.getMetadata().getAuthors().stream() .map(Person::getName) - .collect(Collectors.joining(", ")) + .collect(Collectors.joining(", ")), + mod -> mod.getMetadata().getDescription() ); } diff --git a/spark-fabric/src/main/resources/fabric.mod.json b/spark-fabric/src/main/resources/fabric.mod.json index f1f0ad4e6f0..a6e0a74257c 100644 --- a/spark-fabric/src/main/resources/fabric.mod.json +++ b/spark-fabric/src/main/resources/fabric.mod.json @@ -30,6 +30,7 @@ "fabricloader": ">=0.4.0", "fabric-api-base": "*", "fabric-command-api-v2": "*", - "fabric-lifecycle-events-v1" : "*" + "fabric-lifecycle-events-v1" : "*", + "fabric-permissions-api-v0": "*" } } diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json index beaca2f55a2..a8e280c20a6 100644 --- a/spark-fabric/src/main/resources/spark.mixins.json +++ b/spark-fabric/src/main/resources/spark.mixins.json @@ -1,7 +1,6 @@ { "required": true, "package": "me.lucko.spark.fabric.mixin", - "compatibilityLevel": "JAVA_17", "client": [ "ClientEntityManagerAccessor", "ClientWorldAccessor", @@ -9,7 +8,8 @@ ], "mixins": [ "ServerEntityManagerAccessor", - "ServerWorldAccessor" + "ServerWorldAccessor", + "WorldAccessor" ], "plugin": "me.lucko.spark.fabric.plugin.FabricSparkMixinPlugin" } \ No newline at end of file diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle index 16fc8f5675f..774d9ead8c8 100644 --- a/spark-forge/build.gradle +++ b/spark-forge/build.gradle @@ -1,27 +1,17 @@ -buildscript { - repositories { - maven { url = "https://maven.minecraftforge.net" } - mavenCentral() - } - dependencies { - classpath group: 'net.minecraftforge.gradle', name: 'ForgeGradle', version: '5.1.+', changing: true - } -} - plugins { - id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'com.gradleup.shadow' version '8.3.8' + id 'net.minecraftforge.gradle' version '[6.0.24,6.2)' } -apply plugin: 'net.minecraftforge.gradle' - tasks.withType(JavaCompile) { - // override, compile targeting J17 - options.release = 17 + // override, compile targeting J21 + options.release = 21 } minecraft { - mappings channel: 'official', version: '1.19.4' + mappings channel: 'official', version: '1.21.8' accessTransformer = file('src/main/resources/META-INF/accesstransformer.cfg') + reobf = false } configurations { @@ -30,7 +20,8 @@ configurations { } dependencies { - minecraft 'net.minecraftforge:forge:1.19.4-45.0.1' + minecraft 'net.minecraftforge:forge:1.21.8-58.0.0' + annotationProcessor 'net.minecraftforge:eventbus-validator:7.0-beta.7' shade project(':spark-common') } @@ -49,30 +40,23 @@ processResources { } shadowJar { - archiveName = "spark-${project.pluginVersion}-forge.jar" + archiveFileName = "spark-${project.pluginVersion}-forge.jar" configurations = [project.configurations.shade] relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' + project.applyExcludes(delegate) } artifacts { archives shadowJar shadow shadowJar } - -reobf { - shadowJar { - dependsOn createMcpToSrg - mappings = createMcpToSrg.outputs.files.singleFile - } -} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java index 82d66ca7338..1f7d90d15b6 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java @@ -20,9 +20,8 @@ package me.lucko.spark.forge; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import cpw.mods.modlauncher.TransformingClassLoader; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; public class ForgeClassSourceLookup implements ClassSourceLookup { diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientCommandSender.java similarity index 52% rename from spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java rename to spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientCommandSender.java index b0cfd3cd176..3594e0e6e59 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientCommandSender.java @@ -18,44 +18,56 @@ * along with this program. If not, see . */ -package me.lucko.spark.sponge; +package me.lucko.spark.forge; +import com.google.gson.JsonParseException; +import com.mojang.serialization.JsonOps; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; - -import org.spongepowered.api.command.CommandSource; -import org.spongepowered.api.entity.living.player.Player; -import org.spongepowered.api.text.serializer.TextSerializers; +import net.minecraft.client.player.LocalPlayer; +import net.minecraft.commands.CommandSourceStack; +import net.minecraft.core.RegistryAccess; +import net.minecraft.network.chat.ComponentSerialization; +import net.minecraft.world.entity.Entity; import java.util.UUID; -public class Sponge7CommandSender extends AbstractCommandSender { - public Sponge7CommandSender(CommandSource source) { +public class ForgeClientCommandSender extends AbstractCommandSender { + public ForgeClientCommandSender(CommandSourceStack source) { super(source); } @Override public String getName() { - return super.delegate.getName(); + return this.delegate.getTextName(); } @Override public UUID getUniqueId() { - if (super.delegate instanceof Player) { - return ((Player) super.delegate).getUniqueId(); + Entity entity = this.delegate.getEntity(); + if (entity instanceof LocalPlayer player) { + return player.getUUID(); } return null; } @Override public void sendMessage(Component message) { - super.delegate.sendMessage(TextSerializers.JSON.deserialize(GsonComponentSerializer.gson().serialize(message))); + net.minecraft.network.chat.Component component = ComponentSerialization.CODEC.decode( + RegistryAccess.EMPTY.createSerializationContext(JsonOps.INSTANCE), + GsonComponentSerializer.gson().serializeToTree(message) + ).getOrThrow(JsonParseException::new).getFirst(); + super.delegate.sendSystemMessage(component); } @Override public boolean hasPermission(String permission) { - return super.delegate.hasPermission(permission); + return true; + } + + @Override + protected Object getObjectForComparison() { + return this.delegate.getEntity(); } } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java deleted file mode 100644 index f3b746d1935..00000000000 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.forge; - -import me.lucko.spark.common.command.sender.AbstractCommandSender; -import me.lucko.spark.forge.plugin.ForgeSparkPlugin; - -import net.kyori.adventure.text.Component; -import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; -import net.minecraft.commands.CommandSource; -import net.minecraft.network.chat.Component.Serializer; -import net.minecraft.network.chat.MutableComponent; -import net.minecraft.server.MinecraftServer; -import net.minecraft.server.rcon.RconConsoleSource; -import net.minecraft.world.entity.player.Player; - -import java.util.Objects; -import java.util.UUID; - -public class ForgeCommandSender extends AbstractCommandSender { - private final ForgeSparkPlugin plugin; - - public ForgeCommandSender(CommandSource source, ForgeSparkPlugin plugin) { - super(source); - this.plugin = plugin; - } - - @Override - public String getName() { - if (super.delegate instanceof Player) { - return ((Player) super.delegate).getGameProfile().getName(); - } else if (super.delegate instanceof MinecraftServer) { - return "Console"; - } else if (super.delegate instanceof RconConsoleSource) { - return "RCON Console"; - } else { - return "unknown:" + super.delegate.getClass().getSimpleName(); - } - } - - @Override - public UUID getUniqueId() { - if (super.delegate instanceof Player) { - return ((Player) super.delegate).getUUID(); - } - return null; - } - - @Override - public void sendMessage(Component message) { - MutableComponent component = Serializer.fromJson(GsonComponentSerializer.gson().serialize(message)); - Objects.requireNonNull(component, "component"); - super.delegate.sendSystemMessage(component); - } - - @Override - public boolean hasPermission(String permission) { - return this.plugin.hasPermission(super.delegate, permission); - } -} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java deleted file mode 100644 index cac2771ba4d..00000000000 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.forge; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - -import me.lucko.spark.common.platform.MetadataProvider; - -import net.minecraft.server.packs.repository.Pack; -import net.minecraft.server.packs.repository.PackRepository; -import net.minecraft.server.packs.repository.PackSource; - -import java.util.LinkedHashMap; -import java.util.Map; - -public class ForgeExtraMetadataProvider implements MetadataProvider { - - private final PackRepository resourcePackManager; - - public ForgeExtraMetadataProvider(PackRepository resourcePackManager) { - this.resourcePackManager = resourcePackManager; - } - - @Override - public Map get() { - Map metadata = new LinkedHashMap<>(); - metadata.put("datapacks", datapackMetadata()); - return metadata; - } - - private JsonElement datapackMetadata() { - JsonObject datapacks = new JsonObject(); - for (Pack profile : this.resourcePackManager.getSelectedPacks()) { - JsonObject obj = new JsonObject(); - obj.addProperty("name", profile.getTitle().getString()); - obj.addProperty("description", profile.getDescription().getString()); - obj.addProperty("source", resourcePackSource(profile.getPackSource())); - datapacks.add(profile.getId(), obj); - } - return datapacks; - } - - private static String resourcePackSource(PackSource source) { - if (source == PackSource.DEFAULT) { - return "none"; - } else if (source == PackSource.BUILT_IN) { - return "builtin"; - } else if (source == PackSource.WORLD) { - return "world"; - } else if (source == PackSource.SERVER) { - return "server"; - } else { - return "unknown"; - } - } -} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java index 97b3a86d653..ede1996a253 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java @@ -21,7 +21,9 @@ package me.lucko.spark.forge; import me.lucko.spark.common.platform.PlatformInfo; - +import net.minecraftforge.fml.ModContainer; +import net.minecraftforge.fml.ModList; +import net.minecraftforge.forgespi.language.IModInfo; import net.minecraftforge.versions.forge.ForgeVersion; import net.minecraftforge.versions.mcp.MCPVersion; @@ -42,6 +44,14 @@ public String getName() { return "Forge"; } + @Override + public String getBrand() { + return ModList.get().getModContainerById(ForgeVersion.MOD_ID) + .map(ModContainer::getModInfo) + .map(IModInfo::getDisplayName) + .orElse("Forge"); + } + @Override public String getVersion() { return ForgeVersion.getVersion(); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java index f31cc5b2e45..8b96b652d2e 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.forge; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import net.minecraft.server.MinecraftServer; import net.minecraft.server.level.ServerPlayer; @@ -40,7 +38,7 @@ public ForgePlayerPingProvider(MinecraftServer server) { public Map poll() { ImmutableMap.Builder builder = ImmutableMap.builder(); for (ServerPlayer player : this.server.getPlayerList().getPlayers()) { - builder.put(player.getGameProfile().getName(), player.latency); + builder.put(player.getGameProfile().getName(), player.connection.latency()); } return builder.build(); } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerCommandSender.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerCommandSender.java new file mode 100644 index 00000000000..ea6443274fd --- /dev/null +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerCommandSender.java @@ -0,0 +1,85 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.forge; + +import com.google.gson.JsonParseException; +import com.mojang.serialization.JsonOps; +import me.lucko.spark.common.command.sender.AbstractCommandSender; +import me.lucko.spark.forge.plugin.ForgeServerSparkPlugin; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; +import net.minecraft.commands.CommandSourceStack; +import net.minecraft.core.RegistryAccess; +import net.minecraft.network.chat.ComponentSerialization; +import net.minecraft.world.entity.Entity; + +import java.util.UUID; + +public class ForgeServerCommandSender extends AbstractCommandSender { + private final ForgeServerSparkPlugin plugin; + + public ForgeServerCommandSender(CommandSourceStack commandSource, ForgeServerSparkPlugin plugin) { + super(commandSource); + this.plugin = plugin; + } + + @Override + public String getName() { + String name = this.delegate.getTextName(); + if (this.delegate.getEntity() != null && name.equals("Server")) { + return "Console"; + } + return name; + } + + @Override + public UUID getUniqueId() { + Entity entity = this.delegate.getEntity(); + return entity != null ? entity.getUUID() : null; + } + + @Override + public void sendMessage(Component message) { + net.minecraft.network.chat.Component component = ComponentSerialization.CODEC.decode( + RegistryAccess.EMPTY.createSerializationContext(JsonOps.INSTANCE), + GsonComponentSerializer.gson().serializeToTree(message) + ).getOrThrow(JsonParseException::new).getFirst(); + super.delegate.sendSystemMessage(component); + } + + @Override + public boolean hasPermission(String permission) { + return this.plugin.hasPermission(this.delegate, permission); + } + + @Override + protected Object getObjectForComparison() { + UUID uniqueId = getUniqueId(); + if (uniqueId != null) { + return uniqueId; + } + Entity entity = this.delegate.getEntity(); + if (entity != null) { + return entity; + } + return getName(); + } +} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java index 6feba526644..c05e7d556b0 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; - import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java index 3321d129b24..2076beede64 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java @@ -22,58 +22,45 @@ import me.lucko.spark.forge.plugin.ForgeClientSparkPlugin; import me.lucko.spark.forge.plugin.ForgeServerSparkPlugin; - -import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.server.ServerAboutToStartEvent; -import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.IExtensionPoint; import net.minecraftforge.fml.ModContainer; -import net.minecraftforge.fml.ModLoadingContext; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.event.lifecycle.FMLClientSetupEvent; -import net.minecraftforge.fml.event.lifecycle.FMLCommonSetupEvent; import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext; import net.minecraftforge.fml.loading.FMLPaths; -import net.minecraftforge.network.NetworkConstants; import java.nio.file.Path; @Mod("spark") public class ForgeSparkMod { - private ModContainer container; - private Path configDirectory; + private final ModContainer container; + private final Path configDirectory; + + public ForgeSparkMod(FMLJavaModLoadingContext ctx) { + this.container = ctx.getContainer(); + this.configDirectory = FMLPaths.CONFIGDIR.get().resolve(this.container.getModId()); - public ForgeSparkMod() { - FMLJavaModLoadingContext.get().getModEventBus().addListener(this::setup); - FMLJavaModLoadingContext.get().getModEventBus().addListener(this::clientInit); - MinecraftForge.EVENT_BUS.register(this); + FMLClientSetupEvent.getBus(ctx.getModBusGroup()).addListener(this::clientInit); + ctx.registerDisplayTest(IExtensionPoint.DisplayTest.IGNORE_ALL_VERSION); - ModLoadingContext.get().registerExtensionPoint(IExtensionPoint.DisplayTest.class, () -> new IExtensionPoint.DisplayTest(() -> NetworkConstants.IGNORESERVERONLY, (a, b) -> true)); + ServerAboutToStartEvent.BUS.addListener(this::serverInit); } public String getVersion() { return this.container.getModInfo().getVersion().toString(); } - public void setup(FMLCommonSetupEvent e) { - this.container = ModLoadingContext.get().getActiveContainer(); - this.configDirectory = FMLPaths.CONFIGDIR.get().resolve(this.container.getModId()); - } - public void clientInit(FMLClientSetupEvent e) { ForgeClientSparkPlugin.register(this, e); } - @SubscribeEvent public void serverInit(ServerAboutToStartEvent e) { ForgeServerSparkPlugin.register(this, e); } public Path getConfigDirectory() { - if (this.configDirectory == null) { - throw new IllegalStateException("Config directory not set"); - } return this.configDirectory; } } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickHook.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickHook.java index 36e70b0588c..ee9e5885777 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickHook.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickHook.java @@ -22,39 +22,40 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - -import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.TickEvent; -import net.minecraftforge.eventbus.api.SubscribeEvent; +import net.minecraftforge.eventbus.api.bus.EventBus; +import net.minecraftforge.eventbus.api.listener.EventListener; + +import java.util.Objects; public class ForgeTickHook extends AbstractTickHook implements TickHook { - private final TickEvent.Type type; + private final EventBus bus; + private EventListener listener; public ForgeTickHook(TickEvent.Type type) { - this.type = type; + this.bus = switch (type) { + case CLIENT -> TickEvent.ClientTickEvent.Pre.BUS; + case SERVER -> TickEvent.ServerTickEvent.Pre.BUS; + default -> null; + }; + Objects.requireNonNull(this.bus, "bus"); } - @SubscribeEvent public void onTick(TickEvent e) { - if (e.phase != TickEvent.Phase.START) { - return; - } - - if (e.type != this.type) { - return; - } - onTick(); } @Override public void start() { - MinecraftForge.EVENT_BUS.register(this); + this.listener = this.bus.addListener(this::onTick); } @Override public void close() { - MinecraftForge.EVENT_BUS.unregister(this); + if (this.listener != null) { + this.bus.removeListener(this.listener); + this.listener = null; + } } } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickReporter.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickReporter.java index c33443b2bd9..8ddfd8fb121 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickReporter.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickReporter.java @@ -22,39 +22,58 @@ import me.lucko.spark.common.tick.SimpleTickReporter; import me.lucko.spark.common.tick.TickReporter; - -import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.TickEvent; -import net.minecraftforge.eventbus.api.SubscribeEvent; +import net.minecraftforge.eventbus.api.bus.EventBus; +import net.minecraftforge.eventbus.api.listener.EventListener; + +import java.util.Objects; public class ForgeTickReporter extends SimpleTickReporter implements TickReporter { - private final TickEvent.Type type; + private final EventBus preBus; + private final EventBus postBus; + + private EventListener preListener; + private EventListener postListener; public ForgeTickReporter(TickEvent.Type type) { - this.type = type; + this.preBus = switch (type) { + case CLIENT -> TickEvent.ClientTickEvent.Pre.BUS; + case SERVER -> TickEvent.ServerTickEvent.Pre.BUS; + default -> null; + }; + this.postBus = switch (type) { + case CLIENT -> TickEvent.ClientTickEvent.Post.BUS; + case SERVER -> TickEvent.ServerTickEvent.Post.BUS; + default -> null; + }; + Objects.requireNonNull(this.preBus, "preBus"); + Objects.requireNonNull(this.postBus, "postBus"); } - @SubscribeEvent - public void onTick(TickEvent e) { - if (e.type != this.type) { - return; - } + public void onStart(TickEvent e) { + onStart(); + } - switch (e.phase) { - case START -> onStart(); - case END -> onEnd(); - default -> throw new AssertionError(e.phase); - } + public void onEnd(TickEvent e) { + onEnd(); } @Override public void start() { - MinecraftForge.EVENT_BUS.register(this); + this.preListener = this.preBus.addListener(this::onStart); + this.postListener = this.postBus.addListener(this::onEnd); } @Override public void close() { - MinecraftForge.EVENT_BUS.unregister(this); + if (this.preListener != null) { + this.preBus.removeListener(this.preListener); + this.preListener = null; + } + if (this.postListener != null) { + this.postBus.removeListener(this.postListener); + this.postListener = null; + } super.close(); } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java index 4750c0892f6..a87aa0350e4 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java @@ -20,45 +20,56 @@ package me.lucko.spark.forge; -import it.unimi.dsi.fastutil.longs.LongIterator; -import it.unimi.dsi.fastutil.longs.LongSet; - +import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; - import net.minecraft.client.Minecraft; import net.minecraft.client.multiplayer.ClientLevel; import net.minecraft.server.MinecraftServer; import net.minecraft.server.level.ServerLevel; +import net.minecraft.server.packs.repository.PackRepository; +import net.minecraft.server.packs.repository.PackSource; import net.minecraft.world.entity.Entity; import net.minecraft.world.entity.EntityType; import net.minecraft.world.level.ChunkPos; +import net.minecraft.world.level.GameRules; import net.minecraft.world.level.entity.EntityLookup; -import net.minecraft.world.level.entity.EntitySection; -import net.minecraft.world.level.entity.EntitySectionStorage; import net.minecraft.world.level.entity.PersistentEntitySectionManager; import net.minecraft.world.level.entity.TransientEntitySectionManager; -import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.List; -import java.util.stream.Stream; +import java.util.stream.Collectors; public abstract class ForgeWorldInfoProvider implements WorldInfoProvider { - protected List getChunksFromCache(EntitySectionStorage cache) { - LongSet loadedChunks = cache.getAllChunksWithExistingSections(); - List list = new ArrayList<>(loadedChunks.size()); - - for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { - long chunkPos = iterator.nextLong(); - Stream> sections = cache.getExistingSectionsInChunk(chunkPos); + protected abstract PackRepository getPackRepository(); + + @Override + public Collection pollDataPacks() { + return getPackRepository().getSelectedPacks().stream() + .map(pack -> new DataPackInfo( + pack.getId(), + pack.getDescription().getString(), + resourcePackSource(pack.getPackSource()) + )) + .collect(Collectors.toList()); + } - list.add(new ForgeChunkInfo(chunkPos, sections)); + private static String resourcePackSource(PackSource source) { + if (source == PackSource.DEFAULT) { + return "none"; + } else if (source == PackSource.BUILT_IN) { + return "builtin"; + } else if (source == PackSource.WORLD) { + return "world"; + } else if (source == PackSource.SERVER) { + return "server"; + } else { + return "unknown"; } - - return list; } public static final class Server extends ForgeWorldInfoProvider { @@ -90,15 +101,47 @@ public ChunksResult pollChunks() { ChunksResult data = new ChunksResult<>(); for (ServerLevel level : this.server.getAllLevels()) { - PersistentEntitySectionManager entityManager = level.entityManager; - EntitySectionStorage cache = entityManager.sectionStorage; + Long2ObjectOpenHashMap levelInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : level.getEntities().getAll()) { + ForgeChunkInfo info = levelInfos.computeIfAbsent( + entity.chunkPosition().toLong(), ForgeChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } - List list = getChunksFromCache(cache); - data.put(level.dimension().location().getPath(), list); + data.put(level.dimension().location().getPath(), List.copyOf(levelInfos.values())); } return data; } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + Iterable levels = this.server.getAllLevels(); + + for (ServerLevel level : levels) { + String levelName = level.dimension().location().getPath(); + + level.getGameRules().visitGameRuleTypes(new GameRules.GameRuleTypeVisitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getId(), defaultValue); + + String value = level.getGameRules().getRule(key).serialize(); + data.put(key.getId(), levelName, value); + } + }); + } + + return data; + } + + @Override + protected PackRepository getPackRepository() { + return this.server.getPackRepository(); + } } public static final class Client extends ForgeWorldInfoProvider { @@ -126,37 +169,44 @@ public CountsResult pollCounts() { @Override public ChunksResult pollChunks() { - ChunksResult data = new ChunksResult<>(); - ClientLevel level = this.client.level; if (level == null) { return null; } - TransientEntitySectionManager entityManager = level.entityStorage; - EntitySectionStorage cache = entityManager.sectionStorage; + ChunksResult data = new ChunksResult<>(); + + Long2ObjectOpenHashMap levelInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : level.getEntities().getAll()) { + ForgeChunkInfo info = levelInfos.computeIfAbsent(entity.chunkPosition().toLong(), ForgeChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } - List list = getChunksFromCache(cache); - data.put(level.dimension().location().getPath(), list); + data.put(level.dimension().location().getPath(), List.copyOf(levelInfos.values())); return data; } + + @Override + public GameRulesResult pollGameRules() { + // Not available on client since 24w39a + return null; + } + + @Override + protected PackRepository getPackRepository() { + return this.client.getResourcePackRepository(); + } } - static final class ForgeChunkInfo extends AbstractChunkInfo> { + public static final class ForgeChunkInfo extends AbstractChunkInfo> { private final CountMap> entityCounts; - ForgeChunkInfo(long chunkPos, Stream> entities) { + ForgeChunkInfo(long chunkPos) { super(ChunkPos.getX(chunkPos), ChunkPos.getZ(chunkPos)); this.entityCounts = new CountMap.Simple<>(new HashMap<>()); - entities.forEach(section -> { - if (section.getStatus().isAccessible()) { - section.getEntities().forEach(entity -> - this.entityCounts.increment(entity.getType()) - ); - } - }); } @Override diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java index a8c7c92e771..5bf312cd50b 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java @@ -26,30 +26,30 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; - -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; -import me.lucko.spark.forge.ForgeCommandSender; -import me.lucko.spark.forge.ForgeExtraMetadataProvider; +import me.lucko.spark.forge.ForgeClientCommandSender; import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; import me.lucko.spark.forge.ForgeWorldInfoProvider; - import net.minecraft.client.Minecraft; -import net.minecraft.commands.CommandSource; import net.minecraft.commands.CommandSourceStack; +import net.minecraftforge.client.ClientCommandHandler; import net.minecraftforge.client.event.RegisterClientCommandsEvent; -import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.TickEvent; -import net.minecraftforge.eventbus.api.SubscribeEvent; +import net.minecraftforge.eventbus.api.bus.BusGroup; +import net.minecraftforge.eventbus.api.listener.EventListener; +import net.minecraftforge.eventbus.api.listener.SubscribeEvent; import net.minecraftforge.fml.event.lifecycle.FMLClientSetupEvent; +import java.lang.invoke.MethodHandles; +import java.util.Collection; +import java.util.Collections; import java.util.concurrent.CompletableFuture; import java.util.stream.Stream; @@ -62,6 +62,7 @@ public static void register(ForgeSparkMod mod, FMLClientSetupEvent event) { private final Minecraft minecraft; private final ThreadDumper gameThreadDumper; + private Collection listeners = Collections.emptyList(); public ForgeClientSparkPlugin(ForgeSparkMod mod, Minecraft minecraft) { super(mod); @@ -74,7 +75,18 @@ public void enable() { super.enable(); // register listeners - MinecraftForge.EVENT_BUS.register(this); + this.listeners = BusGroup.DEFAULT.register(MethodHandles.lookup(), this); + } + + @Override + public void disable() { + super.disable(); + + // unregister listeners + if (!this.listeners.isEmpty()) { + BusGroup.DEFAULT.unregister(this.listeners); + } + this.listeners = Collections.emptyList(); } @SubscribeEvent @@ -89,7 +101,7 @@ public int run(CommandContext context) throws CommandSyntaxE return 0; } - this.platform.executeCommand(new ForgeCommandSender(context.getSource().getEntity(), this), args); + this.platform.executeCommand(new ForgeClientCommandSender(context.getSource()), args); return Command.SINGLE_SUCCESS; } @@ -100,17 +112,12 @@ public CompletableFuture getSuggestions(CommandContext getCommandSenders() { - return Stream.of(new ForgeCommandSender(this.minecraft.player, this)); + public Stream getCommandSenders() { + return Stream.of(new ForgeClientCommandSender(ClientCommandHandler.getSource())); } @Override @@ -138,11 +145,6 @@ public WorldInfoProvider createWorldInfoProvider() { return new ForgeWorldInfoProvider.Client(this.minecraft); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new ForgeExtraMetadataProvider(this.minecraft.getResourcePackRepository()); - } - @Override public PlatformInfo getPlatformInfo() { return new ForgePlatformInfo(PlatformInfo.Type.CLIENT); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index 87370579885..38219bdf4df 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -28,41 +28,39 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; -import me.lucko.spark.forge.ForgeCommandSender; -import me.lucko.spark.forge.ForgeExtraMetadataProvider; import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgePlayerPingProvider; +import me.lucko.spark.forge.ForgeServerCommandSender; import me.lucko.spark.forge.ForgeServerConfigProvider; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; import me.lucko.spark.forge.ForgeWorldInfoProvider; - -import net.minecraft.commands.CommandSource; import net.minecraft.commands.CommandSourceStack; import net.minecraft.server.MinecraftServer; import net.minecraft.server.level.ServerPlayer; -import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.RegisterCommandsEvent; import net.minecraftforge.event.TickEvent; import net.minecraftforge.event.server.ServerAboutToStartEvent; import net.minecraftforge.event.server.ServerStoppingEvent; -import net.minecraftforge.eventbus.api.SubscribeEvent; +import net.minecraftforge.eventbus.api.bus.BusGroup; +import net.minecraftforge.eventbus.api.listener.EventListener; +import net.minecraftforge.eventbus.api.listener.SubscribeEvent; import net.minecraftforge.server.permission.PermissionAPI; import net.minecraftforge.server.permission.events.PermissionGatherEvent; import net.minecraftforge.server.permission.nodes.PermissionNode; import net.minecraftforge.server.permission.nodes.PermissionNode.PermissionResolver; import net.minecraftforge.server.permission.nodes.PermissionTypes; +import java.lang.invoke.MethodHandles; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -94,6 +92,7 @@ public static void register(ForgeSparkMod mod, ServerAboutToStartEvent event) { private final MinecraftServer server; private final ThreadDumper gameThreadDumper; private Map> registeredPermissions = Collections.emptyMap(); + private Collection listeners = Collections.emptyList(); public ForgeServerSparkPlugin(ForgeSparkMod mod, MinecraftServer server) { super(mod); @@ -109,7 +108,7 @@ public void enable() { registerCommands(this.server.getCommands().getDispatcher()); // register listeners - MinecraftForge.EVENT_BUS.register(this); + this.listeners = BusGroup.DEFAULT.register(MethodHandles.lookup(), this); } @Override @@ -117,7 +116,10 @@ public void disable() { super.disable(); // unregister listeners - MinecraftForge.EVENT_BUS.unregister(this); + if (!this.listeners.isEmpty()) { + BusGroup.DEFAULT.unregister(this.listeners); + } + this.listeners = Collections.emptyList(); } @SubscribeEvent @@ -177,8 +179,7 @@ public int run(CommandContext context) throws CommandSyntaxE return 0; } - CommandSource source = context.getSource().getEntity() != null ? context.getSource().getEntity() : context.getSource().getServer(); - this.platform.executeCommand(new ForgeCommandSender(source, this), args); + this.platform.executeCommand(new ForgeServerCommandSender(context.getSource(), this), args); return Command.SINGLE_SUCCESS; } @@ -189,12 +190,12 @@ public CompletableFuture getSuggestions(CommandContext getCommandSenders() { + public Stream getCommandSenders() { return Stream.concat( - this.server.getPlayerList().getPlayers().stream(), - Stream.of(this.server) - ).map(sender -> new ForgeCommandSender(sender, this)); + this.server.getPlayerList().getPlayers().stream().map(ServerPlayer::createCommandSourceStack), + Stream.of(this.server.createCommandSourceStack()) + ).map(stack -> new ForgeServerCommandSender(stack, this)); } @Override @@ -247,11 +248,6 @@ public ServerConfigProvider createServerConfigProvider() { return new ForgeServerConfigProvider(); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new ForgeExtraMetadataProvider(this.server.getPackRepository()); - } - @Override public WorldInfoProvider createWorldInfoProvider() { return new ForgeWorldInfoProvider.Server(this.server); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java index 56061b9c88d..a543bfe1af4 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java @@ -30,7 +30,6 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import com.mojang.brigadier.tree.LiteralCommandNode; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; @@ -39,11 +38,8 @@ import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.forge.ForgeClassSourceLookup; import me.lucko.spark.forge.ForgeSparkMod; - -import net.minecraft.commands.CommandSource; import net.minecraftforge.fml.ModList; import net.minecraftforge.forgespi.language.IModInfo; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -79,8 +75,6 @@ public void disable() { this.scheduler.shutdown(); } - public abstract boolean hasPermission(CommandSource sender, String permission); - @Override public String getVersion() { return this.mod.getVersion(); @@ -98,14 +92,23 @@ public void executeAsync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } } @@ -120,7 +123,8 @@ public Collection getKnownSources() { ModList.get().getMods(), IModInfo::getModId, mod -> mod.getVersion().toString(), - mod -> null // ? + mod -> null, // ? + IModInfo::getDescription ); } diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg index 2699a0ebb77..37722747ddc 100644 --- a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg +++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg @@ -5,3 +5,4 @@ public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage public net.minecraft.world.level.entity.TransientEntitySectionManager f_157637_ # entityStorage public net.minecraft.client.Minecraft f_91018_ # gameThread +public net.minecraft.client.multiplayer.ClientLevel m_142646_()Lnet/minecraft/world/level/entity/LevelEntityGetter; # getEntities diff --git a/spark-forge1710/build.gradle b/spark-forge1710/build.gradle index f7c57244663..54c73f11870 100644 --- a/spark-forge1710/build.gradle +++ b/spark-forge1710/build.gradle @@ -1,32 +1,29 @@ -buildscript { - repositories { - mavenCentral() - maven { url = "https://maven.minecraftforge.net" } - } - dependencies { - classpath ('com.anatawa12.forge:ForgeGradle:1.2-1.0.+') { - changing = true - } - } -} +import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar +import com.gtnewhorizons.retrofuturagradle.mcp.ReobfuscatedJar plugins { - id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'com.gradleup.shadow' version '8.3.8' + id 'com.gtnewhorizons.retrofuturagradle' version '1.4.5' } -apply plugin: 'forge' - -// These settings allow you to choose what version of Java you want to be compatible with. Forge 1.7.10 runs on Java 6 to 8. -sourceCompatibility = 1.8 -targetCompatibility = 1.8 +java { + toolchain { + languageVersion.set(JavaLanguageVersion.of(8)) + } +} minecraft { - version = "1.7.10-10.13.4.1614-1.7.10" - runDir = "run" - mappings = 'stable_12' - - replaceIn 'src/main/java/me/lucko/spark/forge/Forge1710SparkMod.java' - replace "@version@", project.pluginVersion + mcVersion = "1.7.10" + + injectedTags.put("VERSION", project.pluginVersion) +} + +tasks.injectTags.configure { + outputClassName.set("me.lucko.spark.forge.Tags") +} + +mcpTasks { + deobfuscationATs.from(file("src/main/resources/META-INF/spark_at.cfg")) } configurations { @@ -34,13 +31,6 @@ configurations { implementation.extendsFrom shade } -// https://github.com/MinecraftForge/ForgeGradle/issues/627#issuecomment-533927535 -configurations.all { - resolutionStrategy { - force 'org.lwjgl.lwjgl:lwjgl-platform:2.9.4-nightly-20150209' - } -} - dependencies { shade project(':spark-common') } @@ -62,7 +52,7 @@ jar { } shadowJar { - archiveName = 'spark-forge1710.jar' + archiveClassifier = "fat-dev" configurations = [project.configurations.shade] relocate 'okio', 'me.lucko.spark.lib.okio' @@ -82,13 +72,7 @@ shadowJar { exclude 'META-INF/proguard/**' } -reobf.reobf(shadowJar) { spec -> - spec.classpath = sourceSets.main.compileClasspath; -} - -artifacts { - archives shadowJar - shadow shadowJar +tasks.named("reobfJar", ReobfuscatedJar) { + archiveFileName = "spark-${project.pluginVersion}-forge-1.7.10.jar" + inputJar.set(tasks.named("shadowJar", ShadowJar).map { it.archiveFile }.get()) } - -build.dependsOn(shadowJar) diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710CommandSender.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710CommandSender.java index 71344a3f855..c35611becae 100644 --- a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710CommandSender.java +++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710CommandSender.java @@ -24,15 +24,14 @@ import me.lucko.spark.forge.plugin.Forge1710SparkPlugin; import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; -import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; import net.minecraft.command.ICommandSender; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.network.rcon.RConConsoleSource; import net.minecraft.server.MinecraftServer; -import net.minecraft.util.ChatComponentText; import net.minecraft.util.IChatComponent; -import net.minecraftforge.common.ForgeHooks; +import java.util.ArrayList; +import java.util.List; import java.util.UUID; public class Forge1710CommandSender extends AbstractCommandSender { @@ -64,23 +63,40 @@ public UUID getUniqueId() { return null; } + private static List splitOnNewline(Component root) { + List lines = new ArrayList<>(); + List current = new ArrayList<>(); + + splitRecursive(root, current, lines); + + if (!current.isEmpty()) { + lines.add(Component.empty().children(current)); + } + + return lines; + } + + private static void splitRecursive(Component comp, List current, List lines) { + if (comp.equals(Component.newline())) { + // flush current line + lines.add(Component.empty().children(current)); + current.clear(); + return; + } + + // copy the component but recurse into its children + List newChildren = new ArrayList<>(); + for (Component child : comp.children()) { + splitRecursive(child, newChildren, lines); + } + + current.add(comp.children(newChildren)); + } + @Override public void sendMessage(Component message) { - /* - * Due to limitations in 1.7.10, messages with \n render incorrectly on the client. - * To work around this, we convert the message to a string first, split it by newline, - * and send each line individually. - * - * This adds a performance penalty, but avoids any weirdness with this old client. - */ - LegacyComponentSerializer serializer = LegacyComponentSerializer.builder() - .character(LegacyComponentSerializer.SECTION_CHAR) - .extractUrls() - .build(); - String output = serializer.serialize(message); - for(String line : output.split("\n")) { - Component deserialized = serializer.deserialize(line); - IChatComponent mcComponent = IChatComponent.Serializer.jsonToComponent(GsonComponentSerializer.gson().serialize(deserialized)); + for (Component line : splitOnNewline(message)) { + IChatComponent mcComponent = IChatComponent.Serializer.func_150699_a(GsonComponentSerializer.colorDownsamplingGson().serialize(line)); super.delegate.addChatMessage(mcComponent); } } diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java index d9c83516e12..9c5b54fa0e2 100644 --- a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java +++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java @@ -40,6 +40,11 @@ public String getName() { return "Forge"; } + @Override + public String getBrand() { + return "Forge"; + } + @Override public String getVersion() { return ForgeVersion.getVersion(); diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710SparkMod.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710SparkMod.java index 5cdd62e3152..b9161b02ad6 100644 --- a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710SparkMod.java +++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710SparkMod.java @@ -36,7 +36,7 @@ @Mod( modid = "spark", name = "spark", - version = "@version@", + version = Tags.VERSION, acceptableRemoteVersions = "*" ) public class Forge1710SparkMod { diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710WorldInfoProvider.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710WorldInfoProvider.java index 232f88b82df..7d6b4ba51c2 100644 --- a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710WorldInfoProvider.java +++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710WorldInfoProvider.java @@ -33,15 +33,42 @@ import net.minecraft.entity.Entity; import net.minecraft.entity.EntityList; import net.minecraft.server.MinecraftServer; +import net.minecraft.world.GameRules; import net.minecraft.world.WorldServer; import net.minecraft.world.chunk.Chunk; import net.minecraft.world.chunk.IChunkProvider; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; +import java.util.*; +import java.util.stream.Collectors; public abstract class Forge1710WorldInfoProvider implements WorldInfoProvider { + @Override + public Collection pollDataPacks() { + return Collections.emptyList(); + } + + protected void encodeGameRules(GameRulesResult result, GameRules rules, String worldName) { + for (String rule : rules.getRules()) { + if (rule == null) { + continue; + } + String value = rules.getGameRuleStringValue(rule); + if (value != null) { + result.put(rule, worldName, value); + } + } + + } + + protected void setDefaultGameRules(GameRulesResult result) { + GameRules vanillaRules = new GameRules(); + + result.getRules().entrySet().stream().filter(entry -> entry.getValue().getDefaultValue() == null).map(Map.Entry::getKey).collect(Collectors.toList()).forEach(rule -> { + String def = vanillaRules.getGameRuleStringValue(rule); + result.putDefault(rule, def != null ? def : ""); + }); + } + public static final class Server extends Forge1710WorldInfoProvider { private final MinecraftServer server; @@ -77,6 +104,19 @@ public CountsResult pollCounts() { return new CountsResult(players, entities, -1, chunks); } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + + for (WorldServer world : server.worldServers) { + encodeGameRules(data, world.getGameRules(), world.provider.getDimensionName()); + } + + setDefaultGameRules(data); + + return data; + } } @SideOnly(Side.CLIENT) @@ -119,6 +159,19 @@ public CountsResult pollCounts() { return new CountsResult(-1, level.loadedEntityList.size(), -1, level.getChunkProvider().getLoadedChunkCount()); } + + @Override + public GameRulesResult pollGameRules() { + WorldClient world = Minecraft.getMinecraft().theWorld; + + GameRulesResult data = new GameRulesResult(); + + encodeGameRules(data, world.getGameRules(), world.provider.getDimensionName()); + + setDefaultGameRules(data); + + return data; + } } static final class ForgeChunkInfo extends AbstractChunkInfo> { diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java index 446a0c97238..4d02a0fff5d 100644 --- a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java +++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java @@ -20,6 +20,7 @@ package me.lucko.spark.forge.plugin; +import com.google.common.util.concurrent.ListenableFuture; import cpw.mods.fml.common.gameevent.TickEvent; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -32,6 +33,7 @@ import net.minecraftforge.client.ClientCommandHandler; import net.minecraftforge.common.MinecraftForge; +import java.util.concurrent.ExecutionException; import java.util.stream.Stream; public class Forge1710ClientSparkPlugin extends Forge1710SparkPlugin { @@ -44,7 +46,7 @@ public static void register(Forge1710SparkMod mod) { MinecraftForge.EVENT_BUS.register(plugin); // register commands - ClientCommandHandler.instance.registerCommand(plugin); + ClientCommandHandler.instance.registerCommand(plugin.new VanillaCommand()); } private final Minecraft minecraft; @@ -53,7 +55,7 @@ public static void register(Forge1710SparkMod mod) { public Forge1710ClientSparkPlugin(Forge1710SparkMod mod, Minecraft minecraft) { super(mod); this.minecraft = minecraft; - this.gameThreadDumper = new ThreadDumper.Specific(minecraft.mcThread); + this.gameThreadDumper = new ThreadDumper.Specific(minecraft.field_152352_aC); } @Override @@ -88,7 +90,12 @@ public WorldInfoProvider createWorldInfoProvider() { @Override public void executeSync(Runnable task) { - this.minecraft.addScheduledTask(task); + ListenableFuture future = this.minecraft.func_152344_a(task); + try { + future.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } } @Override diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java index 03f55d78537..902b15a774d 100644 --- a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java +++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java @@ -52,7 +52,7 @@ public static Forge1710ServerSparkPlugin register(Forge1710SparkMod mod, FMLServ FMLCommonHandler.instance().bus().register(plugin); // register commands & permissions - event.registerServerCommand(plugin); + event.registerServerCommand(plugin.new VanillaCommand()); return plugin; } diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java index 26e17522763..cf605c38783 100644 --- a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java +++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java @@ -23,6 +23,7 @@ import cpw.mods.fml.common.FMLCommonHandler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.forge.Forge1710CommandSender; import me.lucko.spark.forge.Forge1710SparkMod; import net.minecraft.command.ICommand; @@ -38,26 +39,21 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.logging.Level; -public abstract class Forge1710SparkPlugin implements SparkPlugin, ICommand { +public abstract class Forge1710SparkPlugin implements SparkPlugin { private final Forge1710SparkMod mod; private final Logger logger; protected final ScheduledExecutorService scheduler; - protected final SparkPlatform platform; + protected SparkPlatform platform; protected Forge1710SparkPlugin(Forge1710SparkMod mod) { this.mod = mod; this.logger = LogManager.getLogger("spark"); - this.scheduler = Executors.newSingleThreadScheduledExecutor(r -> { - Thread thread = Executors.defaultThreadFactory().newThread(r); - thread.setName("spark-forge-async-worker"); - thread.setDaemon(true); - return thread; - }); - this.platform = new SparkPlatform(this); + this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory()); } public void enable() { + this.platform = new SparkPlatform(this); this.platform.enable(); } @@ -85,61 +81,71 @@ public void executeAsync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg); } } - // implement ICommand - @Override - public String getCommandName() { - return getCommandName(); + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); + } else { + this.logger.info(msg, throwable); + } } - @Override - public String getCommandUsage(ICommandSender iCommandSender) { - return "/" + getCommandName(); - } + // implement ICommand - @Override - public List getCommandAliases() { - return Collections.singletonList(getCommandName()); + protected boolean isOp(EntityPlayer player) { + return FMLCommonHandler.instance().getMinecraftServerInstance().getConfigurationManager().func_152596_g(player.getGameProfile()); } - @Override - public void processCommand(ICommandSender sender, String[] args) { - this.platform.executeCommand(new Forge1710CommandSender(sender, this), args); - } + public class VanillaCommand implements ICommand { + @Override + public String getCommandName() { + return Forge1710SparkPlugin.this.getCommandName(); + } - @Override - public List addTabCompletionOptions(ICommandSender sender, String[] args) { - return this.platform.tabCompleteCommand(new Forge1710CommandSender(sender, this), args); - } + @Override + public String getCommandUsage(ICommandSender iCommandSender) { + return "/" + getCommandName(); + } - @Override - public boolean canCommandSenderUseCommand(ICommandSender sender) { - return this.platform.hasPermissionForAnyCommand(new Forge1710CommandSender(sender, this)); - } + @Override + public List getCommandAliases() { + return Collections.singletonList(getCommandName()); + } - @Override - public boolean isUsernameIndex(String[] strings, int i) { - return false; - } + @Override + public void processCommand(ICommandSender sender, String[] args) { + Forge1710SparkPlugin.this.platform.executeCommand(new Forge1710CommandSender(sender, Forge1710SparkPlugin.this), args); + } - @Override - public int compareTo(Object o) { - return getCommandName().compareTo(((ICommand)o).getCommandName()); - } - - protected boolean isOp(EntityPlayer player) { - return FMLCommonHandler.instance().getMinecraftServerInstance().getConfigurationManager().canSendCommands(player.getGameProfile()); - } + @Override + public List addTabCompletionOptions(ICommandSender sender, String[] args) { + return Forge1710SparkPlugin.this.platform.tabCompleteCommand(new Forge1710CommandSender(sender, Forge1710SparkPlugin.this), args); + } + + @Override + public boolean canCommandSenderUseCommand(ICommandSender sender) { + return Forge1710SparkPlugin.this.platform.hasPermissionForAnyCommand(new Forge1710CommandSender(sender, Forge1710SparkPlugin.this)); + } + @Override + public boolean isUsernameIndex(String[] strings, int i) { + return false; + } + + @Override + public int compareTo(Object o) { + return getCommandName().compareTo(((ICommand)o).getCommandName()); + } + } } diff --git a/spark-forge1710/src/main/resources/META-INF/spark_at.cfg b/spark-forge1710/src/main/resources/META-INF/spark_at.cfg index ff29bd3bcc4..f908002fe58 100644 --- a/spark-forge1710/src/main/resources/META-INF/spark_at.cfg +++ b/spark-forge1710/src/main/resources/META-INF/spark_at.cfg @@ -1 +1,2 @@ -public net.minecraft.client.Minecraft field_152352_aC # mcThread \ No newline at end of file +public net.minecraft.client.Minecraft field_152352_aC # mcThread +public net.minecraft.client.Minecraft field_152350_aA # mcThread diff --git a/spark-minestom/build.gradle b/spark-minestom/build.gradle deleted file mode 100644 index 788201bb7e4..00000000000 --- a/spark-minestom/build.gradle +++ /dev/null @@ -1,48 +0,0 @@ -plugins { - id 'com.github.johnrengelman.shadow' version '7.0.0' -} - -tasks.withType(JavaCompile) { - // override, compile targeting J17 - options.release = 17 -} - -dependencies { - implementation project(':spark-common') - compileOnly 'com.github.Minestom.Minestom:Minestom:master-SNAPSHOT' - implementation 'com.google.guava:guava:19.0' -} - -processResources { - from(sourceSets.main.resources.srcDirs) { - expand ( - 'pluginVersion': project.pluginVersion, - 'pluginDescription': project.pluginDescription - ) - include 'extension.json' - } -} - -shadowJar { - archiveName = "spark-${project.pluginVersion}-minestom.jar" - - dependencies { - exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) - } - - relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination' - relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' - relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' - relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' - relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' - - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' -} - -artifacts { - archives shadowJar - shadow shadowJar -} diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java deleted file mode 100644 index ca44eeaaa39..00000000000 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.minestom; - -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - -import net.minestom.server.MinecraftServer; -import net.minestom.server.extensions.Extension; -import net.minestom.server.extensions.ExtensionClassLoader; - -import java.util.HashMap; -import java.util.Map; - -public class MinestomClassSourceLookup extends ClassSourceLookup.ByClassLoader { - private final Map classLoaderToExtensions; - - public MinestomClassSourceLookup() { - this.classLoaderToExtensions = new HashMap<>(); - for (Extension extension : MinecraftServer.getExtensionManager().getExtensions()) { - this.classLoaderToExtensions.put(extension.getClass().getClassLoader(), extension.getOrigin().getName()); - } - } - - @Override - public String identify(ClassLoader loader) { - if (loader instanceof ExtensionClassLoader) { - return this.classLoaderToExtensions.get(loader); - } - return null; - } -} diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomCommandSender.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomCommandSender.java deleted file mode 100644 index da46224b726..00000000000 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomCommandSender.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.minestom; - -import me.lucko.spark.common.command.sender.AbstractCommandSender; - -import net.kyori.adventure.text.Component; -import net.minestom.server.command.CommandSender; -import net.minestom.server.command.ConsoleSender; -import net.minestom.server.entity.Player; - -import java.util.UUID; - -public class MinestomCommandSender extends AbstractCommandSender { - public MinestomCommandSender(CommandSender delegate) { - super(delegate); - } - - @Override - public String getName() { - if (this.delegate instanceof Player player) { - return player.getUsername(); - } else if (this.delegate instanceof ConsoleSender) { - return "Console"; - }else { - return "unknown:" + this.delegate.getClass().getSimpleName(); - } - } - - @Override - public UUID getUniqueId() { - if (super.delegate instanceof Player player) { - return player.getUuid(); - } - return null; - } - - @Override - public void sendMessage(Component message) { - this.delegate.sendMessage(message); - } - - @Override - public boolean hasPermission(String permission) { - return this.delegate.hasPermission(permission); - } -} diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java deleted file mode 100644 index 90144764151..00000000000 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.minestom; - -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.SparkPlugin; -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.sampler.source.SourceMetadata; -import me.lucko.spark.common.tick.TickHook; -import me.lucko.spark.common.tick.TickReporter; - -import net.minestom.server.MinecraftServer; -import net.minestom.server.command.CommandSender; -import net.minestom.server.command.builder.Command; -import net.minestom.server.command.builder.CommandContext; -import net.minestom.server.command.builder.CommandExecutor; -import net.minestom.server.command.builder.arguments.ArgumentStringArray; -import net.minestom.server.command.builder.arguments.ArgumentType; -import net.minestom.server.command.builder.suggestion.Suggestion; -import net.minestom.server.command.builder.suggestion.SuggestionCallback; -import net.minestom.server.command.builder.suggestion.SuggestionEntry; -import net.minestom.server.extensions.Extension; -import net.minestom.server.timer.ExecutionType; - -import org.jetbrains.annotations.NotNull; - -import java.nio.file.Path; -import java.util.Arrays; -import java.util.Collection; -import java.util.logging.Level; -import java.util.stream.Stream; - -public class MinestomSparkPlugin extends Extension implements SparkPlugin { - private SparkPlatform platform; - private MinestomSparkCommand command; - - @Override - public void initialize() { - this.platform = new SparkPlatform(this); - this.platform.enable(); - this.command = new MinestomSparkCommand(this.platform); - MinecraftServer.getCommandManager().register(this.command); - } - - @Override - public void terminate() { - this.platform.disable(); - MinecraftServer.getCommandManager().unregister(this.command); - } - - @Override - public String getVersion() { - return getOrigin().getVersion(); - } - - @Override - public Path getPluginDirectory() { - return getDataDirectory(); - } - - @Override - public String getCommandName() { - return "spark"; - } - - @Override - public Stream getCommandSenders() { - return Stream.concat( - MinecraftServer.getConnectionManager().getOnlinePlayers().stream(), - Stream.of(MinecraftServer.getCommandManager().getConsoleSender()) - ).map(MinestomCommandSender::new); - } - - @Override - public void executeAsync(Runnable task) { - MinecraftServer.getSchedulerManager().scheduleNextTick(task, ExecutionType.ASYNC); - } - - @Override - public void log(Level level, String msg) { - if (level == Level.INFO) { - this.getLogger().info(msg); - } else if (level == Level.WARNING) { - this.getLogger().warn(msg); - } else if (level == Level.SEVERE) { - this.getLogger().error(msg); - } else { - throw new IllegalArgumentException(level.getName()); - } - } - - @Override - public PlatformInfo getPlatformInfo() { - return new MinestomPlatformInfo(); - } - - @Override - public ClassSourceLookup createClassSourceLookup() { - return new MinestomClassSourceLookup(); - } - - @Override - public Collection getKnownSources() { - return SourceMetadata.gather( - MinecraftServer.getExtensionManager().getExtensions(), - extension -> extension.getOrigin().getName(), - extension -> extension.getOrigin().getVersion(), - extension -> String.join(", ", extension.getOrigin().getAuthors()) - ); - } - - @Override - public PlayerPingProvider createPlayerPingProvider() { - return new MinestomPlayerPingProvider(); - } - - @Override - public TickReporter createTickReporter() { - return new MinestomTickReporter(); - } - - @Override - public TickHook createTickHook() { - return new MinestomTickHook(); - } - - private static final class MinestomSparkCommand extends Command implements CommandExecutor, SuggestionCallback { - private final SparkPlatform platform; - - public MinestomSparkCommand(SparkPlatform platform) { - super("spark"); - this.platform = platform; - - ArgumentStringArray arrayArgument = ArgumentType.StringArray("args"); - arrayArgument.setSuggestionCallback(this); - - addSyntax(this, arrayArgument); - setDefaultExecutor((sender, context) -> platform.executeCommand(new MinestomCommandSender(sender), new String[0])); - } - - // execute - @Override - public void apply(@NotNull CommandSender sender, @NotNull CommandContext context) { - String[] args = processArgs(context, false); - if (args == null) { - return; - } - - this.platform.executeCommand(new MinestomCommandSender(sender), args); - } - - // tab complete - @Override - public void apply(@NotNull CommandSender sender, @NotNull CommandContext context, @NotNull Suggestion suggestion) { - String[] args = processArgs(context, true); - if (args == null) { - return; - } - - Iterable suggestionEntries = this.platform.tabCompleteCommand(new MinestomCommandSender(sender), args); - for (String suggestionEntry : suggestionEntries) { - suggestion.addEntry(new SuggestionEntry(suggestionEntry)); - } - } - - private static String [] processArgs(CommandContext context, boolean tabComplete) { - String[] split = context.getInput().split(" ", tabComplete ? -1 : 0); - if (split.length == 0 || !split[0].equals("/spark") && !split[0].equals("spark")) { - return null; - } - - return Arrays.copyOfRange(split, 1, split.length); - } - } -} diff --git a/spark-minestom/src/main/resources/extension.json b/spark-minestom/src/main/resources/extension.json deleted file mode 100644 index bea56bb4b13..00000000000 --- a/spark-minestom/src/main/resources/extension.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "entrypoint": "me.lucko.spark.minestom.MinestomSparkPlugin", - "name": "spark", - "version": "${pluginVersion}" -} \ No newline at end of file diff --git a/spark-neoforge/build.gradle b/spark-neoforge/build.gradle new file mode 100644 index 00000000000..1a042d3475f --- /dev/null +++ b/spark-neoforge/build.gradle @@ -0,0 +1,82 @@ +plugins { + id 'com.gradleup.shadow' version '8.3.8' + id 'net.neoforged.moddev' version '2.0.78' +} + +tasks.withType(JavaCompile).configureEach { + // override, compile targeting J21 + options.release = 21 +} + +java { + toolchain { + languageVersion = JavaLanguageVersion.of(21) + } +} + +neoForge { + version = "21.8.4-beta" + validateAccessTransformers = true + + runs { + client { + client() + } + server { + server() + } + } + + mods { + spark { + sourceSet sourceSets.main + } + } +} + +configurations { + shade + implementation.extendsFrom shade +} + +dependencies { + shade project(':spark-common') +} + +processResources { + from(sourceSets.main.resources.srcDirs) { + include 'META-INF/neoforge.mods.toml' + expand ( + 'pluginVersion': project.pluginVersion, + 'pluginDescription': project.pluginDescription + ) + } + + from(sourceSets.main.resources.srcDirs) { + exclude 'META-INF/neoforge.mods.toml' + } +} + +shadowJar { + archiveFileName = "spark-${project.pluginVersion}-neoforge.jar" + configurations = [project.configurations.shade] + + relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' + relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' + relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' + relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' + relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' + relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' + relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' + + project.applyExcludes(delegate) + + mergeServiceFiles() +} + +artifacts { + archives shadowJar + shadow shadowJar +} diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeClassSourceLookup.java similarity index 68% rename from spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java rename to spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeClassSourceLookup.java index 180e0af8af4..5e60ee754b9 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeClassSourceLookup.java @@ -18,21 +18,18 @@ * along with this program. If not, see . */ -package me.lucko.spark.nukkit; +package me.lucko.spark.neoforge; +import cpw.mods.modlauncher.TransformingClassLoader; import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import cn.nukkit.plugin.PluginClassLoader; - -import java.io.IOException; -import java.net.URISyntaxException; - -public class NukkitClassSourceLookup extends ClassSourceLookup.ByFirstUrlSource { +public class NeoForgeClassSourceLookup implements ClassSourceLookup { @Override - public String identify(ClassLoader loader) throws IOException, URISyntaxException { - if (loader instanceof PluginClassLoader) { - return super.identify(loader); + public String identify(Class clazz) { + if (clazz.getClassLoader() instanceof TransformingClassLoader) { + String name = clazz.getModule().getName(); + return name.equals("forge") || name.equals("minecraft") ? null : name; } return null; } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeClientCommandSender.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeClientCommandSender.java new file mode 100644 index 00000000000..25a87e78319 --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeClientCommandSender.java @@ -0,0 +1,73 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge; + +import com.google.gson.JsonParseException; +import com.mojang.serialization.JsonOps; +import me.lucko.spark.common.command.sender.AbstractCommandSender; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; +import net.minecraft.client.player.LocalPlayer; +import net.minecraft.commands.CommandSourceStack; +import net.minecraft.core.RegistryAccess; +import net.minecraft.network.chat.ComponentSerialization; +import net.minecraft.world.entity.Entity; + +import java.util.UUID; + +public class NeoForgeClientCommandSender extends AbstractCommandSender { + public NeoForgeClientCommandSender(CommandSourceStack source) { + super(source); + } + + @Override + public String getName() { + return this.delegate.getTextName(); + } + + @Override + public UUID getUniqueId() { + Entity entity = this.delegate.getEntity(); + if (entity instanceof LocalPlayer player) { + return player.getUUID(); + } + return null; + } + + @Override + public void sendMessage(Component message) { + net.minecraft.network.chat.Component component = ComponentSerialization.CODEC.decode( + RegistryAccess.EMPTY.createSerializationContext(JsonOps.INSTANCE), + GsonComponentSerializer.gson().serializeToTree(message) + ).getOrThrow(JsonParseException::new).getFirst(); + super.delegate.sendSystemMessage(component); + } + + @Override + public boolean hasPermission(String permission) { + return true; + } + + @Override + protected Object getObjectForComparison() { + return this.delegate.getEntity(); + } +} diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlatformInfo.java similarity index 57% rename from spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java rename to spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlatformInfo.java index 91d7ea2b0da..6a2338f59a6 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlatformInfo.java @@ -18,37 +18,46 @@ * along with this program. If not, see . */ -package me.lucko.spark.sponge; +package me.lucko.spark.neoforge; import me.lucko.spark.common.platform.PlatformInfo; +import net.neoforged.fml.ModContainer; +import net.neoforged.fml.ModList; +import net.neoforged.fml.loading.FMLLoader; +import net.neoforged.neoforgespi.language.IModInfo; -import org.spongepowered.api.Game; -import org.spongepowered.api.Platform; +public class NeoForgePlatformInfo implements PlatformInfo { + private final Type type; -public class Sponge7PlatformInfo implements PlatformInfo { - private final Game game; - - public Sponge7PlatformInfo(Game game) { - this.game = game; + public NeoForgePlatformInfo(Type type) { + this.type = type; } @Override public Type getType() { - return Type.SERVER; + return this.type; } @Override public String getName() { - return "Sponge"; + return "NeoForge"; + } + + @Override + public String getBrand() { + return ModList.get().getModContainerById("neoforge") + .map(ModContainer::getModInfo) + .map(IModInfo::getDisplayName) + .orElse("NeoForge"); } @Override public String getVersion() { - return this.game.getPlatform().getContainer(Platform.Component.IMPLEMENTATION).getVersion().orElse("unknown"); + return FMLLoader.versionInfo().neoForgeVersion(); } @Override public String getMinecraftVersion() { - return this.game.getPlatform().getMinecraftVersion().getName(); + return FMLLoader.versionInfo().mcVersion(); } } diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlayerPingProvider.java similarity index 70% rename from spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java rename to spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlayerPingProvider.java index 8f4c15f981e..191b60ec6a9 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlayerPingProvider.java @@ -18,29 +18,27 @@ * along with this program. If not, see . */ -package me.lucko.spark.sponge; +package me.lucko.spark.neoforge; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - -import org.spongepowered.api.Server; -import org.spongepowered.api.entity.living.player.Player; +import net.minecraft.server.MinecraftServer; +import net.minecraft.server.level.ServerPlayer; import java.util.Map; -public class Sponge7PlayerPingProvider implements PlayerPingProvider { - private final Server server; +public class NeoForgePlayerPingProvider implements PlayerPingProvider { + private final MinecraftServer server; - public Sponge7PlayerPingProvider(Server server) { + public NeoForgePlayerPingProvider(MinecraftServer server) { this.server = server; } @Override public Map poll() { ImmutableMap.Builder builder = ImmutableMap.builder(); - for (Player player : this.server.getOnlinePlayers()) { - builder.put(player.getName(), player.getConnection().getLatency()); + for (ServerPlayer player : this.server.getPlayerList().getPlayers()) { + builder.put(player.getGameProfile().getName(), player.connection.latency()); } return builder.build(); } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeServerCommandSender.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeServerCommandSender.java new file mode 100644 index 00000000000..183f3fee722 --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeServerCommandSender.java @@ -0,0 +1,85 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge; + +import com.google.gson.JsonParseException; +import com.mojang.serialization.JsonOps; +import me.lucko.spark.common.command.sender.AbstractCommandSender; +import me.lucko.spark.neoforge.plugin.NeoForgeServerSparkPlugin; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; +import net.minecraft.commands.CommandSourceStack; +import net.minecraft.core.RegistryAccess; +import net.minecraft.network.chat.ComponentSerialization; +import net.minecraft.world.entity.Entity; + +import java.util.UUID; + +public class NeoForgeServerCommandSender extends AbstractCommandSender { + private final NeoForgeServerSparkPlugin plugin; + + public NeoForgeServerCommandSender(CommandSourceStack commandSource, NeoForgeServerSparkPlugin plugin) { + super(commandSource); + this.plugin = plugin; + } + + @Override + public String getName() { + String name = this.delegate.getTextName(); + if (this.delegate.getEntity() != null && name.equals("Server")) { + return "Console"; + } + return name; + } + + @Override + public UUID getUniqueId() { + Entity entity = this.delegate.getEntity(); + return entity != null ? entity.getUUID() : null; + } + + @Override + public void sendMessage(Component message) { + net.minecraft.network.chat.Component component = ComponentSerialization.CODEC.decode( + RegistryAccess.EMPTY.createSerializationContext(JsonOps.INSTANCE), + GsonComponentSerializer.gson().serializeToTree(message) + ).getOrThrow(JsonParseException::new).getFirst(); + super.delegate.sendSystemMessage(component); + } + + @Override + public boolean hasPermission(String permission) { + return this.plugin.hasPermission(this.delegate, permission); + } + + @Override + protected Object getObjectForComparison() { + UUID uniqueId = getUniqueId(); + if (uniqueId != null) { + return uniqueId; + } + Entity entity = this.delegate.getEntity(); + if (entity != null) { + return entity; + } + return getName(); + } +} diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeServerConfigProvider.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeServerConfigProvider.java new file mode 100644 index 00000000000..813683d344f --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeServerConfigProvider.java @@ -0,0 +1,56 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; + +import java.util.Collection; +import java.util.Map; + +public class NeoForgeServerConfigProvider extends ServerConfigProvider { + + /** A map of provided files and their type */ + private static final Map FILES; + /** A collection of paths to be excluded from the files */ + private static final Collection HIDDEN_PATHS; + + public NeoForgeServerConfigProvider() { + super(FILES, HIDDEN_PATHS); + } + + static { + ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() + .add("server-ip") + .add("motd") + .add("resource-pack") + .add("rconpassword") + .add("level-seed") + .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths")); + + FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE); + HIDDEN_PATHS = hiddenPaths.build(); + } + +} diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeSparkMod.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeSparkMod.java new file mode 100644 index 00000000000..706deb168a2 --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeSparkMod.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge; + +import me.lucko.spark.neoforge.plugin.NeoForgeClientSparkPlugin; +import me.lucko.spark.neoforge.plugin.NeoForgeServerSparkPlugin; +import net.neoforged.bus.api.IEventBus; +import net.neoforged.bus.api.SubscribeEvent; +import net.neoforged.fml.ModContainer; +import net.neoforged.fml.common.Mod; +import net.neoforged.fml.event.lifecycle.FMLClientSetupEvent; +import net.neoforged.fml.loading.FMLPaths; +import net.neoforged.neoforge.common.NeoForge; +import net.neoforged.neoforge.event.server.ServerAboutToStartEvent; + +import java.nio.file.Path; + +@Mod("spark") +public class NeoForgeSparkMod { + + private final ModContainer container; + private final Path configDirectory; + + public NeoForgeSparkMod(ModContainer container, IEventBus eventBus) { + this.container = container; + this.configDirectory = FMLPaths.CONFIGDIR.get().resolve(this.container.getModId()); + + eventBus.addListener(this::clientInit); + + NeoForge.EVENT_BUS.register(this); + } + + public String getVersion() { + return this.container.getModInfo().getVersion().toString(); + } + + public void clientInit(FMLClientSetupEvent e) { + NeoForgeClientSparkPlugin.register(this, e); + } + + @SubscribeEvent + public void serverInit(ServerAboutToStartEvent e) { + NeoForgeServerSparkPlugin.register(this, e); + } + + public Path getConfigDirectory() { + return this.configDirectory; + } +} diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickReporter.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickHook.java similarity index 50% rename from spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickReporter.java rename to spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickHook.java index ae25f925c1d..987c42d05c8 100644 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickReporter.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickHook.java @@ -18,31 +18,40 @@ * along with this program. If not, see . */ -package me.lucko.spark.minestom; +package me.lucko.spark.neoforge; -import me.lucko.spark.common.tick.AbstractTickReporter; - -import net.minestom.server.MinecraftServer; -import net.minestom.server.event.Event; -import net.minestom.server.event.EventNode; -import net.minestom.server.event.server.ServerTickMonitorEvent; - -import java.util.UUID; - -public class MinestomTickReporter extends AbstractTickReporter { - private final EventNode node = EventNode.all("sparkTickReporter-" + UUID.randomUUID()); - - public MinestomTickReporter() { - this.node.addListener(ServerTickMonitorEvent.class, event -> onTick(event.getTickMonitor().getTickTime())); - } +import me.lucko.spark.common.tick.AbstractTickHook; +import me.lucko.spark.common.tick.TickHook; +import net.neoforged.bus.api.SubscribeEvent; +import net.neoforged.neoforge.client.event.ClientTickEvent; +import net.neoforged.neoforge.common.NeoForge; +import net.neoforged.neoforge.event.tick.ServerTickEvent; +public abstract class NeoForgeTickHook extends AbstractTickHook implements TickHook { @Override public void start() { - MinecraftServer.getGlobalEventHandler().addChild(this.node); + NeoForge.EVENT_BUS.register(this); } @Override public void close() { - MinecraftServer.getGlobalEventHandler().removeChild(this.node); + NeoForge.EVENT_BUS.unregister(this); + } + + public static final class Server extends NeoForgeTickHook { + + @SubscribeEvent + public void onTickStart(ServerTickEvent.Pre e) { + onTick(); + } + } + + public static final class Client extends NeoForgeTickHook { + + @SubscribeEvent + public void onTickStart(ClientTickEvent.Pre e) { + onTick(); + } } + } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickReporter.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickReporter.java new file mode 100644 index 00000000000..7d289fb348d --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickReporter.java @@ -0,0 +1,69 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge; + +import me.lucko.spark.common.tick.SimpleTickReporter; +import me.lucko.spark.common.tick.TickReporter; +import net.neoforged.bus.api.SubscribeEvent; +import net.neoforged.neoforge.client.event.ClientTickEvent; +import net.neoforged.neoforge.common.NeoForge; +import net.neoforged.neoforge.event.tick.ServerTickEvent; + +public abstract class NeoForgeTickReporter extends SimpleTickReporter implements TickReporter { + + @Override + public void start() { + NeoForge.EVENT_BUS.register(this); + } + + @Override + public void close() { + NeoForge.EVENT_BUS.unregister(this); + super.close(); + } + + public static final class Server extends NeoForgeTickReporter { + + @SubscribeEvent + public void onTickStart(ServerTickEvent.Pre e) { + onStart(); + } + + @SubscribeEvent + public void onTickEnd(ServerTickEvent.Post e) { + onEnd(); + } + } + + public static final class Client extends NeoForgeTickReporter { + + @SubscribeEvent + public void onTickStart(ClientTickEvent.Pre e) { + onStart(); + } + + @SubscribeEvent + public void onTickEnd(ClientTickEvent.Post e) { + onEnd(); + } + } + +} diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeWorldInfoProvider.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeWorldInfoProvider.java new file mode 100644 index 00000000000..d85cfb3642f --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeWorldInfoProvider.java @@ -0,0 +1,259 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge; + +import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import net.minecraft.client.Minecraft; +import net.minecraft.client.multiplayer.ClientLevel; +import net.minecraft.server.MinecraftServer; +import net.minecraft.server.level.ServerLevel; +import net.minecraft.server.packs.repository.PackRepository; +import net.minecraft.server.packs.repository.PackSource; +import net.minecraft.world.entity.Entity; +import net.minecraft.world.entity.EntityType; +import net.minecraft.world.level.ChunkPos; +import net.minecraft.world.level.GameRules; +import net.minecraft.world.level.entity.EntityLookup; +import net.minecraft.world.level.entity.LevelEntityGetter; +import net.minecraft.world.level.entity.PersistentEntitySectionManager; +import net.minecraft.world.level.entity.TransientEntitySectionManager; +import net.neoforged.fml.ModList; + +import java.lang.reflect.Method; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.stream.Collectors; + +public abstract class NeoForgeWorldInfoProvider implements WorldInfoProvider { + + protected abstract PackRepository getPackRepository(); + + @Override + public Collection pollDataPacks() { + return getPackRepository().getSelectedPacks().stream() + .map(pack -> new DataPackInfo( + pack.getId(), + pack.getDescription().getString(), + resourcePackSource(pack.getPackSource()) + )) + .collect(Collectors.toList()); + } + + private static String resourcePackSource(PackSource source) { + if (source == PackSource.DEFAULT) { + return "none"; + } else if (source == PackSource.BUILT_IN) { + return "builtin"; + } else if (source == PackSource.WORLD) { + return "world"; + } else if (source == PackSource.SERVER) { + return "server"; + } else { + return "unknown"; + } + } + + public static final class Server extends NeoForgeWorldInfoProvider { + private final MinecraftServer server; + + public Server(MinecraftServer server) { + this.server = server; + } + + @Override + public CountsResult pollCounts() { + int players = this.server.getPlayerCount(); + int entities = 0; + int chunks = 0; + + for (ServerLevel level : this.server.getAllLevels()) { + + if (ModList.get().isLoaded("moonrise")) { + entities += MoonriseMethods.getEntityCount(level.getEntities()); + } else { + PersistentEntitySectionManager entityManager = level.entityManager; + EntityLookup entityIndex = entityManager.visibleEntityStorage; + entities += entityIndex.count(); + } + + chunks += level.getChunkSource().getLoadedChunksCount(); + } + + return new CountsResult(players, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); + + for (ServerLevel level : this.server.getAllLevels()) { + Long2ObjectOpenHashMap levelInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : level.getEntities().getAll()) { + ForgeChunkInfo info = levelInfos.computeIfAbsent( + entity.chunkPosition().toLong(), ForgeChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } + + data.put(level.dimension().location().getPath(), List.copyOf(levelInfos.values())); + } + + return data; + } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + Iterable levels = this.server.getAllLevels(); + + for (ServerLevel level : levels) { + String levelName = level.dimension().location().getPath(); + + level.getGameRules().visitGameRuleTypes(new GameRules.GameRuleTypeVisitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getId(), defaultValue); + + String value = level.getGameRules().getRule(key).serialize(); + data.put(key.getId(), levelName, value); + } + }); + } + + return data; + } + + @Override + protected PackRepository getPackRepository() { + return this.server.getPackRepository(); + } + } + + public static final class Client extends NeoForgeWorldInfoProvider { + private final Minecraft client; + + public Client(Minecraft client) { + this.client = client; + } + + @Override + public CountsResult pollCounts() { + ClientLevel level = this.client.level; + if (level == null) { + return null; + } + + int entities; + if (ModList.get().isLoaded("moonrise")) { + entities = MoonriseMethods.getEntityCount(level.getEntities()); + } else { + TransientEntitySectionManager entityManager = level.entityStorage; + EntityLookup entityIndex = entityManager.entityStorage; + entities = entityIndex.count(); + } + + int chunks = level.getChunkSource().getLoadedChunksCount(); + + return new CountsResult(-1, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ClientLevel level = this.client.level; + if (level == null) { + return null; + } + + ChunksResult data = new ChunksResult<>(); + + Long2ObjectOpenHashMap levelInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : level.getEntities().getAll()) { + ForgeChunkInfo info = levelInfos.computeIfAbsent(entity.chunkPosition().toLong(), ForgeChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } + + data.put(level.dimension().location().getPath(), List.copyOf(levelInfos.values())); + + return data; + } + + @Override + public GameRulesResult pollGameRules() { + // Not available on client since 24w39a + return null; + } + + @Override + protected PackRepository getPackRepository() { + return this.client.getResourcePackRepository(); + } + } + + public static final class ForgeChunkInfo extends AbstractChunkInfo> { + private final CountMap> entityCounts; + + ForgeChunkInfo(long chunkPos) { + super(ChunkPos.getX(chunkPos), ChunkPos.getZ(chunkPos)); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + } + + @Override + public CountMap> getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return EntityType.getKey(type).toString(); + } + } + + private static final class MoonriseMethods { + private static Method getEntityCount; + + private static Method getEntityCountMethod(LevelEntityGetter getter) { + if (getEntityCount == null) { + try { + getEntityCount = getter.getClass().getMethod("getEntityCount"); + } catch (final ReflectiveOperationException e) { + throw new RuntimeException("Cannot find Moonrise getEntityCount method", e); + } + } + return getEntityCount; + } + + private static int getEntityCount(LevelEntityGetter getter) { + try { + return (int) getEntityCountMethod(getter).invoke(getter); + } catch (final ReflectiveOperationException e) { + throw new RuntimeException("Failed to invoke Moonrise getEntityCount method", e); + } + } + } + +} diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeClientSparkPlugin.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeClientSparkPlugin.java new file mode 100644 index 00000000000..539bc004442 --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeClientSparkPlugin.java @@ -0,0 +1,140 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge.plugin; + +import com.mojang.brigadier.Command; +import com.mojang.brigadier.context.CommandContext; +import com.mojang.brigadier.exceptions.CommandSyntaxException; +import com.mojang.brigadier.suggestion.SuggestionProvider; +import com.mojang.brigadier.suggestion.Suggestions; +import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.tick.TickReporter; +import me.lucko.spark.neoforge.NeoForgeClientCommandSender; +import me.lucko.spark.neoforge.NeoForgePlatformInfo; +import me.lucko.spark.neoforge.NeoForgeSparkMod; +import me.lucko.spark.neoforge.NeoForgeTickHook; +import me.lucko.spark.neoforge.NeoForgeTickReporter; +import me.lucko.spark.neoforge.NeoForgeWorldInfoProvider; +import net.minecraft.client.Minecraft; +import net.minecraft.commands.CommandSourceStack; +import net.neoforged.bus.api.SubscribeEvent; +import net.neoforged.fml.event.lifecycle.FMLClientSetupEvent; +import net.neoforged.neoforge.client.ClientCommandHandler; +import net.neoforged.neoforge.client.event.RegisterClientCommandsEvent; +import net.neoforged.neoforge.common.NeoForge; + +import java.util.concurrent.CompletableFuture; +import java.util.stream.Stream; + +public class NeoForgeClientSparkPlugin extends NeoForgeSparkPlugin implements Command, SuggestionProvider { + + public static void register(NeoForgeSparkMod mod, FMLClientSetupEvent event) { + NeoForgeClientSparkPlugin plugin = new NeoForgeClientSparkPlugin(mod, Minecraft.getInstance()); + plugin.enable(); + } + + private final Minecraft minecraft; + private final ThreadDumper gameThreadDumper; + + public NeoForgeClientSparkPlugin(NeoForgeSparkMod mod, Minecraft minecraft) { + super(mod); + this.minecraft = minecraft; + this.gameThreadDumper = new ThreadDumper.Specific(minecraft.gameThread); + } + + @Override + public void enable() { + super.enable(); + + // register listeners + NeoForge.EVENT_BUS.register(this); + } + + @SubscribeEvent + public void onCommandRegister(RegisterClientCommandsEvent e) { + registerCommands(e.getDispatcher(), this, this, "sparkc", "sparkclient"); + } + + @Override + public int run(CommandContext context) throws CommandSyntaxException { + String[] args = processArgs(context, false, "sparkc", "sparkclient"); + if (args == null) { + return 0; + } + + this.platform.executeCommand(new NeoForgeClientCommandSender(context.getSource()), args); + return Command.SINGLE_SUCCESS; + } + + @Override + public CompletableFuture getSuggestions(CommandContext context, SuggestionsBuilder builder) throws CommandSyntaxException { + String[] args = processArgs(context, true, "/sparkc", "/sparkclient"); + if (args == null) { + return Suggestions.empty(); + } + + return generateSuggestions(new NeoForgeClientCommandSender(context.getSource()), args, builder); + } + + @Override + public Stream getCommandSenders() { + return Stream.of(new NeoForgeClientCommandSender(ClientCommandHandler.getSource())); + } + + @Override + public void executeSync(Runnable task) { + this.minecraft.executeIfPossible(task); + } + + @Override + public ThreadDumper getDefaultThreadDumper() { + return this.gameThreadDumper; + } + + @Override + public TickHook createTickHook() { + return new NeoForgeTickHook.Client(); + } + + @Override + public TickReporter createTickReporter() { + return new NeoForgeTickReporter.Client(); + } + + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new NeoForgeWorldInfoProvider.Client(this.minecraft); + } + + @Override + public PlatformInfo getPlatformInfo() { + return new NeoForgePlatformInfo(PlatformInfo.Type.CLIENT); + } + + @Override + public String getCommandName() { + return "sparkc"; + } +} diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeServerSparkPlugin.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeServerSparkPlugin.java new file mode 100644 index 00000000000..5d4630be370 --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeServerSparkPlugin.java @@ -0,0 +1,256 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge.plugin; + +import com.google.common.collect.ImmutableMap; +import com.mojang.brigadier.Command; +import com.mojang.brigadier.CommandDispatcher; +import com.mojang.brigadier.context.CommandContext; +import com.mojang.brigadier.exceptions.CommandSyntaxException; +import com.mojang.brigadier.suggestion.SuggestionProvider; +import com.mojang.brigadier.suggestion.Suggestions; +import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.tick.TickReporter; +import me.lucko.spark.neoforge.NeoForgePlatformInfo; +import me.lucko.spark.neoforge.NeoForgePlayerPingProvider; +import me.lucko.spark.neoforge.NeoForgeServerCommandSender; +import me.lucko.spark.neoforge.NeoForgeServerConfigProvider; +import me.lucko.spark.neoforge.NeoForgeSparkMod; +import me.lucko.spark.neoforge.NeoForgeTickHook; +import me.lucko.spark.neoforge.NeoForgeTickReporter; +import me.lucko.spark.neoforge.NeoForgeWorldInfoProvider; +import net.minecraft.commands.CommandSourceStack; +import net.minecraft.server.MinecraftServer; +import net.minecraft.server.level.ServerPlayer; +import net.neoforged.bus.api.SubscribeEvent; +import net.neoforged.neoforge.common.NeoForge; +import net.neoforged.neoforge.event.RegisterCommandsEvent; +import net.neoforged.neoforge.event.server.ServerAboutToStartEvent; +import net.neoforged.neoforge.event.server.ServerStoppingEvent; +import net.neoforged.neoforge.server.permission.PermissionAPI; +import net.neoforged.neoforge.server.permission.events.PermissionGatherEvent; +import net.neoforged.neoforge.server.permission.nodes.PermissionNode; +import net.neoforged.neoforge.server.permission.nodes.PermissionTypes; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class NeoForgeServerSparkPlugin extends NeoForgeSparkPlugin implements Command, SuggestionProvider { + + public static void register(NeoForgeSparkMod mod, ServerAboutToStartEvent event) { + NeoForgeServerSparkPlugin plugin = new NeoForgeServerSparkPlugin(mod, event.getServer()); + plugin.enable(); + } + + private static final PermissionNode.PermissionResolver DEFAULT_PERMISSION_VALUE = (player, playerUUID, context) -> { + if (player == null) { + return false; + } + + MinecraftServer server = player.getServer(); + if (server != null && server.isSingleplayerOwner(player.getGameProfile())) { + return true; + } + + return player.hasPermissions(4); + }; + + private final MinecraftServer server; + private final ThreadDumper gameThreadDumper; + private Map> registeredPermissions = Collections.emptyMap(); + + public NeoForgeServerSparkPlugin(NeoForgeSparkMod mod, MinecraftServer server) { + super(mod); + this.server = server; + this.gameThreadDumper = new ThreadDumper.Specific(server.getRunningThread()); + } + + @Override + public void enable() { + super.enable(); + + // register commands + registerCommands(this.server.getCommands().getDispatcher()); + + // register listeners + NeoForge.EVENT_BUS.register(this); + } + + @Override + public void disable() { + super.disable(); + + // unregister listeners + NeoForge.EVENT_BUS.unregister(this); + } + + @SubscribeEvent + public void onDisable(ServerStoppingEvent event) { + if (event.getServer() == this.server) { + disable(); + } + } + + @SubscribeEvent + public void onPermissionGather(PermissionGatherEvent.Nodes e) { + // collect all possible permissions + List permissions = this.platform.getCommands().stream() + .map(me.lucko.spark.common.command.Command::primaryAlias) + .collect(Collectors.toList()); + + // special case for the "spark" permission: map it to "spark.all" + permissions.add("all"); + + // register permissions with forge & keep a copy for lookup + ImmutableMap.Builder> builder = ImmutableMap.builder(); + + Map> alreadyRegistered = e.getNodes().stream().collect(Collectors.toMap(PermissionNode::getNodeName, Function.identity())); + + for (String permission : permissions) { + String permissionString = "spark." + permission; + + // there's a weird bug where it seems that this listener can be called twice, causing an + // IllegalArgumentException to be thrown the second time e.addNodes is called. + PermissionNode existing = alreadyRegistered.get(permissionString); + if (existing != null) { + //noinspection unchecked + builder.put(permissionString, (PermissionNode) existing); + continue; + } + + PermissionNode node = new PermissionNode<>("spark", permission, PermissionTypes.BOOLEAN, DEFAULT_PERMISSION_VALUE); + e.addNodes(node); + builder.put(permissionString, node); + } + this.registeredPermissions = builder.build(); + } + + @SubscribeEvent + public void onCommandRegister(RegisterCommandsEvent e) { + registerCommands(e.getDispatcher()); + } + + private void registerCommands(CommandDispatcher dispatcher) { + registerCommands(dispatcher, this, this, "spark"); + } + + @Override + public int run(CommandContext context) throws CommandSyntaxException { + String[] args = processArgs(context, false, "/spark", "spark"); + if (args == null) { + return 0; + } + + this.platform.executeCommand(new NeoForgeServerCommandSender(context.getSource(), this), args); + return Command.SINGLE_SUCCESS; + } + + @Override + public CompletableFuture getSuggestions(CommandContext context, SuggestionsBuilder builder) throws CommandSyntaxException { + String[] args = processArgs(context, true, "/spark", "spark"); + if (args == null) { + return Suggestions.empty(); + } + + return generateSuggestions(new NeoForgeServerCommandSender(context.getSource(), this), args, builder); + } + + public boolean hasPermission(CommandSourceStack source, String permission) { + ServerPlayer player = source.getPlayer(); + if (player != null) { + if (permission.equals("spark")) { + permission = "spark.all"; + } + + PermissionNode permissionNode = this.registeredPermissions.get(permission); + if (permissionNode == null) { + throw new IllegalStateException("spark permission not registered: " + permission); + } + return PermissionAPI.getPermission(player, permissionNode); + } else { + return true; + } + } + + @Override + public Stream getCommandSenders() { + return Stream.concat( + this.server.getPlayerList().getPlayers().stream().map(ServerPlayer::createCommandSourceStack), + Stream.of(this.server.createCommandSourceStack()) + ).map(stack -> new NeoForgeServerCommandSender(stack, this)); + } + + @Override + public void executeSync(Runnable task) { + this.server.executeIfPossible(task); + } + + @Override + public ThreadDumper getDefaultThreadDumper() { + return this.gameThreadDumper; + } + + @Override + public TickHook createTickHook() { + return new NeoForgeTickHook.Server(); + } + + @Override + public TickReporter createTickReporter() { + return new NeoForgeTickReporter.Server(); + } + + @Override + public PlayerPingProvider createPlayerPingProvider() { + return new NeoForgePlayerPingProvider(this.server); + } + + @Override + public ServerConfigProvider createServerConfigProvider() { + return new NeoForgeServerConfigProvider(); + } + + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new NeoForgeWorldInfoProvider.Server(this.server); + } + + @Override + public PlatformInfo getPlatformInfo() { + return new NeoForgePlatformInfo(PlatformInfo.Type.SERVER); + } + + @Override + public String getCommandName() { + return "spark"; + } +} diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java new file mode 100644 index 00000000000..667a12d6a17 --- /dev/null +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java @@ -0,0 +1,176 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.neoforge.plugin; + +import com.mojang.brigadier.Command; +import com.mojang.brigadier.CommandDispatcher; +import com.mojang.brigadier.arguments.StringArgumentType; +import com.mojang.brigadier.builder.LiteralArgumentBuilder; +import com.mojang.brigadier.builder.RequiredArgumentBuilder; +import com.mojang.brigadier.context.CommandContext; +import com.mojang.brigadier.suggestion.SuggestionProvider; +import com.mojang.brigadier.suggestion.Suggestions; +import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import com.mojang.brigadier.tree.LiteralCommandNode; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.common.util.SparkThreadFactory; +import me.lucko.spark.neoforge.NeoForgeClassSourceLookup; +import me.lucko.spark.neoforge.NeoForgeSparkMod; +import net.neoforged.fml.ModList; +import net.neoforged.neoforgespi.language.IModInfo; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.logging.Level; + +public abstract class NeoForgeSparkPlugin implements SparkPlugin { + + private final NeoForgeSparkMod mod; + private final Logger logger; + protected final ScheduledExecutorService scheduler; + + protected SparkPlatform platform; + + protected NeoForgeSparkPlugin(NeoForgeSparkMod mod) { + this.mod = mod; + this.logger = LogManager.getLogger("spark"); + this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory()); + } + + public void enable() { + this.platform = new SparkPlatform(this); + this.platform.enable(); + } + + public void disable() { + this.platform.disable(); + this.scheduler.shutdown(); + } + + @Override + public String getVersion() { + return this.mod.getVersion(); + } + + @Override + public Path getPluginDirectory() { + return this.mod.getConfigDirectory(); + } + + @Override + public void executeAsync(Runnable task) { + this.scheduler.execute(task); + } + + @Override + public void log(Level level, String msg) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); + } else { + this.logger.info(msg, throwable); + } + } + + @Override + public ClassSourceLookup createClassSourceLookup() { + return new NeoForgeClassSourceLookup(); + } + + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + ModList.get().getMods(), + IModInfo::getModId, + mod -> mod.getVersion().toString(), + mod -> null, // ? + IModInfo::getDescription + ); + } + + protected CompletableFuture generateSuggestions(CommandSender sender, String[] args, SuggestionsBuilder builder) { + SuggestionsBuilder suggestions; + + int lastSpaceIdx = builder.getRemaining().lastIndexOf(' '); + if (lastSpaceIdx != -1) { + suggestions = builder.createOffset(builder.getStart() + lastSpaceIdx + 1); + } else { + suggestions = builder; + } + + return CompletableFuture.supplyAsync(() -> { + for (String suggestion : this.platform.tabCompleteCommand(sender, args)) { + suggestions.suggest(suggestion); + } + return suggestions.build(); + }); + } + + protected static void registerCommands(CommandDispatcher dispatcher, Command executor, SuggestionProvider suggestor, String... aliases) { + if (aliases.length == 0) { + return; + } + + String mainName = aliases[0]; + LiteralArgumentBuilder command = LiteralArgumentBuilder.literal(mainName) + .executes(executor) + .then(RequiredArgumentBuilder.argument("args", StringArgumentType.greedyString()) + .suggests(suggestor) + .executes(executor) + ); + + LiteralCommandNode node = dispatcher.register(command); + for (int i = 1; i < aliases.length; i++) { + dispatcher.register(LiteralArgumentBuilder.literal(aliases[i]).redirect(node)); + } + } + + protected static String[] processArgs(CommandContext context, boolean tabComplete, String... aliases) { + String[] split = context.getInput().split(" ", tabComplete ? -1 : 0); + if (split.length == 0 || !Arrays.asList(aliases).contains(split[0])) { + return null; + } + + return Arrays.copyOfRange(split, 1, split.length); + } +} diff --git a/spark-neoforge/src/main/resources/META-INF/accesstransformer.cfg b/spark-neoforge/src/main/resources/META-INF/accesstransformer.cfg new file mode 100644 index 00000000000..80f3f6e2e4e --- /dev/null +++ b/spark-neoforge/src/main/resources/META-INF/accesstransformer.cfg @@ -0,0 +1,8 @@ +public net.minecraft.server.level.ServerLevel entityManager # entityManager +public net.minecraft.world.level.entity.PersistentEntitySectionManager sectionStorage # sectionStorage +public net.minecraft.world.level.entity.PersistentEntitySectionManager visibleEntityStorage # visibleEntityStorage +public net.minecraft.client.multiplayer.ClientLevel entityStorage # entityStorage +public net.minecraft.client.multiplayer.ClientLevel getEntities()Lnet/minecraft/world/level/entity/LevelEntityGetter; # getEntities +public net.minecraft.world.level.entity.TransientEntitySectionManager sectionStorage # sectionStorage +public net.minecraft.world.level.entity.TransientEntitySectionManager entityStorage # entityStorage +public net.minecraft.client.Minecraft gameThread # gameThread diff --git a/spark-neoforge/src/main/resources/META-INF/neoforge.mods.toml b/spark-neoforge/src/main/resources/META-INF/neoforge.mods.toml new file mode 100644 index 00000000000..f44574d2936 --- /dev/null +++ b/spark-neoforge/src/main/resources/META-INF/neoforge.mods.toml @@ -0,0 +1,20 @@ +modLoader="javafml" +loaderVersion="[2,)" +authors="Luck" +license="GPLv3" + +[[mods]] +modId="spark" +displayName="spark" +version="${pluginVersion}" +description="${pluginDescription}" + +[[accessTransformers]] +file="META-INF/accesstransformer.cfg" + +[[dependencies.spark]] +modId = "neoforge" +mandatory=true +versionRange = "[20,)" +ordering = "NONE" +side = "BOTH" \ No newline at end of file diff --git a/spark-neoforge/src/main/resources/pack.mcmeta b/spark-neoforge/src/main/resources/pack.mcmeta new file mode 100644 index 00000000000..d34a5b78cd7 --- /dev/null +++ b/spark-neoforge/src/main/resources/pack.mcmeta @@ -0,0 +1,6 @@ +{ + "pack": { + "description": "spark resources", + "pack_format": 6 + } +} diff --git a/spark-nukkit/build.gradle b/spark-nukkit/build.gradle deleted file mode 100644 index d2e4833d221..00000000000 --- a/spark-nukkit/build.gradle +++ /dev/null @@ -1,44 +0,0 @@ -plugins { - id 'com.github.johnrengelman.shadow' version '7.0.0' -} - -dependencies { - implementation project(':spark-common') - implementation 'net.kyori:adventure-text-serializer-legacy:4.12.0' - compileOnly 'cn.nukkit:nukkit:1.0-SNAPSHOT' -} - -repositories { - maven { url 'https://repo.opencollab.dev/main/' } -} - -processResources { - from(sourceSets.main.resources.srcDirs) { - expand ( - 'pluginVersion': project.pluginVersion, - 'pluginDescription': project.pluginDescription - ) - include 'plugin.yml' - } -} - -shadowJar { - archiveName = "spark-${project.pluginVersion}-nukkit.jar" - - relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' - relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' - relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' - relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' - relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' - relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' - - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' -} - -artifacts { - archives shadowJar - shadow shadowJar -} diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java deleted file mode 100644 index ae212413772..00000000000 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.nukkit; - -import me.lucko.spark.api.Spark; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.SparkPlugin; -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - -import cn.nukkit.command.Command; -import cn.nukkit.command.CommandSender; -import cn.nukkit.plugin.PluginBase; -import cn.nukkit.plugin.service.ServicePriority; - -import java.nio.file.Path; -import java.util.logging.Level; -import java.util.stream.Stream; - -public class NukkitSparkPlugin extends PluginBase implements SparkPlugin { - private SparkPlatform platform; - - @Override - public void onEnable() { - this.platform = new SparkPlatform(this); - this.platform.enable(); - } - - @Override - public void onDisable() { - this.platform.disable(); - } - - @Override - public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { - this.platform.executeCommand(new NukkitCommandSender(sender), args); - return true; - } - - @Override - public String getVersion() { - return getDescription().getVersion(); - } - - @Override - public Path getPluginDirectory() { - return getDataFolder().toPath(); - } - - @Override - public String getCommandName() { - return "spark"; - } - - @Override - public Stream getCommandSenders() { - return Stream.concat( - getServer().getOnlinePlayers().values().stream(), - Stream.of(getServer().getConsoleSender()) - ).map(NukkitCommandSender::new); - } - - @Override - public void executeAsync(Runnable task) { - getServer().getScheduler().scheduleTask(this, task, true); - } - - @Override - public void log(Level level, String msg) { - if (level == Level.INFO) { - getLogger().info(msg); - } else if (level == Level.WARNING) { - getLogger().warning(msg); - } else if (level == Level.SEVERE) { - getLogger().error(msg); - } else { - throw new IllegalArgumentException(level.getName()); - } - } - - @Override - public ClassSourceLookup createClassSourceLookup() { - return new NukkitClassSourceLookup(); - } - - @Override - public PlayerPingProvider createPlayerPingProvider() { - return new NukkitPlayerPingProvider(getServer()); - } - - @Override - public PlatformInfo getPlatformInfo() { - return new NukkitPlatformInfo(getServer()); - } - - @Override - public void registerApi(Spark api) { - getServer().getServiceManager().register(Spark.class, api, this, ServicePriority.NORMAL); - } -} diff --git a/spark-nukkit/src/main/resources/plugin.yml b/spark-nukkit/src/main/resources/plugin.yml deleted file mode 100644 index cab74b5bdb3..00000000000 --- a/spark-nukkit/src/main/resources/plugin.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: spark -version: ${pluginVersion} -description: ${pluginDescription} -author: Luck -website: https://spark.lucko.me/ -main: me.lucko.spark.nukkit.NukkitSparkPlugin -api: [1.0.5] -commands: - spark: - description: Main plugin command \ No newline at end of file diff --git a/spark-paper/build.gradle b/spark-paper/build.gradle new file mode 100644 index 00000000000..fc20f20aa88 --- /dev/null +++ b/spark-paper/build.gradle @@ -0,0 +1,75 @@ +plugins { + id 'net.kyori.blossom' version '1.3.1' + id 'com.gradleup.shadow' version '8.3.8' + id 'maven-publish' +} + +tasks.withType(JavaCompile) { + // override, compile targeting J21 + options.release = 21 +} + +tasks.jar { + archiveClassifier = 'original' +} + +dependencies { + implementation project(':spark-common') + compileOnly 'io.papermc.paper:paper-api:1.21.1-R0.1-SNAPSHOT' +} + +repositories { + maven { url "https://repo.papermc.io/repository/maven-public/" } +} + +blossom { + replaceTokenIn('src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java') + replaceToken '@version@', project.pluginVersion +} + +shadowJar { + archiveFileName = "spark-${project.pluginVersion}-paper.jar" + archiveClassifier = '' + + dependencies { + exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) + exclude(dependency('net.bytebuddy:byte-buddy-agent')) + } + + relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.paper.lib.adventure.pagination' + relocate 'com.google.protobuf', 'me.lucko.spark.paper.lib.protobuf' + relocate 'org.objectweb.asm', 'me.lucko.spark.paper.lib.asm' + relocate 'one.profiler', 'me.lucko.spark.paper.lib.asyncprofiler' + relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.paper.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.paper.lib.bytesocks.ws' + + // nest common classes beneath the paper package to avoid conflicts with spark-bukkit + relocate 'me.lucko.spark.common', 'me.lucko.spark.paper.common' + relocate 'me.lucko.spark.proto', 'me.lucko.spark.paper.proto' + relocate 'spark-native', 'spark-paper-native' + + project.applyExcludes(delegate) +} + +artifacts { + archives shadowJar + shadow shadowJar +} + +publishing { + //repositories { + // maven { + // url = 'https://oss.sonatype.org/content/repositories/snapshots' + // credentials { + // username = sonatypeUsername + // password = sonatypePassword + // } + // } + //} + publications { + shadow(MavenPublication) { publication -> + project.shadow.component(publication) + version = "${project.pluginVersion}-SNAPSHOT" + } + } +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java new file mode 100644 index 00000000000..2c5f7c0ea4b --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import org.bukkit.plugin.java.JavaPlugin; + +import java.lang.reflect.Field; + +public class PaperClassSourceLookup extends ClassSourceLookup.ByClassLoader { + private static final Class PLUGIN_CLASS_LOADER; + private static final Field PLUGIN_FIELD; + + private static final Class PAPER_PLUGIN_CLASS_LOADER; + private static final Field PAPER_PLUGIN_FIELD; + + static { + try { + PLUGIN_CLASS_LOADER = Class.forName("org.bukkit.plugin.java.PluginClassLoader"); + PLUGIN_FIELD = PLUGIN_CLASS_LOADER.getDeclaredField("plugin"); + PLUGIN_FIELD.setAccessible(true); + + PAPER_PLUGIN_CLASS_LOADER = Class.forName("io.papermc.paper.plugin.entrypoint.classloader.PaperPluginClassLoader"); + PAPER_PLUGIN_FIELD = PAPER_PLUGIN_CLASS_LOADER.getDeclaredField("loadedJavaPlugin"); + PAPER_PLUGIN_FIELD.setAccessible(true); + } catch (ReflectiveOperationException e) { + throw new ExceptionInInitializerError(e); + } + } + + @Override + public String identify(ClassLoader loader) throws ReflectiveOperationException { + if (PLUGIN_CLASS_LOADER.isInstance(loader)) { + JavaPlugin plugin = (JavaPlugin) PLUGIN_FIELD.get(loader); + return plugin.getName(); + } else if (PAPER_PLUGIN_CLASS_LOADER.isInstance(loader)) { + JavaPlugin plugin = (JavaPlugin) PAPER_PLUGIN_FIELD.get(loader); + return plugin.getName(); + } + return null; + } +} + diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitCommandSender.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java similarity index 67% rename from spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitCommandSender.java rename to spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java index 56d143fed56..c3b569d105e 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitCommandSender.java +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java @@ -18,22 +18,19 @@ * along with this program. If not, see . */ -package me.lucko.spark.nukkit; +package me.lucko.spark.paper; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; -import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; - -import cn.nukkit.Player; -import cn.nukkit.command.CommandSender; +import org.bukkit.command.CommandSender; +import org.bukkit.entity.Player; import java.util.UUID; -public class NukkitCommandSender extends AbstractCommandSender { +public class PaperCommandSender extends AbstractCommandSender { - public NukkitCommandSender(CommandSender delegate) { - super(delegate); + public PaperCommandSender(CommandSender sender) { + super(sender); } @Override @@ -43,19 +40,19 @@ public String getName() { @Override public UUID getUniqueId() { - if (this.delegate instanceof Player) { - return ((Player) this.delegate).getUniqueId(); + if (super.delegate instanceof Player player) { + return player.getUniqueId(); } return null; } @Override public void sendMessage(Component message) { - this.delegate.sendMessage(LegacyComponentSerializer.legacySection().serialize(message)); + super.delegate.sendMessage(message); } @Override public boolean hasPermission(String permission) { - return this.delegate.hasPermission(permission); + return super.delegate.hasPermission(permission); } } diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java similarity index 71% rename from spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java rename to spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java index ab7a40bd04c..114175e9e23 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java @@ -18,18 +18,13 @@ * along with this program. If not, see . */ -package me.lucko.spark.nukkit; +package me.lucko.spark.paper; +import io.papermc.paper.ServerBuildInfo; import me.lucko.spark.common.platform.PlatformInfo; -import cn.nukkit.Server; - -public class NukkitPlatformInfo implements PlatformInfo { - private final Server server; - - public NukkitPlatformInfo(Server server) { - this.server = server; - } +public enum PaperPlatformInfo implements PlatformInfo { + INSTANCE; @Override public Type getType() { @@ -38,16 +33,21 @@ public Type getType() { @Override public String getName() { - return "Nukkit"; + return "Paper"; + } + + @Override + public String getBrand() { + return ServerBuildInfo.buildInfo().brandName(); } @Override public String getVersion() { - return this.server.getNukkitVersion(); + return ServerBuildInfo.buildInfo().asString(ServerBuildInfo.StringRepresentation.VERSION_SIMPLE); } @Override public String getMinecraftVersion() { - return this.server.getVersion(); + return ServerBuildInfo.buildInfo().minecraftVersionId(); } } diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java similarity index 81% rename from spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java rename to spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java index fc25d7c0b7a..e896b214caf 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java @@ -18,28 +18,26 @@ * along with this program. If not, see . */ -package me.lucko.spark.nukkit; +package me.lucko.spark.paper; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - -import cn.nukkit.Player; -import cn.nukkit.Server; +import org.bukkit.Server; +import org.bukkit.entity.Player; import java.util.Map; -public class NukkitPlayerPingProvider implements PlayerPingProvider { +public class PaperPlayerPingProvider implements PlayerPingProvider { private final Server server; - public NukkitPlayerPingProvider(Server server) { + public PaperPlayerPingProvider(Server server) { this.server = server; } @Override public Map poll() { ImmutableMap.Builder builder = ImmutableMap.builder(); - for (Player player : this.server.getOnlinePlayers().values()) { + for (Player player : this.server.getOnlinePlayers()) { builder.put(player.getName(), player.getPing()); } return builder.build(); diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java new file mode 100644 index 00000000000..d1301f8faea --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java @@ -0,0 +1,159 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonSerializer; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import org.bukkit.Bukkit; +import org.bukkit.World; +import org.bukkit.configuration.MemorySection; +import org.bukkit.configuration.file.YamlConfiguration; + +import java.io.BufferedReader; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; + +public class PaperServerConfigProvider extends ServerConfigProvider { + + /** A map of provided files and their type */ + private static final Map FILES; + /** A collection of paths to be excluded from the files */ + private static final Collection HIDDEN_PATHS; + + public PaperServerConfigProvider() { + super(FILES, HIDDEN_PATHS); + } + + private static class YamlConfigParser implements ConfigParser { + public static final YamlConfigParser INSTANCE = new YamlConfigParser(); + protected static final Gson GSON = new GsonBuilder() + .registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))) + .create(); + + @Override + public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException { + Map values = this.parse(Paths.get(file)); + if (values == null) { + return null; + } + + return filter.apply(GSON.toJsonTree(values)); + } + + @Override + public Map parse(BufferedReader reader) throws IOException { + YamlConfiguration config = YamlConfiguration.loadConfiguration(reader); + return config.getValues(false); + } + } + + // Paper 1.19+ split config layout + private static class SplitYamlConfigParser extends YamlConfigParser { + public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser(); + + @Override + public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException { + String prefix = group.replace("/", ""); + + Path configDir = Paths.get("config"); + if (!Files.exists(configDir)) { + return null; + } + + JsonObject root = new JsonObject(); + + for (Map.Entry entry : getNestedFiles(configDir, prefix).entrySet()) { + String fileName = entry.getKey(); + Path path = entry.getValue(); + + Map values = this.parse(path); + if (values == null) { + continue; + } + + // apply the filter individually to each nested file + root.add(fileName, filter.apply(GSON.toJsonTree(values))); + } + + return root; + } + + private static Map getNestedFiles(Path configDir, String prefix) { + Map files = new LinkedHashMap<>(); + files.put("global.yml", configDir.resolve(prefix + "-global.yml")); + files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml")); + for (World world : Bukkit.getWorlds()) { + files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml")); + } + return files; + } + } + + static { + ImmutableMap.Builder files = ImmutableMap.builder() + .put("server.properties", PropertiesConfigParser.INSTANCE) + .put("bukkit.yml", YamlConfigParser.INSTANCE) + .put("spigot.yml", YamlConfigParser.INSTANCE) + .put("paper.yml", YamlConfigParser.INSTANCE) + .put("paper/", SplitYamlConfigParser.INSTANCE) + .put("purpur.yml", YamlConfigParser.INSTANCE) + .put("pufferfish.yml", YamlConfigParser.INSTANCE); + + for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { + files.put(config, YamlConfigParser.INSTANCE); + } + + ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() + .add("database") + .add("settings.bungeecord-addresses") + .add("settings.velocity-support.secret") + .add("proxies.velocity.secret") + .add("server-ip") + .add("motd") + .add("resource-pack") + .add("rconpassword") + .add("rconip") + .add("level-seed") + .add("world-settings.*.feature-seeds") + .add("world-settings.*.seed-*") + .add("feature-seeds") + .add("seed-*") + .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths")); + + FILES = files.build(); + HIDDEN_PATHS = hiddenPaths.build(); + } + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java new file mode 100644 index 00000000000..3969d51fd6c --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java @@ -0,0 +1,222 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.api.Spark; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.tick.TickReporter; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.paper.api.PaperClassLookup; +import me.lucko.spark.paper.api.PaperScheduler; +import me.lucko.spark.paper.api.PaperSparkModule; +import org.bukkit.Server; +import org.bukkit.command.CommandSender; +import org.bukkit.plugin.Plugin; + +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Stream; + +public class PaperSparkPlugin implements PaperSparkModule, SparkPlugin { + private final Server server; + private final Logger logger; + private final PaperScheduler scheduler; + private final PaperClassLookup classLookup; + + private final PaperTickHook tickHook; + private final PaperTickReporter tickReporter; + private final ThreadDumper gameThreadDumper; + private final SparkPlatform platform; + + public PaperSparkPlugin(Server server, Logger logger, PaperScheduler scheduler, PaperClassLookup classLookup) { + this.server = server; + this.logger = logger; + this.scheduler = scheduler; + this.classLookup = classLookup; + this.tickHook = new PaperTickHook(); + this.tickReporter = new PaperTickReporter(); + this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread()); + this.platform = new SparkPlatform(this); + } + + @Override + public void enable() { + this.platform.enable(); + } + + @Override + public void disable() { + this.platform.disable(); + } + + @Override + public void executeCommand(CommandSender sender, String[] args) { + this.platform.executeCommand(new PaperCommandSender(sender), args); + } + + @Override + public List tabComplete(CommandSender sender, String[] args) { + return this.platform.tabCompleteCommand(new PaperCommandSender(sender), args); + } + + @Override + public boolean hasPermission(CommandSender sender) { + return this.platform.hasPermissionForAnyCommand(new PaperCommandSender(sender)); + } + + @Override + public Collection getPermissions() { + return this.platform.getAllSparkPermissions(); + } + + @Override + public void onServerTickStart() { + this.tickHook.onTick(); + } + + @Override + public void onServerTickEnd(double duration) { + this.tickReporter.onTick(duration); + } + + @Override + public String getVersion() { + return "@version@"; + } + + @Override + public Path getPluginDirectory() { + return this.server.getPluginsFolder().toPath().resolve("spark"); + } + + @Override + public String getCommandName() { + return "spark"; + } + + @Override + public Stream getCommandSenders() { + return Stream.concat( + this.server.getOnlinePlayers().stream(), + Stream.of(this.server.getConsoleSender()) + ).map(PaperCommandSender::new); + } + + @Override + public void executeAsync(Runnable task) { + this.scheduler.executeAsync(task); + } + + @Override + public void executeSync(Runnable task) { + this.scheduler.executeSync(task); + } + + @Override + public void log(Level level, String msg) { + this.logger.log(level, msg); + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + this.logger.log(level, msg, throwable); + } + + @Override + public ThreadDumper getDefaultThreadDumper() { + return this.gameThreadDumper; + } + + @Override + public TickHook createTickHook() { + return this.tickHook; + } + + @Override + public TickReporter createTickReporter() { + return this.tickReporter; + } + + @Override + public ClassSourceLookup createClassSourceLookup() { + return new PaperClassSourceLookup(); + } + + @Override + public ClassFinder createClassFinder() { + return className -> { + try { + return this.classLookup.lookup(className); + } catch (Exception e) { + return null; + } + }; + } + + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + Arrays.asList(this.server.getPluginManager().getPlugins()), + Plugin::getName, + plugin -> plugin.getPluginMeta().getVersion(), + plugin -> String.join(", ", plugin.getPluginMeta().getAuthors()), + plugin -> plugin.getPluginMeta().getDescription() + ); + } + + @Override + public PlayerPingProvider createPlayerPingProvider() { + return new PaperPlayerPingProvider(this.server); + } + + @Override + public ServerConfigProvider createServerConfigProvider() { + return new PaperServerConfigProvider(); + } + + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new PaperWorldInfoProvider(this.server); + } + + @Override + public PlatformInfo getPlatformInfo() { + return PaperPlatformInfo.INSTANCE; + } + + @Override + public void registerApi(Spark api) { + // this.server.getServicesManager().register(Spark.class, api, null, ServicePriority.Normal); + } +} diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java similarity index 66% rename from spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java rename to spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java index 26185222aa9..06126e16520 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java @@ -18,34 +18,29 @@ * along with this program. If not, see . */ -package me.lucko.spark.sponge; +package me.lucko.spark.paper; import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; +import org.bukkit.event.Listener; -import org.spongepowered.api.scheduler.Task; - -public class Sponge7TickHook extends AbstractTickHook implements TickHook, Runnable { - private final Sponge7SparkPlugin plugin; - private Task task; - - public Sponge7TickHook(Sponge7SparkPlugin plugin) { - this.plugin = plugin; - } - - @Override - public void run() { - onTick(); - } +public class PaperTickHook extends AbstractTickHook implements TickHook, Listener { + private boolean open = false; @Override public void start() { - this.task = Task.builder().intervalTicks(1).name("spark-ticker").execute(this).submit(this.plugin); + this.open = true; } @Override public void close() { - this.task.cancel(); + this.open = false; } + @Override + public void onTick() { + if (this.open) { + super.onTick(); + } + } } diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java similarity index 62% rename from spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java rename to spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java index 14b8f607a92..4db1f16edd9 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java @@ -18,31 +18,29 @@ * along with this program. If not, see . */ -package me.lucko.spark.waterdog; +package me.lucko.spark.paper; -import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.tick.AbstractTickReporter; +import me.lucko.spark.common.tick.TickReporter; +import org.bukkit.event.Listener; -import dev.waterdog.waterdogpe.WaterdogPE; - -public class WaterdogPlatformInfo implements PlatformInfo { - - @Override - public Type getType() { - return Type.PROXY; - } +public class PaperTickReporter extends AbstractTickReporter implements TickReporter, Listener { + private boolean open = false; @Override - public String getName() { - return "Waterdog"; + public void start() { + this.open = true; } @Override - public String getVersion() { - return WaterdogPE.version().baseVersion(); + public void close() { + this.open = false; } @Override - public String getMinecraftVersion() { - return null; + public void onTick(double duration) { + if (this.open) { + super.onTick(duration); + } } } diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java new file mode 100644 index 00000000000..4670be10d8f --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java @@ -0,0 +1,150 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import net.kyori.adventure.text.serializer.plain.PlainTextComponentSerializer; +import org.bukkit.Chunk; +import org.bukkit.GameRule; +import org.bukkit.Server; +import org.bukkit.World; +import org.bukkit.entity.Entity; +import org.bukkit.entity.EntityType; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.stream.Collectors; + +public class PaperWorldInfoProvider implements WorldInfoProvider { + private final Server server; + + public PaperWorldInfoProvider(Server server) { + this.server = server; + } + + @Override + public CountsResult pollCounts() { + int players = this.server.getOnlinePlayers().size(); + int entities = 0; + int tileEntities = 0; + int chunks = 0; + + for (World world : this.server.getWorlds()) { + entities += world.getEntityCount(); + tileEntities += world.getTileEntityCount(); + chunks += world.getChunkCount(); + } + + return new CountsResult(players, entities, tileEntities, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); + + for (World world : this.server.getWorlds()) { + Chunk[] chunks = world.getLoadedChunks(); + + List list = new ArrayList<>(chunks.length); + for (Chunk chunk : chunks) { + if (chunk != null) { + list.add(new PaperChunkInfo(chunk)); + } + } + + data.put(world.getName(), list); + } + + return data; + } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + + boolean addDefaults = true; // add defaults in the first iteration + for (World world : this.server.getWorlds()) { + for (String gameRule : world.getGameRules()) { + GameRule ruleObj = GameRule.getByName(gameRule); + if (ruleObj == null) { + continue; + } + + if (addDefaults) { + Object defaultValue = world.getGameRuleDefault(ruleObj); + data.putDefault(gameRule, Objects.toString(defaultValue)); + } + + Object value = world.getGameRuleValue(ruleObj); + data.put(gameRule, world.getName(), Objects.toString(value)); + } + + addDefaults = false; + } + + return data; + } + + @Override + public Collection pollDataPacks() { + this.server.getDatapackManager().refreshPacks(); + return this.server.getDatapackManager().getPacks().stream() + .map(pack -> new DataPackInfo( + PlainTextComponentSerializer.plainText().serialize(pack.getTitle()), + PlainTextComponentSerializer.plainText().serialize(pack.getDescription()), + pack.getSource().toString().toLowerCase(Locale.ROOT).replace("_", "") + )) + .collect(Collectors.toList()); + } + + static final class PaperChunkInfo extends AbstractChunkInfo { + private final CountMap entityCounts; + + PaperChunkInfo(Chunk chunk) { + super(chunk.getX(), chunk.getZ()); + + this.entityCounts = new CountMap.EnumKeyed<>(EntityType.class); + for (Entity entity : chunk.getEntities()) { + if (entity != null) { + this.entityCounts.increment(entity.getType()); + } + } + } + + @Override + public CountMap getEntityCounts() { + return this.entityCounts; + } + + @SuppressWarnings("deprecation") + @Override + public String entityTypeName(EntityType type) { + return type.getName(); + } + + } + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java new file mode 100644 index 00000000000..deca3376e47 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper.api; + +/** + * Used to indicate the version of the spark module API supported by the Paper server. + * + *

This allows spark to add/deprecate callback methods in the {@link PaperSparkModule} interface, + * but know at runtime whether they will actually be called by Paper.

+ */ +public enum Compatibility { + + /** + * Indicates that Paper supports version 1.0 of the spark module API. + */ + VERSION_1_0 + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java new file mode 100644 index 00000000000..280e4d9364a --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java @@ -0,0 +1,27 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper.api; + +public interface PaperClassLookup { + + Class lookup(String className) throws Exception; + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java new file mode 100644 index 00000000000..7f4fdd31e6d --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java @@ -0,0 +1,29 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper.api; + +public interface PaperScheduler { + + void executeAsync(Runnable task); + + void executeSync(Runnable task); + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java new file mode 100644 index 00000000000..ce8b74a3704 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java @@ -0,0 +1,125 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper.api; + +import me.lucko.spark.paper.PaperSparkPlugin; +import org.bukkit.Server; +import org.bukkit.command.CommandSender; + +import java.util.Collection; +import java.util.List; +import java.util.logging.Logger; + +/** + * Spark module for use as a library within the Paper server implementation. + * + *

Paper provides:

+ *
    + *
  • a {@link Server} instance
  • + *
  • a {@link Logger} instance
  • + *
  • a {@link PaperScheduler} instance
  • + *
  • a {@link PaperClassLookup} instance
  • + *
+ * + *

Paper is expected to:

+ *
    + *
  • call {@link #enable()} to enable spark, either immediately or when the server has finished starting
  • + *
  • call {@link #disable()} to disable spark when the server is stopping
  • + *
  • call {@link #executeCommand(CommandSender, String[])} when the spark command is executed
  • + *
  • call {@link #tabComplete(CommandSender, String[])} when the spark command is tab completed
  • + *
  • call {@link #onServerTickStart()} at the start of each server tick
  • + *
  • call {@link #onServerTickEnd(double)} at the end of each server tick
  • + *
+ * + *

This interface and the other interfaces in this package define the API between Paper and spark. All other classes + * are subject to change, even between minor versions.

+ */ +public interface PaperSparkModule { + + /** + * Creates a new PaperSparkModule. + * + * @param compatibility the Paper/spark compatibility version + * @param server the server + * @param logger a logger that can be used by spark + * @param scheduler the scheduler + * @param classLookup a class lookup utility + * @return a new PaperSparkModule + */ + static PaperSparkModule create(Compatibility compatibility, Server server, Logger logger, PaperScheduler scheduler, PaperClassLookup classLookup) { + return new PaperSparkPlugin(server, logger, scheduler, classLookup); + } + + /** + * Enables the spark module. + */ + void enable(); + + /** + * Disables the spark module. + */ + void disable(); + + /** + * Handles a command execution. + * + * @param sender the sender + * @param args the command arguments + */ + void executeCommand(CommandSender sender, String[] args); + + /** + * Handles a tab completion request. + * + * @param sender the sender + * @param args the command arguments + * @return a list of completions + */ + List tabComplete(CommandSender sender, String[] args); + + /** + * Gets if the sender has permission to execute any spark commands. + * + * @param sender the sender + * @return if the sender has permission + */ + boolean hasPermission(CommandSender sender); + + /** + * Gets a collection of all known spark permissions. + * + * @return a set of permissions + */ + Collection getPermissions(); + + /** + * Called by Paper at the start of each server tick. + */ + void onServerTickStart(); + + /** + * Called by Paper at the end of each server tick. + * + * @param duration the duration of the tick + */ + void onServerTickEnd(double duration); + +} diff --git a/spark-sponge8/build.gradle b/spark-sponge/build.gradle similarity index 71% rename from spark-sponge8/build.gradle rename to spark-sponge/build.gradle index cbd922fe9f8..d3d2eea6239 100644 --- a/spark-sponge8/build.gradle +++ b/spark-sponge/build.gradle @@ -1,10 +1,16 @@ plugins { - id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'com.gradleup.shadow' version '8.3.8' +} + +tasks.withType(JavaCompile) { + // override, compile targeting J21 + options.release = 21 } dependencies { implementation project(':spark-common') - compileOnly "org.spongepowered:spongeapi:8.0.0-SNAPSHOT" + compileOnly "org.spongepowered:spongeapi:12.0.0" + compileOnly "com.google.guava:guava:33.3.1-jre" } repositories { @@ -22,7 +28,7 @@ processResources { } shadowJar { - archiveFileName = "spark-${project.pluginVersion}-sponge8.jar" + archiveFileName = "spark-${project.pluginVersion}-sponge.jar" dependencies { exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) @@ -34,10 +40,9 @@ shadowJar { relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeClassSourceLookup.java similarity index 75% rename from spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java rename to spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeClassSourceLookup.java index 7f02e755103..1effb194eae 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeClassSourceLookup.java @@ -21,25 +21,23 @@ package me.lucko.spark.sponge; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import org.spongepowered.api.Game; import org.spongepowered.plugin.PluginCandidate; import org.spongepowered.plugin.PluginContainer; -import org.spongepowered.plugin.builtin.jvm.JVMPluginContainer; -import org.spongepowered.plugin.builtin.jvm.locator.JVMPluginResource; +import org.spongepowered.plugin.builtin.StandardPluginContainer; import java.lang.reflect.Field; import java.nio.file.Path; import java.util.Collection; +import java.util.HashMap; import java.util.Map; -public class Sponge8ClassSourceLookup extends ClassSourceLookup.ByCodeSource { +public class SpongeClassSourceLookup extends ClassSourceLookup.ByCodeSource { private final Path modsDirectory; private final Map pathToPluginMap; - public Sponge8ClassSourceLookup(Game game) { + public SpongeClassSourceLookup(Game game) { this.modsDirectory = game.gameDirectory().resolve("mods").toAbsolutePath().normalize(); this.pathToPluginMap = constructPathToPluginIdMap(game.pluginManager().plugins()); } @@ -59,26 +57,25 @@ public String identifyFile(Path path) { } // pretty nasty, but if it fails it doesn't really matter - @SuppressWarnings("unchecked") private static Map constructPathToPluginIdMap(Collection plugins) { - ImmutableMap.Builder builder = ImmutableMap.builder(); + Map map = new HashMap<>(); try { - Field candidateField = JVMPluginContainer.class.getDeclaredField("candidate"); + Field candidateField = StandardPluginContainer.class.getDeclaredField("candidate"); candidateField.setAccessible(true); for (PluginContainer plugin : plugins) { - if (plugin instanceof JVMPluginContainer) { - PluginCandidate candidate = (PluginCandidate) candidateField.get(plugin); + if (plugin instanceof StandardPluginContainer) { + PluginCandidate candidate = (PluginCandidate) candidateField.get(plugin); Path path = candidate.resource().path().toAbsolutePath().normalize(); - builder.put(path, plugin.metadata().id()); + map.putIfAbsent(path, plugin.metadata().id()); } } } catch (Exception e) { // ignore } - return builder.build(); + return ImmutableMap.copyOf(map); } } diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeCommandSender.java similarity index 72% rename from spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java rename to spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeCommandSender.java index e7878dcfff1..76439d28c5a 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeCommandSender.java @@ -21,30 +21,26 @@ package me.lucko.spark.sponge; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.audience.Audience; import net.kyori.adventure.identity.Identity; import net.kyori.adventure.text.Component; - import org.spongepowered.api.command.CommandCause; import org.spongepowered.api.service.permission.Subject; import org.spongepowered.api.util.Identifiable; import java.util.UUID; -import static java.nio.charset.StandardCharsets.UTF_8; - -public class Sponge8CommandSender extends AbstractCommandSender { +public class SpongeCommandSender extends AbstractCommandSender { private final CommandCause cause; private final Audience audience; - public Sponge8CommandSender(CommandCause cause) { + public SpongeCommandSender(CommandCause cause) { super(cause); this.cause = cause; this.audience = cause.audience(); } - public Sponge8CommandSender(T cause) { + public SpongeCommandSender(T cause) { super(cause); this.cause = null; this.audience = cause; @@ -52,7 +48,8 @@ public Sponge8CommandSender(T cause) { @Override public String getName() { - return super.delegate.friendlyIdentifier().orElse(super.delegate.identifier()); + String name = super.delegate.friendlyIdentifier().orElse(super.delegate.identifier()); + return name.equals("console") ? "Console" : name; } @Override @@ -66,9 +63,11 @@ public UUID getUniqueId() { try { return UUID.fromString(super.delegate.identifier()); - } catch (Exception e) { - return UUID.nameUUIDFromBytes(super.delegate.identifier().getBytes(UTF_8)); + } catch (IllegalArgumentException e) { + // ignore } + + return null; } @Override @@ -82,15 +81,11 @@ public boolean hasPermission(String permission) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Sponge8CommandSender that = (Sponge8CommandSender) o; - return this.getUniqueId().equals(that.getUniqueId()); - } - - @Override - public int hashCode() { - return getUniqueId().hashCode(); + protected Object getObjectForComparison() { + UUID uniqueId = getUniqueId(); + if (uniqueId != null) { + return uniqueId; + } + return getName(); } } diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlatformInfo.java similarity index 78% rename from spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java rename to spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlatformInfo.java index 9589ddfb911..e5811cd8728 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlatformInfo.java @@ -21,14 +21,14 @@ package me.lucko.spark.sponge; import me.lucko.spark.common.platform.PlatformInfo; - import org.spongepowered.api.Game; import org.spongepowered.api.Platform; +import org.spongepowered.plugin.metadata.PluginMetadata; -public class Sponge8PlatformInfo implements PlatformInfo { +public class SpongePlatformInfo implements PlatformInfo { private final Game game; - public Sponge8PlatformInfo(Game game) { + public SpongePlatformInfo(Game game) { this.game = game; } @@ -42,6 +42,12 @@ public String getName() { return "Sponge"; } + @Override + public String getBrand() { + PluginMetadata brandMetadata = this.game.platform().container(Platform.Component.IMPLEMENTATION).metadata(); + return brandMetadata.name().orElseGet(brandMetadata::id); + } + @Override public String getVersion() { return this.game.platform().container(Platform.Component.IMPLEMENTATION).metadata().version().toString(); diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlayerPingProvider.java similarity index 66% rename from spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java rename to spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlayerPingProvider.java index 2bcaf6af984..3aa598ff24b 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlayerPingProvider.java @@ -21,18 +21,19 @@ package me.lucko.spark.sponge; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import org.spongepowered.api.Server; import org.spongepowered.api.entity.living.player.server.ServerPlayer; +import org.spongepowered.api.network.EngineConnectionState; +import org.spongepowered.api.network.ServerConnectionState; +import org.spongepowered.api.network.ServerSideConnection; import java.util.Map; -public class Sponge8PlayerPingProvider implements PlayerPingProvider { +public class SpongePlayerPingProvider implements PlayerPingProvider { private final Server server; - public Sponge8PlayerPingProvider(Server server) { + public SpongePlayerPingProvider(Server server) { this.server = server; } @@ -40,7 +41,12 @@ public Sponge8PlayerPingProvider(Server server) { public Map poll() { ImmutableMap.Builder builder = ImmutableMap.builder(); for (ServerPlayer player : this.server.onlinePlayers()) { - builder.put(player.name(), player.connection().latency()); + ServerSideConnection connection = player.connection(); + EngineConnectionState connectionState = connection.state().orElse(null); + if (connectionState instanceof ServerConnectionState.Game) { + int latency = ((ServerConnectionState.Game) connectionState).latency(); + builder.put(player.name(), latency); + } } return builder.build(); } diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java similarity index 83% rename from spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java rename to spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java index b1d31e948c5..411c6cefe37 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java @@ -22,7 +22,6 @@ import com.google.common.base.Suppliers; import com.google.inject.Inject; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; @@ -33,9 +32,7 @@ import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; - import net.kyori.adventure.text.Component; - import org.apache.logging.log4j.Logger; import org.checkerframework.checker.nullness.qual.NonNull; import org.spongepowered.api.Game; @@ -66,7 +63,7 @@ import java.util.stream.Stream; @Plugin("spark") -public class Sponge8SparkPlugin implements SparkPlugin { +public class SpongeSparkPlugin implements SparkPlugin { private final PluginContainer pluginContainer; private final Logger logger; @@ -79,7 +76,7 @@ public class Sponge8SparkPlugin implements SparkPlugin { private SparkPlatform platform; @Inject - public Sponge8SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory) { + public SpongeSparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory) { this.pluginContainer = pluginContainer; this.logger = logger; this.game = game; @@ -104,7 +101,7 @@ public void onRegisterCommands(final RegisterCommandEvent event) { @Listener public void onEnable(StartedEngineEvent event) { - executeSync(() -> this.gameThreadDumper.setThread(Thread.currentThread())); + this.gameThreadDumper.setThread(Thread.currentThread()); this.platform = new SparkPlatform(this); this.platform.enable(); @@ -136,9 +133,9 @@ public Stream getCommandSenders() { return Stream.concat( this.game.server().onlinePlayers().stream(), Stream.of(this.game.systemSubject()) - ).map(Sponge8CommandSender::new); + ).map(SpongeCommandSender::new); } else { - return Stream.of(this.game.systemSubject()).map(Sponge8CommandSender::new); + return Stream.of(this.game.systemSubject()).map(SpongeCommandSender::new); } } @@ -154,14 +151,23 @@ public void executeSync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } } @@ -172,12 +178,12 @@ public ThreadDumper getDefaultThreadDumper() { @Override public TickHook createTickHook() { - return new Sponge8TickHook(this.pluginContainer, this.game); + return new SpongeTickHook(this.pluginContainer, this.game); } @Override public ClassSourceLookup createClassSourceLookup() { - return new Sponge8ClassSourceLookup(this.game); + return new SpongeClassSourceLookup(this.game); } @Override @@ -188,14 +194,15 @@ public Collection getKnownSources() { plugin -> plugin.metadata().version().toString(), plugin -> plugin.metadata().contributors().stream() .map(PluginContributor::name) - .collect(Collectors.joining(", ")) + .collect(Collectors.joining(", ")), + plugin -> plugin.metadata().description().orElse(null) ); } @Override public PlayerPingProvider createPlayerPingProvider() { if (this.game.isServerAvailable()) { - return new Sponge8PlayerPingProvider(this.game.server()); + return new SpongePlayerPingProvider(this.game.server()); } else { return null; } @@ -204,7 +211,7 @@ public PlayerPingProvider createPlayerPingProvider() { @Override public WorldInfoProvider createWorldInfoProvider() { if (this.game.isServerAvailable()) { - return new Sponge8WorldInfoProvider(this.game.server()); + return new SpongeWorldInfoProvider(this.game.server()); } else { return WorldInfoProvider.NO_OP; } @@ -212,25 +219,25 @@ public WorldInfoProvider createWorldInfoProvider() { @Override public PlatformInfo getPlatformInfo() { - return new Sponge8PlatformInfo(this.game); + return new SpongePlatformInfo(this.game); } private static final class SparkCommand implements Command.Raw { - private final Sponge8SparkPlugin plugin; + private final SpongeSparkPlugin plugin; - public SparkCommand(Sponge8SparkPlugin plugin) { + public SparkCommand(SpongeSparkPlugin plugin) { this.plugin = plugin; } @Override public CommandResult process(CommandCause cause, ArgumentReader.Mutable arguments) { - this.plugin.platform.executeCommand(new Sponge8CommandSender(cause), arguments.input().split(" ")); + this.plugin.platform.executeCommand(new SpongeCommandSender(cause), arguments.input().split(" ")); return CommandResult.success(); } @Override public List complete(CommandCause cause, ArgumentReader.Mutable arguments) { - return this.plugin.platform.tabCompleteCommand(new Sponge8CommandSender(cause), arguments.input().split(" ")) + return this.plugin.platform.tabCompleteCommand(new SpongeCommandSender(cause), arguments.input().split(" ")) .stream() .map(CommandCompletion::of) .collect(Collectors.toList()); @@ -238,7 +245,7 @@ public List complete(CommandCause cause, ArgumentReader.Mutab @Override public boolean canExecute(CommandCause cause) { - return this.plugin.platform.hasPermissionForAnyCommand(new Sponge8CommandSender(cause)); + return this.plugin.platform.hasPermissionForAnyCommand(new SpongeCommandSender(cause)); } @Override diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8TickHook.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickHook.java similarity index 91% rename from spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8TickHook.java rename to spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickHook.java index 6db51b8b18d..71b454146cb 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8TickHook.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickHook.java @@ -22,19 +22,18 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import org.spongepowered.api.Game; import org.spongepowered.api.scheduler.ScheduledTask; import org.spongepowered.api.scheduler.Task; import org.spongepowered.api.util.Ticks; import org.spongepowered.plugin.PluginContainer; -public class Sponge8TickHook extends AbstractTickHook implements TickHook, Runnable { +public class SpongeTickHook extends AbstractTickHook implements TickHook, Runnable { private final PluginContainer plugin; private final Game game; private ScheduledTask task; - public Sponge8TickHook(PluginContainer plugin, Game game) { + public SpongeTickHook(PluginContainer plugin, Game game) { this.plugin = plugin; this.game = game; } diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeWorldInfoProvider.java similarity index 63% rename from spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java rename to spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeWorldInfoProvider.java index 69b4515d03d..2269e6f372d 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeWorldInfoProvider.java @@ -22,11 +22,10 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; - import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; - +import net.kyori.adventure.text.serializer.plain.PlainTextComponentSerializer; import org.spongepowered.api.Server; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.entity.EntityType; @@ -35,13 +34,15 @@ import org.spongepowered.api.world.server.ServerWorld; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.List; +import java.util.stream.Collectors; -public class Sponge8WorldInfoProvider implements WorldInfoProvider { +public class SpongeWorldInfoProvider implements WorldInfoProvider { private final Server server; - public Sponge8WorldInfoProvider(Server server) { + public SpongeWorldInfoProvider(Server server) { this.server = server; } @@ -62,15 +63,15 @@ public CountsResult pollCounts() { } @Override - public ChunksResult pollChunks() { - ChunksResult data = new ChunksResult<>(); + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (ServerWorld world : this.server.worldManager().worlds()) { List chunks = Lists.newArrayList(world.loadedChunks()); - List list = new ArrayList<>(chunks.size()); + List list = new ArrayList<>(chunks.size()); for (WorldChunk chunk : chunks) { - list.add(new Sponge7ChunkInfo(chunk)); + list.add(new SpongeChunkInfo(chunk)); } data.put(world.key().value(), list); @@ -79,10 +80,40 @@ public ChunksResult pollChunks() { return data; } - static final class Sponge7ChunkInfo extends AbstractChunkInfo> { + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + + Collection worlds = this.server.worldManager().worlds(); + for (ServerWorld world : worlds) { + String worldName = world.key().value(); + + world.properties().gameRules().forEach((gameRule, value) -> { + String defaultValue = gameRule.defaultValue().toString(); + data.putDefault(gameRule.name(), defaultValue); + + data.put(gameRule.name(), worldName, value.toString()); + }); + } + + return data; + } + + @Override + public Collection pollDataPacks() { + return this.server.packRepository().enabled().stream() + .map(pack -> new DataPackInfo( + pack.id(), + PlainTextComponentSerializer.plainText().serialize(pack.description()), + "unknown" + )) + .collect(Collectors.toList()); + } + + static final class SpongeChunkInfo extends AbstractChunkInfo> { private final CountMap> entityCounts; - Sponge7ChunkInfo(WorldChunk chunk) { + SpongeChunkInfo(WorldChunk chunk) { super(chunk.chunkPosition().x(), chunk.chunkPosition().z()); this.entityCounts = new CountMap.Simple<>(new HashMap<>()); diff --git a/spark-sponge8/src/main/resources/META-INF/sponge_plugins.json b/spark-sponge/src/main/resources/META-INF/sponge_plugins.json similarity index 83% rename from spark-sponge8/src/main/resources/META-INF/sponge_plugins.json rename to spark-sponge/src/main/resources/META-INF/sponge_plugins.json index 55b17726c6e..7c467469706 100644 --- a/spark-sponge8/src/main/resources/META-INF/sponge_plugins.json +++ b/spark-sponge/src/main/resources/META-INF/sponge_plugins.json @@ -7,9 +7,9 @@ "plugins": [ { "id": "spark", - "name": "spark-sponge8", + "name": "spark", "version": "${version}", - "entrypoint": "me.lucko.spark.sponge.Sponge8SparkPlugin", + "entrypoint": "me.lucko.spark.sponge.SpongeSparkPlugin", "description": "${description}", "links": { "homepage": "https://spark.lucko.me/", @@ -25,7 +25,7 @@ "dependencies": [ { "id": "spongeapi", - "version": "8.0.0" + "version": "12.0.0" } ] } diff --git a/spark-sponge7/build.gradle b/spark-sponge7/build.gradle deleted file mode 100644 index 0610a9aa2d5..00000000000 --- a/spark-sponge7/build.gradle +++ /dev/null @@ -1,42 +0,0 @@ -plugins { - id 'net.kyori.blossom' version '1.3.0' - id 'com.github.johnrengelman.shadow' version '7.0.0' -} - -dependencies { - implementation project(':spark-common') - compileOnly 'org.spongepowered:spongeapi:7.3.0' - annotationProcessor 'org.spongepowered:spongeapi:7.3.0' -} - -repositories { - maven { url "https://repo.spongepowered.org/maven" } -} - -blossom { - replaceTokenIn('src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java') - replaceToken '@version@', project.pluginVersion - replaceToken '@desc@', project.pluginDescription -} - -shadowJar { - archiveFileName = "spark-${project.pluginVersion}-sponge7.jar" - - relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' - relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' - relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' - relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' - relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' - relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' - - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude 'META-INF/versions/**' -} - -artifacts { - archives shadowJar - shadow shadowJar -} diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java deleted file mode 100644 index 0e3f4ebf582..00000000000 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java +++ /dev/null @@ -1,235 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.sponge; - -import com.google.inject.Inject; - -import me.lucko.spark.api.Spark; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.SparkPlugin; -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.tick.TickHook; - -import org.slf4j.Logger; -import org.spongepowered.api.Game; -import org.spongepowered.api.command.CommandCallable; -import org.spongepowered.api.command.CommandResult; -import org.spongepowered.api.command.CommandSource; -import org.spongepowered.api.config.ConfigDir; -import org.spongepowered.api.event.Listener; -import org.spongepowered.api.event.game.state.GameStartedServerEvent; -import org.spongepowered.api.event.game.state.GameStoppingServerEvent; -import org.spongepowered.api.plugin.Plugin; -import org.spongepowered.api.plugin.PluginContainer; -import org.spongepowered.api.scheduler.AsynchronousExecutor; -import org.spongepowered.api.scheduler.SpongeExecutorService; -import org.spongepowered.api.scheduler.SynchronousExecutor; -import org.spongepowered.api.text.Text; -import org.spongepowered.api.world.Location; -import org.spongepowered.api.world.World; - -import java.nio.file.Path; -import java.util.List; -import java.util.Optional; -import java.util.logging.Level; -import java.util.stream.Stream; - -import javax.annotation.Nullable; - -@Plugin( - id = "spark", - name = "spark", - version = "@version@", - description = "@desc@", - authors = {"Luck"} -) -public class Sponge7SparkPlugin implements SparkPlugin { - - private final PluginContainer pluginContainer; - private final Logger logger; - private final Game game; - private final Path configDirectory; - private final SpongeExecutorService asyncExecutor; - private final SpongeExecutorService syncExecutor; - private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread(); - - private SparkPlatform platform; - - @Inject - public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor, @SynchronousExecutor SpongeExecutorService syncExecutor) { - this.pluginContainer = pluginContainer; - this.logger = logger; - this.game = game; - this.configDirectory = configDirectory; - this.asyncExecutor = asyncExecutor; - this.syncExecutor = syncExecutor; - - this.syncExecutor.execute(() -> this.gameThreadDumper.setThread(Thread.currentThread())); - } - - @Listener - public void onEnable(GameStartedServerEvent event) { - this.platform = new SparkPlatform(this); - this.platform.enable(); - this.game.getCommandManager().register(this, new SparkCommand(this), "spark"); - } - - @Listener - public void onDisable(GameStoppingServerEvent event) { - this.platform.disable(); - } - - @Override - public String getVersion() { - return Sponge7SparkPlugin.class.getAnnotation(Plugin.class).version(); - } - - @Override - public Path getPluginDirectory() { - return this.configDirectory; - } - - @Override - public String getCommandName() { - return "spark"; - } - - @Override - public Stream getCommandSenders() { - if (this.game.isServerAvailable()) { - return Stream.concat( - this.game.getServer().getOnlinePlayers().stream(), - Stream.of(this.game.getServer().getConsole()) - ).map(Sponge7CommandSender::new); - } else { - return Stream.of(this.game.getServer().getConsole()).map(Sponge7CommandSender::new); - } - } - - @Override - public void executeAsync(Runnable task) { - this.asyncExecutor.execute(task); - } - - @Override - public void executeSync(Runnable task) { - this.syncExecutor.execute(task); - } - - @Override - public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { - this.logger.error(msg); - } else { - throw new IllegalArgumentException(level.getName()); - } - } - - @Override - public ThreadDumper getDefaultThreadDumper() { - return this.gameThreadDumper.get(); - } - - @Override - public TickHook createTickHook() { - return new Sponge7TickHook(this); - } - - @Override - public ClassSourceLookup createClassSourceLookup() { - return new Sponge7ClassSourceLookup(this.game); - } - - @Override - public PlayerPingProvider createPlayerPingProvider() { - if (this.game.isServerAvailable()) { - return new Sponge7PlayerPingProvider(this.game.getServer()); - } else { - return null; - } - } - - @Override - public WorldInfoProvider createWorldInfoProvider() { - if (this.game.isServerAvailable()) { - return new Sponge7WorldInfoProvider(this.game.getServer()); - } else { - return WorldInfoProvider.NO_OP; - } - } - - @Override - public PlatformInfo getPlatformInfo() { - return new Sponge7PlatformInfo(this.game); - } - - @Override - public void registerApi(Spark api) { - this.game.getServiceManager().setProvider(this, Spark.class, api); - } - - private static final class SparkCommand implements CommandCallable { - private final Sponge7SparkPlugin plugin; - - private SparkCommand(Sponge7SparkPlugin plugin) { - this.plugin = plugin; - } - - @Override - public CommandResult process(CommandSource source, String arguments) { - this.plugin.platform.executeCommand(new Sponge7CommandSender(source), arguments.split(" ")); - return CommandResult.empty(); - } - - @Override - public List getSuggestions(CommandSource source, String arguments, @Nullable Location targetPosition) { - return this.plugin.platform.tabCompleteCommand(new Sponge7CommandSender(source), arguments.split(" ")); - } - - @Override - public boolean testPermission(CommandSource source) { - return this.plugin.platform.hasPermissionForAnyCommand(new Sponge7CommandSender(source)); - } - - @Override - public Optional getShortDescription(CommandSource source) { - return Optional.of(Text.of("Main spark plugin command")); - } - - @Override - public Optional getHelp(CommandSource source) { - return Optional.of(Text.of("Run '/spark' to view usage.")); - } - - @Override - public Text getUsage(CommandSource source) { - return Text.of("Run '/spark' to view usage."); - } - } -} diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java deleted file mode 100644 index df58028cebd..00000000000 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.sponge; - -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - -import me.lucko.spark.common.platform.world.AbstractChunkInfo; -import me.lucko.spark.common.platform.world.CountMap; -import me.lucko.spark.common.platform.world.WorldInfoProvider; - -import org.spongepowered.api.Server; -import org.spongepowered.api.entity.Entity; -import org.spongepowered.api.entity.EntityType; -import org.spongepowered.api.world.Chunk; -import org.spongepowered.api.world.World; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -public class Sponge7WorldInfoProvider implements WorldInfoProvider { - private final Server server; - - public Sponge7WorldInfoProvider(Server server) { - this.server = server; - } - - @Override - public CountsResult pollCounts() { - int players = this.server.getOnlinePlayers().size(); - int entities = 0; - int tileEntities = 0; - int chunks = 0; - - for (World world : this.server.getWorlds()) { - entities += world.getEntities().size(); - tileEntities += world.getTileEntities().size(); - chunks += Iterables.size(world.getLoadedChunks()); - } - - return new CountsResult(players, entities, tileEntities, chunks); - } - - @Override - public ChunksResult pollChunks() { - ChunksResult data = new ChunksResult<>(); - - for (World world : this.server.getWorlds()) { - List chunks = Lists.newArrayList(world.getLoadedChunks()); - - List list = new ArrayList<>(chunks.size()); - for (Chunk chunk : chunks) { - list.add(new Sponge7ChunkInfo(chunk)); - } - - data.put(world.getName(), list); - } - - return data; - } - - static final class Sponge7ChunkInfo extends AbstractChunkInfo { - private final CountMap entityCounts; - - Sponge7ChunkInfo(Chunk chunk) { - super(chunk.getPosition().getX(), chunk.getPosition().getZ()); - - this.entityCounts = new CountMap.Simple<>(new HashMap<>()); - for (Entity entity : chunk.getEntities()) { - this.entityCounts.increment(entity.getType()); - } - } - - @Override - public CountMap getEntityCounts() { - return this.entityCounts; - } - - @Override - public String entityTypeName(EntityType type) { - return type.getName(); - } - - } -} diff --git a/spark-standalone-agent/build.gradle b/spark-standalone-agent/build.gradle new file mode 100644 index 00000000000..7a32fffbf1b --- /dev/null +++ b/spark-standalone-agent/build.gradle @@ -0,0 +1,77 @@ +plugins { + id 'net.kyori.blossom' version '1.3.1' + id 'com.gradleup.shadow' version '8.3.8' +} + +dependencies { + implementation project(':spark-common') + implementation('net.kyori:adventure-text-serializer-ansi:4.21.0') { + exclude(module: 'adventure-bom') + exclude(module: 'adventure-api') + exclude(module: 'annotations') + } + implementation 'org.slf4j:slf4j-simple:2.0.16' + implementation 'com.google.code.gson:gson:2.9.0' + implementation('com.google.guava:guava:31.1-jre') { + exclude(module: 'jsr305') + exclude(module: 'error_prone_annotations') + exclude(module: 'failureaccess') + exclude(module: 'listenablefuture') + exclude(module: 'j2objc-annotations') + exclude(module: 'checker-qual') + } + + implementation 'org.jline:jline-remote-ssh:3.28.0' + implementation 'org.apache.sshd:sshd-core:2.14.0' +} + +tasks.withType(JavaCompile).configureEach { + options.compilerArgs += ['--add-modules', 'jdk.attach'] + options.release = 11 +} + +blossom { + replaceTokenIn('src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java') + replaceToken '@version@', project.pluginVersion +} + +jar { + manifest { + attributes( + 'Main-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent', + 'Agent-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent', + 'Premain-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent' + ) + } +} + +shadowJar { + archiveFileName = "spark-${project.pluginVersion}-standalone-agent.jar" + + relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' + relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' + relocate 'net.kyori.ansi', 'me.lucko.spark.lib.adventure.ansi' + relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' + relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' + relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' + relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' + relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' + relocate 'com.google.gson', 'me.lucko.spark.lib.gson' + relocate 'com.google.common', 'me.lucko.spark.lib.guava' + relocate 'com.google.errorprone', 'me.lucko.spark.lib.google.errorprone' + relocate 'com.google.j2objc', 'me.lucko.spark.lib.google.j2objc' + relocate 'com.google.thirdparty', 'me.lucko.spark.lib.google.thirdparty' + relocate 'org.apache.sshd', 'me.lucko.spark.lib.sshd' + relocate 'org.jline.builtins.ssh', 'me.lucko.spark.lib.jline.builtins.ssh' + + project.applyExcludes(delegate) + + exclude 'META-INF/services/java.nio.file.spi.FileSystemProvider' +} + +artifacts { + archives shadowJar + shadow shadowJar +} \ No newline at end of file diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogCommandSender.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java similarity index 62% rename from spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogCommandSender.java rename to spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java index 8d7223a3098..92ace6b3bed 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogCommandSender.java +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java @@ -18,44 +18,44 @@ * along with this program. If not, see . */ -package me.lucko.spark.waterdog; +package me.lucko.spark.standalone; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; -import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; - -import dev.waterdog.waterdogpe.command.CommandSender; -import dev.waterdog.waterdogpe.player.ProxiedPlayer; +import net.kyori.adventure.text.serializer.ansi.ANSIComponentSerializer; import java.util.UUID; -public class WaterdogCommandSender extends AbstractCommandSender { +public class StandaloneCommandSender extends AbstractCommandSender { + public static final StandaloneCommandSender NO_OP = new StandaloneCommandSender(msg -> {}); + public static final StandaloneCommandSender SYSTEM_OUT = new StandaloneCommandSender(System.out::println); - public WaterdogCommandSender(CommandSender sender) { - super(sender); + public StandaloneCommandSender(Output output) { + super(output); } @Override public String getName() { - return super.delegate.getName(); + return "Standalone"; } @Override public UUID getUniqueId() { - if (super.delegate instanceof ProxiedPlayer) { - return ((ProxiedPlayer) super.delegate).getUniqueId(); - } return null; } @Override public void sendMessage(Component message) { - super.delegate.sendMessage(LegacyComponentSerializer.legacySection().serialize(message)); + this.delegate.sendMessage(ANSIComponentSerializer.ansi().serialize(message)); } @Override public boolean hasPermission(String permission) { - return super.delegate.hasPermission(permission); + return true; } + + public interface Output { + void sendMessage(String message); + } + } diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java new file mode 100644 index 00000000000..48b8d217dc4 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java @@ -0,0 +1,84 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.standalone; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import me.lucko.spark.common.platform.PlatformInfo; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; + +public class StandalonePlatformInfo implements PlatformInfo { + private final String version; + private final String minecraftVersion; + + public StandalonePlatformInfo(String version) { + this.version = version; + this.minecraftVersion = detectVanillaMinecraftVersion(); + } + + @Override + public Type getType() { + return Type.APPLICATION; + } + + @Override + public String getName() { + return "Standalone"; + } + + @Override + public String getBrand() { + return this.minecraftVersion != null ? "Vanilla Minecraft" : "Unknown"; + } + + @Override + public String getVersion() { + return this.version; + } + + @Override + public String getMinecraftVersion() { + return this.minecraftVersion; + } + + private static String detectVanillaMinecraftVersion() { + try { + Class clazz = Class.forName("net.minecraft.bundler.Main"); + URL resource = clazz.getClassLoader().getResource("version.json"); + if (resource != null) { + try (InputStream stream = resource.openStream(); InputStreamReader reader = new InputStreamReader(stream)) { + JsonObject obj = new Gson().fromJson(reader, JsonObject.class); + JsonElement name = obj.get("name"); + if (name.isJsonPrimitive() && name.getAsJsonPrimitive().isString()) { + return name.getAsString(); + } + } + } + } catch (Exception e) { + // ignore + } + return null; + } +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java new file mode 100644 index 00000000000..8439c889ea8 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java @@ -0,0 +1,100 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.standalone; + +import com.sun.tools.attach.VirtualMachine; +import com.sun.tools.attach.VirtualMachineDescriptor; + +import java.lang.instrument.Instrumentation; +import java.net.URI; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class StandaloneSparkAgent { + + // Entry point when the agent is run as a normal jar + public static void main(String[] args) { + if (args.length == 0) { + System.err.println("Usage: java -jar spark-standalone-agent.jar [args...]"); + + List vms = VirtualMachine.list(); + if (vms.isEmpty()) { + return; + } + + System.out.println("Current JVM processes:"); + for (VirtualMachineDescriptor vm : vms) { + System.out.println(" pid=" + vm.id() + " (" + vm.displayName() + ")"); + } + + return; + } + + try { + VirtualMachine vm = VirtualMachine.attach(args[0]); + URI agentPath = StandaloneSparkAgent.class.getProtectionDomain().getCodeSource().getLocation().toURI(); + String arguments = String.join(",", Arrays.copyOfRange(args, 1, args.length)); + vm.loadAgent(Paths.get(agentPath).toAbsolutePath().toString(), arguments); + System.out.println("[spark] Agent loaded successfully."); + vm.detach(); + } catch (Throwable e) { + System.err.println("Failed to attach agent to process " + args[0]); + e.printStackTrace(System.err); + } + } + + // Entry point when the agent is loaded via -javaagent + public static void premain(String agentArgs, Instrumentation instrumentation) { + System.out.println("[spark] Loading standalone agent... (premain)"); + init(agentArgs, instrumentation); + } + + // Entry point when the agent is loaded via VirtualMachine#loadAgent + public static void agentmain(String agentArgs, Instrumentation instrumentation) { + System.out.println("[spark] Loading standalone agent... (agentmain)"); + init(agentArgs, instrumentation); + } + + private static void init(String agentArgs, Instrumentation instrumentation) { + try { + Map arguments = new HashMap<>(); + if (agentArgs == null) { + agentArgs = ""; + } + for (String arg : agentArgs.split(",")) { + if (arg.contains("=")) { + String[] parts = arg.split("=", 2); + arguments.put(parts[0], parts[1]); + } else { + arguments.put(arg, "true"); + } + } + new StandaloneSparkPlugin(instrumentation, arguments); + } catch (Throwable e) { + System.err.println("[spark] Loading failed :("); + e.printStackTrace(System.err); + } + } + +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java new file mode 100644 index 00000000000..dd59f997934 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java @@ -0,0 +1,160 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.standalone; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.command.CommandResponseHandler; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.util.SparkThreadFactory; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; +import me.lucko.spark.common.util.classfinder.InstrumentationClassFinder; +import me.lucko.spark.standalone.remote.RemoteInterface; +import me.lucko.spark.standalone.remote.SshRemoteInterface; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.format.NamedTextColor; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.lang.instrument.Instrumentation; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.logging.Level; +import java.util.stream.Stream; + +public class StandaloneSparkPlugin implements SparkPlugin { + private final Instrumentation instrumentation; + private final Set senders; + private final ScheduledExecutorService scheduler; + private final SparkPlatform platform; + + private final RemoteInterface remoteInterface; + + public StandaloneSparkPlugin(Instrumentation instrumentation, Map arguments) { + this.instrumentation = instrumentation; + this.senders = ConcurrentHashMap.newKeySet(); + this.senders.add(StandaloneCommandSender.SYSTEM_OUT); + this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory()); + this.platform = new SparkPlatform(this); + this.platform.enable(); + this.remoteInterface = new SshRemoteInterface(this, Integer.parseInt(arguments.getOrDefault("port", "0"))); + + if (arguments.containsKey("start")) { + execute(new String[]{"profiler", "start"}, StandaloneCommandSender.SYSTEM_OUT).join(); + + if (arguments.containsKey("open")) { + execute(new String[]{"profiler", "open"}, StandaloneCommandSender.SYSTEM_OUT).join(); + } + } + } + + public void disable() { + this.platform.disable(); + this.scheduler.shutdown(); + this.remoteInterface.close(); + } + + public CompletableFuture execute(String[] args, StandaloneCommandSender sender) { + return this.platform.executeCommand(sender, args); + } + + public List suggest(String[] args, StandaloneCommandSender sender) { + return this.platform.tabCompleteCommand(sender, args); + } + + public void addSender(StandaloneCommandSender sender) { + this.senders.add(sender); + } + + public void removeSender(StandaloneCommandSender sender) { + this.senders.remove(sender); + } + + public CommandResponseHandler createResponseHandler(StandaloneCommandSender sender) { + return new CommandResponseHandler(this.platform, sender); + } + + @Override + public String getVersion() { + return "@version@"; + } + + @Override + public Path getPluginDirectory() { + return Paths.get("spark"); + } + + @Override + public String getCommandName() { + return "spark"; + } + + @Override + public Stream getCommandSenders() { + return this.senders.stream(); + } + + @Override + public void executeAsync(Runnable task) { + this.scheduler.execute(task); + } + + @Override + public void log(Level level, String msg) { + log(level, msg, null); + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + CommandResponseHandler resp = createResponseHandler(StandaloneCommandSender.SYSTEM_OUT); + if (level.intValue() >= 900 || throwable != null) { // severe/warning + resp.replyPrefixed(Component.text(msg, NamedTextColor.RED)); + if (throwable != null) { + StringWriter stringWriter = new StringWriter(); + throwable.printStackTrace(new PrintWriter(stringWriter)); + resp.replyPrefixed(Component.text(stringWriter.toString(), NamedTextColor.YELLOW)); + } + } else { + resp.replyPrefixed(Component.text(msg)); + } + } + + @Override + public PlatformInfo getPlatformInfo() { + return new StandalonePlatformInfo(getVersion()); + } + + @Override + public ClassFinder createClassFinder() { + return ClassFinder.combining( + new InstrumentationClassFinder(this.instrumentation), + FallbackClassFinder.INSTANCE + ); + } +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java new file mode 100644 index 00000000000..1c03aa8dd38 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java @@ -0,0 +1,98 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.standalone.remote; + +import me.lucko.spark.common.command.CommandResponseHandler; +import me.lucko.spark.standalone.StandaloneCommandSender; +import me.lucko.spark.standalone.StandaloneSparkPlugin; +import net.kyori.adventure.text.Component; +import org.jline.reader.Candidate; +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.LineReaderBuilder; +import org.jline.reader.UserInterruptException; +import org.jline.terminal.Terminal; +import org.jline.terminal.impl.AbstractTerminal; + +public abstract class AbstractRemoteInterface implements RemoteInterface { + + protected final StandaloneSparkPlugin spark; + + public AbstractRemoteInterface(StandaloneSparkPlugin spark) { + this.spark = spark; + } + + private static String stripSlashSpark(String command) { + if (command.startsWith("/")) { + command = command.substring(1); + } + if (command.startsWith("spark ")) { + command = command.substring(6); + } + return command; + } + + public void processSession(Terminal terminal, Runnable closer) { + LineReader reader = LineReaderBuilder.builder() + .terminal(terminal) + .completer((lineReader, parsedLine, list) -> { + String command = stripSlashSpark(parsedLine.line()); + String[] args = command.split(" ", -1); + for (String suggestion : this.spark.suggest(args, StandaloneCommandSender.NO_OP)) { + list.add(new Candidate(suggestion)); + } + }) + .build(); + + StandaloneCommandSender sender = new StandaloneCommandSender(reader::printAbove); + + this.spark.addSender(sender); + ((AbstractTerminal) terminal).setOnClose(() -> this.spark.removeSender(sender)); + + CommandResponseHandler resp = this.spark.createResponseHandler(sender); + resp.replyPrefixed(Component.text("spark remote interface - " + this.spark.getVersion())); + resp.replyPrefixed(Component.text("Use '/spark' commands as usual, or run 'exit' to exit.")); + + while (true) { + try { + String line = reader.readLine("> "); + if (line.trim().isEmpty()) { + continue; + } + + String command = stripSlashSpark(line); + if (command.equals("exit")) { + closer.run(); + return; + } + + this.spark.execute(command.split(" ", 0), sender); + + } catch (UserInterruptException e) { + // ignore + } catch (EndOfFileException e) { + this.spark.removeSender(sender); + return; + } + } + } + +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java new file mode 100644 index 00000000000..ce6a8dcd1c3 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java @@ -0,0 +1,28 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.standalone.remote; + +public interface RemoteInterface extends AutoCloseable { + + @Override + void close(); + +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java new file mode 100644 index 00000000000..025dadb1b20 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java @@ -0,0 +1,78 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.standalone.remote; + +import me.lucko.spark.standalone.StandaloneSparkPlugin; +import org.apache.sshd.server.SshServer; +import org.apache.sshd.server.keyprovider.SimpleGeneratorHostKeyProvider; +import org.jline.builtins.ssh.ShellFactoryImpl; + +import java.io.IOException; +import java.security.MessageDigest; +import java.security.SecureRandom; +import java.util.logging.Level; + +public class SshRemoteInterface extends AbstractRemoteInterface { + private final String password; + private final SshServer sshd; + + public SshRemoteInterface(StandaloneSparkPlugin spark, int port) { + super(spark); + this.password = new SecureRandom().ints(48, 122) + .filter(i -> (i <= 57 || i >= 65) && (i <= 90 || i >= 97)) + .limit(32) + .collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append) + .toString(); + + this.sshd = SshServer.setUpDefaultServer(); + if (port > 0) { + this.sshd.setPort(port); + } + this.sshd.setKeyPairProvider(new SimpleGeneratorHostKeyProvider()); + this.sshd.setPasswordAuthenticator((username, password, session) -> "spark".equals(username) && MessageDigest.isEqual(this.password.getBytes(), password.getBytes())); + this.sshd.setShellFactory(new ShellFactoryImpl(shellParams -> this.processSession(shellParams.getTerminal(), shellParams.getCloser()))); + + new Thread(() -> { + try { + this.start(); + } catch (IOException e) { + this.spark.log(Level.SEVERE, "Error whilst starting SSH server", e); + } + }, "spark-ssh-server").start(); + } + + private void start() throws IOException { + this.sshd.start(); + this.spark.log(Level.INFO, "SSH Server started on port " + this.sshd.getPort()); + this.spark.log(Level.INFO, "Connect using: ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -p " + this.sshd.getPort() + " spark@localhost"); + this.spark.log(Level.INFO, "When prompted, enter the password: " + this.password); + } + + @Override + public void close() { + try { + this.sshd.stop(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + +} \ No newline at end of file diff --git a/spark-velocity/build.gradle b/spark-velocity/build.gradle index 2e82cfcb44c..b713272e54e 100644 --- a/spark-velocity/build.gradle +++ b/spark-velocity/build.gradle @@ -1,6 +1,6 @@ plugins { - id 'net.kyori.blossom' version '1.3.0' - id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'net.kyori.blossom' version '1.3.1' + id 'com.gradleup.shadow' version '8.3.8' } dependencies { @@ -20,7 +20,7 @@ blossom { } shadowJar { - archiveName = "spark-${project.pluginVersion}-velocity.jar" + archiveFileName = "spark-${project.pluginVersion}-velocity.jar" dependencies { exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) @@ -32,10 +32,9 @@ shadowJar { relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java index 9b697c32d07..eba00a058e2 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java @@ -22,9 +22,7 @@ import com.velocitypowered.api.plugin.PluginContainer; import com.velocitypowered.api.plugin.PluginManager; - import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import org.checkerframework.checker.nullness.qual.Nullable; import java.util.HashMap; diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityCommandSender.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityCommandSender.java index ce372a644f8..62f7f758b8c 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityCommandSender.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityCommandSender.java @@ -23,9 +23,7 @@ import com.velocitypowered.api.command.CommandSource; import com.velocitypowered.api.proxy.ConsoleCommandSource; import com.velocitypowered.api.proxy.Player; - import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; import java.util.UUID; diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java index 4ee42cb1d04..eba1567f3e1 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.velocity; import com.velocitypowered.api.proxy.ProxyServer; - import me.lucko.spark.common.platform.PlatformInfo; public class VelocityPlatformInfo implements PlatformInfo { @@ -41,6 +40,11 @@ public String getName() { return "Velocity"; } + @Override + public String getBrand() { + return this.proxy.getVersion().getName(); + } + @Override public String getVersion() { return this.proxy.getVersion().getVersion(); diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java index 382ea22071f..6cd4445e108 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java @@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableMap; import com.velocitypowered.api.proxy.Player; import com.velocitypowered.api.proxy.ProxyServer; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import java.util.Map; diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java index 4a89a4e87b9..88bbf1d8e54 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java @@ -29,14 +29,12 @@ import com.velocitypowered.api.plugin.Plugin; import com.velocitypowered.api.plugin.annotation.DataDirectory; import com.velocitypowered.api.proxy.ProxyServer; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; - import org.slf4j.Logger; import java.nio.file.Path; @@ -89,6 +87,11 @@ public List suggest(Invocation inv) { return this.platform.tabCompleteCommand(new VelocityCommandSender(inv.source()), inv.arguments()); } + @Override + public boolean hasPermission(Invocation inv) { + return this.platform.hasPermissionForAnyCommand(new VelocityCommandSender(inv.source())); + } + @Override public String getVersion() { return VelocitySparkPlugin.class.getAnnotation(Plugin.class).version(); @@ -119,14 +122,23 @@ public void executeAsync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } } @@ -141,7 +153,8 @@ public Collection getKnownSources() { this.proxy.getPluginManager().getPlugins(), plugin -> plugin.getDescription().getId(), plugin -> plugin.getDescription().getVersion().orElse("unspecified"), - plugin -> String.join(", ", plugin.getDescription().getAuthors()) + plugin -> String.join(", ", plugin.getDescription().getAuthors()), + plugin -> plugin.getDescription().getDescription().orElse(null) ); } diff --git a/spark-velocity4/build.gradle b/spark-velocity4/build.gradle deleted file mode 100644 index 9b30caa749a..00000000000 --- a/spark-velocity4/build.gradle +++ /dev/null @@ -1,49 +0,0 @@ -plugins { - id 'net.kyori.blossom' version '1.3.0' - id 'com.github.johnrengelman.shadow' version '7.0.0' -} - -tasks.withType(JavaCompile) { - // override, compile targeting J11 - options.release = 11 -} - -dependencies { - implementation project(':spark-common') - compileOnly 'com.velocitypowered:velocity-api:4.0.0-SNAPSHOT' - annotationProcessor 'com.velocitypowered:velocity-annotation-processor:4.0.0-SNAPSHOT' -} - -repositories { - maven { url "https://repo.papermc.io/repository/maven-public/" } -} - -blossom { - replaceTokenIn('src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java') - replaceToken '@version@', project.pluginVersion - replaceToken '@desc@', project.pluginDescription -} - -shadowJar { - archiveName = "spark-${project.pluginVersion}-velocity4.jar" - - dependencies { - exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) - } - - relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination' - relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' - relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' - relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' - relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' - - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' -} - -artifacts { - archives shadowJar - shadow shadowJar -} \ No newline at end of file diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java deleted file mode 100644 index 84840d2ec11..00000000000 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.velocity; - -import com.velocitypowered.api.plugin.PluginContainer; -import com.velocitypowered.api.plugin.PluginManager; - -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - -import org.checkerframework.checker.nullness.qual.Nullable; - -import java.util.HashMap; -import java.util.Map; - -public class Velocity4ClassSourceLookup extends ClassSourceLookup.ByClassLoader { - private static final Class PLUGIN_CLASS_LOADER; - - static { - try { - PLUGIN_CLASS_LOADER = Class.forName("com.velocitypowered.proxy.plugin.PluginClassLoader"); - } catch (ReflectiveOperationException e) { - throw new ExceptionInInitializerError(e); - } - } - - private final Map classLoadersToPlugin; - - public Velocity4ClassSourceLookup(PluginManager pluginManager) { - this.classLoadersToPlugin = new HashMap<>(); - for (PluginContainer plugin : pluginManager.plugins()) { - Object instance = plugin.instance(); - if (instance != null) { - this.classLoadersToPlugin.put(instance.getClass().getClassLoader(), plugin.description().id()); - } - } - } - - @Override - public @Nullable String identify(ClassLoader loader) { - if (PLUGIN_CLASS_LOADER.isInstance(loader)) { - return this.classLoadersToPlugin.get(loader); - } - return null; - } -} diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4CommandSender.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4CommandSender.java deleted file mode 100644 index d346c2ce0e0..00000000000 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4CommandSender.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.velocity; - -import com.velocitypowered.api.command.CommandSource; -import com.velocitypowered.api.command.ConsoleCommandSource; -import com.velocitypowered.api.proxy.connection.Player; - -import me.lucko.spark.common.command.sender.AbstractCommandSender; - -import net.kyori.adventure.text.Component; - -import java.util.UUID; - -public class Velocity4CommandSender extends AbstractCommandSender { - public Velocity4CommandSender(CommandSource source) { - super(source); - } - - @Override - public String getName() { - if (super.delegate instanceof Player) { - return ((Player) super.delegate).username(); - } else if (super.delegate instanceof ConsoleCommandSource) { - return "Console"; - } else { - return "unknown:" + super.delegate.getClass().getSimpleName(); - } - } - - @Override - public UUID getUniqueId() { - if (super.delegate instanceof Player) { - return ((Player) super.delegate).id(); - } - return null; - } - - @Override - public void sendMessage(Component message) { - super.delegate.sendMessage(message); - } - - @Override - public boolean hasPermission(String permission) { - return super.delegate.hasPermission(permission); - } -} diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java deleted file mode 100644 index b6382468e0d..00000000000 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.velocity; - -import com.google.inject.Inject; -import com.velocitypowered.api.command.SimpleCommand; -import com.velocitypowered.api.event.PostOrder; -import com.velocitypowered.api.event.Subscribe; -import com.velocitypowered.api.event.lifecycle.ProxyInitializeEvent; -import com.velocitypowered.api.event.lifecycle.ProxyShutdownEvent; -import com.velocitypowered.api.plugin.Plugin; -import com.velocitypowered.api.plugin.annotation.DataDirectory; -import com.velocitypowered.api.proxy.ProxyServer; - -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.SparkPlugin; -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.sampler.source.SourceMetadata; - -import org.slf4j.Logger; - -import java.nio.file.Path; -import java.util.Collection; -import java.util.List; -import java.util.logging.Level; -import java.util.stream.Stream; - -@Plugin( - id = "spark", - name = "spark", - version = "@version@", - description = "@desc@", - authors = {"Luck"} -) -public class Velocity4SparkPlugin implements SparkPlugin, SimpleCommand { - - private final ProxyServer proxy; - private final Logger logger; - private final Path configDirectory; - - private SparkPlatform platform; - - @Inject - public Velocity4SparkPlugin(ProxyServer proxy, Logger logger, @DataDirectory Path configDirectory) { - this.proxy = proxy; - this.logger = logger; - this.configDirectory = configDirectory; - } - - @Subscribe(order = PostOrder.FIRST) - public void onEnable(ProxyInitializeEvent e) { - this.platform = new SparkPlatform(this); - this.platform.enable(); - this.proxy.commandManager().register("sparkv", this, "sparkvelocity"); - } - - @Subscribe(order = PostOrder.LAST) - public void onDisable(ProxyShutdownEvent e) { - this.platform.disable(); - } - - @Override - public void execute(Invocation inv) { - this.platform.executeCommand(new Velocity4CommandSender(inv.source()), inv.arguments()); - } - - @Override - public List suggest(Invocation inv) { - return this.platform.tabCompleteCommand(new Velocity4CommandSender(inv.source()), inv.arguments()); - } - - @Override - public String getVersion() { - return Velocity4SparkPlugin.class.getAnnotation(Plugin.class).version(); - } - - @Override - public Path getPluginDirectory() { - return this.configDirectory; - } - - @Override - public String getCommandName() { - return "sparkv"; - } - - @Override - public Stream getCommandSenders() { - return Stream.concat( - this.proxy.connectedPlayers().stream(), - Stream.of(this.proxy.consoleCommandSource()) - ).map(Velocity4CommandSender::new); - } - - @Override - public void executeAsync(Runnable task) { - this.proxy.scheduler().buildTask(this, task).schedule(); - } - - @Override - public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { - this.logger.error(msg); - } else { - throw new IllegalArgumentException(level.getName()); - } - } - - @Override - public ClassSourceLookup createClassSourceLookup() { - return new Velocity4ClassSourceLookup(this.proxy.pluginManager()); - } - - @Override - public Collection getKnownSources() { - return SourceMetadata.gather( - this.proxy.pluginManager().plugins(), - plugin -> plugin.description().id(), - plugin -> plugin.description().version(), - plugin -> String.join(", ", plugin.description().authors()) - ); - } - - @Override - public PlayerPingProvider createPlayerPingProvider() { - return new Velocity4PlayerPingProvider(this.proxy); - } - - @Override - public PlatformInfo getPlatformInfo() { - return new Velocity4PlatformInfo(this.proxy); - } -} diff --git a/spark-waterdog/build.gradle b/spark-waterdog/build.gradle deleted file mode 100644 index c4bd38278f5..00000000000 --- a/spark-waterdog/build.gradle +++ /dev/null @@ -1,50 +0,0 @@ -plugins { - id 'com.github.johnrengelman.shadow' version '7.0.0' -} - -tasks.withType(JavaCompile) { - // override, compile targeting J11 - options.release = 11 -} - -dependencies { - implementation project(':spark-common') - implementation 'net.kyori:adventure-text-serializer-legacy:4.12.0' - compileOnly 'dev.waterdog.waterdogpe:waterdog:1.2.3' -} - -repositories { - maven { url 'https://repo.opencollab.dev/main/' } - maven { url 'https://repo.waterdog.dev/artifactory/main/' } -} - -processResources { - from(sourceSets.main.resources.srcDirs) { - expand ( - 'pluginVersion': project.pluginVersion, - 'pluginDescription': project.pluginDescription - ) - include 'plugin.yml' - } -} - -shadowJar { - archiveName = "spark-${project.pluginVersion}-waterdog.jar" - - relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' - relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' - relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' - relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' - relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' - relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' - - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' -} - -artifacts { - archives shadowJar - shadow shadowJar -} diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java deleted file mode 100644 index 2207c9ee2d7..00000000000 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.waterdog; - -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - -import dev.waterdog.waterdogpe.ProxyServer; -import dev.waterdog.waterdogpe.plugin.Plugin; -import dev.waterdog.waterdogpe.plugin.PluginClassLoader; - -import java.util.Map; -import java.util.WeakHashMap; - -public class WaterdogClassSourceLookup extends ClassSourceLookup.ByClassLoader { - private final ProxyServer proxy; - private final Map cache; - - public WaterdogClassSourceLookup(ProxyServer proxy) { - this.proxy = proxy; - this.cache = new WeakHashMap<>(); - } - - @Override - public String identify(ClassLoader loader) throws ReflectiveOperationException { - if (loader instanceof PluginClassLoader) { - String name = this.cache.get(loader); - if (name != null) { - return name; - } - - for (Plugin plugin : this.proxy.getPluginManager().getPlugins()) { - if (plugin.getClass().getClassLoader() == loader) { - name = plugin.getName(); - break; - } - } - - this.cache.put(loader, name); - return name; - } - return null; - } -} - diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java deleted file mode 100644 index b22325c9be3..00000000000 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.waterdog; - -import com.google.common.collect.ImmutableMap; - -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - -import dev.waterdog.waterdogpe.ProxyServer; -import dev.waterdog.waterdogpe.player.ProxiedPlayer; - -import java.util.Map; - -public class WaterdogPlayerPingProvider implements PlayerPingProvider { - private final ProxyServer proxy; - - public WaterdogPlayerPingProvider(ProxyServer proxy) { - this.proxy = proxy; - } - - @Override - public Map poll() { - ImmutableMap.Builder builder = ImmutableMap.builder(); - for (ProxiedPlayer player : this.proxy.getPlayers().values()) { - builder.put(player.getName(), (int) player.getPing()); - } - return builder.build(); - } -} diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java deleted file mode 100644 index 23aa6a686f5..00000000000 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.waterdog; - -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.SparkPlugin; -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.sampler.source.SourceMetadata; - -import dev.waterdog.waterdogpe.ProxyServer; -import dev.waterdog.waterdogpe.command.Command; -import dev.waterdog.waterdogpe.command.CommandSender; -import dev.waterdog.waterdogpe.plugin.Plugin; - -import java.nio.file.Path; -import java.util.Collection; -import java.util.logging.Level; -import java.util.stream.Stream; - -public class WaterdogSparkPlugin extends Plugin implements SparkPlugin { - private SparkPlatform platform; - - public ProxyServer getProxy() { - return ProxyServer.getInstance(); - } - - @Override - public void onEnable() { - this.platform = new SparkPlatform(this); - this.platform.enable(); - getProxy().getCommandMap().registerCommand(new SparkCommand(this)); - } - - @Override - public void onDisable() { - this.platform.disable(); - } - - @Override - public String getVersion() { - return getDescription().getVersion(); - } - - @Override - public Path getPluginDirectory() { - return getDataFolder().toPath(); - } - - @Override - public String getCommandName() { - return "sparkw"; - } - - @Override - public Stream getCommandSenders() { - return Stream.concat( - getProxy().getPlayers().values().stream(), - Stream.of(getProxy().getConsoleSender()) - ).map(WaterdogCommandSender::new); - } - - @Override - public void executeAsync(Runnable task) { - getProxy().getScheduler().scheduleAsync(task); - } - - @Override - public void log(Level level, String msg) { - if (level == Level.INFO) { - getLogger().info(msg); - } else if (level == Level.WARNING) { - getLogger().warn(msg); - } else if (level == Level.SEVERE) { - getLogger().error(msg); - } else { - throw new IllegalArgumentException(level.getName()); - } - } - - @Override - public ClassSourceLookup createClassSourceLookup() { - return new WaterdogClassSourceLookup(getProxy()); - } - - @Override - public Collection getKnownSources() { - return SourceMetadata.gather( - getProxy().getPluginManager().getPlugins(), - Plugin::getName, - plugin -> plugin.getDescription().getVersion(), - plugin -> plugin.getDescription().getAuthor() - ); - } - - @Override - public PlayerPingProvider createPlayerPingProvider() { - return new WaterdogPlayerPingProvider(getProxy()); - } - - @Override - public PlatformInfo getPlatformInfo() { - return new WaterdogPlatformInfo(); - } - - private static final class SparkCommand extends Command { - private final WaterdogSparkPlugin plugin; - - SparkCommand(WaterdogSparkPlugin plugin) { - super("sparkw"); - this.plugin = plugin; - } - - @Override - public boolean onExecute(CommandSender sender, String alias, String[] args) { - this.plugin.platform.executeCommand(new WaterdogCommandSender(sender), args); - return true; - } - } -} diff --git a/spark-waterdog/src/main/resources/plugin.yml b/spark-waterdog/src/main/resources/plugin.yml deleted file mode 100644 index ca0e8467fad..00000000000 --- a/spark-waterdog/src/main/resources/plugin.yml +++ /dev/null @@ -1,6 +0,0 @@ -name: spark -version: ${pluginVersion} -description: ${pluginDescription} -author: Luck -website: https://spark.lucko.me/ -main: me.lucko.spark.waterdog.WaterdogSparkPlugin