diff --git a/.classpath b/.classpath
deleted file mode 100644
index 9c7e0033..00000000
--- a/.classpath
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
-
-
-
-
-
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000..c2987381
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,15 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+
+version: 2
+updates:
+ - package-ecosystem: "maven"
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "monthly"
diff --git a/.github/workflows/cd.yaml b/.github/workflows/cd.yaml
new file mode 100644
index 00000000..0279984d
--- /dev/null
+++ b/.github/workflows/cd.yaml
@@ -0,0 +1,15 @@
+# Note: additional setup is required, see https://www.jenkins.io/redirect/continuous-delivery-of-plugins
+
+name: cd
+on:
+ workflow_dispatch:
+ check_run:
+ types:
+ - completed
+
+jobs:
+ maven-cd:
+ uses: jenkins-infra/github-reusable-workflows/.github/workflows/maven-cd.yml@v1
+ secrets:
+ MAVEN_USERNAME: ${{ secrets.MAVEN_USERNAME }}
+ MAVEN_TOKEN: ${{ secrets.MAVEN_TOKEN }}
diff --git a/.github/workflows/jenkins-security-scan.yml b/.github/workflows/jenkins-security-scan.yml
new file mode 100644
index 00000000..c7b41fc2
--- /dev/null
+++ b/.github/workflows/jenkins-security-scan.yml
@@ -0,0 +1,21 @@
+name: Jenkins Security Scan
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ types: [ opened, synchronize, reopened ]
+ workflow_dispatch:
+
+permissions:
+ security-events: write
+ contents: read
+ actions: read
+
+jobs:
+ security-scan:
+ uses: jenkins-infra/jenkins-security-scan/.github/workflows/jenkins-security-scan.yaml@v2
+ with:
+ java-cache: 'maven' # Optionally enable use of a build dependency cache. Specify 'maven' or 'gradle' as appropriate.
+ # java-version: 21 # Optionally specify what version of Java to set up for the build, or remove to use a recent default.
diff --git a/.gitignore b/.gitignore
index abbe2829..5a3b1d32 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,5 @@
-target
.settings
+.classpath
+target
+work
+*.i*
diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml
new file mode 100644
index 00000000..9440b180
--- /dev/null
+++ b/.mvn/extensions.xml
@@ -0,0 +1,7 @@
+
+
+ io.jenkins.tools.incrementals
+ git-changelist-maven-extension
+ 1.13
+
+
diff --git a/.mvn/maven.config b/.mvn/maven.config
new file mode 100644
index 00000000..f7daf60d
--- /dev/null
+++ b/.mvn/maven.config
@@ -0,0 +1,3 @@
+-Pconsume-incrementals
+-Pmight-produce-incrementals
+-Dchangelist.format=%d.v%s
diff --git a/.project b/.project
index 1388c7f6..310aeb30 100644
--- a/.project
+++ b/.project
@@ -15,8 +15,14 @@
+
+ org.eclipse.m2e.core.maven2Builder
+
+
+
+ org.eclipse.m2e.core.maven2Natureorg.maven.ide.eclipse.maven2Natureorg.eclipse.jdt.core.javanature
diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 7c6bbe61..00000000
--- a/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,5 +0,0 @@
-#Mon Jul 23 09:28:36 CEST 2007
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.source=1.5
-org.eclipse.jdt.core.compiler.compliance=1.5
diff --git a/Jenkinsfile b/Jenkinsfile
new file mode 100644
index 00000000..f5972b03
--- /dev/null
+++ b/Jenkinsfile
@@ -0,0 +1,11 @@
+/*
+ See the documentation for more options:
+ https://github.com/jenkins-infra/pipeline-library/
+*/
+buildPlugin(
+ useContainerAgent: false, // Set to `false` if you need to use Docker for containerized tests
+ configurations: [
+ [platform: 'linux', jdk: 21],
+ [platform: 'windows', jdk: 17],
+ [platform: 'linux', jdk: 25],
+])
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 00000000..d9ce25a1
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Jenkins
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.markdown b/README.markdown
deleted file mode 100644
index ace34aae..00000000
--- a/README.markdown
+++ /dev/null
@@ -1,33 +0,0 @@
-
-Install
-=======
-
-Tested with Hudson 1.367
-
-* Upload target/s3.hpi to your instance of Hudson
-* Configure S3 profile: Manage Hudson -> Configure System -> Amazon S3 profiles
-* Project -> Configure -> [x] Publish artifacts to S3 Bucket
-
-Building
-========
-
-You do try either:
-
-1. Within the Hudson tree
- * The plugin originally expected to live in the `hudson/plugins/s3` directory of a Hudson svn checkout
- * While in the `s3` directory, just run `mvn`
-2. Standalone tree
- * While in the `hudson-s3` directory, with no parent Hudson source, `mvn` might work
- * Note: you may have to move `dotm2_settings.xml` to `~/.m2/settings.xml`
-
-Notes
-=====
-
-* Only the basename of source files is use as the object key name, an option to include the path name relative to the workspace should probably added.
-
-Acknowledgements
-================
-
-* The Hudson scp plugin author for providing a great place to start copy/pasting from
-* http://github.com/stephenh/hudson-git2 - for this README.markdown template and a great git plugin for hudson
-* jets3t - http://jets3t.s3.amazonaws.com/index.html
diff --git a/README.md b/README.md
new file mode 100644
index 00000000..3a1c2a7a
--- /dev/null
+++ b/README.md
@@ -0,0 +1,84 @@
+
+This plugin helps one to upload build artifacts to Amazon S3.
+
+## Making artifacts public
+
+If you'd like to have some of your artifacts be publicly downloadable,
+see [Granting public access to some S3 objects](https://aws.amazon.com/premiumsupport/knowledge-center/read-access-objects-s3-bucket/)
+
+Usage
+=====
+
+When activated, traditional (Freestyle) Jenkins builds will have a
+build action called `S3 Copy Artifact` for downloading artifacts,
+and a post-build action called `Publish Artifacts to S3 Bucket`.
+
+For Pipeline users, the same two actions are available via the
+`s3CopyArtifact` and `s3Upload` step. You can use the snippet generator to get started.
+
+When using an Amazon S3 compatible storage system (OpenStack Swift, EMC Atmos...),
+the list of AWS regions can be overridden by specifying a file
+`classpath://com/amazonaws/partitions/override/endpoints.json` matching the format
+defined in AWS SDK's [endpoints.json](https://github.com/aws/aws-sdk-java/blob/master/aws-java-sdk-core/src/main/resources/com/amazonaws/partitions/endpoints.json).
+
+A solution to add this `endpoints.json` file in the classpath of Jenkins is to use the
+`java` command line parameter `-Xbootclasspath/a:/path/to/boot/classpath/folder/` and
+to locate `com/amazonaws/partitions/override/endpoints.json` in `/path/to/boot/classpath/folder/`.
+
+
+Even if most of the features of the Jenkins S3 Plugin require the user to specify the target region,
+some feature rely on a default Amazon S3 region which is by default the "US Standard Amazon S3 Region"
+and its endpoint is `s3.amazonaws.com`. This default region can be overridden with the system property
+`hudson.plugins.s3.DEFAULT_AMAZON_S3_REGION`.
+Note that this default region name MUST match with a region define in the AWS SDK configuration file `endpoints.json`
+(see above).
+
+Usage with IAM
+=====
+
+If you used IAM to create a separate pair of access credentials for this
+plugin, you can lock down its AWS access to simply listing buckets and
+writing to a specific bucket. Add the following custom policy to the
+user in the IAM console, replacing occurrences of "my-artifact-bucket"
+with your bucket name, which you'll have to create first:
+
+``` json
+{
+ "Statement": [
+ {
+ "Action": [
+ "s3:ListAllMyBuckets"
+ ],
+ "Effect": "Allow",
+ "Resource": "arn:aws:s3:::*"
+ },
+ {
+ "Action": "s3:*",
+ "Effect": "Allow",
+ "Resource": ["arn:aws:s3:::my-artifact-bucket", "arn:aws:s3:::my-artifact-bucket/*"]
+ }
+ ]
+}
+```
+
+Notes
+=====
+
+* Only the basename of source files is used as the object key name,
+an option to include the path name relative to the workspace
+should probably be added.
+
+Changelog
+=========
+
+* New change logs are in [GitHub Releases](https://github.com/jenkinsci/s3-plugin/releases)
+* Old change logs are stored in [old-changelog.md](old-changelog.md).
+
+Acknowledgements
+================
+
+* The Hudson scp plugin author for providing a great place to
+start copy/pasting from.
+* http://github.com/stephenh/hudson-git2 - for this README.markdown
+template and a great git plugin for hudson.
+* jets3t - http://jets3t.s3.amazonaws.com/index.html
diff --git a/dotm2_settings.xml b/dotm2_settings.xml
deleted file mode 100644
index 0471ceff..00000000
--- a/dotm2_settings.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-
-
-
-
-
-
- hudson
-
-
-
-
-
- java.net2
- http://download.java.net/maven/2
-
-
-
-
-
- java.net2
- http://download.java.net/maven/2
-
- true
- never
-
-
- false
-
-
-
-
-
-
-
- hudson
-
-
-
- org.jvnet.hudson.tools
-
-
diff --git a/old-changelog.md b/old-changelog.md
new file mode 100644
index 00000000..adf2c0a8
--- /dev/null
+++ b/old-changelog.md
@@ -0,0 +1,81 @@
+# Version 0.10.11 (Dec 31, 2016)
+**do not update - backward compatibility for pipeline scripts are broken**
+* Make plugin compatible with storage backends compatible with Amazon S3 (OpenStack Swift...) (JENKINS-40654, PR-100)
+* Add Standard - Infrequent Access storage class (PR-98)
+* Constrain build result severity (JENKINS-27284, PR-95)
+* Add job setting to suppress console logging (PR-94)
+
+# Version 0.10.10 (Oct 10, 2016)
+* Add method for changing S3Profile via GroovyVersion
+
+# Version 0.10.9 (June 27, 2016)
+* Added option to open content directly in browser (JENKINS-37346)
+* FIXED IE and Chrome download issue when file path is window style ([PR-93|https://github.com/jenkinsci/s3-plugin/pull/93)
+
+# Version 0.10.8 (Aug 31, 2016)
+**Doesn't exist (broken release because of changes in Jenkins plugin repository)**
+
+#Version 0.10.7 (July 21, 2016)
+* Handle InterruptedExceptions that no files are found (PR-92)
+
+# Version 0.10.6 (July 1, 2016)
+* Don't upload on aborted build (JENKINS-25509, PR-90)
+
+# Version 0.10.5.1 (June 27, 2016)
+* Plugin missing transitive dependencies ( JENKINS-36096 )
+
+# Version 0.10.5 (June 17, 2016)
+* Failed to reset the request input stream (JENKINS-34216 / PR-90 )
+
+# Version 0.10.4 (June 10, 2016)
+* Restore support for MatrixPlugin (JENKINS-35123)
+* Add new parameter on Profile level - to keep or not to folder structure. By default, plugin doesn't keep folder structure. And option to keep structure will be removed in some of next releases (JENKINS-34780)
+
+# Version 0.10.3 (May 25, 2016)
+* Add option to keep artifacts forever
+* S3 Plugin switches credential profiles on-the-fly (JENKINS-14470)
+
+# Version 0.10.2 (May 11, 2016)
+* Add usages to README file (PR-87)
+* Add option to set content-type on files (PR-86)
+* S3 artifacts are visible from API
+
+# Version 0.10.1 (Apr 25, 2016)
+* Parallel uploading
+* Support uploading for unfinished builds
+
+# Version 0.9.4 (Apr 23, 2016)
+* Update AWS SDK to latest version
+* Fix credential issue
+
+# Version 0.9.2 (Apr 06, 2016)
+* Update AWS SDK to latest version
+* Fix credential issue
+
+# Version 0.9.1 (Apr 05, 2016)
+* Updated the aws-java-sdk dependency to support java region uploads
+* Uploading and downloading files more than 5GB using TransferManager
+* Flatten directories
+* Excludes for downloading and uploading
+* Several profiles
+* Retries for downloading
+* Workflow plugin support
+* Using default Jenkins proxy
+* Now artifacts are using full name instead of project name only
+
+# Version 0.5 (Aug 09, 2013)
+* Added Regions Support (JENKINS-18839)
+* Update AWS SDK to latest version
+
+# Version 0.4 (Jul 12, 2013)
+* Added storage class support
+* Added arbitrary metadata support
+* Fixed the problem where the plugin messes up credential profiles upon concurrent use (JENKINS-14470)
+* Plugin shouldn't store S3 password in clear (JENKINS-14395)
+
+# Version 0.3.1 (Sept. 20th, 2012)
+* Prevent OOME when uploading large files.
+* Update Amazon SDK
+
+# Version 0.3.0 (May 29th, 2012)
+* Use AWS MimeType library to determine the Content-Type of the uploaded file.
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 1d1b60be..97a12749 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,30 +1,130 @@
- 4.0.0
-
- org.jvnet.hudson.plugins
- plugin
- 1.367
- ../pom.xml
-
-
- s3
- hpi
- 0.1.0
- Hudson S3 publisher plugin
- http://github.com/d6y/hudson-s3
-
-
- dougmDoug MacEachern
- d6yRichard Dallaway
-
-
-
-
-
-
- net.java.dev.jets3t
- jets3t
- 0.6.1
-
-
+ 4.0.0
+
+ org.jenkins-ci.plugins
+ plugin
+ 5.26
+
+
+ s3
+ hpi
+ ${changelist}
+ Jenkins S3 publisher plugin
+ https://github.com/jenkinsci/${project.artifactId}-plugin
+
+
+ 999999-SNAPSHOT
+ jenkinsci/${project.artifactId}-plugin
+
+ 2.479
+ ${jenkins.baseline}.3
+
+
+
+
+ dougm
+ Doug MacEachern
+
+
+ d6y
+ Richard Dallaway
+
+
+ longlho
+ Long Ho
+
+
+ mikewatt
+ Michael Watt
+
+
+ dmbeer
+ David Beer
+
+
+ mattias
+ Mattias Appelgren
+
+
+ Jimilian
+ Alexander Akbashev
+
+
+
+
+ scm:git:https://github.com/${gitHubRepo}.git
+ scm:git:git@github.com:${gitHubRepo}.git
+ https://github.com/${gitHubRepo}
+ ${scmTag}
+
+
+
+
+ org.mockito
+ mockito-core
+ test
+
+
+ io.jenkins.plugins.aws-java-sdk2
+ aws-java-sdk2-core
+
+
+ io.jenkins.plugins.aws-java-sdk2
+ aws-java-sdk2-s3
+
+
+ io.jenkins.plugins.aws-java-sdk2
+ aws-java-sdk2-netty-nio-client
+
+
+ org.jenkins-ci.plugins
+ copyartifact
+
+
+ org.jenkins-ci.main
+ maven-plugin
+ true
+
+
+ org.jenkins-ci.plugins
+ matrix-project
+ true
+
+
+ org.jenkins-ci.plugins
+ structs
+
+
+ org.testcontainers
+ testcontainers
+ 1.21.3
+ test
+
+
+
+
+
+
+ io.jenkins.tools.bom
+ bom-${jenkins.baseline}.x
+ 5054.v620b_5d2b_d5e6
+ pom
+ import
+
+
+
+
+
+
+ repo.jenkins-ci.org
+ https://repo.jenkins-ci.org/public/
+
+
+
+
+
+ repo.jenkins-ci.org
+ https://repo.jenkins-ci.org/public/
+
+
diff --git a/src/main/java/com/hyperic/hudson/plugin/Entry.java b/src/main/java/com/hyperic/hudson/plugin/Entry.java
deleted file mode 100644
index da9bb6a0..00000000
--- a/src/main/java/com/hyperic/hudson/plugin/Entry.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.hyperic.hudson.plugin;
-
-public final class Entry {
- /**
- * Destination bucket for the copy. Can contain macros.
- */
- public String bucket;
- /**
- * File name relative to the workspace root to upload.
- * Can contain macros and wildcards.
- *
- */
- public String sourceFile;
-}
diff --git a/src/main/java/com/hyperic/hudson/plugin/S3BucketPublisher.java b/src/main/java/com/hyperic/hudson/plugin/S3BucketPublisher.java
deleted file mode 100644
index 44436984..00000000
--- a/src/main/java/com/hyperic/hudson/plugin/S3BucketPublisher.java
+++ /dev/null
@@ -1,230 +0,0 @@
-package com.hyperic.hudson.plugin;
-
-import hudson.Extension;
-import hudson.FilePath;
-import hudson.Launcher;
-import hudson.Util;
-import hudson.model.AbstractBuild;
-import hudson.model.AbstractProject;
-import hudson.model.BuildListener;
-import hudson.model.Result;
-import hudson.tasks.BuildStepDescriptor;
-import hudson.tasks.BuildStepMonitor;
-import hudson.tasks.Notifier;
-import hudson.tasks.Publisher;
-import hudson.util.CopyOnWriteList;
-import hudson.util.FormValidation;
-
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import net.sf.json.JSONObject;
-
-import org.apache.commons.lang.StringUtils;
-import org.jets3t.service.S3ServiceException;
-import org.kohsuke.stapler.StaplerRequest;
-
-public final class S3BucketPublisher extends Notifier {
-
- private String profileName;
-
- public static final Logger LOGGER = Logger.getLogger(S3BucketPublisher.class.getName());
-
- private final List entries = new ArrayList();
-
- public S3BucketPublisher() {
- }
-
- public S3BucketPublisher(String profileName) {
- if (profileName == null) {
- // defaults to the first one
- S3Profile[] sites = DESCRIPTOR.getProfiles();
- if (sites.length > 0)
- profileName = sites[0].getName();
- }
- this.profileName = profileName;
- }
-
-
- public List getEntries() {
- return entries;
- }
-
- public S3Profile getProfile() {
- S3Profile[] profiles = DESCRIPTOR.getProfiles();
- if (profileName == null && profiles.length > 0)
- // default
- return profiles[0];
-
- for (S3Profile profile : profiles) {
- if (profile.getName().equals(profileName))
- return profile;
- }
- return null;
- }
-
- public BuildStepMonitor getRequiredMonitorService() {
- return BuildStepMonitor.BUILD;
- }
-
-
- @Override
- public boolean perform(AbstractBuild, ?> build,
- Launcher launcher,
- BuildListener listener)
- throws InterruptedException, IOException {
-
- if (build.getResult() == Result.FAILURE) {
- // build failed. don't post
- return true;
- }
-
- S3Profile profile = getProfile();
- if (profile == null) {
- log(listener.getLogger(), "No S3 profile is configured.");
- build.setResult(Result.UNSTABLE);
- return true;
- }
- log(listener.getLogger(), "Using S3 profile: " + profile.getName());
- try {
- profile.login();
- } catch (S3ServiceException e) {
- throw new IOException("Can't connect to S3 service: " + e);
- }
-
- try {
- Map envVars = build.getEnvironment(listener);
-
- log(listener.getLogger(), "Entries: "+entries);
-
- for (Entry entry : entries) {
- String expanded = Util.replaceMacro(entry.sourceFile, envVars);
- FilePath ws = build.getWorkspace();
- FilePath[] paths = ws.list(expanded);
-
- if (paths.length == 0) {
- // try to do error diagnostics
- log(listener.getLogger(), "No file(s) found: " + expanded);
- String error = ws.validateAntFileMask(expanded);
- if (error != null)
- log(listener.getLogger(), error);
- }
- String bucket = Util.replaceMacro(entry.bucket, envVars);
- for (FilePath src : paths) {
- log(listener.getLogger(), "bucket=" + bucket + ", file=" + src.getName());
- profile.upload(bucket, src, envVars, listener.getLogger());
- }
- }
- } catch (IOException e) {
- e.printStackTrace(listener.error("Failed to upload files"));
- build.setResult(Result.UNSTABLE);
- } finally {
- if (profile != null) {
- profile.logout();
- }
- }
-
- return true;
- }
-
- @Override
- public BuildStepDescriptor getDescriptor() {
- return DESCRIPTOR;
- }
-
- @Extension
- public static final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
-
- public static final class DescriptorImpl extends BuildStepDescriptor {
-
- public DescriptorImpl() {
- super(S3BucketPublisher.class);
- load();
- }
-
- protected DescriptorImpl(Class extends Publisher> clazz) {
- super(clazz);
- }
-
- private final CopyOnWriteList profiles = new CopyOnWriteList();
-
- public String getDisplayName() {
- return "Publish artifacts to S3 Bucket";
- }
-
- public String getShortName()
- {
- return "[S3] ";
- }
-
- @Override
- public String getHelpFile() {
- return "/plugin/s3/help.html";
- }
-
- @Override
- public boolean isApplicable(Class extends AbstractProject> jobType) {
- return true;
- }
-
- @Override
- public Publisher newInstance(StaplerRequest req, JSONObject formData) {
- S3BucketPublisher pub = new S3BucketPublisher();
- req.bindParameters(pub, "s3.");
- pub.getEntries().addAll(req.bindParametersToList(Entry.class, "s3.entry."));
- return pub;
- }
-
- public S3Profile[] getProfiles() {
- return profiles.toArray(new S3Profile[0]);
- }
-
- @Override
- public boolean configure(StaplerRequest req, JSONObject formData) {
- profiles.replaceBy(req.bindParametersToList(S3Profile.class, "s3."));
- save();
- return true;
- }
-
-
- public FormValidation doLoginCheck(final StaplerRequest request) {
- final String name = Util.fixEmpty(request.getParameter("name"));
- if (name == null) { // name is not entered yet
- return FormValidation.ok();
- }
-
- S3Profile profile = new S3Profile(name, request.getParameter("accessKey"), request.getParameter("secretKey"));
-
- try {
- profile.login();
- profile.check();
- profile.logout();
- } catch (S3ServiceException e) {
- LOGGER.log(Level.SEVERE, e.getMessage());
- return FormValidation.error("Can't connect to S3 service: " + e.getS3ErrorMessage());
- }
-
- return FormValidation.ok();
- }
-
-
-
- }
-
- public String getProfileName() {
- return this.profileName;
- }
-
- public void setProfileName(String profileName) {
- this.profileName = profileName;
- }
-
- protected void log(final PrintStream logger, final String message) {
- logger.println(StringUtils.defaultString(DESCRIPTOR.getShortName()) + message);
- }
-}
diff --git a/src/main/java/com/hyperic/hudson/plugin/S3Profile.java b/src/main/java/com/hyperic/hudson/plugin/S3Profile.java
deleted file mode 100644
index 1ecd1811..00000000
--- a/src/main/java/com/hyperic/hudson/plugin/S3Profile.java
+++ /dev/null
@@ -1,126 +0,0 @@
-package com.hyperic.hudson.plugin;
-
-import hudson.FilePath;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang.StringUtils;
-import org.jets3t.service.S3Service;
-import org.jets3t.service.S3ServiceException;
-import org.jets3t.service.impl.rest.httpclient.RestS3Service;
-import org.jets3t.service.model.S3Bucket;
-import org.jets3t.service.model.S3Object;
-import org.jets3t.service.security.AWSCredentials;
-
-public class S3Profile {
- String name;
- String accessKey;
- String secretKey;
- private S3Service s3;
-
- public static final Logger LOGGER =
- Logger.getLogger(S3Profile.class.getName());
-
- public S3Profile() {
-
- }
-
- public S3Profile(String name, String accessKey, String secretKey) {
- this.name = name;
- this.accessKey = accessKey;
- this.secretKey = secretKey;
- }
-
- public String getAccessKey() {
- return accessKey;
- }
-
- public void setAccessKey(String accessKey) {
- this.accessKey = accessKey;
- }
-
- public String getSecretKey() {
- return secretKey;
- }
-
- public void setSecretKey(String secretKey) {
- this.secretKey = secretKey;
- }
-
- public String getName() {
- return this.name;
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
- public void login() throws S3ServiceException {
- if (this.s3 != null) {
- return;
- }
- try {
- AWSCredentials creds =
- new AWSCredentials(this.accessKey, this.secretKey);
- this.s3 = new RestS3Service(creds);
- } catch (S3ServiceException e) {
- LOGGER.log(Level.SEVERE, e.getMessage());
- throw e;
- }
- }
-
- public void check() throws S3ServiceException {
- this.s3.listAllBuckets();
- }
-
- public void logout() {
- this.s3 = null;
- }
-
- private S3Bucket getOrCreateBucket(String bucketName) throws S3ServiceException {
- S3Bucket bucket = this.s3.getBucket(bucketName);
- if (bucket == null) {
- bucket = this.s3.createBucket(new S3Bucket(bucketName));
- }
- return bucket;
- }
-
- public void upload(String bucketName,
- FilePath filePath,
- Map envVars,
- PrintStream logger)
- throws IOException, InterruptedException {
-
- if (filePath.isDirectory()) {
- throw new IOException(filePath + " is a directory");
- }
- else {
- File file = new File(filePath.getName());
- S3Bucket bucket;
- try {
- bucket = getOrCreateBucket(bucketName);
- } catch (S3ServiceException e) {
- throw new IOException(bucketName + " bucket: " + e);
- }
-
- try {
- S3Object fileObject = new S3Object(bucket, file.getName());
- fileObject.setDataInputStream(filePath.read());
- this.s3.putObject(bucket, fileObject);
- } catch (Exception e) {
- throw new IOException("put " + file + ": " + e, e);
- }
- }
- }
-
- protected void log(final PrintStream logger, final String message) {
- final String name =
- StringUtils.defaultString(S3BucketPublisher.DESCRIPTOR.getShortName());
- logger.println(name + message);
- }
-}
diff --git a/src/main/java/hudson/plugins/s3/ClientHelper.java b/src/main/java/hudson/plugins/s3/ClientHelper.java
new file mode 100644
index 00000000..63b29558
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/ClientHelper.java
@@ -0,0 +1,189 @@
+package hudson.plugins.s3;
+
+import edu.umd.cs.findbugs.annotations.CheckForNull;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import hudson.ProxyConfiguration;
+import io.netty.handler.ssl.SslProvider;
+import jenkins.model.Jenkins;
+import jenkins.util.JenkinsJVM;
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
+import software.amazon.awssdk.http.SdkHttpClient;
+import software.amazon.awssdk.http.apache.ApacheHttpClient;
+import software.amazon.awssdk.http.async.SdkAsyncHttpClient;
+import software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3AsyncClient;
+import software.amazon.awssdk.services.s3.S3AsyncClientBuilder;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.regex.Pattern;
+
+import static org.apache.commons.lang.StringUtils.isNotEmpty;
+
+public class ClientHelper {
+ public final static String DEFAULT_AMAZON_S3_REGION_NAME = System.getProperty(
+ "hudson.plugins.s3.DEFAULT_AMAZON_S3_REGION", Region.US_EAST_1.id());
+ public static final String ENDPOINT = System.getProperty("hudson.plugins.s3.ENDPOINT", System.getenv("PLUGIN_S3_ENDPOINT"));
+ public static final URI ENDPOINT_URI;
+
+ static {
+ try {
+ ENDPOINT_URI = isNotEmpty(ENDPOINT) ? new URI(ENDPOINT) : null;
+ } catch (URISyntaxException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Deprecated
+ public static S3AsyncClient createAsyncClient(String accessKey, String secretKey, boolean useRole, String region, @CheckForNull ProxyConfiguration proxy, @CheckForNull URI customEndpoint, Long thresholdInBytes) {
+ return createAsyncClient(accessKey, secretKey, useRole, region, proxy, customEndpoint, thresholdInBytes, false);
+ }
+
+ public static S3AsyncClient createAsyncClient(String accessKey, String secretKey, boolean useRole, String region, @CheckForNull ProxyConfiguration proxy, @CheckForNull URI customEndpoint, Long thresholdInBytes, boolean usePathStyle) {
+ Region awsRegion = getRegionFromString(region);
+ S3AsyncClientBuilder builder = S3AsyncClient.builder();//.overrideConfiguration(clientConfiguration);
+ builder.region(awsRegion);
+ builder.multipartEnabled(true);
+
+ if (!useRole) {
+ builder = builder.credentialsProvider(() -> AwsBasicCredentials.create(accessKey, secretKey));
+ }
+
+ if (customEndpoint != null) {
+ builder = builder.endpointOverride(customEndpoint).forcePathStyle(usePathStyle);
+ builder.httpClient(getAsyncHttpClient(customEndpoint, proxy));
+ } else if (ENDPOINT_URI != null) {
+ builder = builder.endpointOverride(ENDPOINT_URI).forcePathStyle(usePathStyle);
+ builder.httpClient(getAsyncHttpClient(ENDPOINT_URI, proxy));
+ } else {
+ builder.httpClient(getAsyncHttpClient(null, proxy));
+ }
+ if (thresholdInBytes != null) {
+ builder.multipartConfiguration(mcb -> mcb.thresholdInBytes(thresholdInBytes));
+ }
+ return builder.build();
+ }
+
+ @Deprecated
+ public static S3Client createClient(String accessKey, String secretKey, boolean useRole, String region, ProxyConfiguration proxy) {
+ return createClient(accessKey, secretKey, useRole, region, proxy, ENDPOINT_URI, false);
+ }
+
+ public static S3Client createClient(String accessKey, String secretKey, boolean useRole, String region, ProxyConfiguration proxy, boolean usePathStyle) {
+ return createClient(accessKey, secretKey, useRole, region, proxy, ENDPOINT_URI, usePathStyle);
+ }
+
+ @Deprecated
+ public static S3Client createClient(String accessKey, String secretKey, boolean useRole, String region, ProxyConfiguration proxy, @CheckForNull URI customEndpoint) {
+ return createClient(accessKey, secretKey, useRole, region, proxy, customEndpoint, false);
+ }
+
+ public static S3Client createClient(String accessKey, String secretKey, boolean useRole, String region, ProxyConfiguration proxy, @CheckForNull URI customEndpoint, boolean usePathStyle) {
+ Region awsRegion = getRegionFromString(region);
+ S3ClientBuilder builder = S3Client.builder();
+ builder.region(awsRegion);
+
+ if (!useRole) {
+ builder = builder.credentialsProvider(() -> AwsBasicCredentials.create(accessKey, secretKey));
+ }
+
+ try {
+ if (customEndpoint != null) {
+ builder = builder.endpointOverride(customEndpoint).forcePathStyle(usePathStyle);
+ builder.httpClient(getHttpClient(customEndpoint, proxy));
+ } else if (ENDPOINT_URI != null) {
+ builder = builder.endpointOverride(ENDPOINT_URI).forcePathStyle(usePathStyle);
+ builder.httpClient(getHttpClient(ENDPOINT_URI, proxy));
+ } else {
+ builder.httpClient(getHttpClient(null, proxy));
+ }
+ } catch (URISyntaxException e) {
+ throw new RuntimeException("Can't create proxy URI", e);
+ }
+
+ return builder.build();
+ }
+
+ /**
+ * Gets the {@link Region} from its name with backward compatibility concerns and defaulting
+ *
+ * @param regionName nullable region name
+ * @return AWS region, never {@code null}, defaults to {@link Region#US_EAST_1} see {@link #DEFAULT_AMAZON_S3_REGION_NAME}.
+ */
+ @NonNull
+ private static Region getRegionFromString(@CheckForNull String regionName) {
+ Region region = null;
+
+ if (regionName == null || regionName.isEmpty()) {
+ region = Region.of(DEFAULT_AMAZON_S3_REGION_NAME);
+ } else {
+ region = Region.of(regionName);
+ }
+ if (region == null) {
+ throw new IllegalStateException("No AWS Region found for name '" + regionName + "' and default region '" + DEFAULT_AMAZON_S3_REGION_NAME + "'");
+ }
+ return region;
+ }
+
+ private static SdkHttpClient getHttpClient(URI serviceEndpoint, ProxyConfiguration proxy) throws URISyntaxException {
+ ApacheHttpClient.Builder httpClient1 = ApacheHttpClient.builder();
+ if (proxy == null && JenkinsJVM.isJenkinsJVM()) {
+ proxy = Jenkins.get().getProxy();
+ }
+ if (shouldUseProxy(proxy, serviceEndpoint)) {
+ software.amazon.awssdk.http.apache.ProxyConfiguration.Builder proxyBuilder = software.amazon.awssdk.http.apache.ProxyConfiguration.builder()
+ .endpoint(new URI("http", null, proxy.getName(), proxy.getPort(), null, null, null));
+ if (isNotEmpty(proxy.getUserName())) {
+ proxyBuilder
+ .username(proxy.getUserName())
+ .password(proxy.getPassword());
+ }
+ httpClient1.proxyConfiguration(proxyBuilder.build());
+ }
+ return httpClient1.build();
+ }
+
+ private static SdkAsyncHttpClient getAsyncHttpClient(URI serviceEndpoint, ProxyConfiguration proxy) {
+ NettyNioAsyncHttpClient.Builder builder = NettyNioAsyncHttpClient.builder().sslProvider(SslProvider.JDK); //make sure we use BouncyCastle when available
+ if (proxy == null && JenkinsJVM.isJenkinsJVM()) {
+ proxy = Jenkins.get().getProxy();
+ }
+ if (shouldUseProxy(proxy, serviceEndpoint)) {
+ software.amazon.awssdk.http.nio.netty.ProxyConfiguration.Builder proxyBuilder = software.amazon.awssdk.http.nio.netty.ProxyConfiguration.builder()
+ .host(proxy.getName()).port(proxy.getPort());
+ if (isNotEmpty(proxy.getUserName())) {
+ proxyBuilder
+ .username(proxy.getUserName())
+ .password(proxy.getPassword());
+ }
+ builder.proxyConfiguration(proxyBuilder.build());
+ }
+ return builder.build();
+ }
+
+ /**
+ * Determines whether the proxy should be used for the given endpoint.
+ * When endpoint is null (standard AWS regions), defaults to using the proxy.
+ */
+ private static boolean shouldUseProxy(ProxyConfiguration proxy, @CheckForNull URI endpoint) {
+ if (proxy == null) {
+ return false;
+ }
+ if (endpoint == null) {
+ return true;
+ }
+ String hostname = endpoint.getHost();
+ if (hostname == null) {
+ return true;
+ }
+ for (Pattern p : proxy.getNoProxyHostPatterns()) {
+ if (p.matcher(hostname).matches()) {
+ return false;
+ }
+ }
+ return true;
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/Destination.java b/src/main/java/hudson/plugins/s3/Destination.java
new file mode 100644
index 00000000..9a2a128e
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/Destination.java
@@ -0,0 +1,68 @@
+package hudson.plugins.s3;
+
+import hudson.model.Run;
+
+import java.io.Serializable;
+
+
+/**
+ * Provides a way to construct a destination bucket name and object name based
+ * on the bucket name provided by the user.
+ *
+ * The convention implemented here is that a / in a bucket name is used to
+ * construct a structure in the object name. That is, a put of file.txt to bucket name
+ * of "mybucket/v1" will cause the object "v1/file.txt" to be created in the mybucket.
+ *
+ */
+public class Destination implements Serializable {
+ private static final long serialVersionUID = 1L;
+ public final String bucketName;
+ public final String objectName;
+
+ public Destination(final String userBucketName, final String fileName) {
+
+ if (userBucketName == null || fileName == null)
+ throw new IllegalArgumentException("Not defined for null parameters: "+userBucketName+","+fileName);
+
+ final String[] bucketNameArray = userBucketName.split("/", 2);
+ final String s3CompatibleFileName = replaceWindowsBackslashes(fileName);
+
+ bucketName = bucketNameArray[0];
+
+ if (bucketNameArray.length > 1) {
+ objectName = bucketNameArray[1] + "/" + s3CompatibleFileName;
+ } else {
+ objectName = s3CompatibleFileName;
+ }
+ }
+
+ private String replaceWindowsBackslashes(String fileName) {
+ return fileName.replace("\\", "/");
+ }
+
+ @Override
+ public String toString() {
+ return "Destination [bucketName="+bucketName+", objectName="+objectName+"]";
+ }
+
+
+ public static Destination newFromRun(Run run, String bucketName, String fileName, boolean enableFullpath)
+ {
+ final String projectName;
+
+ if (enableFullpath) {
+ projectName = run.getParent().getFullName();
+ }
+ else {
+ projectName = run.getParent().getName();
+ }
+
+ int buildID = run.getNumber();
+ return new Destination(bucketName, "jobs/" + projectName + "/" + buildID + "/" + fileName);
+ }
+
+ public static Destination newFromRun(Run run, S3Artifact artifact)
+ {
+ return newFromRun(run, artifact.getBucket(), artifact.getName(), artifact.useFullProjectName());
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/Entry.java b/src/main/java/hudson/plugins/s3/Entry.java
new file mode 100644
index 00000000..c05a2e68
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/Entry.java
@@ -0,0 +1,145 @@
+package hudson.plugins.s3;
+
+import hudson.Extension;
+import hudson.model.Describable;
+import hudson.model.Descriptor;
+import hudson.util.ListBoxModel;
+import org.kohsuke.stapler.DataBoundConstructor;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+
+import java.util.List;
+
+public final class Entry implements Describable {
+
+ /**
+ * Destination bucket for the copy. Can contain macros.
+ */
+ public String bucket;
+ /**
+ * File name relative to the workspace root to upload.
+ * Can contain macros and wildcards.
+ */
+ public String sourceFile;
+ /**
+ * File name relative to the workspace root to be excluded from upload.
+ * Can contain macros and wildcards.
+ */
+ public String excludedFile;
+ /**
+ * options for x-amz-storage-class can be STANDARD, STANDARD_IA, or REDUCED_REDUNDANCY
+ */
+ static final String[] storageClasses = {"STANDARD", "STANDARD_IA", "REDUCED_REDUNDANCY"};
+ /**
+ * what x-amz-storage-class is currently set
+ */
+ public String storageClass;
+ /**
+ * Regions Values
+ */
+ public static final List regions = S3Client.serviceMetadata().regions();
+ /**
+ * Stores the Region Value
+ */
+ public String selectedRegion;
+
+ /**
+ * Do not publish the artifacts when build fails
+ */
+ public boolean noUploadOnFailure;
+
+ /**
+ * Upload either from the slave or the master
+ */
+ public boolean uploadFromSlave;
+
+ /**
+ * Let Jenkins manage the S3 uploaded artifacts
+ */
+ public boolean managedArtifacts;
+
+ /**
+ * Use S3 server side encryption when uploading the artifacts
+ */
+ public boolean useServerSideEncryption;
+
+ /**
+ * Flatten directories
+ */
+ public boolean flatten;
+
+ /**
+ * use GZIP to compress files
+ */
+ public boolean gzipFiles;
+
+ /**
+ * show content of entity directly in browser
+ */
+ public boolean showDirectlyInBrowser;
+
+ /**
+ * Don't delete artifacts in Amazon after job was rotated
+ */
+
+ public boolean keepForever;
+
+ /**
+ * Metadata overrides
+ */
+ public List userMetadata;
+
+ @DataBoundConstructor
+ public Entry(String bucket, String sourceFile, String excludedFile, String storageClass, String selectedRegion,
+ boolean noUploadOnFailure, boolean uploadFromSlave, boolean managedArtifacts,
+ boolean useServerSideEncryption, boolean flatten, boolean gzipFiles, boolean keepForever,
+ boolean showDirectlyInBrowser, List userMetadata) {
+ this.bucket = bucket;
+ this.sourceFile = sourceFile;
+ this.excludedFile = excludedFile;
+ this.storageClass = storageClass;
+ this.selectedRegion = selectedRegion;
+ this.noUploadOnFailure = noUploadOnFailure;
+ this.uploadFromSlave = uploadFromSlave;
+ this.managedArtifacts = managedArtifacts;
+ this.useServerSideEncryption = useServerSideEncryption;
+ this.flatten = flatten;
+ this.gzipFiles = gzipFiles;
+ this.keepForever = keepForever;
+ this.userMetadata = userMetadata;
+ this.showDirectlyInBrowser = showDirectlyInBrowser;
+ }
+
+ @Override
+ public Descriptor getDescriptor() {
+ return DESCRIPOR;
+ }
+
+ @Extension
+ public static final DescriptorImpl DESCRIPOR = new DescriptorImpl();
+
+ public static class DescriptorImpl extends Descriptor {
+
+ @Override
+ public String getDisplayName() {
+ return "File to upload";
+ }
+
+ public ListBoxModel doFillStorageClassItems() {
+ final ListBoxModel model = new ListBoxModel();
+ for (String s : storageClasses) {
+ model.add(s, s);
+ }
+ return model;
+ }
+
+ public ListBoxModel doFillSelectedRegionItems() {
+ final ListBoxModel model = new ListBoxModel();
+ for (Region r : regions) {
+ model.add(r.id(), r.id());
+ }
+ return model;
+ }
+ }
+
+}
diff --git a/src/main/java/hudson/plugins/s3/FileHelper.java b/src/main/java/hudson/plugins/s3/FileHelper.java
new file mode 100644
index 00000000..58df7d7f
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/FileHelper.java
@@ -0,0 +1,63 @@
+package hudson.plugins.s3;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.tools.ant.types.selectors.FilenameSelector;
+
+import java.io.File;
+
+public class FileHelper {
+
+ @SuppressFBWarnings("DMI_HARDCODED_ABSOLUTE_FILENAME")
+ public static boolean selected(String includeFilter, String excludeFilter, String filename) {
+ if (includeFilter == null) {
+ return false;
+ }
+
+ final FilenameSelector positiveSelector = new FilenameSelector();
+ final FilenameSelector negativeSelector = new FilenameSelector();
+
+ if (excludeFilter != null) {
+ final String[] excludeFilters = excludeFilter.split(",");
+
+ for (String exclude : excludeFilters) {
+ negativeSelector.setName(exclude.trim());
+
+ if (negativeSelector.isSelected(new File("/"), filename, null)) {
+ return false;
+ }
+ }
+ }
+
+ final String[] includeFilters = includeFilter.split(",");
+
+ for (String include : includeFilters) {
+ positiveSelector.setName(include.trim());
+
+ if (positiveSelector.isSelected(new File("/"), filename, null))
+ return true;
+ }
+
+ return false;
+ }
+
+ public static int getSearchPathLength(String workSpace, String filterExpanded, boolean alwaysKeepParentDirectory) {
+ if (alwaysKeepParentDirectory) {
+ return workSpace.length() + 1;
+ }
+
+ final File file1 = new File(workSpace);
+ final File file2 = new File(file1, filterExpanded);
+
+ final String pathWithFilter = file2.getPath();
+
+ final int indexOfWildCard = pathWithFilter.indexOf('*');
+
+
+ if (indexOfWildCard > 0) {
+ int folderNameEnds = pathWithFilter.substring(0, indexOfWildCard).lastIndexOf(File.separatorChar);
+ return pathWithFilter.substring(0, folderNameEnds).length() + 1;
+ } else {
+ return file2.getParent().length() + 1;
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/hudson/plugins/s3/FingerprintRecord.java b/src/main/java/hudson/plugins/s3/FingerprintRecord.java
new file mode 100644
index 00000000..bacbfc38
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/FingerprintRecord.java
@@ -0,0 +1,72 @@
+package hudson.plugins.s3;
+
+import hudson.model.Fingerprint;
+import hudson.model.FingerprintMap;
+import hudson.model.Run;
+import jenkins.model.Jenkins;
+import org.kohsuke.stapler.export.Exported;
+import org.kohsuke.stapler.export.ExportedBean;
+
+import java.io.IOException;
+import java.io.Serializable;
+
+@ExportedBean
+public class FingerprintRecord implements Serializable {
+ private static final long serialVersionUID = 1L;
+ private final boolean produced;
+ private final String md5sum;
+ private final S3Artifact artifact;
+ private boolean keepForever;
+ private boolean showDirectlyInBrowser;
+
+
+ public FingerprintRecord(boolean produced, String bucket, String name, String region, String md5sum) {
+ this.produced = produced;
+ this.artifact = new S3Artifact(region, bucket, name);
+ this.md5sum = md5sum;
+ this.showDirectlyInBrowser = false;
+ this.keepForever = false;
+ }
+
+ Fingerprint addRecord(Run, ?> run) throws IOException {
+ final FingerprintMap map = Jenkins.getInstance().getFingerprintMap();
+ return map.getOrCreate(produced ? run : null, artifact.getName(), md5sum);
+ }
+
+ public boolean isKeepForever() {
+ return keepForever;
+ }
+
+ public void setKeepForever(boolean keepForever) {
+ this.keepForever = keepForever;
+ }
+
+ public boolean isShowDirectlyInBrowser() {
+ return showDirectlyInBrowser;
+ }
+
+ public void setShowDirectlyInBrowser(boolean showDirectlyInBrowser) {
+ this.showDirectlyInBrowser = showDirectlyInBrowser;
+ }
+
+ @Exported
+ public String getName() {
+ return artifact.getName();
+ }
+
+ @Exported
+ public String getLink() {
+ //Chrome and IE convert backslash in the URL into forward slashes, need escape with %5c
+ return artifact.getName().replace("\\","%5C");
+ }
+
+ @Exported
+ public String getFingerprint() {
+ return md5sum;
+ }
+
+ @Exported
+ public S3Artifact getArtifact() {
+ return artifact;
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/MD5.java b/src/main/java/hudson/plugins/s3/MD5.java
new file mode 100644
index 00000000..570d9349
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/MD5.java
@@ -0,0 +1,27 @@
+package hudson.plugins.s3;
+
+import hudson.FilePath;
+import org.apache.commons.codec.digest.DigestUtils;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+public class MD5 {
+ public static String generateFromFile(File file) throws IOException {
+ try(InputStream inputStream = new FileInputStream(file.getAbsolutePath())) {
+ return getMD5FromStream(inputStream);
+ }
+ }
+
+ public static String generateFromFile(FilePath file) throws IOException, InterruptedException {
+ try(InputStream inputStream = file.read()) {
+ return getMD5FromStream(inputStream);
+ }
+ }
+
+ private static String getMD5FromStream(InputStream stream) throws IOException {
+ return DigestUtils.md5Hex(stream);
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/MetadataPair.java b/src/main/java/hudson/plugins/s3/MetadataPair.java
new file mode 100644
index 00000000..b906e117
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/MetadataPair.java
@@ -0,0 +1,42 @@
+package hudson.plugins.s3;
+
+import hudson.Extension;
+import hudson.model.Describable;
+import hudson.model.Descriptor;
+import org.kohsuke.stapler.DataBoundConstructor;
+
+public final class MetadataPair implements Describable {
+
+ /**
+ * The key of the user metadata pair to tag an upload with.
+ * Can contain macros.
+ */
+ public String key;
+
+ /**
+ * The key of the user metadata pair to tag an upload with.
+ * Can contain macros.
+ */
+ public String value;
+
+ @DataBoundConstructor
+ public MetadataPair(String key, String value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ public Descriptor getDescriptor() {
+ return DESCRIPOR;
+ }
+
+ @Extension
+ public final static DescriptorImpl DESCRIPOR = new DescriptorImpl();
+
+ public static class DescriptorImpl extends Descriptor {
+
+ @Override
+ public String getDisplayName() {
+ return "Metadata";
+ }
+ };
+}
diff --git a/src/main/java/hudson/plugins/s3/S3Artifact.java b/src/main/java/hudson/plugins/s3/S3Artifact.java
new file mode 100644
index 00000000..04cc4fec
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/S3Artifact.java
@@ -0,0 +1,55 @@
+package hudson.plugins.s3;
+
+import org.kohsuke.stapler.export.Exported;
+import org.kohsuke.stapler.export.ExportedBean;
+
+import java.io.ObjectStreamException;
+import java.io.Serializable;
+
+@ExportedBean
+public final class S3Artifact implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private final String bucket;
+ private final String name;
+ private final String region;
+ private /*almost final*/ Boolean useFullProjectName;
+
+ public S3Artifact(String region, String bucket, String name) {
+ this.bucket = bucket.intern();
+ this.name = name.intern();
+ this.region = region.intern();
+ this.useFullProjectName = true;
+ }
+
+ /* Old version of this plugin used short name,
+ * so jobs with same name (but from different folders)
+ * could replace each other.
+ * That's why we started to use full name.
+ * But we need to keep backport capability */
+ private void readObjectNoData() throws ObjectStreamException {
+ this.useFullProjectName = false;
+ }
+
+ @Exported
+ public String getBucket() {
+ return bucket;
+ }
+
+ @Exported
+ public String getName() {
+ return name;
+ }
+
+ @Exported
+ public String getRegion() {
+ return region;
+ }
+
+ public Boolean useFullProjectName() {
+ if (useFullProjectName == null)
+ return false;
+
+ return useFullProjectName;
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/hudson/plugins/s3/S3ArtifactsAction.java b/src/main/java/hudson/plugins/s3/S3ArtifactsAction.java
new file mode 100644
index 00000000..e8cf6fd5
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/S3ArtifactsAction.java
@@ -0,0 +1,146 @@
+package hudson.plugins.s3;
+
+import hudson.Functions;
+import hudson.model.Run;
+import hudson.util.Secret;
+import jakarta.servlet.ServletException;
+import jenkins.model.RunAction2;
+import jenkins.security.FIPS140;
+import org.kohsuke.stapler.StaplerRequest2;
+import org.kohsuke.stapler.StaplerResponse2;
+import org.kohsuke.stapler.export.Exported;
+import org.kohsuke.stapler.export.ExportedBean;
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3Configuration;
+import software.amazon.awssdk.services.s3.model.GetObjectRequest;
+import software.amazon.awssdk.services.s3.presigner.S3Presigner;
+import software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest;
+
+import java.io.File;
+import java.io.IOException;
+import java.time.Duration;
+import java.util.Collections;
+import java.util.List;
+
+import static jakarta.servlet.http.HttpServletResponse.SC_NOT_FOUND;
+import static org.apache.commons.lang.StringUtils.isNotEmpty;
+
+@ExportedBean
+public class S3ArtifactsAction implements RunAction2 {
+ private final Run,?> build; // Compatibility for old versions
+ private final String profile;
+ private final List artifacts;
+
+ public S3ArtifactsAction(Run, ?> run, S3Profile profile, List artifacts) {
+ this.build = run;
+ this.profile = profile.getName();
+ this.artifacts = artifacts;
+ onLoad(run); // make compact
+ }
+
+ public Run, ?> getBuild() {
+ return build;
+ }
+
+ public String getIconFileName() {
+ return hasAccess() ? "fingerprint.png" : null;
+ }
+
+ public String getDisplayName() {
+ return "S3 Artifacts";
+ }
+
+ public String getUrlName() {
+ return hasAccess() ? "s3" : null;
+ }
+
+ private boolean hasAccess () {
+ return !Functions.isArtifactsPermissionEnabled() || build.getParent().hasPermission(Run.ARTIFACTS);
+ }
+
+ @Override
+ public void onLoad(Run, ?> r) {
+ }
+
+ public void onAttached(Run r) {
+ }
+
+ public String getProfile() {
+ return profile;
+ }
+
+ @Exported
+ public List getArtifacts() {
+ if (!hasAccess()) {
+ return Collections.emptyList();
+ }
+ return artifacts;
+ }
+
+ public void doDownload(final StaplerRequest2 request, final StaplerResponse2 response) throws IOException, ServletException {
+ if (Functions.isArtifactsPermissionEnabled()) {
+ build.getParent().checkPermission(Run.ARTIFACTS);
+ }
+ final String restOfPath = request.getRestOfPath();
+ if (restOfPath == null) {
+ return;
+ }
+
+ // skip the leading /
+ final String artifact = restOfPath.substring(1);
+ for (FingerprintRecord record : artifacts) {
+ if (record.getArtifact().getName().equals(artifact)) {
+ final S3Profile s3 = S3BucketPublisher.getProfile(profile);
+ final var client = s3.getClient(record.getArtifact().getRegion());
+ final String url = getDownloadURL(client, s3, build, record);
+ response.sendRedirect2(url);
+ return;
+ }
+ }
+ response.sendError(SC_NOT_FOUND, "This artifact is not available");
+ }
+
+ /**
+ * Generate a signed download request for a redirect from s3/download.
+ *
+ * When the user asks to download a file, we sign a short-lived S3 URL
+ * for them and redirect them to it, so we don't have to proxy for the
+ * download and there's no need for the user to have credentials to
+ * access S3.
+ */
+ private String getDownloadURL(S3Client client, S3Profile s3, Run run, FingerprintRecord record) {
+ final Destination dest = Destination.newFromRun(run, record.getArtifact());
+ S3Presigner.Builder presignerBuilder = S3Presigner.builder()
+ .fipsEnabled(FIPS140.useCompliantAlgorithms())
+ .s3Client(client)
+ .region(Region.of(record.getArtifact().getRegion()));
+ if (ClientHelper.ENDPOINT_URI != null) {
+ presignerBuilder.endpointOverride(ClientHelper.ENDPOINT_URI);
+ }
+ if (s3.isUsePathStyle()) {
+ presignerBuilder.serviceConfiguration(S3Configuration.builder().pathStyleAccessEnabled(true).build());
+ }
+ if (!s3.isUseRole()) {
+ presignerBuilder.credentialsProvider(() -> AwsBasicCredentials.create(s3.getAccessKey(), Secret.toString(s3.getSecretKey())));
+ }
+ try (S3Presigner presigner = presignerBuilder.build()) {
+ GetObjectRequest.Builder builder = GetObjectRequest.builder().bucket(dest.bucketName).key(dest.objectName);
+ if (!record.isShowDirectlyInBrowser()) {
+ // let the browser use the last part of the name, not the full path
+ // when saving.
+ final String fileName = (new File(dest.objectName)).getName().trim();
+ builder.responseContentDisposition(fileName);
+ }
+
+ GetObjectRequest getObjectRequest = builder.build();
+ GetObjectPresignRequest getObjectPresignRequest = GetObjectPresignRequest.builder()
+ .signatureDuration(Duration.ofSeconds(s3.getSignedUrlExpirySeconds()))
+ .getObjectRequest(getObjectRequest).build();
+
+ return presigner.presignGetObject(getObjectPresignRequest).url().toExternalForm();
+
+ }
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/S3ArtifactsProjectAction.java b/src/main/java/hudson/plugins/s3/S3ArtifactsProjectAction.java
new file mode 100644
index 00000000..4659ac69
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/S3ArtifactsProjectAction.java
@@ -0,0 +1,58 @@
+package hudson.plugins.s3;
+
+import java.util.List;
+
+import hudson.Functions;
+import hudson.model.Action;
+import hudson.model.AbstractProject;
+import hudson.model.Run;
+
+public class S3ArtifactsProjectAction implements Action {
+
+ private final AbstractProject, ?> project;
+
+ public S3ArtifactsProjectAction(AbstractProject, ?> project) {
+ this.project = project;
+ }
+
+ private Run getLastSuccessfulBuild() {
+ return project.getLastSuccessfulBuild();
+ }
+
+ @SuppressWarnings("unused")
+ public S3ArtifactsAction getLatestDeployedArtifacts() {
+ if (Functions.isArtifactsPermissionEnabled() && !project.hasPermission(Run.ARTIFACTS)) {
+ return null;
+ }
+ Run latestSuccessfulBuild = getLastSuccessfulBuild();
+ if (latestSuccessfulBuild == null) {
+ return null;
+ }
+ List actions = latestSuccessfulBuild.getActions(S3ArtifactsAction.class);
+ if (actions == null || actions.size() == 0) {
+ return null;
+ }
+ return actions.get(actions.size() - 1);
+ }
+
+ @SuppressWarnings("unused")
+ public int getLastSuccessfulNumber() {
+ Run latestSuccessfulBuild = getLastSuccessfulBuild();
+ if (latestSuccessfulBuild == null) {
+ return 0;
+ }
+ return latestSuccessfulBuild.getNumber();
+ }
+
+ public String getIconFileName() {
+ return null;
+ }
+
+ public String getDisplayName() {
+ return null;
+ }
+
+ public String getUrlName() {
+ return null;
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/S3BucketPublisher.java b/src/main/java/hudson/plugins/s3/S3BucketPublisher.java
new file mode 100644
index 00000000..ef9df1fc
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/S3BucketPublisher.java
@@ -0,0 +1,650 @@
+package hudson.plugins.s3;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import hudson.AbortException;
+import hudson.Extension;
+import hudson.FilePath;
+import hudson.Launcher;
+import hudson.Util;
+import hudson.model.AbstractProject;
+import hudson.model.Action;
+import hudson.model.Fingerprint;
+import hudson.model.Job;
+import hudson.model.Item;
+import hudson.model.ItemGroup;
+import hudson.model.Result;
+import hudson.model.Run;
+import hudson.model.TaskListener;
+import hudson.model.listeners.ItemListener;
+import hudson.model.listeners.RunListener;
+import hudson.tasks.BuildStepDescriptor;
+import hudson.tasks.BuildStepMonitor;
+import hudson.tasks.Fingerprinter.FingerprintAction;
+import hudson.tasks.Publisher;
+import hudson.tasks.Recorder;
+import hudson.util.CopyOnWriteList;
+import hudson.util.FormFillFailure;
+import hudson.util.FormValidation;
+import hudson.util.ListBoxModel;
+import hudson.util.Secret;
+import jenkins.model.Jenkins;
+import jenkins.tasks.SimpleBuildStep;
+import net.sf.json.JSONArray;
+import net.sf.json.JSONObject;
+import org.apache.commons.lang.StringUtils;
+import org.jenkinsci.Symbol;
+import org.kohsuke.accmod.Restricted;
+import org.kohsuke.accmod.restrictions.DoNotUse;
+import org.kohsuke.stapler.AncestorInPath;
+import org.kohsuke.stapler.DataBoundConstructor;
+import org.kohsuke.stapler.QueryParameter;
+import org.kohsuke.stapler.StaplerRequest2;
+import org.kohsuke.stapler.interceptor.RequirePOST;
+import software.amazon.awssdk.core.exception.SdkException;
+import software.amazon.awssdk.regions.Region;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public final class S3BucketPublisher extends Recorder implements SimpleBuildStep {
+
+ private String profileName;
+ @Extension
+ public static final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
+
+ private final List entries;
+
+ private boolean dontWaitForConcurrentBuildCompletion;
+ private boolean dontSetBuildResultOnFailure;
+
+ /**
+ * In-memory representation of console log level.
+ *
+ * @see #consoleLogLevelString
+ */
+ private transient Level consoleLogLevel;
+
+ /**
+ * Serial form of console log level.
+ *
+ * @see #consoleLogLevel
+ */
+ private String consoleLogLevelString;
+
+ private Result pluginFailureResultConstraint;
+ /**
+ * User metadata key/value pairs to tag the upload with.
+ */
+ private /*almost final*/ List userMetadata;
+
+ @DataBoundConstructor
+ public S3BucketPublisher(String profileName, List entries, List userMetadata,
+ boolean dontWaitForConcurrentBuildCompletion, String consoleLogLevel, String pluginFailureResultConstraint,
+ boolean dontSetBuildResultOnFailure) {
+ if (profileName == null) {
+ // defaults to the first one
+ final S3Profile[] sites = DESCRIPTOR.getProfiles();
+ if (sites.length > 0)
+ profileName = sites[0].getName();
+ }
+
+ if (entries == null || entries.isEmpty()) {
+ throw new IllegalArgumentException("No files to upload specified.");
+ }
+
+ this.profileName = profileName;
+ this.entries = entries;
+
+ this.userMetadata = userMetadata;
+ if (this.userMetadata == null) {
+ this.userMetadata = new ArrayList<>();
+ }
+
+ this.dontWaitForConcurrentBuildCompletion = dontWaitForConcurrentBuildCompletion;
+ this.dontSetBuildResultOnFailure = dontSetBuildResultOnFailure;
+ this.consoleLogLevel = parseLevel(consoleLogLevel);
+ this.consoleLogLevelString = this.consoleLogLevel.getName();
+ if (pluginFailureResultConstraint == null) {
+ this.pluginFailureResultConstraint = Result.FAILURE;
+ } else {
+ this.pluginFailureResultConstraint = Result.fromString(pluginFailureResultConstraint);
+ }
+ }
+
+ private Level parseLevel(String lvl) {
+ if (lvl == null)
+ lvl = "";
+ switch (lvl) {
+ case "WARNING": return Level.WARNING;
+ case "SEVERE": return Level.SEVERE;
+ default: return Level.INFO;
+ }
+ }
+
+ protected Object readResolve() {
+ if (userMetadata == null)
+ userMetadata = new ArrayList<>();
+
+ if (pluginFailureResultConstraint == null)
+ pluginFailureResultConstraint = Result.FAILURE;
+
+ if (consoleLogLevel != null && consoleLogLevelString == null) {
+ consoleLogLevelString = consoleLogLevel.getName();
+ }
+
+ if (consoleLogLevel == null && consoleLogLevelString != null) {
+ consoleLogLevel = parseLevel(consoleLogLevelString);
+ }
+
+ if(consoleLogLevel==null)
+ consoleLogLevel = Level.INFO;
+
+ if (consoleLogLevelString == null) {
+ consoleLogLevelString = consoleLogLevel.getName();
+ }
+
+ return this;
+ }
+
+ private Result constrainResult(Result r, @NonNull TaskListener listener) {
+ final PrintStream console = listener.getLogger();
+ // pass through NOT_BUILT and ABORTED
+ if (r.isWorseThan(Result.FAILURE)) {
+ return r;
+ } else if (r.isWorseThan(pluginFailureResultConstraint)) {
+ log(console, "Build result constrained by configuration to: " + pluginFailureResultConstraint + " from: " + Result.UNSTABLE);
+ return pluginFailureResultConstraint;
+ }
+ return r;
+ }
+
+ @SuppressWarnings("unused")
+ public List getEntries() {
+ return entries;
+ }
+
+ @SuppressWarnings("unused")
+ public List getUserMetadata() {
+ return userMetadata;
+ }
+
+ @SuppressWarnings("unused")
+ public String getProfileName() {
+ return this.profileName;
+ }
+
+ /**
+ * for data binding only
+ *
+ * @return pluginFailureResultConstraint string representation
+ */
+ @SuppressWarnings("unused")
+ public String getPluginFailureResultConstraint() {
+ if (pluginFailureResultConstraint == null) {
+ return Result.FAILURE.toString();
+ }
+ return pluginFailureResultConstraint.toString();
+ }
+
+ @SuppressWarnings("unused")
+ public boolean isDontWaitForConcurrentBuildCompletion() {
+ return dontWaitForConcurrentBuildCompletion;
+ }
+
+ @SuppressWarnings("unused")
+ public boolean isDontSetBuildResultOnFailure() {
+ return dontSetBuildResultOnFailure;
+ }
+
+ /**
+ * for data binding only
+ *
+ * @return consoleLogLevel string representation
+ */
+ @SuppressWarnings("unused")
+ public String getConsoleLogLevel() {
+ return consoleLogLevelString;
+ }
+
+ public S3Profile getProfile() {
+ return getProfile(profileName);
+ }
+
+ public static S3Profile getProfile(String profileName) {
+ final S3Profile[] profiles = DESCRIPTOR.getProfiles();
+
+ if (profileName == null && profiles.length > 0)
+ // default
+ return profiles[0];
+
+ for (S3Profile profile : profiles) {
+ if (profile.getName().equals(profileName))
+ return profile;
+ }
+
+ throw new IllegalArgumentException("Can't find profile: " + profileName);
+ }
+
+ @Override @NonNull
+ public Collection extends Action> getProjectActions(AbstractProject, ?> project) {
+ return ImmutableList.of(new S3ArtifactsProjectAction(project));
+ }
+
+ private void log(final PrintStream logger, final String message) {
+ log(Level.INFO, logger, message);
+ }
+
+ private void log(final Level level, final PrintStream logger, final String message) {
+ if(level.intValue() >= consoleLogLevel.intValue()) {
+ logger.println(StringUtils.defaultString(getDescriptor().getDisplayName()) + ' ' + message);
+ }
+ }
+
+ @Override
+ public void perform(@NonNull Run, ?> run, @NonNull FilePath ws, @NonNull Launcher launcher, @NonNull TaskListener listener)
+ throws InterruptedException, IOException {
+ final PrintStream console = listener.getLogger();
+ if (Result.ABORTED.equals(run.getResult())) {
+ log(Level.SEVERE, console, "Skipping publishing on S3 because build aborted");
+ return;
+ }
+
+ if (run.isBuilding()) {
+ log(console, "Build is still running");
+ }
+
+ final S3Profile profile = getProfile();
+
+ if (profile == null) {
+ log(Level.SEVERE, console, "No S3 profile is configured.");
+ if (!isDontSetBuildResultOnFailure()) {
+ run.setResult(constrainResult(Result.UNSTABLE, listener));
+ return;
+ }
+ throw new AbortException("No S3 profile is configured.");
+ }
+
+ log(console, "Using S3 profile: " + profile.getName());
+
+ try {
+ final Map envVars = run.getEnvironment(listener);
+ final Map record = Maps.newHashMap();
+ final List artifacts = new CopyOnWriteArrayList();
+
+ for (Entry entry : entries) {
+ if (entry.noUploadOnFailure && Result.FAILURE.equals(run.getResult())) {
+ // build failed. don't post
+ log(Level.WARNING, console, "Skipping publishing on S3 because build failed");
+ continue;
+ }
+
+ final String expanded = Util.replaceMacro(entry.sourceFile, envVars);
+ final String exclude = Util.replaceMacro(entry.excludedFile, envVars);
+ if (expanded == null) {
+ throw new IOException();
+ }
+
+ final String bucket = Util.replaceMacro(entry.bucket, envVars);
+ final String storageClass = Util.replaceMacro(entry.storageClass, envVars);
+ final String selRegion = entry.selectedRegion;
+
+ final List paths = new ArrayList<>();
+ final List filenames = new ArrayList<>();
+
+ for (String startPath : expanded.split(",")) {
+ for (FilePath path : ws.list(startPath, exclude)) {
+ if (path.isDirectory()) {
+ throw new IOException(path + " is a directory");
+ }
+
+ paths.add(path);
+ final int workspacePath = FileHelper.getSearchPathLength(ws.getRemote(),
+ startPath.trim(),
+ getProfile().isKeepStructure());
+ filenames.add(getFilename(path, entry.flatten, workspacePath));
+ log(console, "bucket=" + bucket + ", file=" + path.getName() + " region=" + selRegion + ", will be uploaded from slave=" + entry.uploadFromSlave + " managed=" + entry.managedArtifacts + " , server encryption " + entry.useServerSideEncryption);
+ }
+ }
+
+ if (paths.isEmpty()) {
+ printDiagnostics(ws, console, expanded);
+ continue;
+ }
+
+
+ final Map escapedMetadata = buildMetadata(envVars, entry);
+
+ final List records = Lists.newArrayList();
+ final List fingerprints = profile.upload(run, bucket, paths, filenames, escapedMetadata, storageClass, selRegion, entry.uploadFromSlave, entry.managedArtifacts, entry.useServerSideEncryption, entry.gzipFiles);
+
+ for (FingerprintRecord fingerprintRecord : fingerprints) {
+ records.add(fingerprintRecord);
+ fingerprintRecord.setKeepForever(entry.keepForever);
+ fingerprintRecord.setShowDirectlyInBrowser(entry.showDirectlyInBrowser);
+ }
+
+ if (entry.managedArtifacts) {
+ artifacts.addAll(fingerprints);
+ fillFingerprints(run, listener, record, fingerprints);
+ }
+ }
+
+ // don't bother adding actions if none of the artifacts are managed
+ if (!artifacts.isEmpty()) {
+ addS3ArtifactsAction(run, profile, artifacts);
+ addFingerprintAction(run, record);
+ }
+ } catch (IOException e) {
+ if (!isDontSetBuildResultOnFailure()) {
+ e.printStackTrace(listener.error("Failed to upload files"));
+ run.setResult(constrainResult(Result.UNSTABLE, listener));
+ } else {
+ throw new IOException("Failed to upload files", e);
+ }
+
+ }
+ }
+
+ private void addS3ArtifactsAction(Run, ?> run, S3Profile profile, List artifacts) {
+ S3ArtifactsAction existingAction = run.getAction(S3ArtifactsAction.class);
+ if (existingAction != null) {
+ existingAction.getArtifacts().addAll(artifacts);
+ } else {
+ run.addAction(new S3ArtifactsAction(run, profile, artifacts));
+ }
+ }
+
+ private void addFingerprintAction(Run, ?> run, Map record) {
+ FingerprintAction existingAction = run.getAction(FingerprintAction.class);
+ if (existingAction != null) {
+ existingAction.add(record);
+ } else {
+ run.addAction(new FingerprintAction(run, record));
+ }
+ }
+
+ private void printDiagnostics(@NonNull FilePath ws, PrintStream console, String expanded) throws IOException {
+ log(Level.WARNING, console, "No file(s) found: " + expanded);
+ try {
+ final String error = ws.validateAntFileMask(expanded, 100);
+ if (error != null) {
+ log(Level.WARNING, console, error);
+ }
+ } catch (InterruptedException ignored) {
+ // don't want to die here just because
+ // validateAntFileMask found no alternative paths within
+ // alloted bounds limit
+ }
+ }
+
+ @SuppressFBWarnings("RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
+ private void fillFingerprints(@NonNull Run, ?> run, @NonNull TaskListener listener, Map record, List fingerprints) throws IOException {
+ for (FingerprintRecord r : fingerprints) {
+ final Fingerprint fp = r.addRecord(run);
+ if (fp == null) {
+ listener.error("Fingerprinting failed for " + r.getName());
+ continue;
+ }
+ fp.addFor(run);
+ record.put(r.getName(), fp.getHashString());
+ }
+ }
+
+ private Map buildMetadata(Map envVars, Entry entry) {
+ final Map mergedMetadata = new HashMap<>();
+
+ if (userMetadata != null) {
+ for (MetadataPair pair : userMetadata) {
+ mergedMetadata.put(pair.key, pair.value);
+ }
+ }
+
+ if (entry.userMetadata != null) {
+ for (MetadataPair pair : entry.userMetadata) {
+ mergedMetadata.put(pair.key, pair.value);
+ }
+ }
+
+ final Map escapedMetadata = new HashMap<>();
+
+ for (Map.Entry mapEntry : mergedMetadata.entrySet()) {
+ escapedMetadata.put(
+ Util.replaceMacro(mapEntry.getKey(), envVars),
+ Util.replaceMacro(mapEntry.getValue(), envVars));
+ }
+
+ return escapedMetadata;
+ }
+
+ private String getFilename(FilePath src, boolean flatten, int searchIndex) {
+ final String fileName;
+ if (flatten) {
+ fileName = src.getName();
+ } else {
+ final String relativeFileName = src.getRemote();
+ fileName = relativeFileName.substring(searchIndex);
+ }
+ return fileName;
+ }
+
+ @Extension
+ public static final class S3DeletedItemListener extends ItemListener {
+ @Override
+ public void onDeleted(Item item) {
+ if (item instanceof Job, ?> job) {
+ handleJobDeletion(job);
+ } else if (item instanceof ItemGroup> itemGroup) {
+ handleItemGroupDeletion(itemGroup);
+ }
+ }
+
+ private void handleJobDeletion(Job, ?> job) {
+ for (Run, ?> run : job.getBuilds()) {
+ S3ArtifactsAction artifacts = run.getAction(S3ArtifactsAction.class);
+ if (artifacts != null) {
+ S3Profile profile = S3BucketPublisher.getProfile(artifacts.getProfile());
+ if (profile.isDeleteArtifactsRecursively()) {
+ for (FingerprintRecord record : artifacts.getArtifacts()) {
+ if (!record.isKeepForever()) {
+ try {
+ profile.delete(run, record);
+ } catch (Exception e) {
+ Logger.getLogger(S3DeletedItemListener.class.getName())
+ .log(Level.WARNING, "Failed to delete S3 artifact: " + record.getName(), e);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private void handleItemGroupDeletion(ItemGroup> itemGroup) {
+ for (Item item : itemGroup.getItems()) {
+ if (item instanceof Job, ?> job) {
+ handleJobDeletion(job);
+ } else if (item instanceof ItemGroup> childItemGroup) {
+ handleItemGroupDeletion(childItemGroup);
+ }
+ }
+ }
+ }
+
+ @Extension
+ public static final class S3DeletedJobListener extends RunListener {
+ @Override
+ public void onDeleted(Run run) {
+ final S3ArtifactsAction artifacts = run.getAction(S3ArtifactsAction.class);
+ if (artifacts != null) {
+ final S3Profile profile = S3BucketPublisher.getProfile(artifacts.getProfile());
+ for (FingerprintRecord record : artifacts.getArtifacts()) {
+ if (!record.isKeepForever()) {
+ profile.delete(run, record);
+ }
+ }
+ }
+ }
+ }
+
+ public BuildStepMonitor getRequiredMonitorService() {
+ return dontWaitForConcurrentBuildCompletion ? BuildStepMonitor.NONE : BuildStepMonitor.STEP;
+ }
+
+ @Symbol("s3Upload")
+ public static final class DescriptorImpl extends BuildStepDescriptor {
+
+ private final CopyOnWriteList profiles = new CopyOnWriteList();
+ static final Level[] consoleLogLevels = { Level.INFO, Level.WARNING, Level.SEVERE };
+ private static final Logger LOGGER = Logger.getLogger(DescriptorImpl.class.getName());
+ private static final Result[] pluginFailureResultConstraints = { Result.FAILURE, Result.UNSTABLE, Result.SUCCESS };
+
+ public DescriptorImpl(Class extends Publisher> clazz) {
+ super(clazz);
+ load();
+ }
+
+ @Restricted(DoNotUse.class) @Deprecated //This field is unused but has been stored in global config
+ public transient List regions = Entry.regions;
+
+ @Restricted(DoNotUse.class) @Deprecated //This field is unused but has been stored in global config
+ public transient String[] storageClasses = Entry.storageClasses;
+
+ public DescriptorImpl() {
+ this(S3BucketPublisher.class);
+ }
+
+ @Override
+ public String getDisplayName() {
+ return "Publish artifacts to S3 Bucket";
+ }
+
+ @Override
+ public String getHelpFile() {
+ return "/plugin/s3/help.html";
+ }
+
+ @Override
+ public boolean configure(StaplerRequest2 req, JSONObject json) {
+ final JSONArray array = json.optJSONArray("profile");
+ if (array != null) {
+ profiles.replaceBy(req.bindJSONToList(S3Profile.class, array));
+ } else {
+ profiles.replaceBy(req.bindJSON(S3Profile.class, json.getJSONObject("profile")));
+ }
+ save();
+ return true;
+ }
+
+ @SuppressWarnings("unused")
+ public ListBoxModel doFillProfileNameItems(@AncestorInPath Item item) {
+ final ListBoxModel model = new ListBoxModel();
+ if (item != null && !item.hasPermission(Item.CONFIGURE)) {
+ return model;
+ }
+ if (item == null && !Jenkins.get().hasPermission(Item.CREATE)) {
+ // accessing from $JENKINS_URL/pipeline-syntax
+ return model;
+ }
+
+ for (S3Profile profile : profiles) {
+ model.add(profile.getName(), profile.getName());
+ }
+ return model;
+ }
+
+ public ListBoxModel doFillConsoleLogLevelItems() {
+ final ListBoxModel model = new ListBoxModel();
+ for (Level l : consoleLogLevels) {
+ model.add(l.getName(), l.getLocalizedName());
+ }
+ return model;
+ }
+
+ @SuppressWarnings("unused")
+ public ListBoxModel doFillPluginFailureResultConstraintItems() {
+ final ListBoxModel model = new ListBoxModel();
+ for (Result r : pluginFailureResultConstraints) {
+ model.add(r.toString(), r.toString());
+ }
+ return model;
+ }
+
+ @SuppressWarnings("unused")
+ public void replaceProfiles(List profiles) {
+ this.profiles.replaceBy(profiles);
+ save();
+ }
+
+ public Level[] getConsoleLogLevels() {
+ return consoleLogLevels.clone();
+ }
+
+ public S3Profile[] getProfiles() {
+ final S3Profile[] profileArray = new S3Profile[profiles.size()];
+ return profiles.toArray(profileArray);
+ }
+
+ public Result[] getPluginFailureResultConstraints() {
+ return pluginFailureResultConstraints.clone();
+ }
+
+ @SuppressWarnings("unused")
+ @RequirePOST
+ public FormValidation doLoginCheck(@QueryParameter String name, @QueryParameter String accessKey,
+ @QueryParameter Secret secretKey, @QueryParameter boolean useRole,
+ @QueryParameter boolean usePathStyle) {
+ Jenkins.get().checkPermission(Jenkins.ADMINISTER);
+
+ final String checkedName = Util.fixNull(name);
+ final String checkedAccessKey = Util.fixNull(accessKey);
+ final String checkedSecretKey = secretKey != null ? secretKey.getPlainText() : "";
+
+ final boolean couldBeValidated = !checkedName.isEmpty() && !checkedAccessKey.isEmpty() && !checkedSecretKey.isEmpty();
+
+ if (!couldBeValidated) {
+ if (checkedName.isEmpty()) {
+ return FormValidation.ok("Please, enter name");
+ }
+
+ if (useRole) {
+ return FormValidation.ok();
+ }
+
+ if (checkedAccessKey.isEmpty()) {
+ return FormValidation.ok("Please, enter accessKey");
+ }
+
+ if (checkedSecretKey.isEmpty()) {
+ return FormValidation.ok("Please, enter secretKey");
+ }
+ }
+
+ final String defaultRegion = ClientHelper.DEFAULT_AMAZON_S3_REGION_NAME;
+
+ try (var client = ClientHelper.createClient(checkedAccessKey, checkedSecretKey, useRole, defaultRegion, Jenkins.get().getProxy(), usePathStyle)) {
+ client.listBuckets();
+ } catch (SdkException e) {
+ LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ return FormValidation.error("Can't connect to S3 service: " + e.getMessage());
+ }
+ return FormValidation.ok("Check passed!");
+ }
+
+ @Override
+ public boolean isApplicable(Class extends AbstractProject> aClass) {
+ return true;
+ }
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/S3CopyArtifact.java b/src/main/java/hudson/plugins/s3/S3CopyArtifact.java
new file mode 100644
index 00000000..4e1b1e21
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/S3CopyArtifact.java
@@ -0,0 +1,476 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2004-2011, Sun Microsystems, Inc., Alan Harder
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package hudson.plugins.s3;
+
+import com.google.common.collect.Maps;
+import hudson.DescriptorExtensionList;
+import hudson.EnvVars;
+import hudson.Extension;
+import hudson.FilePath;
+import hudson.Launcher;
+import hudson.Util;
+import hudson.console.HyperlinkNote;
+import hudson.matrix.MatrixBuild;
+import hudson.matrix.MatrixProject;
+import hudson.maven.MavenModuleSet;
+import hudson.maven.MavenModuleSetBuild;
+import hudson.model.AbstractBuild;
+import hudson.model.AbstractProject;
+import hudson.model.Build;
+import hudson.model.Descriptor;
+import hudson.model.EnvironmentContributingAction;
+import hudson.model.Fingerprint;
+import hudson.model.FingerprintMap;
+import hudson.model.Item;
+import hudson.model.Job;
+import hudson.model.Project;
+import hudson.model.Result;
+import hudson.model.Run;
+import hudson.model.TaskListener;
+import hudson.model.listeners.ItemListener;
+import hudson.model.listeners.RunListener;
+import hudson.plugins.copyartifact.BuildFilter;
+import hudson.plugins.copyartifact.BuildSelector;
+import hudson.plugins.copyartifact.ParametersBuildFilter;
+import hudson.plugins.copyartifact.StatusBuildSelector;
+import hudson.plugins.copyartifact.WorkspaceSelector;
+import hudson.security.AccessControlled;
+import hudson.security.SecurityRealm;
+import hudson.tasks.BuildStepDescriptor;
+import hudson.tasks.Builder;
+import hudson.tasks.Fingerprinter.FingerprintAction;
+import hudson.util.DescribableList;
+import hudson.util.FormValidation;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.text.MessageFormat;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import jenkins.model.Jenkins;
+
+import jenkins.tasks.SimpleBuildStep;
+import org.acegisecurity.GrantedAuthority;
+import org.acegisecurity.providers.UsernamePasswordAuthenticationToken;
+import org.jenkinsci.Symbol;
+import org.kohsuke.stapler.AncestorInPath;
+import org.kohsuke.stapler.DataBoundConstructor;
+import org.kohsuke.stapler.QueryParameter;
+
+import edu.umd.cs.findbugs.annotations.NonNull;
+
+/**
+ * This is a S3 variant of the CopyArtifact plugin:
+ * Build step to copy artifacts from another project.
+ * @author Alan Harder
+ */
+public class S3CopyArtifact extends Builder implements SimpleBuildStep {
+
+ private String projectName;
+ private final String filter;
+ private final String excludeFilter;
+ private final String target;
+
+ private /*almost final*/ BuildSelector selector;
+ private final Boolean flatten;
+ private final Boolean optional;
+
+ private static final BuildSelector DEFAULT_BUILD_SELECTOR = new StatusBuildSelector(true);
+
+ @DataBoundConstructor
+ public S3CopyArtifact(String projectName, BuildSelector buildSelector, String filter,
+ String excludeFilter, String target, boolean flatten, boolean optional) {
+ // Prevents both invalid values and access to artifacts of projects which this user cannot see.
+ // If value is parameterized, it will be checked when build runs.
+ if (projectName.indexOf('$') < 0 && new JobResolver(projectName).job == null)
+ projectName = ""; // Ignore/clear bad value to avoid ugly 500 page
+ this.projectName = projectName;
+
+ this.selector = buildSelector;
+ if (this.selector == null) {
+ this.selector = DEFAULT_BUILD_SELECTOR;
+ }
+
+ this.filter = Util.fixNull(filter).trim();
+ this.excludeFilter = Util.fixNull(excludeFilter).trim();
+ this.target = Util.fixNull(target).trim();
+ this.flatten = flatten ? Boolean.TRUE : null;
+ this.optional = optional ? Boolean.TRUE : null;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public BuildSelector getBuildSelector() {
+ return selector;
+ }
+
+ public String getFilter() {
+ return filter;
+ }
+ public String getExcludeFilter() {
+ return excludeFilter;
+ }
+
+ public String getTarget() {
+ return target;
+ }
+
+ public boolean isFlatten() {
+ return flatten != null && flatten;
+ }
+
+ public boolean isOptional() {
+ return optional != null && optional;
+ }
+
+ private void setResult(@NonNull Run, ?> run, boolean isOk) {
+ if (isOptional()) {
+ return;
+ }
+
+ if (isOk)
+ run.setResult(Result.SUCCESS);
+ else
+ run.setResult(Result.FAILURE);
+ }
+
+ private static boolean isMavenPluginInstalled() {
+ Jenkins instance = Jenkins.getInstanceOrNull();
+ return instance != null && instance.getPlugin("maven-plugin") != null;
+ }
+
+ @Override
+ public void perform(@NonNull Run, ?> dst, @NonNull FilePath targetDir, @NonNull Launcher launcher, @NonNull TaskListener listener) throws InterruptedException, IOException {
+ final PrintStream console = listener.getLogger();
+ String expandedProject = projectName;
+ String includeFilter = getFilter();
+ String excludeFilter = getExcludeFilter();
+
+ try {
+ final EnvVars env = dst.getEnvironment(listener);
+ expandedProject = env.expand(projectName);
+ final JobResolver job = new JobResolver(expandedProject);
+ if (job.job != null && !expandedProject.equals(projectName)
+ // If projectName is parameterized, need to do permission check on source project.
+ // Would like to check if user who started build has permission, but unable to get
+ // Authentication object for arbitrary user.. instead, only allow use of parameters
+ // to select jobs which are accessible to all authenticated users.
+ && !job.job.getACL().hasPermission(
+ new UsernamePasswordAuthenticationToken("authenticated", "",
+ new GrantedAuthority[]{ SecurityRealm.AUTHENTICATED_AUTHORITY }),
+ Item.READ)) {
+ job.job = null; // Disallow access
+ }
+ if (job.job == null) {
+ console.println(Messages.CopyArtifact_MissingProject(expandedProject));
+ setResult(dst, false);
+ return;
+ }
+ final Run src = getBuildSelector().getBuild(job.job, env, job.filter, dst);
+ if (src == null) {
+ console.println(Messages.CopyArtifact_MissingBuild(expandedProject));
+ setResult(dst, false); // Fail build unless copy is optional
+ return;
+ }
+
+ if (!targetDir.exists()) {
+ console.println(Messages.CopyArtifact_MissingSrcWorkspace()); // (see JENKINS-3330)
+ setResult(dst, false); // Fail build unless copy is optional
+ return;
+ }
+
+ // Add info about the selected build into the environment
+ final EnvAction envData = dst.getAction(EnvAction.class);
+ if (envData != null) {
+ envData.add(expandedProject, src.getNumber());
+ }
+
+ if (!target.isEmpty())
+ targetDir = new FilePath(targetDir, env.expand(target));
+
+ includeFilter = env.expand(includeFilter);
+ if (includeFilter.trim().isEmpty())
+ includeFilter = "**";
+
+ excludeFilter = env.expand(excludeFilter);
+
+ if (isMavenPluginInstalled() && src instanceof MavenModuleSetBuild) {
+ // Copy artifacts from the build (ArchiveArtifacts build step)
+ boolean ok = perform(src, dst, includeFilter, excludeFilter, targetDir, console);
+
+ // Copy artifacts from all modules of this Maven build (automatic archiving)
+ for (Run r : ((MavenModuleSetBuild) src).getModuleLastBuilds().values()) {
+ ok |= perform(r, dst, includeFilter, excludeFilter, targetDir, console);
+ }
+
+ setResult(dst, ok);
+ } else if (Jenkins.get().getPlugin("matrix-project") != null && src instanceof MatrixBuild) {
+ boolean ok = false;
+ // Copy artifacts from all configurations of this matrix build
+ // Use MatrixBuild.getExactRuns if available
+ for (Run r : ((MatrixBuild) src).getExactRuns()) {
+ // Use subdir of targetDir with configuration name (like "jdk=java6u20")
+ FilePath subdir = targetDir.child(r.getParent().getName());
+ ok |= perform(r, dst, includeFilter, excludeFilter, subdir, console);
+ }
+
+ setResult(dst, ok);
+ } else {
+ setResult(dst, perform(src, dst, includeFilter, excludeFilter, targetDir, console));
+ }
+ }
+ catch (IOException ex) {
+ Util.displayIOException(ex, listener);
+ ex.printStackTrace(listener.error(
+ Messages.CopyArtifact_FailedToCopy(expandedProject, includeFilter)));
+ setResult(dst, false);
+ }
+ }
+
+ private boolean perform(Run,?> src, Run,?> dst, String includeFilter, String excludeFilter, FilePath targetDir, PrintStream console)
+ throws IOException, InterruptedException {
+
+ final S3ArtifactsAction action = src.getAction(S3ArtifactsAction.class);
+ if (action == null) {
+ console.println("Build " + src.getDisplayName() + '[' + src.number + "] doesn't have any S3 artifacts uploaded");
+ return false;
+ }
+
+ final S3Profile profile = S3BucketPublisher.getProfile(action.getProfile());
+
+ if (profile == null) {
+ console.println("Can't find S3 profile");
+ return false;
+ }
+
+ targetDir.mkdirs();
+ final List records = profile.downloadAll(src, action.getArtifacts(), includeFilter, excludeFilter, targetDir, isFlatten());
+
+ final Map fingerprints = Maps.newHashMap();
+ for(FingerprintRecord record : records) {
+ final FingerprintMap map = Jenkins.get().getFingerprintMap();
+
+ final Fingerprint f = map.getOrCreate(src, record.getName(), record.getFingerprint());
+ f.addFor(src);
+ f.addFor(dst);
+ fingerprints.put(record.getName(), record.getFingerprint());
+ }
+
+ for (Run,?> r : new Run,?>[]{src, dst}) {
+ if (r == null) {
+ continue;
+ }
+
+ final FingerprintAction fa = r.getAction(FingerprintAction.class);
+ if (fa != null) {
+ fa.add(fingerprints);
+ } else {
+ r.addAction(new FingerprintAction(r, fingerprints));
+ }
+ }
+
+ console.println(MessageFormat.format("Copied {0} {0,choice,0#artifacts|1#artifact|1 job;
+ BuildFilter filter = new BuildFilter();
+
+ JobResolver(String projectName) {
+ final Jenkins jenkins = Jenkins.get();
+ job = jenkins.getItemByFullName(projectName, Job.class);
+ if (job == null) {
+ // Check for parameterized job with filter (see help file)
+ final int i = projectName.indexOf('/');
+ if (i > 0) {
+ final Job,?> candidate = jenkins.getItemByFullName(projectName.substring(0, i), Job.class);
+ if (candidate != null) {
+ final ParametersBuildFilter pFilter = new ParametersBuildFilter(projectName.substring(i + 1));
+ if (pFilter.isValid(candidate)) {
+ job = candidate;
+ filter = pFilter;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ @Extension
+ @Symbol("s3CopyArtifact")
+ public static final class DescriptorImpl extends BuildStepDescriptor {
+
+ public FormValidation doCheckProjectName(
+ @AncestorInPath AccessControlled anc, @QueryParameter String value) {
+ // Require CONFIGURE permission on this project
+ if (!anc.hasPermission(Item.CONFIGURE)) return FormValidation.ok();
+ final FormValidation result;
+ final Item item = new JobResolver(value).job;
+ if (item != null) {
+ if (isMavenPluginInstalled() && item instanceof MavenModuleSet) {
+ result = FormValidation.warning(Messages.CopyArtifact_MavenProject());
+ } else if (Jenkins.get().getPlugin("matrix-project") != null && item instanceof MatrixProject) {
+ result = FormValidation.warning(Messages.CopyArtifact_MatrixProject());
+ } else {
+ result = FormValidation.ok();
+ }
+ }
+ else if (value.indexOf('$') >= 0) {
+ result = FormValidation.warning(Messages.CopyArtifact_ParameterizedName());
+ }
+ else {
+ AbstractProject nearProject = AbstractProject.findNearest(value);
+ if (nearProject != null) {
+ result = FormValidation.error(
+ Messages.BuildTrigger_NoSuchProjectWithSuggestion(
+ value, nearProject.getName()));
+ } else {
+ result = FormValidation.error(
+ Messages.BuildTrigger_NoSuchProject(value));
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public boolean isApplicable(Class extends AbstractProject> clazz) {
+ return true;
+ }
+
+ @Override
+ public String getDisplayName() {
+ return "S3 Copy Artifact";
+ }
+
+ public DescriptorExtensionList> getBuildSelectors() {
+ final DescriptorExtensionList> list = DescriptorExtensionList.createDescriptorList(Jenkins.getInstance(), BuildSelector.class);
+ // remove from list some of the CopyArchiver build selector that we can't deal with
+ list.remove(WorkspaceSelector.DESCRIPTOR);
+ return list;
+ }
+ }
+
+ // Listen for project renames and update property here if needed.
+ @Extension
+ public static final class ListenerImpl extends ItemListener {
+ @Override
+ public void onRenamed(Item item, String oldName, String newName) {
+ for (AbstractProject,?> project
+ : Jenkins.get().getAllItems(AbstractProject.class)) {
+ for (S3CopyArtifact ca : getCopiers(project)) try {
+ if (ca.getProjectName().equals(oldName)) {
+ ca.projectName = newName;
+ } else if (ca.getProjectName().startsWith(oldName + '/')) {
+ // Support rename for "MatrixJobName/AxisName=value" type of name
+ ca.projectName = newName + ca.projectName.substring(oldName.length());
+ } else {
+ continue;
+ }
+ project.save();
+ } catch (IOException ex) {
+ Logger.getLogger(ListenerImpl.class.getName()).log(Level.WARNING,
+ "Failed to resave project " + project.getName()
+ + " for project rename in S3 Copy Artifact build step ("
+ + oldName + " =>" + newName + ')', ex);
+ }
+ }
+ }
+
+ private static List getCopiers(AbstractProject project) {
+ final DescribableList> list;
+ if (project instanceof Project) {
+ list = ((Project, ?>) project).getBuildersList();
+ } else if (Jenkins.get().getPlugin("matrix-project") != null && project instanceof MatrixProject) {
+ list = ((MatrixProject) project).getBuildersList();
+ } else {
+ list = null;
+ }
+
+ if (list == null) {
+ return Collections.emptyList();
+ }
+
+ return list.getAll(S3CopyArtifact.class);
+ }
+ }
+
+ // Listen for new builds and add EnvAction in any that use CopyArtifact build step
+ @Extension
+ public static final class CopyArtifactRunListener extends RunListener {
+ public CopyArtifactRunListener() {
+ super(Build.class);
+ }
+
+ @Override
+ public void onStarted(Build r, TaskListener listener) {
+ if (((Build,?>)r).getProject().getBuildersList().get(S3CopyArtifact.class) != null) {
+ r.addAction(new EnvAction());
+ }
+ }
+ }
+
+ private static class EnvAction implements EnvironmentContributingAction {
+ // Decided not to record this data in build.xml, so marked transient:
+ private transient Map data = new HashMap();
+
+ private void add(String projectName, int buildNumber) {
+ if (data==null) return;
+ int i = projectName.indexOf('/'); // Omit any detail after a /
+ if (i > 0) {
+ projectName = projectName.substring(0, i);
+ }
+ data.put("COPYARTIFACT_BUILD_NUMBER_"
+ + projectName.toUpperCase().replaceAll("[^A-Z]+", "_"), // Only use letters and _
+ Integer.toString(buildNumber));
+ }
+
+ @Override
+ public void buildEnvVars(AbstractBuild,?> build, EnvVars env) {
+ if (data!=null) {
+ env.putAll(data);
+ }
+ }
+
+ @Override
+ public String getIconFileName() { return null; }
+
+ @Override
+ public String getDisplayName() { return null; }
+
+ @Override
+ public String getUrlName() { return null; }
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/S3Profile.java b/src/main/java/hudson/plugins/s3/S3Profile.java
new file mode 100644
index 00000000..1a0beb01
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/S3Profile.java
@@ -0,0 +1,324 @@
+package hudson.plugins.s3;
+
+import com.google.common.collect.Lists;
+import hudson.FilePath;
+import hudson.ProxyConfiguration;
+import hudson.model.Run;
+import hudson.plugins.s3.callable.MasterSlaveCallable;
+import hudson.plugins.s3.callable.S3CleanupUploadCallable;
+import hudson.plugins.s3.callable.S3DownloadCallable;
+import hudson.plugins.s3.callable.S3GzipCallable;
+import hudson.plugins.s3.callable.S3UploadCallable;
+import hudson.plugins.s3.callable.S3WaitUploadCallable;
+import hudson.util.Secret;
+import jenkins.model.Jenkins;
+import org.apache.commons.io.FilenameUtils;
+import org.kohsuke.stapler.DataBoundConstructor;
+import org.kohsuke.stapler.DataBoundSetter;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.model.DeleteObjectRequest;
+import software.amazon.awssdk.services.s3.model.GetObjectRequest;
+import software.amazon.awssdk.services.s3.model.ListObjectsRequest;
+import software.amazon.awssdk.services.s3.model.ListObjectsV2Request;
+import software.amazon.awssdk.services.s3.model.ListObjectsV2Response;
+import software.amazon.awssdk.services.s3.model.S3Object;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+public class S3Profile {
+ private final String name;
+ private final String accessKey;
+ private final Secret secretKey;
+ private final int maxUploadRetries;
+ private final int uploadRetryTime;
+ private final int maxDownloadRetries;
+ private final int downloadRetryTime;
+ private final boolean keepStructure;
+
+ private final boolean useRole;
+ private final int signedUrlExpirySeconds;
+ private boolean usePathStyle = false;
+ private boolean deleteArtifactsRecursively = false;
+
+ @DataBoundConstructor
+ public S3Profile(String name, String accessKey, String secretKey, boolean useRole, int signedUrlExpirySeconds, String maxUploadRetries, String uploadRetryTime, String maxDownloadRetries, String downloadRetryTime, boolean keepStructure) {
+ this.name = name;
+ this.useRole = useRole;
+ this.maxUploadRetries = parseWithDefault(maxUploadRetries, 5);
+ this.uploadRetryTime = parseWithDefault(uploadRetryTime, 5);
+ this.maxDownloadRetries = parseWithDefault(maxDownloadRetries, 5);
+ this.downloadRetryTime = parseWithDefault(downloadRetryTime, 5);
+ this.signedUrlExpirySeconds = signedUrlExpirySeconds;
+ if (useRole) {
+ this.accessKey = "";
+ this.secretKey = null;
+ } else {
+ this.accessKey = accessKey;
+ this.secretKey = Secret.fromString(secretKey);
+ }
+
+ this.keepStructure = keepStructure;
+ }
+
+ @DataBoundSetter
+ public void setUsePathStyle(boolean usePathStyle) {
+ this.usePathStyle = usePathStyle;
+ }
+
+ @DataBoundSetter
+ public void setDeleteArtifactsRecursively(boolean deleteArtifactsRecursively) {
+ this.deleteArtifactsRecursively = deleteArtifactsRecursively;
+ }
+
+ public boolean isKeepStructure() {
+ return keepStructure;
+ }
+
+ private int parseWithDefault(String number, int defaultValue) {
+ try {
+ return Integer.parseInt(number);
+ } catch(NumberFormatException nfe) {
+ return defaultValue;
+ }
+ }
+
+ public int getMaxDownloadRetries() {
+ return maxDownloadRetries;
+ }
+
+ public int getDownloadRetryTime() {
+ return downloadRetryTime;
+ }
+
+ public final String getAccessKey() {
+ return accessKey;
+ }
+
+ public final Secret getSecretKey() {
+ return secretKey;
+ }
+
+ public final int getMaxUploadRetries() {
+ return maxUploadRetries;
+ }
+
+ public final int getUploadRetryTime() {
+ return uploadRetryTime;
+ }
+
+ public /*final*/ String getName() {
+ return this.name;
+ }
+
+ public final boolean getUseRole() {
+ return this.useRole;
+ }
+
+ public boolean isUseRole() {
+ return useRole;
+ }
+
+ public int getSignedUrlExpirySeconds() {
+ return signedUrlExpirySeconds;
+ }
+
+ public boolean isUsePathStyle() { return usePathStyle; }
+
+ public boolean isDeleteArtifactsRecursively() { return deleteArtifactsRecursively; }
+
+ public S3Client getClient(String region) {
+ return ClientHelper.createClient(accessKey, Secret.toString(secretKey), useRole, region, getProxy(), usePathStyle);
+ }
+
+ public List upload(Run, ?> run,
+ final String bucketName,
+ final List filePaths,
+ final List fileNames,
+ final Map userMetadata,
+ final String storageClass,
+ final String selregion,
+ final boolean uploadFromSlave,
+ final boolean managedArtifacts,
+ final boolean useServerSideEncryption,
+ final boolean gzipFiles) throws IOException, InterruptedException {
+ final List fingerprints = new ArrayList<>(fileNames.size());
+
+ try {
+ for (int i = 0; i < fileNames.size(); i++) {
+ final FilePath filePath = filePaths.get(i);
+ final String fileName = fileNames.get(i);
+
+ final Destination dest;
+ final boolean produced;
+ if (managedArtifacts) {
+ dest = Destination.newFromRun(run, bucketName, fileName, true);
+ produced = run.getTimeInMillis() <= filePath.lastModified() + 2000;
+ } else {
+ dest = new Destination(bucketName, fileName);
+ produced = false;
+ }
+
+ final MasterSlaveCallable upload;
+ if (gzipFiles) {
+ upload = new S3GzipCallable(accessKey, secretKey, useRole, dest, userMetadata,
+ storageClass, selregion, useServerSideEncryption, getProxy(), usePathStyle);
+ } else {
+ upload = new S3UploadCallable(accessKey, secretKey, useRole, dest, userMetadata,
+ storageClass, selregion, useServerSideEncryption, getProxy(), usePathStyle);
+ }
+
+ final FingerprintRecord fingerprintRecord = repeat(maxUploadRetries, uploadRetryTime, dest, new Callable() {
+ @Override
+ public FingerprintRecord call() throws IOException, InterruptedException {
+ final String md5 = invoke(uploadFromSlave, filePath, upload);
+ return new FingerprintRecord(produced, bucketName, fileName, selregion, md5);
+ }
+ });
+
+ fingerprints.add(fingerprintRecord);
+ }
+
+ waitUploads(filePaths, uploadFromSlave);
+ } catch (InterruptedException | IOException exception) {
+ cleanupUploads(filePaths, uploadFromSlave);
+ throw exception;
+ }
+
+ return fingerprints;
+ }
+
+ private void cleanupUploads(final List filePaths, boolean uploadFromSlave) {
+ for (FilePath filePath : filePaths) {
+ try {
+ invoke(uploadFromSlave, filePath, new S3CleanupUploadCallable());
+ }
+ catch (InterruptedException | IOException ignored) {
+ }
+ }
+ }
+
+ private void waitUploads(final List filePaths, boolean uploadFromSlave) throws InterruptedException, IOException {
+ for (FilePath filePath : filePaths) {
+ invoke(uploadFromSlave, filePath, new S3WaitUploadCallable());
+ }
+ }
+
+ private T invoke(boolean uploadFromSlave, FilePath filePath, MasterSlaveCallable callable) throws InterruptedException, IOException {
+ if (uploadFromSlave) {
+ return filePath.act(callable);
+ } else {
+ return callable.invoke(filePath);
+ }
+ }
+
+ public List list(Run build, String bucket) {
+ final S3Client s3client = getClient(ClientHelper.DEFAULT_AMAZON_S3_REGION_NAME);
+
+ final String buildName = build.getDisplayName();
+ final int buildID = build.getNumber();
+ final Destination dest = new Destination(bucket, "jobs/" + buildName + '/' + buildID + '/' + name);
+ final List files = Lists.newArrayList();
+ String nextContinuationToken = null;
+ do {
+ final ListObjectsV2Request listObjectsRequest = ListObjectsV2Request.builder()
+ .bucket(dest.bucketName)
+ .prefix(dest.objectName)
+ .encodingType("url")
+ .continuationToken(nextContinuationToken)
+ .build();
+
+ ListObjectsV2Response response = s3client.listObjectsV2(listObjectsRequest);
+ nextContinuationToken = response.nextContinuationToken();
+
+ for (S3Object summary : response.contents()) {
+ final GetObjectRequest req = GetObjectRequest.builder().bucket(dest.bucketName).key(summary.key()).build();
+ files.add(req.key());
+ }
+ } while (nextContinuationToken != null);
+
+
+ return files;
+ }
+
+ /**
+ * Download all artifacts from a given build
+ */
+ public List downloadAll(Run,?> build,
+ final List artifacts,
+ final String includeFilter,
+ final String excludeFilter,
+ final FilePath targetDir,
+ final boolean flatten) throws IOException, InterruptedException {
+ final List fingerprints = Lists.newArrayList();
+ for(final FingerprintRecord record : artifacts) {
+ final S3Artifact artifact = record.getArtifact();
+ final Destination dest = Destination.newFromRun(build, artifact);
+ final FilePath target = getFilePath(targetDir, flatten, artifact.getName());
+
+ if (FileHelper.selected(includeFilter, excludeFilter, artifact.getName())) {
+ fingerprints.add(repeat(maxDownloadRetries, downloadRetryTime, dest, new Callable() {
+ @Override
+ public FingerprintRecord call() throws IOException, InterruptedException {
+ final String md5 = target.act(new S3DownloadCallable(accessKey, secretKey, useRole, dest, artifact.getRegion(), getProxy(), usePathStyle));
+ return new FingerprintRecord(true, dest.bucketName, target.getName(), artifact.getRegion(), md5);
+ }
+ }));
+ }
+ }
+ return fingerprints;
+ }
+
+ private T repeat(int maxRetries, int waitTime, Destination dest, Callable func) throws IOException, InterruptedException {
+ int retryCount = 0;
+
+ while (true) {
+ try {
+ return func.call();
+ } catch (Exception e) {
+ retryCount++;
+ if(retryCount >= maxRetries){
+ throw new IOException("Call fails for " + dest + ": " + e + ":: Failed after " + retryCount + " tries.", e);
+ }
+ Thread.sleep(TimeUnit.SECONDS.toMillis(waitTime));
+ }
+ }
+ }
+
+ private FilePath getFilePath(FilePath targetDir, boolean flatten, String fullName) {
+ if (flatten) {
+ return new FilePath(targetDir, FilenameUtils.getName(fullName));
+ }
+ else {
+ return new FilePath(targetDir, fullName);
+ }
+ }
+
+ /**
+ * Delete some artifacts of a given run
+ */
+ public void delete(Run run, FingerprintRecord record) {
+ final Destination dest = Destination.newFromRun(run, record.getArtifact());
+ final DeleteObjectRequest req = DeleteObjectRequest.builder().bucket(dest.bucketName).key(dest.objectName).build();
+ final var client = getClient(record.getArtifact().getRegion());
+ client.deleteObject(req);
+ }
+
+ @Override
+ public String toString() {
+ return "S3Profile{" +
+ "name='" + name + '\'' +
+ ", accessKey='" + accessKey + '\'' +
+ ", secretKey=" + secretKey +
+ ", useRole=" + useRole +
+ '}';
+ }
+
+ private ProxyConfiguration getProxy() {
+ return Jenkins.get().proxy;
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/Uploads.java b/src/main/java/hudson/plugins/s3/Uploads.java
new file mode 100644
index 00000000..4ebbe115
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/Uploads.java
@@ -0,0 +1,116 @@
+package hudson.plugins.s3;
+
+import hudson.FilePath;
+import software.amazon.awssdk.core.async.AsyncRequestBody;
+import software.amazon.awssdk.services.s3.model.PutObjectRequest;
+import software.amazon.awssdk.transfer.s3.S3TransferManager;
+import software.amazon.awssdk.transfer.s3.model.FileUpload;
+import software.amazon.awssdk.transfer.s3.model.Upload;
+import software.amazon.awssdk.transfer.s3.model.UploadRequest;
+import software.amazon.awssdk.transfer.s3.progress.TransferListener;
+import software.amazon.awssdk.utils.NamedThreadFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.function.Consumer;
+import java.util.logging.Logger;
+
+public final class Uploads {
+ private Uploads() {}
+ private static final Logger LOGGER = Logger.getLogger(Uploads.class.getName());
+ public static final int MULTIPART_UPLOAD_THRESHOLD = 16*1024*1024; // 16 MB
+
+ private static transient volatile Uploads instance;
+ private final transient HashMap startedUploads = new HashMap<>();
+ private final ExecutorService executors = Executors.newScheduledThreadPool(1, new NamedThreadFactory(Executors.defaultThreadFactory(), Uploads.class.getName()));
+ private final transient HashMap openedStreams = new HashMap<>();
+
+ public Upload startUploading(S3TransferManager manager, FilePath file, InputStream inputStream, String bucketName, String objectName, Metadata metadata, TransferListener listener) {
+ UploadRequest.Builder request = UploadRequest.builder();
+ request.putObjectRequest(metadata.builder.andThen(b -> b.bucket(bucketName).key(objectName).metadata(metadata.metadata)));
+ request.requestBody(AsyncRequestBody.fromInputStream(inputStream, metadata.getContentLength(), executors));
+
+ if (listener != null) {
+ request.addTransferListener(listener);
+ }
+ final Upload upload = manager.upload(request.build());
+ startedUploads.put(file, upload);
+ openedStreams.put(file, inputStream);
+ return upload;
+ }
+
+ public void finishUploading(FilePath filePath) throws InterruptedException {
+ final Upload upload = startedUploads.remove(filePath);
+ if (upload == null) {
+ LOGGER.info("File: " + filePath.getName() + " already was uploaded");
+ return;
+ }
+ try {
+ upload.completionFuture().join();
+ }
+ finally {
+ closeStream(filePath);
+ }
+ }
+
+ public void cleanup(FilePath filePath) {
+ startedUploads.remove(filePath);
+ closeStream(filePath);
+ }
+
+ private void closeStream(FilePath filePath) {
+ try {
+ final InputStream stream = openedStreams.remove(filePath);
+ if (stream != null) {
+ stream.close();
+ }
+ } catch (IOException e) {
+ LOGGER.warning("Failed to close stream for file:" + filePath);
+ }
+ }
+
+ public static Uploads getInstance() {
+ if (instance == null) {
+ synchronized (Uploads.class) {
+ if (instance == null) {
+ instance = new Uploads();
+ }
+ }
+ }
+ return instance;
+ }
+
+ public static class Metadata {
+ private Consumer builder;
+ private final Map metadata;
+ private long contentLength;
+ public Metadata(Consumer builder, Map metadata) {
+ this.builder = builder;
+ this.metadata = metadata != null ? metadata : new HashMap<>();
+ }
+
+ public Metadata(Consumer builder) {
+ this(builder, new HashMap<>());
+ }
+
+ public void putMetadata(String key, String value) {
+ metadata.put(key, value);
+ }
+
+ public long getContentLength() {
+ return contentLength;
+ }
+
+ public void setContentLength(long contentLength) {
+ this.contentLength = contentLength;
+ }
+
+ public void andThen(Consumer addition) {
+ builder = builder.andThen(addition);
+ }
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/callable/MasterSlaveCallable.java b/src/main/java/hudson/plugins/s3/callable/MasterSlaveCallable.java
new file mode 100644
index 00000000..4d0ee4fe
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/callable/MasterSlaveCallable.java
@@ -0,0 +1,20 @@
+package hudson.plugins.s3.callable;
+
+import hudson.FilePath;
+import hudson.remoting.VirtualChannel;
+
+import java.io.File;
+import java.io.IOException;
+
+public interface MasterSlaveCallable extends FilePath.FileCallable {
+ /**
+ * Upload from slave directly
+ */
+ @Override
+ T invoke(File file, VirtualChannel channel) throws IOException, InterruptedException;
+
+ /**
+ * Stream from slave to master, then upload from master
+ */
+ T invoke(FilePath file) throws IOException, InterruptedException;
+}
diff --git a/src/main/java/hudson/plugins/s3/callable/S3BaseUploadCallable.java b/src/main/java/hudson/plugins/s3/callable/S3BaseUploadCallable.java
new file mode 100644
index 00000000..4d72a4f8
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/callable/S3BaseUploadCallable.java
@@ -0,0 +1,96 @@
+package hudson.plugins.s3.callable;
+
+import hudson.plugins.s3.Uploads;
+import software.amazon.awssdk.core.internal.util.Mimetype;
+import hudson.FilePath;
+import hudson.ProxyConfiguration;
+import hudson.plugins.s3.Destination;
+import hudson.remoting.VirtualChannel;
+import hudson.util.Secret;
+import software.amazon.awssdk.services.s3.model.PutObjectRequest;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Map;
+import java.util.function.Consumer;
+
+public abstract class S3BaseUploadCallable extends S3Callable {
+ private static final long serialVersionUID = 1L;
+ private final Destination dest;
+ private final String storageClass;
+ private final Map userMetadata;
+ private final boolean useServerSideEncryption;
+
+
+ public S3BaseUploadCallable(String accessKey, Secret secretKey, boolean useRole,
+ Destination dest, Map userMetadata, String storageClass, String selregion,
+ boolean useServerSideEncryption, ProxyConfiguration proxy, boolean usePathStyle) {
+ super(accessKey, secretKey, useRole, selregion, proxy, usePathStyle);
+ this.dest = dest;
+ this.storageClass = storageClass;
+ this.userMetadata = userMetadata;
+ this.useServerSideEncryption = useServerSideEncryption;
+ }
+
+ /**
+ * Upload from slave directly
+ */
+ @Override
+ public String invoke(File file, VirtualChannel channel) throws IOException, InterruptedException {
+ return invoke(new FilePath(file));
+ }
+
+ /**
+ * Stream from slave to master, then upload from master
+ */
+ public abstract String invoke(FilePath file) throws IOException, InterruptedException;
+
+ protected Uploads.Metadata buildMetadata(FilePath filePath) throws IOException, InterruptedException {
+ long contentLength = filePath.length();
+ Consumer builder = metadata -> {
+ metadata.contentType(Mimetype.getInstance().getMimetype(new File(filePath.getName())));
+ metadata.contentLength(contentLength);
+ if (storageClass != null && !storageClass.isEmpty()) {
+ metadata.storageClass(storageClass);
+ }
+ if (useServerSideEncryption) {
+ metadata.sseCustomerAlgorithm("AES256");
+ }
+ };
+ Uploads.Metadata metadata = new Uploads.Metadata(builder);
+ metadata.setContentLength(contentLength);
+ for (Map.Entry entry : userMetadata.entrySet()) {
+ final String key = entry.getKey().toLowerCase();
+ switch (key) {
+ case "cache-control":
+ metadata.andThen(b1 -> b1.cacheControl(entry.getValue()));
+ break;
+ case "expires":
+ try {
+ final Date expires = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z").parse(entry.getValue());
+ metadata.andThen(b1 -> b1.expires(expires.toInstant()));
+ } catch (ParseException e) {
+ metadata.putMetadata(entry.getKey(), entry.getValue());
+ }
+ break;
+ case "content-encoding":
+ metadata.andThen(b1 -> b1.contentEncoding(entry.getValue()));
+ break;
+ case "content-type":
+ metadata.andThen(b1 -> b1.contentType(entry.getValue()));
+ break;
+ default:
+ metadata.putMetadata(entry.getKey(), entry.getValue());
+ break;
+ }
+ }
+ return metadata;
+ }
+
+ public Destination getDest() {
+ return dest;
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/callable/S3Callable.java b/src/main/java/hudson/plugins/s3/callable/S3Callable.java
new file mode 100644
index 00000000..61f665c9
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/callable/S3Callable.java
@@ -0,0 +1,73 @@
+package hudson.plugins.s3.callable;
+
+import hudson.FilePath.FileCallable;
+import hudson.ProxyConfiguration;
+import hudson.plugins.s3.ClientHelper;
+import hudson.plugins.s3.Uploads;
+import hudson.util.Secret;
+import jenkins.security.Roles;
+import org.jenkinsci.remoting.RoleChecker;
+import software.amazon.awssdk.services.s3.S3AsyncClient;
+import software.amazon.awssdk.transfer.s3.S3TransferManager;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+
+import static org.apache.commons.lang.StringUtils.isNotEmpty;
+
+abstract class S3Callable implements FileCallable {
+ private static final long serialVersionUID = 1L;
+
+ private final String accessKey;
+ private final Secret secretKey;
+ private final boolean useRole;
+ private final String region;
+ private final ProxyConfiguration proxy;
+ private final String customEndpoint;
+ private final boolean usePathStyle;
+
+ private static final HashMap transferManagers = new HashMap<>();
+
+ S3Callable(String accessKey, Secret secretKey, boolean useRole, String region, ProxyConfiguration proxy, boolean usePathStyle) {
+ this.accessKey = accessKey;
+ this.secretKey = secretKey;
+ this.useRole = useRole;
+ this.region = region;
+ this.proxy = proxy;
+ this.customEndpoint = ClientHelper.ENDPOINT;
+ this.usePathStyle = usePathStyle;
+
+ }
+
+ protected synchronized S3TransferManager getTransferManager() {
+ final String uniqueKey = getUniqueKey();
+ if (transferManagers.get(uniqueKey) == null) {
+ try {
+ final S3AsyncClient client = ClientHelper.createAsyncClient(
+ accessKey,
+ Secret.toString(secretKey),
+ useRole,
+ region,
+ proxy,
+ isNotEmpty(customEndpoint) ? new URI(customEndpoint) : null,
+ (long)Uploads.MULTIPART_UPLOAD_THRESHOLD,
+ usePathStyle);
+ transferManagers.put(uniqueKey, S3TransferManager.builder().s3Client(client).build());
+ } catch (URISyntaxException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ return transferManagers.get(uniqueKey);
+ }
+
+ @Override
+ public void checkRoles(RoleChecker roleChecker) throws SecurityException {
+ roleChecker.check(this, Roles.SLAVE);
+ }
+
+ private String getUniqueKey() {
+ return region + '_' + secretKey + '_' + accessKey + '_' + useRole;
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/hudson/plugins/s3/callable/S3CleanupUploadCallable.java b/src/main/java/hudson/plugins/s3/callable/S3CleanupUploadCallable.java
new file mode 100644
index 00000000..0ee89d4d
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/callable/S3CleanupUploadCallable.java
@@ -0,0 +1,28 @@
+package hudson.plugins.s3.callable;
+
+import hudson.FilePath;
+import hudson.plugins.s3.Uploads;
+import hudson.remoting.VirtualChannel;
+import jenkins.security.Roles;
+import org.jenkinsci.remoting.RoleChecker;
+
+import java.io.File;
+
+public final class S3CleanupUploadCallable implements MasterSlaveCallable {
+ @Override
+ public Void invoke(File f, VirtualChannel channel) {
+ invoke(new FilePath(f));
+ return null;
+ }
+
+ @Override
+ public Void invoke(FilePath file) {
+ Uploads.getInstance().cleanup(file);
+ return null;
+ }
+
+ @Override
+ public void checkRoles(RoleChecker checker) throws SecurityException {
+ checker.check(this, Roles.SLAVE);
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/callable/S3DownloadCallable.java b/src/main/java/hudson/plugins/s3/callable/S3DownloadCallable.java
new file mode 100644
index 00000000..fa9bd137
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/callable/S3DownloadCallable.java
@@ -0,0 +1,38 @@
+package hudson.plugins.s3.callable;
+
+import hudson.ProxyConfiguration;
+import hudson.plugins.s3.Destination;
+import hudson.plugins.s3.MD5;
+import hudson.remoting.VirtualChannel;
+import hudson.util.Secret;
+import software.amazon.awssdk.transfer.s3.model.DownloadFileRequest;
+import software.amazon.awssdk.transfer.s3.model.FileDownload;
+
+import java.io.File;
+import java.io.IOException;
+
+public final class S3DownloadCallable extends S3Callable
+{
+ private static final long serialVersionUID = 1L;
+ private final Destination dest;
+
+ public S3DownloadCallable(String accessKey, Secret secretKey, boolean useRole, Destination dest, String region, ProxyConfiguration proxy, boolean usePathStyle)
+ {
+ super(accessKey, secretKey, useRole, region, proxy, usePathStyle);
+ this.dest = dest;
+ }
+
+ @Override
+ public String invoke(File file, VirtualChannel channel) throws IOException, InterruptedException
+ {
+ final DownloadFileRequest req = DownloadFileRequest.builder()
+ .getObjectRequest(builder -> builder.bucket(dest.bucketName).key(dest.objectName))
+ .destination(file).build();
+ FileDownload download = getTransferManager().downloadFile(req);
+
+ download.completionFuture().join();
+
+ return MD5.generateFromFile(file);
+ }
+
+}
diff --git a/src/main/java/hudson/plugins/s3/callable/S3GzipCallable.java b/src/main/java/hudson/plugins/s3/callable/S3GzipCallable.java
new file mode 100644
index 00000000..68fc0eba
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/callable/S3GzipCallable.java
@@ -0,0 +1,109 @@
+package hudson.plugins.s3.callable;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import hudson.FilePath;
+import hudson.ProxyConfiguration;
+import hudson.plugins.s3.Destination;
+import hudson.plugins.s3.MD5;
+import hudson.plugins.s3.Uploads;
+import hudson.util.Secret;
+import org.apache.commons.io.IOUtils;
+import software.amazon.awssdk.transfer.s3.model.Upload;
+import software.amazon.awssdk.transfer.s3.progress.TransferListener;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.Map;
+import java.util.logging.Logger;
+import java.util.zip.GZIPOutputStream;
+
+public final class S3GzipCallable extends S3BaseUploadCallable implements MasterSlaveCallable {
+ public S3GzipCallable(String accessKey, Secret secretKey, boolean useRole, Destination dest, Map userMetadata, String storageClass, String selregion, boolean useServerSideEncryption, ProxyConfiguration proxy, boolean usePathStyle) {
+ super(accessKey, secretKey, useRole, dest, userMetadata, storageClass, selregion, useServerSideEncryption, proxy, usePathStyle);
+ }
+
+ // Return a File containing the gzipped contents of the input file.
+ @SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_BAD_PRACTICE")
+ private File gzipFile(FilePath file) throws IOException, InterruptedException {
+ final File localFile = File.createTempFile("s3plugin", ".bin");
+ try (InputStream inputStream = file.read()) {
+ try (OutputStream outputStream = new FileOutputStream(localFile)) {
+ try (OutputStream gzipStream = new GZIPOutputStream(outputStream, true)) {
+ IOUtils.copy(inputStream, gzipStream);
+ gzipStream.flush();
+ }
+ }
+ } catch (RuntimeException ex) {
+ localFile.delete();
+ throw ex;
+ }
+ return localFile;
+ }
+
+ // Hook to ensure that the file is deleted once the upload finishes.
+ private static class CleanupHook implements TransferListener {
+ private final File localFile;
+
+ CleanupHook(File localFile) {
+ this.localFile = localFile;
+ }
+
+ @Override
+ public void transferComplete(Context.TransferComplete context) {
+ done(context);
+ }
+
+ @Override
+ public void transferFailed(Context.TransferFailed context) {
+ TransferListener.super.transferFailed(context);
+ }
+
+ public void done(Context.TransferComplete context) {
+ if (localFile.delete()) {
+ Logger.getLogger(S3GzipCallable.class.getName()).fine(() -> "Removed temporary file " + localFile.getName());
+ } else {
+ Logger.getLogger(S3GzipCallable.class.getName()).fine(() -> "Not removed temporary file " + localFile.getName() + " exists? " + localFile.exists());
+ }
+ }
+ }
+
+ @Override
+ @SuppressFBWarnings({"RV_RETURN_VALUE_IGNORED_BAD_PRACTICE","OBL_UNSATISFIED_OBLIGATION"})
+ public String invoke(FilePath file) throws IOException, InterruptedException {
+ final File localFile = gzipFile(file);
+ Upload upload = null;
+
+ try {
+ // This stream is asynchronously used in startUploading,
+ // so we cannot use its AutoCloseable behaviour with a
+ // try-with-resources statement, as that would likely
+ // close the stream before the upload has succeeded.
+ final InputStream gzippedStream = new FileInputStream(localFile);
+ final Uploads.Metadata metadata = buildMetadata(file);
+ long length = localFile.length();
+ metadata.setContentLength(length);
+ metadata.andThen(meta -> meta.contentEncoding("gzip")
+ .contentLength(length));
+
+ String md5 = MD5.generateFromFile(localFile);
+
+ // Add the cleanup hook only after we have the MD5,
+ // because the hook might delete the file immediately.
+ upload = Uploads.getInstance().startUploading(getTransferManager(), file, gzippedStream, getDest().bucketName, getDest().objectName, metadata, new CleanupHook(localFile));
+
+ return md5;
+ } finally {
+ // The upload might have finished before we installed the progress listener.
+ if (upload == null || upload.completionFuture().isDone()) {
+ // The progress listener might have fired before this,
+ // but .delete() on non-existent path is ok, and the
+ // temporary name won't be reused by anything
+ localFile.delete();
+ }
+ }
+ }
+}
diff --git a/src/main/java/hudson/plugins/s3/callable/S3UploadCallable.java b/src/main/java/hudson/plugins/s3/callable/S3UploadCallable.java
new file mode 100644
index 00000000..ad78426a
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/callable/S3UploadCallable.java
@@ -0,0 +1,31 @@
+package hudson.plugins.s3.callable;
+
+import hudson.FilePath;
+import hudson.ProxyConfiguration;
+import hudson.plugins.s3.Destination;
+import hudson.plugins.s3.MD5;
+import hudson.plugins.s3.Uploads;
+import hudson.util.Secret;
+
+import java.io.IOException;
+import java.util.Map;
+
+public final class S3UploadCallable extends S3BaseUploadCallable implements MasterSlaveCallable {
+ private static final long serialVersionUID = 1L;
+
+ public S3UploadCallable(String accessKey, Secret secretKey, boolean useRole, Destination dest, Map userMetadata, String storageClass, String selregion, boolean useServerSideEncryption, ProxyConfiguration proxy, boolean usePathStyle) {
+ super(accessKey, secretKey, useRole, dest, userMetadata, storageClass, selregion, useServerSideEncryption, proxy, usePathStyle);
+ }
+
+ /**
+ * Stream from slave to master, then upload from master
+ */
+ @Override
+ public String invoke(FilePath file) throws IOException, InterruptedException {
+ Uploads.Metadata metadata = buildMetadata(file);
+
+ Uploads.getInstance().startUploading(getTransferManager(), file, file.read(), getDest().bucketName, getDest().objectName, metadata, null);
+
+ return MD5.generateFromFile(file);
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/hudson/plugins/s3/callable/S3WaitUploadCallable.java b/src/main/java/hudson/plugins/s3/callable/S3WaitUploadCallable.java
new file mode 100644
index 00000000..27cc79b1
--- /dev/null
+++ b/src/main/java/hudson/plugins/s3/callable/S3WaitUploadCallable.java
@@ -0,0 +1,28 @@
+package hudson.plugins.s3.callable;
+
+import hudson.FilePath;
+import hudson.plugins.s3.Uploads;
+import hudson.remoting.VirtualChannel;
+import jenkins.security.Roles;
+import org.jenkinsci.remoting.RoleChecker;
+
+import java.io.File;
+
+public final class S3WaitUploadCallable implements MasterSlaveCallable {
+ @Override
+ public Void invoke(File f, VirtualChannel channel) throws InterruptedException {
+ invoke(new FilePath(f));
+ return null;
+ }
+
+ @Override
+ public Void invoke(FilePath file) throws InterruptedException {
+ Uploads.getInstance().finishUploading(file);
+ return null;
+ }
+
+ @Override
+ public void checkRoles(RoleChecker checker) throws SecurityException {
+ checker.check(this, Roles.SLAVE);
+ }
+}
diff --git a/src/main/resources/META-INF/hudson.remoting.ClassFilter b/src/main/resources/META-INF/hudson.remoting.ClassFilter
new file mode 100644
index 00000000..f406b039
--- /dev/null
+++ b/src/main/resources/META-INF/hudson.remoting.ClassFilter
@@ -0,0 +1,2 @@
+# Model object classes, no deserialization logic
+com.amazonaws.regions.Region
\ No newline at end of file
diff --git a/src/main/resources/com/hyperic/hudson/plugin/S3BucketPublisher/config.jelly b/src/main/resources/com/hyperic/hudson/plugin/S3BucketPublisher/config.jelly
deleted file mode 100644
index 390f8855..00000000
--- a/src/main/resources/com/hyperic/hudson/plugin/S3BucketPublisher/config.jelly
+++ /dev/null
@@ -1,32 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-