diff --git a/minifi/minifi-nar-bundles/minifi-provenance-repository-bundle/minifi-provenance-repositories/src/main/java/org/apache/nifi/provenance/NoOpProvenanceRepository.java b/minifi/minifi-nar-bundles/minifi-provenance-repository-bundle/minifi-provenance-repositories/src/main/java/org/apache/nifi/provenance/NoOpProvenanceRepository.java index b369076b06bb..30a0870df493 100644 --- a/minifi/minifi-nar-bundles/minifi-provenance-repository-bundle/minifi-provenance-repositories/src/main/java/org/apache/nifi/provenance/NoOpProvenanceRepository.java +++ b/minifi/minifi-nar-bundles/minifi-provenance-repository-bundle/minifi-provenance-repositories/src/main/java/org/apache/nifi/provenance/NoOpProvenanceRepository.java @@ -25,7 +25,6 @@ import org.apache.nifi.provenance.search.SearchableField; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; @@ -42,8 +41,8 @@ public class NoOpProvenanceRepository implements ProvenanceRepository { @Override public void initialize(EventReporter eventReporter, Authorizer authorizer, - ProvenanceAuthorizableFactory factory, IdentifierLookup identifierLookup) - throws IOException { + ProvenanceAuthorizableFactory factory, IdentifierLookup identifierLookup) + throws IOException { } @@ -69,13 +68,13 @@ public ProvenanceEventRecord getEvent(final long id, final NiFiUser user) throws @Override public List getEvents(long firstRecordId, int maxRecords) - throws IOException { + throws IOException { return emptyList(); } @Override public List getEvents(long firstRecordId, - int maxRecords, NiFiUser niFiUser) throws IOException { + int maxRecords, NiFiUser niFiUser) throws IOException { return emptyList(); } @@ -156,7 +155,7 @@ public long getContainerUsableSpace(String s) throws IOException { @Override public AsyncLineageSubmission retrieveLineageSubmission(final String lineageIdentifier, - final NiFiUser user) { + final NiFiUser user) { return null; } @@ -169,14 +168,4 @@ public AsyncLineageSubmission submitExpandParents(final long eventId, final NiFi public AsyncLineageSubmission submitExpandChildren(final long eventId, final NiFiUser user) { return null; } - - @Override - public List getPreviousEventIds(String flowFileUUID) { - return Collections.emptyList(); - } - - @Override - public void updatePreviousEventIds(ProvenanceEventRecord record, List previousIds) { - // Do nothing - } } diff --git a/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceEventRecord.java b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceEventRecord.java index dd36be166afc..3fed9141ab5d 100644 --- a/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceEventRecord.java +++ b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceEventRecord.java @@ -40,8 +40,6 @@ public interface ProvenanceEventRecord { */ List getPreviousEventIds(); - void setPreviousEventIds(List previousEventIds); - /** * @return the time at which this Provenance Event was created, as the * number of milliseconds since epoch diff --git a/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceEventRepository.java b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceEventRepository.java index c1b314463460..b27945f089ca 100644 --- a/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceEventRepository.java +++ b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceEventRepository.java @@ -92,20 +92,4 @@ public interface ProvenanceEventRepository { * @throws IOException if failure closing repository */ void close() throws IOException; - - /** - * Returns the previous provenance event IDs for the given FlowFile - * @param flowFileUUID the UUID of the FlowFile - * @return the previous event IDs for the given FlowFile - */ - List getPreviousEventIds(String flowFileUUID); - - /** - * Updates the previous provenance event IDs for the given event - * - * @param record The record for which to update the previous event IDs - * @param previousIds the list of previous event IDs to set for the record, or null to remove - */ - void updatePreviousEventIds(ProvenanceEventRecord record, List previousIds); - } diff --git a/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceReporter.java b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceReporter.java index eeffc7fe93f6..3f66486fb8f8 100644 --- a/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceReporter.java +++ b/nifi-api/src/main/java/org/apache/nifi/provenance/ProvenanceReporter.java @@ -42,7 +42,7 @@ public interface ProvenanceReporter { * events to an external Enterprise-wide system that is then able to * correlate the SEND and RECEIVE events. */ - void receive(FlowFile flowFile, String transitUri); + void receive(FlowFile flowFile, String transitUri, final Relationship relationship); /** * Emits a Provenance Event of type @@ -61,7 +61,7 @@ public interface ProvenanceReporter { * system uses to refer to the data; if this value is non-null and is not a * URI, the prefix "urn:tdo:" will be used to form a URI. */ - void receive(FlowFile flowFile, String transitUri, String sourceSystemFlowFileIdentifier); + void receive(FlowFile flowFile, String transitUri, String sourceSystemFlowFileIdentifier, final Relationship relationship); /** * Emits a Provenance Event of type @@ -77,7 +77,7 @@ public interface ProvenanceReporter { * @param transmissionMillis the number of milliseconds taken to transfer * the data */ - void receive(FlowFile flowFile, String transitUri, long transmissionMillis); + void receive(FlowFile flowFile, String transitUri, long transmissionMillis, final Relationship relationship); /** * Emits a Provenance Event of type @@ -97,7 +97,7 @@ public interface ProvenanceReporter { * @param transmissionMillis the number of milliseconds taken to transfer * the data */ - void receive(FlowFile flowFile, String transitUri, String details, long transmissionMillis); + void receive(FlowFile flowFile, String transitUri, String details, long transmissionMillis, final Relationship relationship); /** * Emits a Provenance Event of type @@ -120,7 +120,7 @@ public interface ProvenanceReporter { * @param transmissionMillis the number of milliseconds taken to transfer * the data */ - void receive(FlowFile flowFile, String transitUri, String sourceSystemFlowFileIdentifier, String details, long transmissionMillis); + void receive(FlowFile flowFile, String transitUri, String sourceSystemFlowFileIdentifier, String details, long transmissionMillis, final Relationship relationship); /** * Emits a Provenance Event of type @@ -131,7 +131,7 @@ public interface ProvenanceReporter { * @param transitUri A URI that provides information about the System and * Protocol information over which the transfer occurred. */ - void fetch(FlowFile flowFile, String transitUri); + void fetch(FlowFile flowFile, String transitUri, final Relationship relationship); /** * Emits a Provenance Event of type @@ -143,7 +143,7 @@ public interface ProvenanceReporter { * Protocol information over which the transfer occurred. * @param transmissionMillis the number of milliseconds taken to transfer the data */ - void fetch(FlowFile flowFile, String transitUri, long transmissionMillis); + void fetch(FlowFile flowFile, String transitUri, long transmissionMillis, final Relationship relationship); /** * Emits a Provenance Event of type @@ -157,7 +157,7 @@ public interface ProvenanceReporter { * @param transmissionMillis the number of milliseconds taken to transfer * the data */ - void fetch(FlowFile flowFile, String transitUri, String details, long transmissionMillis); + void fetch(FlowFile flowFile, String transitUri, String details, long transmissionMillis, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#SEND SEND} @@ -172,7 +172,7 @@ public interface ProvenanceReporter { * events to an external Enterprise-wide system that is then able to * correlate the SEND and RECEIVE events. */ - void send(FlowFile flowFile, String transitUri); + void send(FlowFile flowFile, String transitUri, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#SEND SEND} @@ -189,7 +189,7 @@ public interface ProvenanceReporter { * @param details additional details related to the SEND event, such as a * remote system's Distinguished Name */ - void send(FlowFile flowFile, String transitUri, String details); + void send(FlowFile flowFile, String transitUri, String details, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#SEND SEND} @@ -206,7 +206,7 @@ public interface ProvenanceReporter { * @param transmissionMillis the number of milliseconds spent sending the * data to the remote system */ - void send(FlowFile flowFile, String transitUri, long transmissionMillis); + void send(FlowFile flowFile, String transitUri, long transmissionMillis, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#SEND SEND} @@ -225,7 +225,7 @@ public interface ProvenanceReporter { * @param transmissionMillis the number of milliseconds spent sending the * data to the remote system */ - void send(FlowFile flowFile, String transitUri, String details, long transmissionMillis); + void send(FlowFile flowFile, String transitUri, String details, long transmissionMillis, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#SEND SEND} @@ -245,7 +245,7 @@ public interface ProvenanceReporter { * ProvenanceReporter is associated is rolled back. Otherwise, the Event * will be recorded only on a successful session commit. */ - void send(FlowFile flowFile, String transitUri, boolean force); + void send(FlowFile flowFile, String transitUri, boolean force, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#SEND SEND} @@ -267,7 +267,7 @@ public interface ProvenanceReporter { * ProvenanceReporter is associated is rolled back. Otherwise, the Event * will be recorded only on a successful session commit. */ - void send(FlowFile flowFile, String transitUri, String details, boolean force); + void send(FlowFile flowFile, String transitUri, String details, boolean force, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#SEND SEND} @@ -289,7 +289,7 @@ public interface ProvenanceReporter { * ProvenanceReporter is associated is rolled back. Otherwise, the Event * will be recorded only on a successful session commit. */ - void send(FlowFile flowFile, String transitUri, long transmissionMillis, boolean force); + void send(FlowFile flowFile, String transitUri, long transmissionMillis, boolean force, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#SEND SEND} @@ -313,7 +313,7 @@ public interface ProvenanceReporter { * ProvenanceReporter is associated is rolled back. Otherwise, the Event * will be recorded only on a successful session commit. */ - void send(FlowFile flowFile, String transitUri, String details, long transmissionMillis, boolean force); + void send(FlowFile flowFile, String transitUri, String details, long transmissionMillis, boolean force, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#REMOTE_INVOCATION} @@ -325,7 +325,7 @@ public interface ProvenanceReporter { * Protocol information over which the invocation occurred. The intent of this * field is to identify they type and target resource or object of the invocation. */ - void invokeRemoteProcess(FlowFile flowFile, String transitUri); + void invokeRemoteProcess(FlowFile flowFile, String transitUri, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#REMOTE_INVOCATION} @@ -339,7 +339,21 @@ public interface ProvenanceReporter { * @param details additional details related to the REMOTE_INVOCATION event, such as an * explanation of the invoked process. */ - void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details); + void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details, final Relationship relationship); + + /** + * Emits a Provenance Event of type {@link ProvenanceEventType#REMOTE_INVOCATION} + * that indicates a remote invocation is requested to an external endpoint using + * the given FlowFile. The external endpoint may exist in a remote or a local system, + * but is external to NiFi. + * @param flowFile the FlowFile that was used to make the remote invocation + * @param transitUri A URI that provides information about the System and + * Protocol information over which the invocation occurred. The intent of this + * field is to identify they type and target resource or object of the invocation. + * @param details additional details related to the REMOTE_INVOCATION event, such as an + * explanation of the invoked process. + */ + void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details, long transmissionMillis, final Relationship relationship); /** * Emits a Provenance Event of type @@ -354,7 +368,7 @@ public interface ProvenanceReporter { * @param alternateIdentifier the identifier that the alternate system uses * when referring to the data that is encompassed by this FlowFile */ - void associate(FlowFile flowFile, String alternateIdentifierNamespace, String alternateIdentifier); + void associate(FlowFile flowFile, String alternateIdentifierNamespace, String alternateIdentifier, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#FORK FORK} @@ -366,8 +380,21 @@ public interface ProvenanceReporter { * @param parent the FlowFile from which the children are derived * @param children the FlowFiles that are derived from the parent. */ + // TODO add relationship if possible void fork(FlowFile parent, Collection children); + /** + * Emits a Provenance Event of type {@link ProvenanceEventType#FORK FORK} + * that establishes that the given parent was split into multiple child + * FlowFiles. In general, this method does not need to be called by + * Processors, as the ProcessSession will handle this automatically for you + * when calling {@link ProcessSession#create(FlowFile)}. + * + * @param parent the FlowFile from which the children are derived + * @param children the FlowFiles that are derived from the parent. + */ + void fork(FlowFile parent, Collection children, final Relationship relationship); + /** * Emits a Provenance Event of type {@link ProvenanceEventType#FORK FORK} * that establishes that the given parent was split into multiple child @@ -379,7 +406,7 @@ public interface ProvenanceReporter { * @param children the FlowFiles that are derived from the parent. * @param details any details pertinent to the fork */ - void fork(FlowFile parent, Collection children, String details); + void fork(FlowFile parent, Collection children, String details, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#FORK FORK} @@ -393,7 +420,7 @@ public interface ProvenanceReporter { * @param forkDuration the number of milliseconds that it took to perform * the task */ - void fork(FlowFile parent, Collection children, long forkDuration); + void fork(FlowFile parent, Collection children, long forkDuration, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#FORK FORK} @@ -408,7 +435,7 @@ public interface ProvenanceReporter { * @param forkDuration the number of milliseconds that it took to perform * the task */ - void fork(FlowFile parent, Collection children, String details, long forkDuration); + void fork(FlowFile parent, Collection children, String details, long forkDuration, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#JOIN JOIN} @@ -421,8 +448,22 @@ public interface ProvenanceReporter { * child * @param child the FlowFile that is being created by joining the parents */ + // TODO include relationship if possible void join(Collection parents, FlowFile child); + /** + * Emits a Provenance Event of type {@link ProvenanceEventType#JOIN JOIN} + * that establishes that the given parents were joined together to create a + * new child FlowFile. In general, this method does not need to be called by + * Processors, as the ProcessSession will handle this automatically for you + * when calling {@link ProcessSession#create(FlowFile)}. + * + * @param parents the FlowFiles that are being joined together to create the + * child + * @param child the FlowFile that is being created by joining the parents + */ + void join(Collection parents, FlowFile child, final Relationship relationship); + /** * Emits a Provenance Event of type {@link ProvenanceEventType#JOIN JOIN} * that establishes that the given parents were joined together to create a @@ -435,7 +476,7 @@ public interface ProvenanceReporter { * @param child the FlowFile that is being created by joining the parents * @param details any details pertinent to the event */ - void join(Collection parents, FlowFile child, String details); + void join(Collection parents, FlowFile child, String details, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#JOIN JOIN} @@ -450,7 +491,7 @@ public interface ProvenanceReporter { * @param joinDuration the number of milliseconds that it took to join the * FlowFiles */ - void join(Collection parents, FlowFile child, long joinDuration); + void join(Collection parents, FlowFile child, long joinDuration, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#JOIN JOIN} @@ -466,7 +507,7 @@ public interface ProvenanceReporter { * @param joinDuration the number of milliseconds that it took to join the * FlowFiles */ - void join(Collection parents, FlowFile child, String details, long joinDuration); + void join(Collection parents, FlowFile child, String details, long joinDuration, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#CLONE CLONE} @@ -488,8 +529,9 @@ public interface ProvenanceReporter { * the contents of a FlowFile are modified. * * @param flowFile the FlowFile whose content is being modified + * @param relationship the Relationship to which the flowFile was transferred */ - void modifyContent(FlowFile flowFile); + void modifyContent(FlowFile flowFile, final Relationship relationship); /** * Emits a Provenance Event of type @@ -503,8 +545,9 @@ public interface ProvenanceReporter { * modified. Details should not be specified if they can be inferred by * other information in the event, such as the name of the Processor, as * specifying this information will add undue overhead + * @param relationship the Relationship to which the flowFile was transferred */ - void modifyContent(FlowFile flowFile, String details); + void modifyContent(FlowFile flowFile, String details, final Relationship relationship); /** * Emits a Provenance Event of type @@ -516,8 +559,9 @@ public interface ProvenanceReporter { * @param flowFile the FlowFile whose content is being modified * @param processingMillis the number of milliseconds spent processing the * FlowFile + * @param relationship the Relationship to which the flowFile was transferred */ - void modifyContent(FlowFile flowFile, long processingMillis); + void modifyContent(FlowFile flowFile, long processingMillis, final Relationship relationship); /** * Emits a Provenance Event of type @@ -533,8 +577,9 @@ public interface ProvenanceReporter { * specifying this information will add undue overhead * @param processingMillis the number of milliseconds spent processing the * FlowFile + * @param relationship the Relationship to which the flowFile was transferred */ - void modifyContent(FlowFile flowFile, String details, long processingMillis); + void modifyContent(FlowFile flowFile, String details, long processingMillis, final Relationship relationship); /** * Emits a Provenance Event of type @@ -551,8 +596,9 @@ public interface ProvenanceReporter { * significant amount of overhead for storage and processing. * * @param flowFile the FlowFile whose attributes were modified + * @param relationship the Relationship to which the flowFile was transferred */ - void modifyAttributes(FlowFile flowFile); + void modifyAttributes(FlowFile flowFile, final Relationship relationship); /** * Emits a Provenance Event of type @@ -571,8 +617,9 @@ public interface ProvenanceReporter { * @param flowFile the FlowFile whose attributes were modified * @param details any details should be provided about the attribute * modification + * @param relationship the Relationship to which the flowFile was transferred */ - void modifyAttributes(FlowFile flowFile, String details); + void modifyAttributes(FlowFile flowFile, String details, final Relationship relationship); /** * Emits a Provenance Event of type {@link ProvenanceEventType#ROUTE ROUTE} @@ -662,7 +709,7 @@ public interface ProvenanceReporter { * * @param flowFile the FlowFile that was created */ - void create(FlowFile flowFile); + void create(FlowFile flowFile, final Relationship relationship); /** * Emits a Provenance Event of type @@ -673,8 +720,9 @@ public interface ProvenanceReporter { * * @param flowFile the FlowFile that was created * @param details any relevant details about the CREATE event + * @param relationship the relationship the created FlowFile was transferred to */ - void create(FlowFile flowFile, String details); + void create(FlowFile flowFile, String details, Relationship relationship); /** * @return the number of FlowFiles for which there was a RECEIVE event diff --git a/nifi-framework-api/src/main/java/org/apache/nifi/provenance/AbstractProvenanceRepository.java b/nifi-api/src/main/java/org/apache/nifi/provenance/UpdateableProvenanceEventRecord.java similarity index 56% rename from nifi-framework-api/src/main/java/org/apache/nifi/provenance/AbstractProvenanceRepository.java rename to nifi-api/src/main/java/org/apache/nifi/provenance/UpdateableProvenanceEventRecord.java index a05590c46f94..978694d1b30a 100644 --- a/nifi-framework-api/src/main/java/org/apache/nifi/provenance/AbstractProvenanceRepository.java +++ b/nifi-api/src/main/java/org/apache/nifi/provenance/UpdateableProvenanceEventRecord.java @@ -16,25 +16,10 @@ */ package org.apache.nifi.provenance; -import java.util.HashMap; import java.util.List; -import java.util.Map; -public abstract class AbstractProvenanceRepository implements ProvenanceRepository { +public interface UpdateableProvenanceEventRecord extends ProvenanceEventRecord { - protected final Map> previousEventIdsMap = new HashMap<>(); - - @Override - public List getPreviousEventIds(String flowFileUUID) { - return previousEventIdsMap.get(flowFileUUID); - } - - @Override - public void updatePreviousEventIds(ProvenanceEventRecord record, List previousIds) { - if (previousIds == null) { - previousEventIdsMap.remove(record.getFlowFileUuid()); - } else { - previousEventIdsMap.put(record.getFlowFileUuid(), previousIds); - } - } + void setEventId(final long eventId); + void setPreviousEventIds(List previousEventIds); } diff --git a/nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/PlaceholderProvenanceEvent.java b/nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/PlaceholderProvenanceEvent.java index d0202bbebd73..5caa82ca1d43 100644 --- a/nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/PlaceholderProvenanceEvent.java +++ b/nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/PlaceholderProvenanceEvent.java @@ -25,9 +25,9 @@ * A Provenance Event that is used to replace another Provenance Event when authorizations * are not granted for the original Provenance Event */ -public class PlaceholderProvenanceEvent implements ProvenanceEventRecord { +public class PlaceholderProvenanceEvent implements UpdateableProvenanceEventRecord { private final String componentId; - private final long eventId; + private long eventId; private List previousEventIds; private final long eventTime; private final String flowFileUuid; @@ -45,6 +45,11 @@ public long getEventId() { return eventId; } + @Override + public void setEventId(long eventId) { + this.eventId = eventId; + } + @Override public List getPreviousEventIds() { return previousEventIds; diff --git a/nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/StandardProvenanceEventRecord.java b/nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/StandardProvenanceEventRecord.java index 5b0737a4ff13..940ef20354b3 100644 --- a/nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/StandardProvenanceEventRecord.java +++ b/nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/StandardProvenanceEventRecord.java @@ -29,7 +29,7 @@ /** * Holder for provenance relevant information */ -public class StandardProvenanceEventRecord implements ProvenanceEventRecord { +public class StandardProvenanceEventRecord implements UpdateableProvenanceEventRecord { private final long eventTime; private final long entryDate; @@ -124,7 +124,8 @@ public long getStorageByteOffset() { return storageByteOffset; } - void setEventId(final long eventId) { + @Override + public void setEventId(final long eventId) { this.eventId = eventId; } @@ -138,7 +139,6 @@ public List getPreviousEventIds() { return previousEventIds; } - @Override public void setPreviousEventIds(List previousEventIds) { this.previousEventIds = previousEventIds; } @@ -315,7 +315,8 @@ public int hashCode() { } return -37423 + 3 * componentId.hashCode() + (transitUri == null ? 0 : 41 * transitUri.hashCode()) - + (relationship == null ? 0 : 47 * relationship.hashCode()) + 44 * eventTypeCode + //+ (relationship == null ? 0 : 47 * relationship.hashCode()) + + 44 * eventTypeCode + 47 * getChildUuids().hashCode() + 47 * getParentUuids().hashCode(); } @@ -362,10 +363,6 @@ public boolean equals(final Object obj) { return false; } - if (different(relationship, other.relationship)) { - return false; - } - return !(eventType == ProvenanceEventType.REPLAY && eventTime != other.getEventTime()); } @@ -430,7 +427,7 @@ public String toString() { + ", uuid=" + uuid + ", fileSize=" + contentSize + ", componentId=" + componentId - + ", componentType" + componentType + + ", componentType=" + componentType + ", transitUri=" + transitUri + ", sourceSystemFlowFileIdentifier=" + sourceSystemFlowFileIdentifier + ", parentUuids=" + parentUuids @@ -534,7 +531,6 @@ public Builder fromEvent(final ProvenanceEventRecord event) { } previousEventIds = event.getPreviousEventIds(); - return this; } diff --git a/nifi-mock/src/main/java/org/apache/nifi/provenance/MockProvenanceRepository.java b/nifi-mock/src/main/java/org/apache/nifi/provenance/MockProvenanceRepository.java index 46b1306d868a..77732cac22a4 100644 --- a/nifi-mock/src/main/java/org/apache/nifi/provenance/MockProvenanceRepository.java +++ b/nifi-mock/src/main/java/org/apache/nifi/provenance/MockProvenanceRepository.java @@ -33,7 +33,7 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -public class MockProvenanceRepository extends AbstractProvenanceRepository { +public class MockProvenanceRepository implements ProvenanceRepository { private final List records = new ArrayList<>(); private final AtomicLong idGenerator = new AtomicLong(0L); diff --git a/nifi-mock/src/main/java/org/apache/nifi/util/MockProvenanceReporter.java b/nifi-mock/src/main/java/org/apache/nifi/util/MockProvenanceReporter.java index ae808a4fe6e8..4c09654cbbd0 100644 --- a/nifi-mock/src/main/java/org/apache/nifi/util/MockProvenanceReporter.java +++ b/nifi-mock/src/main/java/org/apache/nifi/util/MockProvenanceReporter.java @@ -110,36 +110,38 @@ ProvenanceEventRecord generateDropEvent(final FlowFile flowFile, final String de } @Override - public void receive(final FlowFile flowFile, final String transitUri) { - receive(flowFile, transitUri, -1L); + public void receive(final FlowFile flowFile, final String transitUri, final Relationship relationship) { + receive(flowFile, transitUri, -1L, relationship); } @Override - public void receive(FlowFile flowFile, String transitUri, String sourceSystemFlowFileIdentifier) { - receive(flowFile, transitUri, sourceSystemFlowFileIdentifier, -1L); + public void receive(FlowFile flowFile, String transitUri, String sourceSystemFlowFileIdentifier, final Relationship relationship) { + receive(flowFile, transitUri, sourceSystemFlowFileIdentifier, -1L, relationship); } @Override - public void receive(final FlowFile flowFile, final String transitUri, final long transmissionMillis) { - receive(flowFile, transitUri, null, transmissionMillis); + public void receive(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final Relationship relationship) { + receive(flowFile, transitUri, null, transmissionMillis, relationship); } @Override - public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, final long transmissionMillis) { - receive(flowFile, transitUri, sourceSystemFlowFileIdentifier, null, transmissionMillis); + public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, final long transmissionMillis, final Relationship relationship) { + receive(flowFile, transitUri, sourceSystemFlowFileIdentifier, null, transmissionMillis, relationship); } @Override - public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, final String details, final long transmissionMillis) { + public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, + final String details, final long transmissionMillis, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.RECEIVE) - .setTransitUri(transitUri) - .setSourceSystemFlowFileIdentifier(sourceSystemFlowFileIdentifier) - .setEventDuration(transmissionMillis) - .setDetails(details) - .build(); + .setTransitUri(transitUri) + .setSourceSystemFlowFileIdentifier(sourceSystemFlowFileIdentifier) + .setEventDuration(transmissionMillis) + .setDetails(details) + .setRelationship(relationship) + .build(); events.add(record); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); @@ -150,25 +152,26 @@ public void receive(final FlowFile flowFile, final String transitUri, final Stri } @Override - public void fetch(final FlowFile flowFile, final String transitUri) { - fetch(flowFile, transitUri, -1L); + public void fetch(final FlowFile flowFile, final String transitUri, final Relationship relationship) { + fetch(flowFile, transitUri, -1L, relationship); } @Override - public void fetch(final FlowFile flowFile, final String transitUri, final long transmissionMillis) { - fetch(flowFile, transitUri, null, transmissionMillis); + public void fetch(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final Relationship relationship) { + fetch(flowFile, transitUri, null, transmissionMillis, relationship); } @Override - public void fetch(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis) { + public void fetch(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.FETCH) - .setTransitUri(transitUri) - .setEventDuration(transmissionMillis) - .setDetails(details) - .build(); + .setTransitUri(transitUri) + .setEventDuration(transmissionMillis) + .setDetails(details) + .setRelationship(relationship) + .build(); events.add(record); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); @@ -179,39 +182,44 @@ public void fetch(final FlowFile flowFile, final String transitUri, final String } @Override - public void send(final FlowFile flowFile, final String transitUri, final long transmissionMillis) { - send(flowFile, transitUri, transmissionMillis, true); + public void send(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final Relationship relationship) { + send(flowFile, transitUri, transmissionMillis, true, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri) { - send(flowFile, transitUri, null, -1L, true); + public void send(final FlowFile flowFile, final String transitUri, final Relationship relationship) { + send(flowFile, transitUri, null, -1L, true, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final String details) { - send(flowFile, transitUri, details, -1L, true); + public void send(final FlowFile flowFile, final String transitUri, final String details, final Relationship relationship) { + send(flowFile, transitUri, details, -1L, true, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final boolean force) { - send(flowFile, transitUri, null, transmissionMillis, force); + public void send(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final boolean force, final Relationship relationship) { + send(flowFile, transitUri, null, transmissionMillis, force, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final String details, final boolean force) { - send(flowFile, transitUri, details, -1L, force); + public void send(final FlowFile flowFile, final String transitUri, final String details, final boolean force, final Relationship relationship) { + send(flowFile, transitUri, details, -1L, force, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis) { - send(flowFile, transitUri, details, transmissionMillis, true); + public void send(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis, final Relationship relationship) { + send(flowFile, transitUri, details, transmissionMillis, true, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis, final boolean force) { + public void send(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis, final boolean force, final Relationship relationship) { try { - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.SEND).setTransitUri(transitUri).setEventDuration(transmissionMillis).setDetails(details).build(); + final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.SEND) + .setTransitUri(transitUri) + .setEventDuration(transmissionMillis) + .setDetails(details) + .setRelationship(relationship) + .build(); if (force) { sharedSessionState.addProvenanceEvents(Collections.singleton(record)); } else { @@ -226,20 +234,29 @@ public void send(final FlowFile flowFile, final String transitUri, final String } @Override - public void send(final FlowFile flowFile, final String transitUri, final boolean force) { - send(flowFile, transitUri, -1L, true); + public void send(final FlowFile flowFile, final String transitUri, final boolean force, final Relationship relationship) { + send(flowFile, transitUri, -1L, force, relationship); } @Override - public void invokeRemoteProcess(final FlowFile flowFile, final String transitUri) { - invokeRemoteProcess(flowFile, transitUri, null); + public void invokeRemoteProcess(final FlowFile flowFile, final String transitUri, final Relationship relationship) { + invokeRemoteProcess(flowFile, transitUri, null, relationship); } @Override - public void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details) { + public void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details, final Relationship relationship) { + invokeRemoteProcess(flowFile, transitUri, details, -1L, relationship); + } + + @Override + public void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details, long transmissionMillis, final Relationship relationship) { try { final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.REMOTE_INVOCATION) - .setTransitUri(transitUri).setDetails(details).build(); + .setTransitUri(transitUri) + .setDetails(details) + .setEventDuration(transmissionMillis) + .setRelationship(relationship) + .build(); events.add(record); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); @@ -250,7 +267,7 @@ public void invokeRemoteProcess(FlowFile flowFile, String transitUri, String det } @Override - public void associate(final FlowFile flowFile, final String alternateIdentifierNamespace, final String alternateIdentifier) { + public void associate(final FlowFile flowFile, final String alternateIdentifierNamespace, final String alternateIdentifier, final Relationship relationship) { try { String trimmedNamespace = alternateIdentifierNamespace.trim(); if (trimmedNamespace.endsWith(":")) { @@ -266,7 +283,9 @@ public void associate(final FlowFile flowFile, final String alternateIdentifierN } final String alternateIdentifierUri = trimmedNamespace + ":" + trimmedIdentifier; - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ADDINFO).setAlternateIdentifierUri(alternateIdentifierUri).build(); + final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ADDINFO) + .setRelationship(relationship) + .setAlternateIdentifierUri(alternateIdentifierUri).build(); events.add(record); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); @@ -308,25 +327,29 @@ void expire(final FlowFile flowFile, final String details) { @Override public void fork(final FlowFile parent, final Collection children) { - fork(parent, children, null, -1L); + fork(parent, children, null, -1L, null); + } + @Override + public void fork(final FlowFile parent, final Collection children, final Relationship relationship) { + fork(parent, children, null, -1L, relationship); } @Override - public void fork(final FlowFile parent, final Collection children, final long forkDuration) { - fork(parent, children, null, forkDuration); + public void fork(final FlowFile parent, final Collection children, final long forkDuration, final Relationship relationship) { + fork(parent, children, null, forkDuration, relationship); } @Override - public void fork(final FlowFile parent, final Collection children, final String details) { - fork(parent, children, details, -1L); + public void fork(final FlowFile parent, final Collection children, final String details, final Relationship relationship) { + fork(parent, children, details, -1L, relationship); } @Override - public void fork(final FlowFile parent, final Collection children, final String details, final long forkDuration) { + public void fork(final FlowFile parent, final Collection children, final String details, final long forkDuration, final Relationship relationship) { verifyFlowFileKnown(parent); try { - final ProvenanceEventBuilder eventBuilder = build(parent, ProvenanceEventType.FORK); + final ProvenanceEventBuilder eventBuilder = build(parent, ProvenanceEventType.FORK).setRelationship(relationship); eventBuilder.addParentFlowFile(parent); for (final FlowFile child : children) { eventBuilder.addChildFlowFile(child); @@ -351,27 +374,34 @@ public void fork(final FlowFile parent, final Collection children, fin @Override public void join(final Collection parents, final FlowFile child) { - join(parents, child, null, -1L); + join(parents, child, null, -1L, null); + } + + @Override + public void join(final Collection parents, final FlowFile child, final Relationship relationship) { + join(parents, child, null, -1L, relationship); } @Override - public void join(final Collection parents, final FlowFile child, final long joinDuration) { - join(parents, child, null, joinDuration); + public void join(final Collection parents, final FlowFile child, final long joinDuration, final Relationship relationship) { + join(parents, child, null, joinDuration, relationship); } @Override - public void join(final Collection parents, final FlowFile child, final String details) { - join(parents, child, details, -1L); + public void join(final Collection parents, final FlowFile child, final String details, final Relationship relationship) { + join(parents, child, details, -1L, relationship); } @Override - public void join(final Collection parents, final FlowFile child, final String details, final long joinDuration) { + public void join(final Collection parents, final FlowFile child, final String details, final long joinDuration, final Relationship relationship) { verifyFlowFileKnown(child); try { final ProvenanceEventBuilder eventBuilder = build(child, ProvenanceEventType.JOIN); eventBuilder.addChildFlowFile(child); eventBuilder.setDetails(details); + eventBuilder.setEventDuration(joinDuration); + eventBuilder.setRelationship(relationship); for (final FlowFile parent : parents) { eventBuilder.addParentFlowFile(parent); @@ -404,26 +434,30 @@ public void clone(final FlowFile parent, final FlowFile child) { } @Override - public void modifyContent(final FlowFile flowFile) { - modifyContent(flowFile, null, -1L); + public void modifyContent(final FlowFile flowFile, final Relationship relationship) { + modifyContent(flowFile, null, -1L, relationship); } @Override - public void modifyContent(final FlowFile flowFile, final String details) { - modifyContent(flowFile, details, -1L); + public void modifyContent(final FlowFile flowFile, final String details, final Relationship relationship) { + modifyContent(flowFile, details, -1L, relationship); } @Override - public void modifyContent(final FlowFile flowFile, final long processingMillis) { - modifyContent(flowFile, null, processingMillis); + public void modifyContent(final FlowFile flowFile, final long processingMillis, final Relationship relationship) { + modifyContent(flowFile, null, processingMillis, relationship); } @Override - public void modifyContent(final FlowFile flowFile, final String details, final long processingMillis) { + public void modifyContent(final FlowFile flowFile, final String details, final long processingMillis, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CONTENT_MODIFIED).setEventDuration(processingMillis).setDetails(details).build(); + final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CONTENT_MODIFIED) + .setEventDuration(processingMillis) + .setDetails(details) + .setRelationship(relationship) + .build(); events.add(record); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); @@ -434,16 +468,19 @@ public void modifyContent(final FlowFile flowFile, final String details, final l } @Override - public void modifyAttributes(final FlowFile flowFile) { + public void modifyAttributes(final FlowFile flowFile, final Relationship relationship) { modifyAttributes(flowFile, null); } @Override - public void modifyAttributes(final FlowFile flowFile, final String details) { + public void modifyAttributes(final FlowFile flowFile, final String details, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ATTRIBUTES_MODIFIED).setDetails(details).build(); + final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ATTRIBUTES_MODIFIED) + .setDetails(details) + .setRelationship(relationship) + .build(); events.add(record); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); @@ -484,16 +521,19 @@ public void route(final FlowFile flowFile, final Relationship relationship, fina } @Override - public void create(final FlowFile flowFile) { - create(flowFile, null); + public void create(final FlowFile flowFile, final Relationship relationship) { + create(flowFile, null, relationship); } @Override - public void create(final FlowFile flowFile, final String details) { + public void create(final FlowFile flowFile, final String details, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CREATE).setDetails(details).build(); + final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CREATE) + .setDetails(details) + .setRelationship(relationship) + .build(); events.add(record); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); diff --git a/nifi-nar-bundles/nifi-airtable-bundle/nifi-airtable-processors/src/main/java/org/apache/nifi/processors/airtable/QueryAirtableTable.java b/nifi-nar-bundles/nifi-airtable-bundle/nifi-airtable-processors/src/main/java/org/apache/nifi/processors/airtable/QueryAirtableTable.java index f692a63a8279..ac79b3477bbf 100644 --- a/nifi-nar-bundles/nifi-airtable-bundle/nifi-airtable-processors/src/main/java/org/apache/nifi/processors/airtable/QueryAirtableTable.java +++ b/nifi-nar-bundles/nifi-airtable-bundle/nifi-airtable-processors/src/main/java/org/apache/nifi/processors/airtable/QueryAirtableTable.java @@ -323,7 +323,7 @@ private void addFragmentAttributesToFlowFiles(final ProcessSession session, fina private void transferFlowFiles(final ProcessSession session, final List flowFiles, final int totalRecordCount) { final String transitUri = airtableRestService.createUriBuilder().build().toString(); for (final FlowFile flowFile : flowFiles) { - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } session.adjustCounter("Records Processed", totalRecordCount, false); diff --git a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java index 87cefc7922da..05340ac8619c 100644 --- a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java +++ b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/ConsumeAMQP.java @@ -174,7 +174,7 @@ protected void processResource(final Connection connection, final AMQPConsumer c context.getProperty(HEADER_SEPARATOR).toString()); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().receive(flowFile, connection.toString() + "/" + context.getProperty(QUEUE).getValue()); + session.getProvenanceReporter().receive(flowFile, connection.toString() + "/" + context.getProperty(QUEUE).getValue(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); lastReceived = response; } diff --git a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/PublishAMQP.java b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/PublishAMQP.java index 6a8c7ac64531..271d1ef22a5d 100644 --- a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/PublishAMQP.java +++ b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/main/java/org/apache/nifi/amqp/processors/PublishAMQP.java @@ -177,7 +177,7 @@ protected void processResource(final Connection connection, final AMQPPublisher } session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().send(flowFile, connection.toString() + "/E:" + exchange + "/RK:" + routingKey); + session.getProvenanceReporter().send(flowFile, connection.toString() + "/E:" + exchange + "/RK:" + routingKey, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/reporting/SimpleProvenanceRecord.java b/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/reporting/SimpleProvenanceRecord.java index 515e4a590db9..fadad155951d 100644 --- a/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/reporting/SimpleProvenanceRecord.java +++ b/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/reporting/SimpleProvenanceRecord.java @@ -18,12 +18,13 @@ import org.apache.nifi.provenance.ProvenanceEventRecord; import org.apache.nifi.provenance.ProvenanceEventType; +import org.apache.nifi.provenance.UpdateableProvenanceEventRecord; import java.util.HashMap; import java.util.List; import java.util.Map; -public class SimpleProvenanceRecord implements ProvenanceEventRecord { +public class SimpleProvenanceRecord implements UpdateableProvenanceEventRecord { private long eventId; private List previousEventIds; private String componentId; @@ -53,6 +54,7 @@ public void setEventId(long eventId) { this.eventId = eventId; } + @Override public void setPreviousEventIds(List previousEventIds) { this.previousEventIds = previousEventIds; } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/AbstractAWSProcessor.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/AbstractAWSProcessor.java index fb88c0725104..9036df891791 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/AbstractAWSProcessor.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/AbstractAWSProcessor.java @@ -215,7 +215,7 @@ protected Collection customValidate(final ValidationContext va } if (proxyUserSet && !proxyHostSet) { - validationResults.add(new ValidationResult.Builder().subject("Proxy").valid(false).explanation("If Proxy Username or Proxy Password").build()); + validationResults.add(new ValidationResult.Builder().subject("Proxy").valid(false).explanation("If Proxy Username is set then Proxy Host and Proxy Port must be set").build()); } ProxyConfiguration.validateProxySpec(validationContext, validationResults, PROXY_SPECS); diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/wag/AbstractAWSGatewayApiProcessor.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/wag/AbstractAWSGatewayApiProcessor.java index a7a23951a263..37f94a2d3f33 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/wag/AbstractAWSGatewayApiProcessor.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/src/main/java/org/apache/nifi/processors/aws/wag/AbstractAWSGatewayApiProcessor.java @@ -71,7 +71,7 @@ * This class is the base class for invoking aws gateway api endpoints */ public abstract class AbstractAWSGatewayApiProcessor extends - AbstractAWSCredentialsProviderProcessor { + AbstractAWSCredentialsProviderProcessor { private volatile Set dynamicPropertyNames = new HashSet<>(); private volatile Pattern regexAttributesToSend = null; @@ -626,7 +626,6 @@ protected void route(FlowFile request, final FlowFile response, final ProcessSes session.transfer(request, getRelationshipForName(REL_NO_RETRY_NAME, relationships)); } } - } protected boolean isSuccess(final int statusCode) { @@ -636,7 +635,7 @@ protected boolean isSuccess(final int statusCode) { protected void logRequest(final ComponentLog logger, final URI endpoint, final GenericApiGatewayRequest request) { try { logger.debug("\nRequest to remote service:\n\t{}\t{}\t\n{}", - new Object[]{endpoint.toURL().toExternalForm(), request.getHttpMethod(), getLogString(request.getHeaders())}); + endpoint.toURL().toExternalForm(), request.getHttpMethod(), getLogString(request.getHeaders())); } catch (final MalformedURLException e) { logger.debug(e.getMessage()); } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/cloudwatch/PutCloudWatchMetric.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/cloudwatch/PutCloudWatchMetric.java index 678fadfaa0fd..13e2ec2d1eef 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/cloudwatch/PutCloudWatchMetric.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/cloudwatch/PutCloudWatchMetric.java @@ -333,10 +333,11 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro .withMetricData(datum); putMetricData(context, metricDataRequest); + session.getProvenanceReporter().send(flowFile,"TODO", REL_SUCCESS); // TODO transitUri session.transfer(flowFile, REL_SUCCESS); - getLogger().info("Successfully published cloudwatch metric for {}", new Object[]{flowFile}); + getLogger().info("Successfully published cloudwatch metric for {}", flowFile); } catch (final Exception e) { - getLogger().error("Failed to publish cloudwatch metric for {} due to {}", new Object[]{flowFile, e}); + getLogger().error("Failed to publish cloudwatch metric for {} due to {}", flowFile, e); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/AbstractKinesisRecordProcessor.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/AbstractKinesisRecordProcessor.java index 815fdb7eabb5..560d6f05cef2 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/AbstractKinesisRecordProcessor.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/AbstractKinesisRecordProcessor.java @@ -124,7 +124,7 @@ public void processRecords(final ProcessRecordsInput processRecordsInput) { startProcessingRecords(); final int recordsTransformed = processRecordsWithRetries(records, flowFiles, session, stopWatch); - transferTo(ConsumeKinesisStream.REL_SUCCESS, session, records.size(), recordsTransformed, flowFiles); + transferTo(ConsumeKinesisStream.REL_SUCCESS, session, records.size(), recordsTransformed, flowFiles, stopWatch); session.commitAsync(() -> { processingRecords = false; @@ -201,12 +201,12 @@ abstract void processRecord(final List flowFiles, final Record kinesis final ProcessSession session, final StopWatch stopWatch); void reportProvenance(final ProcessSession session, final FlowFile flowFile, final String partitionKey, - final String sequenceNumber, final StopWatch stopWatch) { + final String sequenceNumber, final StopWatch stopWatch, final Relationship relationship) { final String transitUri = StringUtils.isNotBlank(partitionKey) && StringUtils.isNotBlank(sequenceNumber) ? String.format("%s/%s/%s#%s", transitUriPrefix, kinesisShardId, partitionKey, sequenceNumber) : String.format("%s/%s", transitUriPrefix, kinesisShardId); - session.getProvenanceReporter().receive(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), relationship); } Map getDefaultAttributes(final String sequenceNumber, final String partitionKey, final Date approximateArrivalTimestamp) { @@ -222,10 +222,11 @@ Map getDefaultAttributes(final String sequenceNumber, final Stri } void transferTo(final Relationship relationship, final ProcessSession session, final int recordsProcessed, - final int recordsTransformed, final List flowFiles) { + final int recordsTransformed, final List flowFiles, StopWatch stopWatch) { session.adjustCounter("Records Processed", recordsProcessed, false); if (!flowFiles.isEmpty()) { session.adjustCounter("Records Transformed", recordsTransformed, false); + reportProvenance(session, flowFiles.get(0), null, null, stopWatch, relationship); session.transfer(flowFiles, relationship); } } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/KinesisRecordProcessorRaw.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/KinesisRecordProcessorRaw.java index 4c28de5fdf37..b93ca934caa5 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/KinesisRecordProcessorRaw.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/KinesisRecordProcessorRaw.java @@ -21,6 +21,7 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessSessionFactory; +import org.apache.nifi.processors.aws.kinesis.stream.ConsumeKinesisStream; import org.apache.nifi.util.StopWatch; import java.time.format.DateTimeFormatter; @@ -52,7 +53,7 @@ void processRecord(final List flowFiles, final Record kinesisRecord, f getLogger().debug("Sequence No: {}, Partition Key: {}, Data: {}", sequenceNumber, partitionKey, BASE_64_ENCODER.encodeToString(data)); } - reportProvenance(session, flowFile, partitionKey, sequenceNumber, stopWatch); + reportProvenance(session, flowFile, partitionKey, sequenceNumber, stopWatch, ConsumeKinesisStream.REL_SUCCESS); final Map attributes = getDefaultAttributes(sequenceNumber, partitionKey, approximateArrivalTimestamp); flowFile = session.putAllAttributes(flowFile, attributes); diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/KinesisRecordProcessorRecord.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/KinesisRecordProcessorRecord.java index adba6d01e41f..167f85fdfd47 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/KinesisRecordProcessorRecord.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/kinesis/stream/record/KinesisRecordProcessorRecord.java @@ -107,7 +107,7 @@ void processRecord(final List flowFiles, final Record kinesisRecord, f // write raw Kinesis Record to the parse failure relationship getLogger().error("Failed to parse message from Kinesis Stream using configured Record Reader and Writer due to {}", e.getLocalizedMessage(), e); - outputRawRecordOnException(firstOutputRecord, flowFile, flowFiles, session, data, kinesisRecord, e); + outputRawRecordOnException(firstOutputRecord, flowFile, flowFiles, session, data, kinesisRecord, e, stopWatch); } if (getLogger().isDebugEnabled()) { @@ -147,8 +147,6 @@ private void completeFlowFile(final List flowFiles, final ProcessSessi } } - reportProvenance(session, flowFiles.get(0), null, null, stopWatch); - final Map attributes = getDefaultAttributes(lastRecord); attributes.put("record.count", String.valueOf(recordCount)); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); @@ -161,7 +159,7 @@ private void completeFlowFile(final List flowFiles, final ProcessSessi private void outputRawRecordOnException(final boolean firstOutputRecord, final FlowFile flowFile, final List flowFiles, final ProcessSession session, - final byte[] data, final Record kinesisRecord, final Exception e) { + final byte[] data, final Record kinesisRecord, final Exception e, final StopWatch stopWatch) { if (firstOutputRecord && flowFile != null) { session.remove(flowFile); flowFiles.remove(0); @@ -180,7 +178,7 @@ private void outputRawRecordOnException(final boolean firstOutputRecord, final F final Throwable c = e.getCause() != null ? e.getCause() : e; attributes.put("record.error.message", (c.getLocalizedMessage() != null) ? c.getLocalizedMessage() : c.getClass().getCanonicalName() + " Thrown"); failed = session.putAllAttributes(failed, attributes); - transferTo(ConsumeKinesisStream.REL_PARSE_FAILURE, session, 0, 0, Collections.singletonList(failed)); + transferTo(ConsumeKinesisStream.REL_PARSE_FAILURE, session, 0, 0, Collections.singletonList(failed), stopWatch); } private Map getDefaultAttributes(final Record kinesisRecord) { diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/lambda/PutLambda.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/lambda/PutLambda.java index 4ef0bcbcb0ba..5b362820f0da 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/lambda/PutLambda.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/lambda/PutLambda.java @@ -186,7 +186,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } else { session.transfer(flowFile, REL_SUCCESS); final long totalTimeMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime); - session.getProvenanceReporter().send(flowFile, functionName, totalTimeMillis); + session.getProvenanceReporter().send(flowFile, functionName, totalTimeMillis, REL_SUCCESS); } } catch (final InvalidRequestContentException | InvalidParameterValueException @@ -194,25 +194,25 @@ public void onTrigger(final ProcessContext context, final ProcessSession session | ResourceNotFoundException | UnsupportedMediaTypeException unrecoverableException) { getLogger().error("Failed to invoke lambda {} with unrecoverable exception {} for flow file {}", - new Object[]{functionName, unrecoverableException, flowFile}); + functionName, unrecoverableException, flowFile); flowFile = populateExceptionAttributes(session, flowFile, unrecoverableException); session.transfer(flowFile, REL_FAILURE); } catch (final TooManyRequestsException retryableServiceException) { getLogger().error("Failed to invoke lambda {} with exception {} for flow file {}, therefore penalizing flowfile", - new Object[]{functionName, retryableServiceException, flowFile}); + functionName, retryableServiceException, flowFile); flowFile = populateExceptionAttributes(session, flowFile, retryableServiceException); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); context.yield(); } catch (final AmazonServiceException unrecoverableServiceException) { getLogger().error("Failed to invoke lambda {} with exception {} for flow file {} sending to fail", - new Object[]{functionName, unrecoverableServiceException, flowFile}); + functionName, unrecoverableServiceException, flowFile); flowFile = populateExceptionAttributes(session, flowFile, unrecoverableServiceException); session.transfer(flowFile, REL_FAILURE); context.yield(); } catch (final Exception exception) { getLogger().error("Failed to invoke lambda {} with exception {} for flow file {}", - new Object[]{functionName, exception, flowFile}); + functionName, exception, flowFile); session.transfer(flowFile, REL_FAILURE); context.yield(); } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java index b93d7ebc4a6e..4e8cefc826c0 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java @@ -140,6 +140,6 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final String url = s3.getResourceUrl(bucket, key); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success", flowFile, transferMillis); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, url, "Object deleted"); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, url, "Object deleted", REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java index cac2a6a3f741..a541b5ab01ba 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java @@ -311,7 +311,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final String url = client.getResourceUrl(bucket, key); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); getLogger().info("Successfully retrieved S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis}); - session.getProvenanceReporter().fetch(flowFile, url, transferMillis); + session.getProvenanceReporter().fetch(flowFile, url, transferMillis, REL_SUCCESS); } private GetObjectMetadataRequest createGetObjectMetadataRequest(final ProcessContext context, final Map attributes) { diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java index 10632082cb68..77c76511823b 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java @@ -880,14 +880,14 @@ public void process(final InputStream rawIn) throws IOException { final String url = s3.getResourceUrl(bucket, key); final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, url, millis); + session.getProvenanceReporter().send(flowFile, url, millis, REL_SUCCESS); - getLogger().info("Successfully put {} to Amazon S3 in {} milliseconds", new Object[] {ff, millis}); + getLogger().info("Successfully put {} to Amazon S3 in {} milliseconds", ff, millis); try { removeLocalState(cacheKey); } catch (IOException e) { getLogger().info("Error trying to delete key {} from cache: {}", - new Object[]{cacheKey, e.getMessage()}); + cacheKey, e.getMessage()); } } catch (final ProcessException | AmazonClientException pe) { extractExceptionDetails(pe, session, flowFile); @@ -895,7 +895,7 @@ public void process(final InputStream rawIn) throws IOException { getLogger().info(pe.getMessage()); session.rollback(); } else { - getLogger().error("Failed to put {} to Amazon S3 due to {}", new Object[]{flowFile, pe}); + getLogger().error("Failed to put {} to Amazon S3 due to {}", flowFile, pe); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/TagS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/TagS3Object.java index 9a79839b6834..acc9f971a450 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/TagS3Object.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/TagS3Object.java @@ -216,12 +216,12 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); final String url = s3.getResourceUrl(bucket, key); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - getLogger().info("Successfully tagged S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis}); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, url, "Object tagged"); + getLogger().info("Successfully tagged S3 Object for {} in {} millis; routing to success", flowFile, transferMillis); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, url, "Object tagged", REL_SUCCESS); } private void failFlowWithBlankEvaluatedProperty(ProcessSession session, FlowFile flowFile, PropertyDescriptor pd) { - getLogger().error("{} value is blank after attribute expression language evaluation", new Object[]{pd.getName()}); + getLogger().error("{} value is blank after attribute expression language evaluation", pd.getName()); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java index 43a1dec034ef..7d50e5a51af9 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java @@ -187,7 +187,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session try { client.publish(requestBuilder.build()); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().send(flowFile, arn); + session.getProvenanceReporter().send(flowFile, arn, REL_SUCCESS); getLogger().info("Publishing completed for {}", flowFile); } catch (final Exception e) { getLogger().error("Publishing failed for {}", flowFile, e); @@ -195,5 +195,4 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_FAILURE); } } - } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java index e6f197829b6c..4b42f9a200ba 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java @@ -187,9 +187,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.write(flowFile, out -> out.write(message.body().getBytes(charset))); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, queueUrl); + session.getProvenanceReporter().receive(flowFile, queueUrl, REL_SUCCESS); - getLogger().info("Successfully received {} from Amazon SQS", new Object[]{flowFile}); + getLogger().info("Successfully received {} from Amazon SQS", flowFile); } if (autoDelete) { diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java index 6667673cbd23..8d1ff9f48efe 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java @@ -171,10 +171,10 @@ public void onTrigger(final ProcessContext context, final ProcessSession session return; } - getLogger().info("Successfully published message to Amazon SQS for {}", new Object[]{flowFile}); + getLogger().info("Successfully published message to Amazon SQS for {}", flowFile); session.transfer(flowFile, REL_SUCCESS); final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, queueUrl, transmissionMillis); + session.getProvenanceReporter().send(flowFile, queueUrl, transmissionMillis, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/wag/InvokeAWSGatewayApi.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/wag/InvokeAWSGatewayApi.java index 73ee986df9f3..f0a9c120860b 100644 --- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/wag/InvokeAWSGatewayApi.java +++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/wag/InvokeAWSGatewayApi.java @@ -266,9 +266,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session // emit provenance event final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); if (requestFlowFile != null) { - session.getProvenanceReporter().fetch(responseFlowFile, endpoint, millis); + session.getProvenanceReporter().fetch(responseFlowFile, endpoint, millis, getRelationshipForName(REL_RESPONSE_NAME, relationships)); } else { - session.getProvenanceReporter().receive(responseFlowFile, endpoint, millis); + session.getProvenanceReporter().receive(responseFlowFile, endpoint, millis, getRelationshipForName(REL_RESPONSE_NAME, relationships)); } } else if (exception != null) { final String contentType = "application/json"; @@ -284,9 +284,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final long millis = TimeUnit.NANOSECONDS .toMillis(System.nanoTime() - startNanos); if (requestFlowFile != null) { - session.getProvenanceReporter().fetch(responseFlowFile, endpoint, millis); + session.getProvenanceReporter().fetch(responseFlowFile, endpoint, millis, getRelationshipForName(REL_RESPONSE_NAME, relationships)); } else { - session.getProvenanceReporter().receive(responseFlowFile, endpoint, millis); + session.getProvenanceReporter().receive(responseFlowFile, endpoint, millis, getRelationshipForName(REL_RESPONSE_NAME, relationships)); } } } @@ -321,8 +321,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session requestFlowFile = session.putAllAttributes(requestFlowFile, statusAttributes); final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().modifyAttributes(requestFlowFile, String - .format("The %s has been added. The value of which is the body of a http call to %s%s. It took %s millis,", attributeKey, endpoint, resourceName, millis)); + session.getProvenanceReporter().modifyAttributes(requestFlowFile, + String.format("The %s has been added. The value of which is the body of a http call to %s%s. It took %s millis,", attributeKey, endpoint, resourceName, millis), + (Relationship) null); // TODO determine relationship or report provenance when transferring } route(requestFlowFile, responseFlowFile, session, context, statusCode, diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecord.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecord.java index 343797e763a5..b536911a83a1 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecord.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecord.java @@ -201,7 +201,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session error = true; } finally { if (!error) { - session.getProvenanceReporter().send(flowFile, getURI(context)); + session.getProvenanceReporter().send(flowFile, getURI(context), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } else { session.transfer(flowFile, REL_FAILURE); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/ConsumeAzureEventHub.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/ConsumeAzureEventHub.java index 29170f938a56..ab0d866dece1 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/ConsumeAzureEventHub.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/ConsumeAzureEventHub.java @@ -655,7 +655,7 @@ private void transferTo( ) { session.transfer(flowFile, relationship); final String transitUri = getTransitUri(partitionContext); - session.getProvenanceReporter().receive(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), relationship); } private String createStorageConnectionString(final ProcessContext context) { diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java index 59beb71dc62f..542a8930f66b 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/GetAzureEventHub.java @@ -304,7 +304,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); final String transitUri = getTransitUri(partitionId); - session.getProvenanceReporter().receive(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); lastSequenceNumber = eventData.getSequenceNumber(); } diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/PutAzureEventHub.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/PutAzureEventHub.java index 09c01a370135..9720c54903b5 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/PutAzureEventHub.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/eventhub/PutAzureEventHub.java @@ -236,7 +236,7 @@ private void processFlowFileResults( final String eventHubName = context.getProperty(EVENT_HUB_NAME).getValue(); final String serviceBusEndpoint = context.getProperty(SERVICE_BUS_ENDPOINT).getValue(); final String transitUri = String.format(TRANSIT_URI_FORMAT_STRING, namespace, serviceBusEndpoint, eventHubName); - session.getProvenanceReporter().send(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().send(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } else { final Throwable processException = flowFileResult.getException(); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage.java index d369a22e4cf4..08c446afd6ba 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage.java @@ -97,7 +97,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro session.transfer(flowFile, REL_SUCCESS); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, blob.getSnapshotQualifiedUri().toString(), "Blob deleted"); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, blob.getSnapshotQualifiedUri().toString(), "Blob deleted", transferMillis, REL_SUCCESS); } catch ( StorageException | URISyntaxException e) { getLogger().error("Failed to delete the specified blob {} from Azure Storage. Routing to failure", blobPath, e); flowFile = session.penalize(flowFile); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage_v12.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage_v12.java index 6f820588bd23..9c3feff71ced 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage_v12.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureBlobStorage_v12.java @@ -117,7 +117,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro session.transfer(flowFile, REL_SUCCESS); long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, blobClient.getBlobUrl(), String.format("%s (%d ms)", provenanceMesage, transferMillis)); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, blobClient.getBlobUrl(), String.format("%s (%d ms)", provenanceMesage, transferMillis), REL_SUCCESS); } catch (Exception e) { getLogger().error("Failed to delete the specified blob ({}) from Azure Blob Storage. Routing to failure", blobName, e); flowFile = session.penalize(flowFile); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureDataLakeStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureDataLakeStorage.java index cf1ef18f77fa..fce2581f2796 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureDataLakeStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/DeleteAzureDataLakeStorage.java @@ -103,11 +103,11 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro final DataLakeFileClient fileClient = directoryClient.getFileClient(fileName); fileClient.delete(); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, fileClient.getFileUrl(), "File deleted"); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, fileClient.getFileUrl(), "File deleted", REL_SUCCESS); } else { directoryClient.deleteWithResponse(true, new DataLakeRequestConditions(), Duration.ofSeconds(10), Context.NONE); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, directoryClient.getDirectoryUrl(), "Directory deleted"); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, directoryClient.getDirectoryUrl(), "Directory deleted", REL_SUCCESS); } } catch (Exception e) { getLogger().error("Failed to delete the specified file from Azure Data Lake Storage", e); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage.java index f76b0792a5fd..34fb4c424b88 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage.java @@ -152,7 +152,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro session.transfer(flowFile, REL_SUCCESS); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().fetch(flowFile, blob.getSnapshotQualifiedUri().toString(), transferMillis); + session.getProvenanceReporter().fetch(flowFile, blob.getSnapshotQualifiedUri().toString(), transferMillis, REL_SUCCESS); } catch (IllegalArgumentException | URISyntaxException | StorageException | ProcessException | DecoderException e) { if (e instanceof ProcessException && storedException.get() == null) { throw (ProcessException) e; diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage_v12.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage_v12.java index 4d9db2d5b13a..c560b7924384 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage_v12.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureBlobStorage_v12.java @@ -170,7 +170,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); String transitUri = attributes.get(ATTR_NAME_PRIMARY_URI); - session.getProvenanceReporter().fetch(flowFile, transitUri, transferMillis); + session.getProvenanceReporter().fetch(flowFile, transitUri, transferMillis, REL_SUCCESS); } catch (Exception e) { getLogger().error("Failure to fetch Azure blob {}", blobName, e); flowFile = session.penalize(flowFile); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureDataLakeStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureDataLakeStorage.java index 5e8ebeb61893..b735e0c6f3f5 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureDataLakeStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/FetchAzureDataLakeStorage.java @@ -134,11 +134,11 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro flowFile = session.write(flowFile, os -> fileClient.readWithResponse(os, fileRange, retryOptions, null, false, null, Context.NONE)); - session.getProvenanceReporter().modifyContent(flowFile); + session.getProvenanceReporter().modifyContent(flowFile, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().fetch(flowFile, fileClient.getFileUrl(), transferMillis); + session.getProvenanceReporter().fetch(flowFile, fileClient.getFileUrl(), transferMillis, REL_SUCCESS); } catch (final DataLakeStorageException e) { getLogger().error("Failure to fetch file from Azure Data Lake Storage", e); flowFile = session.putAttribute(flowFile, "azure.datalake.storage.statusCode", String.valueOf(e.getStatusCode())); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/MoveAzureDataLakeStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/MoveAzureDataLakeStorage.java index c5b38becf614..446f5f5d7a54 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/MoveAzureDataLakeStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/MoveAzureDataLakeStorage.java @@ -202,7 +202,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro session.transfer(flowFile, REL_SUCCESS); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, sourceFileClient.getFileUrl(), transferMillis); + session.getProvenanceReporter().send(flowFile, sourceFileClient.getFileUrl(), transferMillis, REL_SUCCESS); } catch (DataLakeStorageException dlsException) { if (dlsException.getStatusCode() == 409 && conflictResolution.equals(IGNORE_RESOLUTION)) { session.transfer(flowFile, REL_SUCCESS); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureBlobStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureBlobStorage.java index 5d701a331639..2b4e8d913da0 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureBlobStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureBlobStorage.java @@ -180,7 +180,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, blob.getSnapshotQualifiedUri().toString(), transferMillis); + session.getProvenanceReporter().send(flowFile, blob.getSnapshotQualifiedUri().toString(), transferMillis, REL_SUCCESS); } catch (IllegalArgumentException | URISyntaxException | StorageException | ProcessException | DecoderException e) { if (e instanceof ProcessException && storedException.get() == null) { diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureBlobStorage_v12.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureBlobStorage_v12.java index f005800873ad..8307e7d2062d 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureBlobStorage_v12.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureBlobStorage_v12.java @@ -226,7 +226,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); String transitUri = attributes.get(ATTR_NAME_PRIMARY_URI); - session.getProvenanceReporter().send(flowFile, transitUri, transferMillis); + session.getProvenanceReporter().send(flowFile, transitUri, transferMillis, REL_SUCCESS); } catch (Exception e) { getLogger().error("Failed to create blob on Azure Blob Storage", e); flowFile = session.penalize(flowFile); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureDataLakeStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureDataLakeStorage.java index cfd660c28911..4069ce8bffaa 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureDataLakeStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/PutAzureDataLakeStorage.java @@ -169,7 +169,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.putAllAttributes(flowFile, attributes); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, fileUrl, transferMillis); + session.getProvenanceReporter().send(flowFile, fileUrl, transferMillis, REL_SUCCESS); } session.transfer(flowFile, REL_SUCCESS); diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/GetAzureQueueStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/GetAzureQueueStorage.java index 6e29905b5d50..66467db911ad 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/GetAzureQueueStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/GetAzureQueueStorage.java @@ -160,7 +160,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro }); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, cloudQueue.getStorageUri().toString()); + session.getProvenanceReporter().receive(flowFile, cloudQueue.getStorageUri().toString(), REL_SUCCESS); } if(autoDelete) { diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/GetAzureQueueStorage_v12.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/GetAzureQueueStorage_v12.java index 28ba1e1b4e46..2b9939b62672 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/GetAzureQueueStorage_v12.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/GetAzureQueueStorage_v12.java @@ -194,7 +194,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.write(flowFile, out -> out.write(message.getBody().toString().getBytes())); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, queueClient.getQueueUrl().toString()); + session.getProvenanceReporter().receive(flowFile, queueClient.getQueueUrl().toString(), REL_SUCCESS); } if (autoDelete) { diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/PutAzureQueueStorage.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/PutAzureQueueStorage.java index 10da98016f99..c0e1da41f0a4 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/PutAzureQueueStorage.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/PutAzureQueueStorage.java @@ -119,7 +119,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro session.transfer(flowFile, REL_SUCCESS); final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, cloudQueue.getUri().toString(), transmissionMillis); + session.getProvenanceReporter().send(flowFile, cloudQueue.getUri().toString(), transmissionMillis, REL_SUCCESS); } @Override diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/PutAzureQueueStorage_v12.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/PutAzureQueueStorage_v12.java index e6389504c51a..b6fbfb4c5e94 100644 --- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/PutAzureQueueStorage_v12.java +++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/queue/PutAzureQueueStorage_v12.java @@ -157,6 +157,6 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, queueClient.getQueueUrl().toString(), transmissionMillis); + session.getProvenanceReporter().send(flowFile, queueClient.getQueueUrl().toString(), transmissionMillis, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-beats-bundle/nifi-beats-processors/src/main/java/org/apache/nifi/processors/beats/ListenBeats.java b/nifi-nar-bundles/nifi-beats-bundle/nifi-beats-processors/src/main/java/org/apache/nifi/processors/beats/ListenBeats.java index a3b8dc4e64e3..3f93cd27e070 100644 --- a/nifi-nar-bundles/nifi-beats-bundle/nifi-beats-processors/src/main/java/org/apache/nifi/processors/beats/ListenBeats.java +++ b/nifi-nar-bundles/nifi-beats-bundle/nifi-beats-processors/src/main/java/org/apache/nifi/processors/beats/ListenBeats.java @@ -203,7 +203,7 @@ private void processEvents(final ProcessSession session, final Map 0) { diff --git a/nifi-nar-bundles/nifi-cdc/nifi-cdc-mysql-bundle/nifi-cdc-mysql-processors/src/main/java/org/apache/nifi/cdc/mysql/event/io/AbstractBinlogEventWriter.java b/nifi-nar-bundles/nifi-cdc/nifi-cdc-mysql-bundle/nifi-cdc-mysql-processors/src/main/java/org/apache/nifi/cdc/mysql/event/io/AbstractBinlogEventWriter.java index 420da6c2d4a4..89d74085e069 100644 --- a/nifi-nar-bundles/nifi-cdc/nifi-cdc-mysql-bundle/nifi-cdc-mysql-processors/src/main/java/org/apache/nifi/cdc/mysql/event/io/AbstractBinlogEventWriter.java +++ b/nifi-nar-bundles/nifi-cdc/nifi-cdc-mysql-bundle/nifi-cdc-mysql-processors/src/main/java/org/apache/nifi/cdc/mysql/event/io/AbstractBinlogEventWriter.java @@ -111,7 +111,7 @@ public void finishAndTransferFlowFile(final ProcessSession session, final EventW } flowFile = session.putAllAttributes(flowFile, getCommonAttributes(seqId, eventInfo)); session.transfer(flowFile, relationship); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, relationship); eventWriterConfiguration.cleanUp(); } catch (IOException ioe) { diff --git a/nifi-nar-bundles/nifi-compress-bundle/nifi-compress-processors/src/main/java/org/apache/nifi/processors/compress/ModifyCompression.java b/nifi-nar-bundles/nifi-compress-bundle/nifi-compress-processors/src/main/java/org/apache/nifi/processors/compress/ModifyCompression.java index f4a3653e0345..b74d87f62cf3 100644 --- a/nifi-nar-bundles/nifi-compress-bundle/nifi-compress-processors/src/main/java/org/apache/nifi/processors/compress/ModifyCompression.java +++ b/nifi-nar-bundles/nifi-compress-bundle/nifi-compress-processors/src/main/java/org/apache/nifi/processors/compress/ModifyCompression.java @@ -274,7 +274,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session getLogger().info("Input Compression [{}] Size [{}] Output Compression [{}] Size [{}] Completed {}", inputCompressionStrategy, inputFileSize, outputCompressionStrategy, flowFile.getSize(), flowFile); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getDuration(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getDuration(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final RuntimeException e) { getLogger().error("Input Compression [{}] Size [{}] Output Compression [{}] Failed {}", diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java index 2519e7d227db..b98d1fa347c5 100644 --- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java +++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java @@ -222,7 +222,7 @@ public void process(final InputStream in) throws IOException { outFile = session.putAllAttributes(outFile, updatedAttrs); final long fetchMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().fetch(outFile, getTransitUrl(bucket, docId), fetchMillis); + session.getProvenanceReporter().fetch(outFile, getTransitUrl(bucket, docId), fetchMillis, REL_SUCCESS); session.transfer(outFile, REL_SUCCESS); } catch (final CouchbaseException e) { diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java index e8fdbe3eec19..bef8b96a9466 100644 --- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java +++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java @@ -159,7 +159,7 @@ public void process(final InputStream in) throws IOException { updatedAttrs.put(CouchbaseAttributes.Expiry.key(), String.valueOf(doc.expiry())); flowFile = session.putAllAttributes(flowFile, updatedAttrs); - session.getProvenanceReporter().send(flowFile, getTransitUrl(bucket, docId)); + session.getProvenanceReporter().send(flowFile, getTransitUrl(bucket, docId), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final CouchbaseException e) { String errMsg = String.format("Writing document %s to Couchbase Server using %s failed due to %s", docId, flowFile, e); diff --git a/nifi-nar-bundles/nifi-cybersecurity-bundle/nifi-cybersecurity-processors/src/main/java/org/apache/nifi/processors/cybersecurity/FuzzyHashContent.java b/nifi-nar-bundles/nifi-cybersecurity-bundle/nifi-cybersecurity-processors/src/main/java/org/apache/nifi/processors/cybersecurity/FuzzyHashContent.java index 34cb62c92f4e..324fd8d09a8a 100644 --- a/nifi-nar-bundles/nifi-cybersecurity-bundle/nifi-cybersecurity-processors/src/main/java/org/apache/nifi/processors/cybersecurity/FuzzyHashContent.java +++ b/nifi-nar-bundles/nifi-cybersecurity-bundle/nifi-cybersecurity-processors/src/main/java/org/apache/nifi/processors/cybersecurity/FuzzyHashContent.java @@ -74,8 +74,6 @@ @DeprecationNotice(reason = "Unmaintained and planned for removal in version 2.0") public class FuzzyHashContent extends AbstractFuzzyHashProcessor { - - public static final PropertyDescriptor HASH_ALGORITHM = new PropertyDescriptor.Builder() .name("HASH_ALGORITHM") .displayName("Hashing Algorithm") @@ -166,11 +164,11 @@ public void process(final InputStream in) throws IOException { final String attributeName = context.getProperty(ATTRIBUTE_NAME).getValue(); flowFile = session.putAttribute(flowFile, attributeName, hashValueHolder.get()); - logger.info("Successfully added attribute '{}' to {} with a value of {}; routing to success", new Object[]{attributeName, flowFile, hashValueHolder.get()}); - session.getProvenanceReporter().modifyAttributes(flowFile); + logger.info("Successfully added attribute '{}' to {} with a value of {}; routing to success", attributeName, flowFile, hashValueHolder.get()); + session.getProvenanceReporter().modifyAttributes(flowFile, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final InsufficientComplexityException | ProcessException e) { - logger.error("Failed to process {} due to {}; routing to failure", new Object[]{flowFile, e}); + logger.error("Failed to process {} due to {}; routing to failure", flowFile, e); session.transfer(flowFile, REL_FAILURE); } } diff --git a/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/FetchDropbox.java b/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/FetchDropbox.java index 2a9fb9c1789e..c41c449072e1 100644 --- a/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/FetchDropbox.java +++ b/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/FetchDropbox.java @@ -150,7 +150,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro outFlowFile = session.putAllAttributes(outFlowFile, attributes); String url = DROPBOX_HOME_URL + fileMetadata.getPathDisplay(); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().fetch(flowFile, url, transferMillis); + session.getProvenanceReporter().fetch(flowFile, url, transferMillis, REL_SUCCESS); session.transfer(outFlowFile, REL_SUCCESS); } catch (Exception e) { diff --git a/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/PutDropbox.java b/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/PutDropbox.java index 1bd0398bcc86..e6962a78846e 100644 --- a/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/PutDropbox.java +++ b/nifi-nar-bundles/nifi-dropbox-bundle/nifi-dropbox-processors/src/main/java/org/apache/nifi/processors/dropbox/PutDropbox.java @@ -248,7 +248,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro String url = DROPBOX_HOME_URL + fileMetadata.getPathDisplay(); flowFile = session.putAllAttributes(flowFile, attributes); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, url, transferMillis); + session.getProvenanceReporter().send(flowFile, url, transferMillis, REL_SUCCESS); } session.transfer(flowFile, REL_SUCCESS); diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java index e7b3b0ecd02b..d45f6116aa41 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/FetchElasticsearchHttp.java @@ -248,14 +248,14 @@ public void onTrigger(final ProcessContext context, final ProcessSession session // emit provenance event final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); if (context.hasNonLoopConnection()) { - session.getProvenanceReporter().fetch(flowFile, url.toExternalForm(), millis); + session.getProvenanceReporter().fetch(flowFile, url.toExternalForm(), millis, REL_SUCCESS); } else { - session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis); + session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis, REL_SUCCESS); } session.transfer(flowFile, REL_SUCCESS); } else { logger.debug("Failed to read {}/{}/{} from Elasticsearch: Document not found", - new Object[]{index, docType, docId}); + index, docType, docId); // We couldn't find the document, so send it to "not found" session.transfer(flowFile, REL_NOT_FOUND); @@ -263,7 +263,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } else { if (statusCode == 404) { logger.warn("Failed to read {}/{}/{} from Elasticsearch: Document not found", - new Object[]{index, docType, docId}); + index, docType, docId); // We couldn't find the document, so penalize it and send it to "not found" session.transfer(flowFile, REL_NOT_FOUND); diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java index 2081d2e30720..ee00bf05f61d 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttp.java @@ -365,7 +365,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } else { session.transfer(flowFile, REL_SUCCESS); // Record provenance event - session.getProvenanceReporter().send(flowFile, url.toString()); + session.getProvenanceReporter().send(flowFile, url.toString(), REL_SUCCESS); } } } @@ -375,7 +375,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFilesToTransfer.forEach(file -> { session.transfer(file, REL_SUCCESS); // Record provenance event - session.getProvenanceReporter().send(file, url.toString()); + session.getProvenanceReporter().send(file, url.toString(), REL_SUCCESS); }); } catch (IOException ioe) { // Something went wrong when parsing the response, log the error and route to failure diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttpRecord.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttpRecord.java index a2d06d00f085..a6dcb6afe0ac 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttpRecord.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchHttpRecord.java @@ -549,7 +549,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session // Everything succeeded, route FF and end flowFile = session.putAttribute(flowFile, "record.count", Integer.toString(recordCount)); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().send(flowFile, url.toString()); + session.getProvenanceReporter().send(flowFile, url.toString(), REL_SUCCESS); return; } } diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/QueryElasticsearchHttp.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/QueryElasticsearchHttp.java index 24443defbd0e..b38d2195cbed 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/QueryElasticsearchHttp.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/QueryElasticsearchHttp.java @@ -485,7 +485,7 @@ private int getPage(final Response getResponse, final URL url, final ProcessCont throw new UnretryableException(String.format("Elasticsearch returned code %s with message %s, transferring flow file to failure", statusCode, getResponse.message())); } else { - logger.warn("Elasticsearch returned code {} with message {}", new Object[]{statusCode, getResponse.message()}); + logger.warn("Elasticsearch returned code {} with message {}", statusCode, getResponse.message()); } } finally { if (!page.isEmpty()) { @@ -499,9 +499,9 @@ private int getPage(final Response getResponse, final URL url, final ProcessCont final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); if (!page.isEmpty()) { if (context.hasNonLoopConnection()) { - page.forEach(f -> session.getProvenanceReporter().fetch(f, url.toExternalForm(), millis)); + page.forEach(f -> session.getProvenanceReporter().fetch(f, url.toExternalForm(), millis, REL_SUCCESS)); } else { - page.forEach(f -> session.getProvenanceReporter().receive(f, url.toExternalForm(), millis)); + page.forEach(f -> session.getProvenanceReporter().receive(f, url.toExternalForm(), millis, REL_SUCCESS)); } } return page.size(); diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ScrollElasticsearchHttp.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ScrollElasticsearchHttp.java index 1946edcb137d..5a60cd5b785f 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ScrollElasticsearchHttp.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ScrollElasticsearchHttp.java @@ -355,17 +355,17 @@ private void getPage(final Response getResponse, final URL url, final ProcessCon // emit provenance event final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis); + session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis, REL_SUCCESS); } else { // 5xx -> RETRY, but a server error might last a while, so yield if (statusCode / 100 == 5) { logger.warn("Elasticsearch returned code {} with message {}, removing the flow file. This is likely a server problem, yielding...", - new Object[]{statusCode, getResponse.message()}); + statusCode, getResponse.message()); session.remove(flowFile); context.yield(); } else { - logger.warn("Elasticsearch returned code {} with message {}", new Object[]{statusCode, getResponse.message()}); + logger.warn("Elasticsearch returned code {} with message {}", statusCode, getResponse.message()); session.remove(flowFile); } } diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearch.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearch.java index 383c4dfd0a37..c90e3d80afec 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearch.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearch.java @@ -301,7 +301,7 @@ private void handleAggregations(final Map aggregations, final Pr if (!aggsFlowFiles.isEmpty()) { session.transfer(aggsFlowFiles, REL_AGGREGATIONS); - aggsFlowFiles.forEach(ff -> session.getProvenanceReporter().receive(ff, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS))); + aggsFlowFiles.forEach(ff -> session.getProvenanceReporter().receive(ff, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_AGGREGATIONS)); } } } @@ -393,7 +393,7 @@ private void transferResultFlowFiles(final ProcessSession session, final List session.getProvenanceReporter().receive(ff, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS))); + hitsFlowFiles.forEach(ff -> session.getProvenanceReporter().receive(ff, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_HITS)); hitsFlowFiles.clear(); } } diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractPaginatedJsonQueryElasticsearch.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractPaginatedJsonQueryElasticsearch.java index ac1099795c41..0d9c04a5e12f 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractPaginatedJsonQueryElasticsearch.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractPaginatedJsonQueryElasticsearch.java @@ -139,7 +139,8 @@ SearchResponse doQuery(final PaginatedJsonQueryParameters paginatedJsonQueryPara session.getProvenanceReporter().send( input, clientService.get().getTransitUrl(paginatedJsonQueryParameters.getIndex(), paginatedJsonQueryParameters.getType()), - stopWatch.getElapsed(TimeUnit.MILLISECONDS) + stopWatch.getElapsed(TimeUnit.MILLISECONDS), + REL_HITS ); } @@ -274,7 +275,7 @@ List handleHits(final List> hits, final boolean ne // output results if it seems we've combined all available results (i.e. no hits in this page and therefore no more expected) if (!hitsFlowFiles.isEmpty() && (hits == null || hits.isEmpty())) { session.transfer(hitsFlowFiles, REL_HITS); - hitsFlowFiles.forEach(ff -> session.getProvenanceReporter().receive(ff, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS))); + hitsFlowFiles.forEach(ff -> session.getProvenanceReporter().receive(ff, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_HITS)); hitsFlowFiles.clear(); } } else { diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractPutElasticsearch.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractPutElasticsearch.java index 14af7cfe7103..2e27ce92858a 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractPutElasticsearch.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractPutElasticsearch.java @@ -220,10 +220,12 @@ Map getBulkHeaderParameters(final Map dynamicPro void transferFlowFilesOnException(final Exception ex, final Relationship rel, final ProcessSession session, final boolean penalize, final FlowFile... flowFiles) { for (FlowFile flowFile : flowFiles) { - flowFile = session.putAttribute(flowFile, "elasticsearch.put.error", ex.getMessage() == null ? "null" : ex.getMessage()); + final String errorMessage = ex.getMessage() == null ? "null" : ex.getMessage(); + flowFile = session.putAttribute(flowFile, "elasticsearch.put.error", errorMessage); if (penalize) { session.penalize(flowFile); } + session.getProvenanceReporter().route(flowFile, rel, errorMessage); session.transfer(flowFile, rel); } } diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/GetElasticsearch.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/GetElasticsearch.java index d61ccc6f6d81..f020c5e3b890 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/GetElasticsearch.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/GetElasticsearch.java @@ -246,7 +246,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } documentFlowFile = session.putAllAttributes(documentFlowFile, attributes); - session.getProvenanceReporter().receive(documentFlowFile, clientService.get().getTransitUrl(index, type), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(documentFlowFile, clientService.get().getTransitUrl(index, type), stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_DOC); session.transfer(documentFlowFile, REL_DOC); } catch (final ElasticsearchException ese) { handleElasticsearchException(ese, input, session, index, type, id); diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/JsonQueryElasticsearch.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/JsonQueryElasticsearch.java index c1ddbb207bb7..795dcc24ac7d 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/JsonQueryElasticsearch.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/JsonQueryElasticsearch.java @@ -80,7 +80,8 @@ SearchResponse doQuery(final JsonQueryParameters queryJsonParameters, final List session.getProvenanceReporter().send( input, clientService.get().getTransitUrl(queryJsonParameters.getIndex(), queryJsonParameters.getType()), - stopWatch.getElapsed(TimeUnit.MILLISECONDS) + stopWatch.getElapsed(TimeUnit.MILLISECONDS), + REL_HITS ); } diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchJson.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchJson.java index 1038b83e9571..4b16c9fe7763 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchJson.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchJson.java @@ -327,7 +327,8 @@ private void handleResponse(final ProcessContext context, final ProcessSession s context.getProperty(INDEX).evaluateAttributeExpressions(e).getValue(), context.getProperty(TYPE).evaluateAttributeExpressions(e).getValue() ), - "Elasticsearch _bulk operation error" + "Elasticsearch _bulk operation error", + REL_FAILED_DOCUMENTS ) ); @@ -339,7 +340,8 @@ private void handleResponse(final ProcessContext context, final ProcessSession s clientService.get().getTransitUrl( context.getProperty(INDEX).evaluateAttributeExpressions(s).getValue(), context.getProperty(TYPE).evaluateAttributeExpressions(s).getValue() - ) + ), + REL_SUCCESS ) ); } diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchRecord.java b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchRecord.java index 442d9d70d072..59671cae8299 100644 --- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchRecord.java +++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/PutElasticsearchRecord.java @@ -459,7 +459,8 @@ public void onTrigger(final ProcessContext context, final ProcessSession session input, clientService.get().getTransitUrl(String.join(",", indices), types.isEmpty() ? null : String.join(",", types)), String.format(Locale.getDefault(), "%d Elasticsearch _bulk operation batch(es) [%d error(s), %d success(es)]", batches, erroredRecords.get(), successfulRecords.get()), - stopWatch.getDuration(TimeUnit.MILLISECONDS) + stopWatch.getDuration(TimeUnit.MILLISECONDS), + REL_SUCCESS ); input = session.putAllAttributes(input, new HashMap() {{ diff --git a/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/AbstractEmailProcessor.java b/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/AbstractEmailProcessor.java index a8b2cf0dbcaa..16e680fabdf0 100644 --- a/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/AbstractEmailProcessor.java +++ b/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/AbstractEmailProcessor.java @@ -423,8 +423,8 @@ private void transfer(Message emailMessage, ProcessContext context, ProcessSessi this.logger.warn("Failed to retrieve 'From' attribute from Message."); } - processSession.getProvenanceReporter().receive(flowFile, this.displayUrl, "Received message from " + fromAddressesString, executionDuration); - this.getLogger().info("Successfully received {} from {} in {} millis", new Object[]{flowFile, fromAddressesString, executionDuration}); + processSession.getProvenanceReporter().receive(flowFile, this.displayUrl, "Received message from " + fromAddressesString, executionDuration, REL_SUCCESS); + this.getLogger().info("Successfully received {} from {} in {} millis", flowFile, fromAddressesString, executionDuration); processSession.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/ConsumeEWS.java b/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/ConsumeEWS.java index ef676411bf5d..9711f0194896 100644 --- a/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/ConsumeEWS.java +++ b/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/ConsumeEWS.java @@ -520,8 +520,8 @@ public void process(final OutputStream out) throws IOException { this.logger.warn("Faild to retrieve 'From' attribute from Message."); } - processSession.getProvenanceReporter().receive(flowFile, this.displayUrl, "Received message from " + fromAddressesString, executionDuration); - this.getLogger().info("Successfully received {} from {} in {} millis", new Object[]{flowFile, fromAddressesString, executionDuration}); + processSession.getProvenanceReporter().receive(flowFile, this.displayUrl, "Received message from " + fromAddressesString, executionDuration, REL_SUCCESS); + this.getLogger().info("Successfully received {} from {} in {} millis", flowFile, fromAddressesString, executionDuration); processSession.transfer(flowFile, REL_SUCCESS); try { diff --git a/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/smtp/SmtpConsumer.java b/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/smtp/SmtpConsumer.java index a109c0579f75..31baea8db591 100644 --- a/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/smtp/SmtpConsumer.java +++ b/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/src/main/java/org/apache/nifi/processors/email/smtp/SmtpConsumer.java @@ -110,7 +110,7 @@ public void data(final InputStream data) throws RejectException, TooMuchDataExce } flowFile = processSession.putAllAttributes(flowFile, extractMessageAttributes()); watch.stop(); - processSession.getProvenanceReporter().receive(flowFile, "smtp://" + host + ":" + port + "/", watch.getDuration(TimeUnit.MILLISECONDS)); + processSession.getProvenanceReporter().receive(flowFile, "smtp://" + host + ":" + port + "/", watch.getDuration(TimeUnit.MILLISECONDS), ListenSMTP.REL_SUCCESS); processSession.transfer(flowFile, ListenSMTP.REL_SUCCESS); processSession.commitAsync(); } catch (FlowFileAccessException | IllegalStateException | RejectException | IOException ex) { diff --git a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIPRecord.java b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIPRecord.java index d8c6aec07e74..bf9a878abe89 100644 --- a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIPRecord.java +++ b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIPRecord.java @@ -289,9 +289,9 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro } session.transfer(output, REL_FOUND); session.transfer(input, REL_ORIGINAL); - session.getProvenanceReporter().modifyContent(notFound); + session.getProvenanceReporter().modifyContent(notFound, REL_NOT_FOUND); } - session.getProvenanceReporter().modifyContent(output); + session.getProvenanceReporter().modifyContent(output, REL_FOUND); } catch (InvalidDatabaseException | InternalError idbe) { // The database was likely changed out while being read, rollback and try again setNeedsReload(true); diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java b/nifi-nar-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java index fa23111e29a6..b78044c0f39f 100644 --- a/nifi-nar-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java +++ b/nifi-nar-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java @@ -110,7 +110,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro // the sender and command will be the same for all events based on the batch key final String transitUri = getTransitUri(entry.getValue()); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); allEvents.addAll(events); } diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-event-put/src/main/java/org/apache/nifi/processor/util/put/AbstractPutEventProcessor.java b/nifi-nar-bundles/nifi-extension-utils/nifi-event-put/src/main/java/org/apache/nifi/processor/util/put/AbstractPutEventProcessor.java index f2aa825b2ea0..56ac8ddcd4e6 100644 --- a/nifi-nar-bundles/nifi-extension-utils/nifi-event-put/src/main/java/org/apache/nifi/processor/util/put/AbstractPutEventProcessor.java +++ b/nifi-nar-bundles/nifi-extension-utils/nifi-event-put/src/main/java/org/apache/nifi/processor/util/put/AbstractPutEventProcessor.java @@ -408,7 +408,7 @@ public int compare(final Range o1, final Range o2) { // Create a FlowFile for this range. FlowFile child = session.clone(flowFile, range.getStart(), range.getEnd() - range.getStart()); if (relationship == REL_SUCCESS) { - session.getProvenanceReporter().send(child, transitUri, "Sent " + count + " messages"); + session.getProvenanceReporter().send(child, transitUri, "Sent " + count + " messages", relationship); session.transfer(child, relationship); } else { child = session.penalize(child); @@ -439,7 +439,7 @@ public synchronized void completeSession() { if (failedRanges.isEmpty()) { final long transferMillis = TimeUnit.NANOSECONDS.toMillis(completeTime - startTime); - session.getProvenanceReporter().send(flowFile, transitUri, "Sent " + successfulRanges.size() + " messages;", transferMillis); + session.getProvenanceReporter().send(flowFile, transitUri, "Sent " + successfulRanges.size() + " messages;", transferMillis, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().info("Successfully sent {} messages for {} in {} millis", successfulRanges.size(), flowFile, transferMillis); session.commitAsync(); diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractFetchHDFSRecord.java b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractFetchHDFSRecord.java index 03770ac56f82..7c4d188717c3 100644 --- a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractFetchHDFSRecord.java +++ b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractFetchHDFSRecord.java @@ -235,22 +235,22 @@ record == null ? null : record.getSchema()); final Path qualifiedPath = path.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory()); successFlowFile = session.putAttribute(successFlowFile, HADOOP_FILE_URL_ATTRIBUTE, qualifiedPath.toString()); - getLogger().info("Successfully received content from {} for {} in {} milliseconds", new Object[] {qualifiedPath, successFlowFile, stopWatch.getDuration()}); - session.getProvenanceReporter().fetch(successFlowFile, qualifiedPath.toString(), stopWatch.getDuration(TimeUnit.MILLISECONDS)); + getLogger().info("Successfully received content from {} for {} in {} milliseconds", qualifiedPath, successFlowFile, stopWatch.getDuration()); + session.getProvenanceReporter().fetch(successFlowFile, qualifiedPath.toString(), stopWatch.getDuration(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(successFlowFile, REL_SUCCESS); session.remove(originalFlowFile); return null; } catch (final FileNotFoundException | AccessControlException e) { - getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", new Object[] {filenameValue, originalFlowFile, e}); + getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", filenameValue, originalFlowFile, e); final FlowFile failureFlowFile = session.putAttribute(originalFlowFile, FETCH_FAILURE_REASON_ATTR, e.getMessage() == null ? e.toString() : e.getMessage()); session.transfer(failureFlowFile, REL_FAILURE); } catch (final IOException | FlowFileAccessException e) { - getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to retry", new Object[] {filenameValue, originalFlowFile, e}); + getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to retry", filenameValue, originalFlowFile, e); session.transfer(session.penalize(originalFlowFile), REL_RETRY); context.yield(); } catch (final Throwable t) { - getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", new Object[] {filenameValue, originalFlowFile, t}); + getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", filenameValue, originalFlowFile, t); final FlowFile failureFlowFile = session.putAttribute(originalFlowFile, FETCH_FAILURE_REASON_ATTR, t.getMessage() == null ? t.toString() : t.getMessage()); session.transfer(failureFlowFile, REL_FAILURE); } diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractPutHDFSRecord.java b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractPutHDFSRecord.java index 5aef6b462132..27d614c7d7d0 100644 --- a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractPutHDFSRecord.java +++ b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-hadoop-record-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractPutHDFSRecord.java @@ -379,7 +379,7 @@ public FileSystem getFileSystem(Configuration conf) throws IOException { // Send a provenance event and transfer to success final Path qualifiedPath = destFile.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory()); putFlowFile = session.putAttribute(putFlowFile, HADOOP_FILE_URL_ATTRIBUTE, qualifiedPath.toString()); - session.getProvenanceReporter().send(putFlowFile, qualifiedPath.toString()); + session.getProvenanceReporter().send(putFlowFile, qualifiedPath.toString(), REL_SUCCESS); session.transfer(putFlowFile, REL_SUCCESS); } catch (IOException | FlowFileAccessException e) { diff --git a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/AbstractFlumeProcessor.java b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/AbstractFlumeProcessor.java index 13d2ff7412a0..3c818801035f 100644 --- a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/AbstractFlumeProcessor.java +++ b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/AbstractFlumeProcessor.java @@ -66,8 +66,7 @@ public void process(final OutputStream out) throws IOException { } }); - session.getProvenanceReporter() - .create(flowFile); + session.getProvenanceReporter().create(flowFile, relationship); session.transfer(flowFile, relationship); } diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java index f1444ace0f55..a05c859e9c54 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java @@ -257,7 +257,7 @@ private void validateCommitState() { if (relationship == null) { final String createdThisSession = record.getOriginalQueue() == null ? "was created" : "was not created"; throw new FlowFileHandlingException(record.getCurrent() + " transfer relationship not specified. This FlowFile " + createdThisSession + " in this session and was not transferred " + - "to any Relationship via ProcessSession.transfer()"); + "to any Relationship via ProcessSession.transfer()"); } final Collection destinations = context.getConnections(relationship); @@ -448,9 +448,9 @@ private void retry(final StandardRepositoryRecord record, final long maxBackoffM // Adjust for any state that has been updated for the Record that is no longer relevant. final String uuid = record.getCurrent().getAttribute(CoreAttributes.UUID.key()); final FlowFileRecord updatedFlowFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getOriginal()) - .addAttribute(retryAttribute, String.valueOf(currentRetries + 1)) - .build(); + .fromFlowFile(record.getOriginal()) + .addAttribute(retryAttribute, String.valueOf(currentRetries + 1)) + .build(); if (original == null) { record.markForDelete(); @@ -693,7 +693,7 @@ protected void commit(final Checkpoint checkpoint, final boolean asynchronous) { stateManager.setState(checkpoint.localState.toMap(), Scope.LOCAL); } else { LOG.debug("Will not update State Manager's Local State because the State Manager reports the latest version as {}, which is newer than the session's known version of {}.", - stateMap.getVersion(), checkpoint.localState.getVersion()); + stateMap.getVersion(), checkpoint.localState.getVersion()); } } catch (final Exception e) { LOG.warn("Failed to update Local State for {}. If NiFi is restarted before the state is able to be updated, it could result in data duplication.", connectableDescription, e); @@ -709,7 +709,7 @@ protected void commit(final Checkpoint checkpoint, final boolean asynchronous) { stateManager.setState(checkpoint.clusterState.toMap(), Scope.CLUSTER); } else { LOG.debug("Will not update State Manager's Cluster State because the State Manager reports the latest version as {}, which is newer than the session's known version of {}.", - stateMap.getVersion(), checkpoint.clusterState.getVersion()); + stateMap.getVersion(), checkpoint.clusterState.getVersion()); } } catch (final Exception e) { LOG.warn("Failed to update Cluster State for {}. If NiFi is restarted before the state is able to be updated, it could result in data duplication.", connectableDescription, e); @@ -919,8 +919,10 @@ protected void updateProvenanceRepo(final Checkpoint checkpoint) { final boolean newFlowFile = repoRecord.getOriginal() == null; if (contentChanged && !newFlowFile) { - ProvenanceEventRecord record = provenanceReporter.generateModifyContentEvent(curFlowFile, "Session detected change in content"); - recordsToSubmit.add(record); + ProvenanceEventRecord event = provenanceReporter.build(curFlowFile, ProvenanceEventType.CONTENT_MODIFIED).build(); + if (!processorGenerated.contains(event)) { + recordsToSubmit.add(event); + } addEventType(eventTypesPerFlowFileId, flowFileId, ProvenanceEventType.CONTENT_MODIFIED); eventAdded = true; } @@ -930,18 +932,18 @@ protected void updateProvenanceRepo(final Checkpoint checkpoint) { boolean creationEventRegistered = false; if (registeredTypes != null) { if (registeredTypes.get(ProvenanceEventType.CREATE.ordinal()) - || registeredTypes.get(ProvenanceEventType.FORK.ordinal()) - || registeredTypes.get(ProvenanceEventType.CLONE.ordinal()) - || registeredTypes.get(ProvenanceEventType.JOIN.ordinal()) - || registeredTypes.get(ProvenanceEventType.RECEIVE.ordinal()) - || registeredTypes.get(ProvenanceEventType.FETCH.ordinal())) { + || registeredTypes.get(ProvenanceEventType.FORK.ordinal()) + || registeredTypes.get(ProvenanceEventType.CLONE.ordinal()) + || registeredTypes.get(ProvenanceEventType.JOIN.ordinal()) + || registeredTypes.get(ProvenanceEventType.RECEIVE.ordinal()) + || registeredTypes.get(ProvenanceEventType.FETCH.ordinal())) { creationEventRegistered = true; } } if (!creationEventRegistered) { - recordsToSubmit.add(provenanceReporter.generateCreateEvent(curFlowFile, "Session detected no CREATE event present, auto-generating a CREATE event")); + recordsToSubmit.add(provenanceReporter.build(curFlowFile, ProvenanceEventType.CREATE).build()); eventAdded = true; } } @@ -953,9 +955,7 @@ protected void updateProvenanceRepo(final Checkpoint checkpoint) { // event is redundant if another already exists. // We don't generate ATTRIBUTES_MODIFIED event for retry. if (!eventTypesPerFlowFileId.containsKey(flowFileId)) { - recordsToSubmit.add( - provenanceReporter.generateModifyAttributesEvent(curFlowFile, "Session detected no provenance event created, auto-generating an ATTRIBUTES_MODIFIED event") - ); + recordsToSubmit.add(provenanceReporter.build(curFlowFile, ProvenanceEventType.ATTRIBUTES_MODIFIED).build()); addEventType(eventTypesPerFlowFileId, flowFileId, ProvenanceEventType.ATTRIBUTES_MODIFIED); } } @@ -1030,11 +1030,11 @@ private void updateEventContentClaims(final ProvenanceEventBuilder builder, fina } else { final ResourceClaim resourceClaim = originalClaim.getResourceClaim(); builder.setCurrentContentClaim( - resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), - repoRecord.getOriginal().getContentClaimOffset() + originalClaim.getOffset(), repoRecord.getOriginal().getSize()); + resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), + repoRecord.getOriginal().getContentClaimOffset() + originalClaim.getOffset(), repoRecord.getOriginal().getSize()); builder.setPreviousContentClaim( - resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), - repoRecord.getOriginal().getContentClaimOffset() + originalClaim.getOffset(), repoRecord.getOriginal().getSize()); + resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), + repoRecord.getOriginal().getContentClaimOffset() + originalClaim.getOffset(), repoRecord.getOriginal().getSize()); } } @@ -1079,8 +1079,8 @@ public ProvenanceEventRecord enrich(final ProvenanceEventRecord rawEvent, final } private ProvenanceEventRecord enrich( - final ProvenanceEventRecord rawEvent, final Map flowFileRecordMap, final Map records, - final boolean updateAttributes, final long commitNanos) { + final ProvenanceEventRecord rawEvent, final Map flowFileRecordMap, final Map records, + final boolean updateAttributes, final long commitNanos) { final ProvenanceEventBuilder recordBuilder = context.createProvenanceEventBuilder().fromEvent(rawEvent); final FlowFileRecord eventFlowFile = flowFileRecordMap.get(rawEvent.getFlowFileUuid()); if (eventFlowFile != null) { @@ -1272,8 +1272,8 @@ protected synchronized void rollback(final boolean penalize, final boolean rollb // If we have transient claims that need to be cleaned up, do so. final List transientClaims = recordsToHandle.stream() - .flatMap(record -> record.getTransientClaims().stream()) - .collect(Collectors.toList()); + .flatMap(record -> record.getTransientClaims().stream()) + .collect(Collectors.toList()); if (!transientClaims.isEmpty()) { final RepositoryRecord repoRecord = new TransientClaimRepositoryRecord(transientClaims); @@ -1489,12 +1489,12 @@ private synchronized void migrate(final StandardProcessSession newOwner, Collect for (final FlowFile flowFile : flowFiles) { if (openInputStreams.containsKey(flowFile)) { throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " - + "has an open InputStream for the FlowFile, created by calling ProcessSession.read(FlowFile)"); + + "has an open InputStream for the FlowFile, created by calling ProcessSession.read(FlowFile)"); } if (openOutputStreams.containsKey(flowFile)) { throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " - + "has an open OutputStream for the FlowFile, created by calling ProcessSession.write(FlowFile)"); + + "has an open OutputStream for the FlowFile, created by calling ProcessSession.write(FlowFile)"); } if (readRecursionSet.containsKey(flowFile)) { @@ -1515,8 +1515,8 @@ private synchronized void migrate(final StandardProcessSession newOwner, Collect // ProcessSession is committed, claiming to have created FlowFiles from the parent, which is no longer even in // the flow. This would be very confusing when looking at the provenance for the FlowFile, so it is best to avoid this. final Set flowFileIds = flowFiles.stream() - .map(ff -> ff.getAttribute(CoreAttributes.UUID.key())) - .collect(Collectors.toSet()); + .map(ff -> ff.getAttribute(CoreAttributes.UUID.key())) + .collect(Collectors.toSet()); for (final Map.Entry entry : forkEventBuilders.entrySet()) { final FlowFile eventFlowFile = entry.getKey(); @@ -1525,7 +1525,7 @@ private synchronized void migrate(final StandardProcessSession newOwner, Collect for (final String childId : eventBuilder.getChildFlowFileIds()) { if (!flowFileIds.contains(childId)) { throw new FlowFileHandlingException("Cannot migrate " + eventFlowFile + " to a new session because it was forked to create " + eventBuilder.getChildFlowFileIds().size() - + " children and not all children are being migrated. If any FlowFile is forked, all of its children must also be migrated at the same time as the forked FlowFile"); + + " children and not all children are being migrated. If any FlowFile is forked, all of its children must also be migrated at the same time as the forked FlowFile"); } } } else { @@ -1533,7 +1533,7 @@ private synchronized void migrate(final StandardProcessSession newOwner, Collect for (final String childId : eventBuilder.getChildFlowFileIds()) { if (flowFileIds.contains(childId)) { throw new FlowFileHandlingException("Cannot migrate " + eventFlowFile + " to a new session because it was forked from a Parent FlowFile, " + - "but the parent is not being migrated. If any FlowFile is forked, the parent and all children must be migrated at the same time."); + "but the parent is not being migrated. If any FlowFile is forked, the parent and all children must be migrated at the same time."); } } } @@ -1700,7 +1700,7 @@ private String summarizeEvents(final Checkpoint checkpoint) { final StringBuilder sb = new StringBuilder(512); if (!LOG.isDebugEnabled() && (largestTransferSetSize > VERBOSE_LOG_THRESHOLD - || numModified > VERBOSE_LOG_THRESHOLD || numCreated > VERBOSE_LOG_THRESHOLD || numRemoved > VERBOSE_LOG_THRESHOLD)) { + || numModified > VERBOSE_LOG_THRESHOLD || numCreated > VERBOSE_LOG_THRESHOLD || numRemoved > VERBOSE_LOG_THRESHOLD)) { if (numCreated > 0) { sb.append("created ").append(numCreated).append(" FlowFiles, "); } @@ -1830,12 +1830,12 @@ private void handleConflictingId(final FlowFileRecord flowFile, final Connection } LOG.error("Attempted to pull {} from {} but the Session already has a FlowFile with the same ID ({}): {}, which was pulled from {}. This means that the system has two FlowFiles with the" + - " same ID, which should not happen.", flowFile, connection, flowFile.getId(), conflict.getCurrent(), conflict.getOriginalQueue()); + " same ID, which should not happen.", flowFile, connection, flowFile.getId(), conflict.getCurrent(), conflict.getOriginalQueue()); connection.getFlowFileQueue().put(flowFile); rollback(true, false); throw new FlowFileAccessException("Attempted to pull a FlowFile with ID " + flowFile.getId() + " from Connection " - + connection + " but a FlowFile with that ID already exists in the session"); + + connection + " but a FlowFile with that ID already exists in the session"); } @Override @@ -2017,8 +2017,8 @@ public FlowFile create() { attrs.put(CoreAttributes.UUID.key(), uuid); final FlowFileRecord fFile = new StandardFlowFileRecord.Builder().id(context.getNextFlowFileSequence()) - .addAttributes(attrs) - .build(); + .addAttributes(attrs) + .build(); final StandardRepositoryRecord record = new StandardRepositoryRecord(null); record.setWorking(fFile, attrs, false); records.put(fFile.getId(), record); @@ -2050,8 +2050,8 @@ public FlowFile create(FlowFile parent) { final String key = entry.getKey(); final String value = entry.getValue(); if (CoreAttributes.ALTERNATE_IDENTIFIER.key().equals(key) - || CoreAttributes.DISCARD_REASON.key().equals(key) - || CoreAttributes.UUID.key().equals(key)) { + || CoreAttributes.DISCARD_REASON.key().equals(key) + || CoreAttributes.UUID.key().equals(key)) { continue; } newAttributes.put(key, value); @@ -2107,9 +2107,9 @@ public FlowFile create(Collection parents) { newAttributes.put(CoreAttributes.UUID.key(), uuid); final FlowFileRecord fFile = new StandardFlowFileRecord.Builder().id(context.getNextFlowFileSequence()) - .addAttributes(newAttributes) - .lineageStart(lineageStartDate, lineageStartIndex) - .build(); + .addAttributes(newAttributes) + .lineageStart(lineageStartDate, lineageStartIndex) + .build(); final StandardRepositoryRecord record = new StandardRepositoryRecord(null); record.setWorking(fFile, newAttributes, false); @@ -2559,9 +2559,9 @@ public ProvenanceEventRecord next() { if (claim != null) { final ResourceClaim resourceClaim = claim.getResourceClaim(); enriched.setCurrentContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), - record.getContentClaimOffset() + claim.getOffset(), record.getSize()); + record.getContentClaimOffset() + claim.getOffset(), record.getSize()); enriched.setPreviousContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), - record.getContentClaimOffset() + claim.getOffset(), record.getSize()); + record.getContentClaimOffset() + claim.getOffset(), record.getSize()); } enriched.setAttributes(record.getAttributes(), Collections. emptyMap()); @@ -2608,7 +2608,7 @@ private InputStream getInputStream(final FlowFile flowFile, final ContentClaim c final InputStream limitingInputStream = new LimitingInputStream(new DisableOnCloseInputStream(currentReadClaimStream), flowFile.getSize()); final ContentClaimInputStream contentClaimInputStream = new ContentClaimInputStream(context.getContentRepository(), claim, - contentClaimOffset, limitingInputStream, performanceTracker); + contentClaimOffset, limitingInputStream, performanceTracker); return contentClaimInputStream; } } @@ -2674,9 +2674,9 @@ public void read(FlowFile source, boolean allowSessionStreamManagement, InputStr } try (final InputStream rawIn = getInputStream(source, record.getCurrentClaim(), record.getCurrentClaimOffset(), true); - final InputStream limitedIn = new LimitedInputStream(rawIn, source.getSize()); - final InputStream disableOnCloseIn = new DisableOnCloseInputStream(limitedIn); - final ByteCountingInputStream countingStream = new ByteCountingInputStream(disableOnCloseIn, this.bytesRead)) { + final InputStream limitedIn = new LimitedInputStream(rawIn, source.getSize()); + final InputStream disableOnCloseIn = new DisableOnCloseInputStream(limitedIn); + final ByteCountingInputStream countingStream = new ByteCountingInputStream(disableOnCloseIn, this.bytesRead)) { // We want to differentiate between IOExceptions thrown by the repository and IOExceptions thrown from // Processor code. As a result, as have the FlowFileAccessInputStream that catches IOException from the repository @@ -2904,7 +2904,7 @@ public FlowFile merge(Collection sources, FlowFile destination, final try { try (final OutputStream rawOut = contentRepo.write(newClaim); - final OutputStream out = new BufferedOutputStream(rawOut)) { + final OutputStream out = new BufferedOutputStream(rawOut)) { if (header != null && header.length > 0) { out.write(header); @@ -2950,11 +2950,11 @@ public FlowFile merge(Collection sources, FlowFile destination, final removeTemporaryClaim(destinationRecord); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(destinationRecord.getCurrent()) - .contentClaim(newClaim) - .contentClaimOffset(0L) - .size(writtenCount) - .build(); + .fromFlowFile(destinationRecord.getCurrent()) + .contentClaim(newClaim) + .contentClaimOffset(0L) + .size(writtenCount) + .build(); destinationRecord.setWorking(newFile, true); return newFile; } @@ -3069,21 +3069,21 @@ public void close() throws IOException { final FlowFileRecord newFile; if (bytesWritten == 0) { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(null) - .contentClaimOffset(0) - .size(bytesWritten) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(null) + .contentClaimOffset(0) + .size(bytesWritten) + .build(); context.getContentRepository().decrementClaimantCount(updatedClaim); record.addTransientClaim(updatedClaim); } else { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(updatedClaim) - .contentClaimOffset(Math.max(0, updatedClaim.getLength() - bytesWritten)) - .size(bytesWritten) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(updatedClaim) + .contentClaimOffset(Math.max(0, updatedClaim.getLength() - bytesWritten)) + .size(bytesWritten) + .build(); } record.setWorking(newFile, true); @@ -3123,9 +3123,9 @@ public FlowFile write(FlowFile source, final OutputStreamCallback writer) { ensureNotAppending(newClaim); try (final OutputStream stream = claimCache.write(newClaim); - final NonFlushableOutputStream nonFlushableOutputStream = new NonFlushableOutputStream(stream); - final OutputStream disableOnClose = new DisableOnCloseOutputStream(nonFlushableOutputStream); - final ByteCountingOutputStream countingOut = new ByteCountingOutputStream(disableOnClose)) { + final NonFlushableOutputStream nonFlushableOutputStream = new NonFlushableOutputStream(stream); + final OutputStream disableOnClose = new DisableOnCloseOutputStream(nonFlushableOutputStream); + final ByteCountingOutputStream countingOut = new ByteCountingOutputStream(disableOnClose)) { try { writeRecursionSet.add(source); final OutputStream ffaos = new FlowFileAccessOutputStream(countingOut, source); @@ -3155,21 +3155,21 @@ public FlowFile write(FlowFile source, final OutputStreamCallback writer) { final FlowFileRecord newFile; if (writtenToFlowFile == 0) { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(null) - .contentClaimOffset(0) - .size(0) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(null) + .contentClaimOffset(0) + .size(0) + .build(); context.getContentRepository().decrementClaimantCount(newClaim); record.addTransientClaim(newClaim); } else { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(newClaim) - .contentClaimOffset(Math.max(0, newClaim.getLength() - writtenToFlowFile)) - .size(writtenToFlowFile) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(newClaim) + .contentClaimOffset(Math.max(0, newClaim.getLength() - writtenToFlowFile)) + .size(writtenToFlowFile) + .build(); } record.setWorking(newFile, true); @@ -3234,7 +3234,7 @@ public FlowFile append(FlowFile source, final OutputStreamCallback writer) { // Wrap our OutputStreams so that the processor cannot close it try (final OutputStream disableOnClose = new DisableOnCloseOutputStream(nonFlushable); - final OutputStream flowFileAccessOutStream = new FlowFileAccessOutputStream(disableOnClose, source)) { + final OutputStream flowFileAccessOutStream = new FlowFileAccessOutputStream(disableOnClose, source)) { writeRecursionSet.add(source); writer.process(flowFileAccessOutStream); } finally { @@ -3296,21 +3296,21 @@ public FlowFile append(FlowFile source, final OutputStreamCallback writer) { final FlowFileRecord newFile; if (newSize == 0) { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(null) - .contentClaimOffset(0) - .size(0) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(null) + .contentClaimOffset(0) + .size(0) + .build(); context.getContentRepository().decrementClaimantCount(newClaim); record.addTransientClaim(newClaim); } else { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(newClaim) - .contentClaimOffset(0) - .size(newSize) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(newClaim) + .contentClaimOffset(0) + .size(newSize) + .build(); } record.setWorking(newFile, true); @@ -3412,13 +3412,13 @@ public FlowFile write(FlowFile source, final StreamCallback writer) { } try (final InputStream is = getInputStream(source, currClaim, record.getCurrentClaimOffset(), true); - final InputStream limitedIn = new LimitedInputStream(is, source.getSize()); - final InputStream disableOnCloseIn = new DisableOnCloseInputStream(limitedIn); - final ByteCountingInputStream countingIn = new ByteCountingInputStream(disableOnCloseIn, bytesRead); - final OutputStream os = claimCache.write(newClaim); - final OutputStream nonFlushableOut = new NonFlushableOutputStream(os); - final OutputStream disableOnCloseOut = new DisableOnCloseOutputStream(nonFlushableOut); - final ByteCountingOutputStream countingOut = new ByteCountingOutputStream(disableOnCloseOut)) { + final InputStream limitedIn = new LimitedInputStream(is, source.getSize()); + final InputStream disableOnCloseIn = new DisableOnCloseInputStream(limitedIn); + final ByteCountingInputStream countingIn = new ByteCountingInputStream(disableOnCloseIn, bytesRead); + final OutputStream os = claimCache.write(newClaim); + final OutputStream nonFlushableOut = new NonFlushableOutputStream(os); + final OutputStream disableOnCloseOut = new DisableOnCloseOutputStream(nonFlushableOut); + final ByteCountingOutputStream countingOut = new ByteCountingOutputStream(disableOnCloseOut)) { writeRecursionSet.add(source); @@ -3464,21 +3464,21 @@ public FlowFile write(FlowFile source, final StreamCallback writer) { if (writtenToFlowFile == 0) { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(null) - .contentClaimOffset(0) - .size(0) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(null) + .contentClaimOffset(0) + .size(0) + .build(); context.getContentRepository().decrementClaimantCount(newClaim); record.addTransientClaim(newClaim); } else { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(newClaim) - .contentClaimOffset(Math.max(0L, newClaim.getLength() - writtenToFlowFile)) - .size(writtenToFlowFile) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(newClaim) + .contentClaimOffset(Math.max(0L, newClaim.getLength() - writtenToFlowFile)) + .size(writtenToFlowFile) + .build(); } record.setWorking(newFile, true); @@ -3525,23 +3525,23 @@ public FlowFile importFrom(final Path source, final boolean keepSourceFile, Flow final FlowFileRecord newFile; if (newSize == 0) { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(null) - .contentClaimOffset(0) - .size(0) - .addAttribute(CoreAttributes.FILENAME.key(), source.toFile().getName()) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(null) + .contentClaimOffset(0) + .size(0) + .addAttribute(CoreAttributes.FILENAME.key(), source.toFile().getName()) + .build(); context.getContentRepository().decrementClaimantCount(newClaim); record.addTransientClaim(newClaim); } else { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(newClaim) - .contentClaimOffset(claimOffset) - .size(newSize) - .addAttribute(CoreAttributes.FILENAME.key(), source.toFile().getName()) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(newClaim) + .contentClaimOffset(claimOffset) + .size(newSize) + .addAttribute(CoreAttributes.FILENAME.key(), source.toFile().getName()) + .build(); } record.setWorking(newFile, CoreAttributes.FILENAME.key(), source.toFile().getName(), true); @@ -3585,21 +3585,21 @@ public FlowFile importFrom(final InputStream source, FlowFile destination) { final FlowFileRecord newFile; if (newSize == 0) { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(null) - .contentClaimOffset(0) - .size(0) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(null) + .contentClaimOffset(0) + .size(0) + .build(); context.getContentRepository().decrementClaimantCount(newClaim); record.addTransientClaim(newClaim); } else { newFile = new StandardFlowFileRecord.Builder() - .fromFlowFile(record.getCurrent()) - .contentClaim(newClaim) - .contentClaimOffset(claimOffset) - .size(newSize) - .build(); + .fromFlowFile(record.getCurrent()) + .contentClaim(newClaim) + .contentClaimOffset(claimOffset) + .size(newSize) + .build(); } record.setWorking(newFile, true); @@ -3642,9 +3642,9 @@ public void exportTo(FlowFile source, final OutputStream destination) { } try (final InputStream rawIn = getInputStream(source, record.getCurrentClaim(), record.getCurrentClaimOffset(), true); - final InputStream limitedIn = new LimitedInputStream(rawIn, source.getSize()); - final InputStream disableOnCloseIn = new DisableOnCloseInputStream(limitedIn); - final ByteCountingInputStream countingStream = new ByteCountingInputStream(disableOnCloseIn, this.bytesRead)) { + final InputStream limitedIn = new LimitedInputStream(rawIn, source.getSize()); + final InputStream disableOnCloseIn = new DisableOnCloseInputStream(limitedIn); + final ByteCountingInputStream countingStream = new ByteCountingInputStream(disableOnCloseIn, this.bytesRead)) { // We want to differentiate between IOExceptions thrown by the repository and IOExceptions thrown from // Processor code. As a result, as have the FlowFileAccessInputStream that catches IOException from the repository diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java index cb98852a7785..203d234c316e 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java @@ -17,7 +17,6 @@ package org.apache.nifi.controller.repository; import org.apache.nifi.flowfile.FlowFile; -import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.FlowFileHandlingException; import org.apache.nifi.provenance.InternalProvenanceReporter; @@ -28,11 +27,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; -import java.util.List; import java.util.Set; import java.util.function.Predicate; @@ -53,7 +50,7 @@ public class StandardProvenanceReporter implements InternalProvenanceReporter { private long bytesFetched = 0L; public StandardProvenanceReporter(final Predicate flowfileKnownCheck, final String processorId, final String processorType, - final ProvenanceEventRepository repository, final ProvenanceEventEnricher enricher) { + final ProvenanceEventRepository repository, final ProvenanceEventEnricher enricher) { this.flowfileKnownCheck = flowfileKnownCheck; this.processorId = processorId; this.processorType = processorType; @@ -118,7 +115,7 @@ public void receiveMigration(final Set events) { } /** - * Generates a Join event for the given child and parents but does not register the event. This is useful so that a ProcessSession has the ability to de-dupe events, since one or more events may + * Generates a Fork event for the given child and parents but does not register the event. This is useful so that a ProcessSession has the ability to de-dupe events, since one or more events may * be created by the session itself, as well as by the Processor * * @param parents parents @@ -130,59 +127,16 @@ public ProvenanceEventRecord generateJoinEvent(final Collection parent final ProvenanceEventBuilder eventBuilder = build(child, ProvenanceEventType.JOIN); eventBuilder.addChildFlowFile(child); - List parentEventIds = new ArrayList<>(parents.size()); for (final FlowFile parent : parents) { eventBuilder.addParentFlowFile(parent); - parentEventIds.addAll(repository.getPreviousEventIds(parent.getAttribute(CoreAttributes.UUID.key()))); } - eventBuilder.setPreviousEventIds(parentEventIds); - ProvenanceEventRecord record = eventBuilder.build(); - repository.updatePreviousEventIds(record, parentEventIds); - return record; + return eventBuilder.build(); } @Override public ProvenanceEventRecord generateDropEvent(final FlowFile flowFile, final String details) { - final String flowFileUUID = flowFile.getAttribute(CoreAttributes.UUID.key()); - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.DROP) - .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFileUUID)) - .build(); - repository.updatePreviousEventIds(record, null); - return record; - } - - @Override - public ProvenanceEventRecord generateModifyContentEvent(final FlowFile flowFile, final String details) { - final String flowFileUUID = flowFile.getAttribute(CoreAttributes.UUID.key()); - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CONTENT_MODIFIED) - .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFileUUID)) - .build(); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); - return record; - } - - @Override - public ProvenanceEventRecord generateModifyAttributesEvent(final FlowFile flowFile, final String details) { - final String flowFileUUID = flowFile.getAttribute(CoreAttributes.UUID.key()); - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ATTRIBUTES_MODIFIED) - .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFileUUID)) - .build(); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); - return record; - } - - @Override - public ProvenanceEventRecord generateCreateEvent(final FlowFile flowFile, final String details) { - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CREATE) - .setDetails(details) - .setPreviousEventIds(Collections.emptyList()) - .build(); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); - return record; + return build(flowFile, ProvenanceEventType.DROP).setDetails(details).build(); } private void verifyFlowFileKnown(final FlowFile flowFile) { @@ -192,27 +146,27 @@ private void verifyFlowFileKnown(final FlowFile flowFile) { } @Override - public void receive(final FlowFile flowFile, final String transitUri) { - receive(flowFile, transitUri, -1L); + public void receive(final FlowFile flowFile, final String transitUri, final Relationship relationship) { + receive(flowFile, transitUri, -1L, relationship); } @Override - public void receive(FlowFile flowFile, String transitUri, String sourceSystemFlowFileIdentifier) { - receive(flowFile, transitUri, sourceSystemFlowFileIdentifier, -1L); + public void receive(FlowFile flowFile, String transitUri, String sourceSystemFlowFileIdentifier, final Relationship relationship) { + receive(flowFile, transitUri, sourceSystemFlowFileIdentifier, -1L, relationship); } @Override - public void receive(final FlowFile flowFile, final String transitUri, final long transmissionMillis) { - receive(flowFile, transitUri, null, transmissionMillis); + public void receive(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final Relationship relationship) { + receive(flowFile, transitUri, null, transmissionMillis, relationship); } @Override - public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, final long transmissionMillis) { - receive(flowFile, transitUri, sourceSystemFlowFileIdentifier, null, transmissionMillis); + public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, final long transmissionMillis, final Relationship relationship) { + receive(flowFile, transitUri, sourceSystemFlowFileIdentifier, null, transmissionMillis, relationship); } @Override - public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, final String details, final long transmissionMillis) { + public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, final String details, final long transmissionMillis, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { @@ -220,11 +174,9 @@ public void receive(final FlowFile flowFile, final String transitUri, final Stri .setTransitUri(transitUri) .setSourceSystemFlowFileIdentifier(sourceSystemFlowFileIdentifier) .setEventDuration(transmissionMillis) - .setDetails(details) - .setPreviousEventIds(Collections.singletonList(-1L)) - .build(); + .setRelationship(relationship) + .setDetails(details).build(); events.add(record); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); bytesReceived += flowFile.getSize(); flowFilesReceived++; @@ -237,30 +189,28 @@ public void receive(final FlowFile flowFile, final String transitUri, final Stri } @Override - public void fetch(final FlowFile flowFile, final String transitUri) { - fetch(flowFile, transitUri, -1L); + public void fetch(final FlowFile flowFile, final String transitUri, final Relationship relationship) { + fetch(flowFile, transitUri, -1L, relationship); } @Override - public void fetch(final FlowFile flowFile, final String transitUri, final long transmissionMillis) { - fetch(flowFile, transitUri, null, transmissionMillis); + public void fetch(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final Relationship relationship) { + fetch(flowFile, transitUri, null, transmissionMillis, relationship); } @Override - public void fetch(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis) { + public void fetch(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { - final String flowFileUUID = flowFile.getAttribute(CoreAttributes.UUID.key()); final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.FETCH) .setTransitUri(transitUri) .setEventDuration(transmissionMillis) .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFileUUID)) + .setRelationship(relationship) .build(); events.add(record); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); bytesFetched += flowFile.getSize(); flowFilesFetched++; @@ -273,43 +223,43 @@ public void fetch(final FlowFile flowFile, final String transitUri, final String } @Override - public void send(final FlowFile flowFile, final String transitUri, final long transmissionMillis) { - send(flowFile, transitUri, transmissionMillis, true); + public void send(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final Relationship relationship) { + send(flowFile, transitUri, transmissionMillis, true, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri) { - send(flowFile, transitUri, null, -1L, true); + public void send(final FlowFile flowFile, final String transitUri, final Relationship relationship) { + send(flowFile, transitUri, null, -1L, true, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final String details) { - send(flowFile, transitUri, details, -1L, true); + public void send(final FlowFile flowFile, final String transitUri, final String details, final Relationship relationship) { + send(flowFile, transitUri, details, -1L, true, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final boolean force) { - send(flowFile, transitUri, null, transmissionMillis, force); + public void send(final FlowFile flowFile, final String transitUri, final long transmissionMillis, final boolean force, final Relationship relationship) { + send(flowFile, transitUri, null, transmissionMillis, force, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final String details, final boolean force) { - send(flowFile, transitUri, details, -1L, force); + public void send(final FlowFile flowFile, final String transitUri, final String details, final boolean force, final Relationship relationship) { + send(flowFile, transitUri, details, -1L, force, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis) { - send(flowFile, transitUri, details, transmissionMillis, true); + public void send(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis, final Relationship relationship) { + send(flowFile, transitUri, details, transmissionMillis, true, relationship); } @Override - public void send(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis, final boolean force) { + public void send(final FlowFile flowFile, final String transitUri, final String details, final long transmissionMillis, final boolean force, final Relationship relationship) { try { final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.SEND) .setTransitUri(transitUri) .setEventDuration(transmissionMillis) .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFile.getAttribute(CoreAttributes.UUID.key()))) + .setRelationship(relationship) .build(); // If the transmissionMillis field has been populated, use zero as the value of commitNanos (the call to System.nanoTime() is expensive but the value will be ignored). final long commitNanos = transmissionMillis < 0 ? System.nanoTime() : 0L; @@ -320,7 +270,6 @@ public void send(final FlowFile flowFile, final String transitUri, final String } else { events.add(enriched); } - repository.updatePreviousEventIds(enriched, enriched.getPreviousEventIds()); bytesSent += flowFile.getSize(); flowFilesSent++; @@ -333,25 +282,30 @@ public void send(final FlowFile flowFile, final String transitUri, final String } @Override - public void send(final FlowFile flowFile, final String transitUri, final boolean force) { - send(flowFile, transitUri, -1L, force); + public void send(final FlowFile flowFile, final String transitUri, final boolean force, final Relationship relationship) { + send(flowFile, transitUri, -1L, force, relationship); + } + + @Override + public void invokeRemoteProcess(final FlowFile flowFile, final String transitUri, final Relationship relationship) { + invokeRemoteProcess(flowFile, transitUri, null, relationship); } @Override - public void invokeRemoteProcess(final FlowFile flowFile, final String transitUri) { - invokeRemoteProcess(flowFile, transitUri, null); + public void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details, final Relationship relationship) { + invokeRemoteProcess(flowFile, transitUri, details, -1L, relationship); } @Override - public void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details) { + public void invokeRemoteProcess(FlowFile flowFile, String transitUri, String details, long transmissionMillis, final Relationship relationship) { try { final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.REMOTE_INVOCATION) .setTransitUri(transitUri) .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFile.getAttribute(CoreAttributes.UUID.key()))) + .setEventDuration(transmissionMillis) + .setRelationship(relationship) .build(); events.add(record); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -361,7 +315,7 @@ public void invokeRemoteProcess(FlowFile flowFile, String transitUri, String det } @Override - public void associate(final FlowFile flowFile, final String alternateIdentifierNamespace, final String alternateIdentifier) { + public void associate(final FlowFile flowFile, final String alternateIdentifierNamespace, final String alternateIdentifier, final Relationship relationship) { try { String trimmedNamespace = alternateIdentifierNamespace.trim(); if (trimmedNamespace.endsWith(":")) { @@ -379,10 +333,9 @@ public void associate(final FlowFile flowFile, final String alternateIdentifierN final String alternateIdentifierUri = trimmedNamespace + ":" + trimmedIdentifier; final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ADDINFO) .setAlternateIdentifierUri(alternateIdentifierUri) - .setPreviousEventIds(repository.getPreviousEventIds(flowFile.getAttribute(CoreAttributes.UUID.key()))) + .setRelationship(relationship) .build(); events.add(record); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -398,10 +351,8 @@ public ProvenanceEventRecord drop(final FlowFile flowFile, final String reason) if (reason != null) { builder.setDetails("Discard reason: " + reason); } - builder.setPreviousEventIds(repository.getPreviousEventIds(flowFile.getAttribute(CoreAttributes.UUID.key()))); final ProvenanceEventRecord record = builder.build(); events.add(record); - repository.updatePreviousEventIds(record, null); return record; } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); @@ -415,12 +366,8 @@ public ProvenanceEventRecord drop(final FlowFile flowFile, final String reason) @Override public void expire(final FlowFile flowFile, final String details) { try { - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.EXPIRE) - .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFile.getAttribute(CoreAttributes.UUID.key()))) - .build(); + final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.EXPIRE).setDetails(details).build(); events.add(record); - repository.updatePreviousEventIds(record, null); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -431,25 +378,29 @@ public void expire(final FlowFile flowFile, final String details) { @Override public void fork(final FlowFile parent, final Collection children) { - fork(parent, children, null, -1L); + fork(parent, children, null, -1L, null); + } + + public void fork(final FlowFile parent, final Collection children, final Relationship relationship) { + fork(parent, children, null, -1L, null); } @Override - public void fork(final FlowFile parent, final Collection children, final long forkDuration) { - fork(parent, children, null, forkDuration); + public void fork(final FlowFile parent, final Collection children, final long forkDuration, final Relationship relationship) { + fork(parent, children, null, forkDuration, relationship); } @Override - public void fork(final FlowFile parent, final Collection children, final String details) { - fork(parent, children, details, -1L); + public void fork(final FlowFile parent, final Collection children, final String details, final Relationship relationship) { + fork(parent, children, details, -1L, relationship); } @Override - public void fork(final FlowFile parent, final Collection children, final String details, final long forkDuration) { + public void fork(final FlowFile parent, final Collection children, final String details, final long forkDuration, final Relationship relationship) { verifyFlowFileKnown(parent); try { - final ProvenanceEventBuilder eventBuilder = build(parent, ProvenanceEventType.FORK); + final ProvenanceEventBuilder eventBuilder = build(parent, ProvenanceEventType.FORK).setRelationship(relationship); eventBuilder.addParentFlowFile(parent); for (final FlowFile child : children) { eventBuilder.addChildFlowFile(child); @@ -463,14 +414,7 @@ public void fork(final FlowFile parent, final Collection children, fin eventBuilder.setDetails(details); } - final ProvenanceEventRecord record = eventBuilder.build(); - events.add(record); - for (final FlowFile child : children) { - // Add the child FlowFiles to the previous event ID map with the parent's entry in the map - repository.updatePreviousEventIds( - record, - repository.getPreviousEventIds(parent.getAttribute(CoreAttributes.UUID.key()))); - } + events.add(eventBuilder.build()); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -481,21 +425,26 @@ public void fork(final FlowFile parent, final Collection children, fin @Override public void join(final Collection parents, final FlowFile child) { - join(parents, child, null, -1L); + join(parents, child, null, -1L, null); + } + + @Override + public void join(final Collection parents, final FlowFile child, final Relationship relationship) { + join(parents, child, null, -1L, null); } @Override - public void join(final Collection parents, final FlowFile child, final long joinDuration) { - join(parents, child, null, joinDuration); + public void join(final Collection parents, final FlowFile child, final long joinDuration, final Relationship relationship) { + join(parents, child, null, joinDuration, relationship); } @Override - public void join(final Collection parents, final FlowFile child, final String details) { - join(parents, child, details, -1L); + public void join(final Collection parents, final FlowFile child, final String details, final Relationship relationship) { + join(parents, child, details, -1L, relationship); } @Override - public void join(final Collection parents, final FlowFile child, final String details, final long joinDuration) { + public void join(final Collection parents, final FlowFile child, final String details, final long joinDuration, final Relationship relationship) { verifyFlowFileKnown(child); try { @@ -503,16 +452,13 @@ public void join(final Collection parents, final FlowFile child, final eventBuilder.addChildFlowFile(child); eventBuilder.setDetails(details); - List parentEventIds = new ArrayList<>(parents.size()); for (final FlowFile parent : parents) { eventBuilder.addParentFlowFile(parent); - parentEventIds.addAll(repository.getPreviousEventIds(parent.getAttribute(CoreAttributes.UUID.key()))); } - eventBuilder.setPreviousEventIds(parentEventIds); - final ProvenanceEventRecord record = eventBuilder.build(); - events.add(record); - repository.updatePreviousEventIds(record, parentEventIds); + + eventBuilder.setEventDuration(joinDuration).setRelationship(relationship); + events.add(eventBuilder.build()); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -534,13 +480,9 @@ public void clone(final FlowFile parent, final FlowFile child, final boolean ver try { final ProvenanceEventBuilder eventBuilder = build(parent, ProvenanceEventType.CLONE); - final ProvenanceEventRecord event = eventBuilder - .addChildFlowFile(child) - .addParentFlowFile(parent) - .setPreviousEventIds(repository.getPreviousEventIds(parent.getAttribute(CoreAttributes.UUID.key()))) - .build(); - events.add(event); - repository.updatePreviousEventIds(event, Collections.singletonList(event.getEventId())); + eventBuilder.addChildFlowFile(child); + eventBuilder.addParentFlowFile(parent); + events.add(eventBuilder.build()); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -550,32 +492,31 @@ public void clone(final FlowFile parent, final FlowFile child, final boolean ver } @Override - public void modifyContent(final FlowFile flowFile) { - modifyContent(flowFile, null, -1L); + public void modifyContent(final FlowFile flowFile, final Relationship relationship) { + modifyContent(flowFile, null, -1L, relationship); } @Override - public void modifyContent(final FlowFile flowFile, final String details) { - modifyContent(flowFile, details, -1L); + public void modifyContent(final FlowFile flowFile, final String details, final Relationship relationship) { + modifyContent(flowFile, details, -1L, relationship); } @Override - public void modifyContent(final FlowFile flowFile, final long processingMillis) { - modifyContent(flowFile, null, processingMillis); + public void modifyContent(final FlowFile flowFile, final long processingMillis, final Relationship relationship) { + modifyContent(flowFile, null, processingMillis, relationship); } @Override - public void modifyContent(final FlowFile flowFile, final String details, final long processingMillis) { + public void modifyContent(final FlowFile flowFile, final String details, final long processingMillis, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CONTENT_MODIFIED) .setEventDuration(processingMillis) .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFile.getAttribute(CoreAttributes.UUID.key()))) + .setRelationship(relationship) .build(); events.add(record); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -585,21 +526,20 @@ public void modifyContent(final FlowFile flowFile, final String details, final l } @Override - public void modifyAttributes(final FlowFile flowFile) { - modifyAttributes(flowFile, null); + public void modifyAttributes(final FlowFile flowFile, final Relationship relationship) { + modifyAttributes(flowFile, null, relationship); } @Override - public void modifyAttributes(final FlowFile flowFile, final String details) { + public void modifyAttributes(final FlowFile flowFile, final String details, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ATTRIBUTES_MODIFIED) .setDetails(details) - .setPreviousEventIds(repository.getPreviousEventIds(flowFile.getAttribute(CoreAttributes.UUID.key()))) + .setRelationship(relationship) .build(); events.add(record); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -628,13 +568,12 @@ public void route(final FlowFile flowFile, final Relationship relationship, fina verifyFlowFileKnown(flowFile); try { - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ROUTE).setRelationship(relationship) + final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.ROUTE) + .setRelationship(relationship) .setDetails(details) .setEventDuration(processingDuration) - .setPreviousEventIds(repository.getPreviousEventIds(flowFile.getAttribute(CoreAttributes.UUID.key()))) .build(); events.add(record); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { @@ -644,18 +583,20 @@ public void route(final FlowFile flowFile, final Relationship relationship, fina } @Override - public void create(final FlowFile flowFile) { - create(flowFile, null); + public void create(final FlowFile flowFile, final Relationship relationship) { + create(flowFile, null, relationship); } @Override - public void create(final FlowFile flowFile, final String details) { + public void create(final FlowFile flowFile, final String details, final Relationship relationship) { verifyFlowFileKnown(flowFile); try { - final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CREATE).setDetails(details).setPreviousEventIds(Collections.emptyList()).build(); + final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.CREATE) + .setDetails(details) + .setRelationship(relationship) + .build(); events.add(record); - repository.updatePreviousEventIds(record, Collections.singletonList(record.getEventId())); } catch (final Exception e) { logger.error("Failed to generate Provenance Event due to " + e); if (logger.isDebugEnabled()) { diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/provenance/InternalProvenanceReporter.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/provenance/InternalProvenanceReporter.java index 24b1d73e923b..6570865ae2a4 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/provenance/InternalProvenanceReporter.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/provenance/InternalProvenanceReporter.java @@ -33,12 +33,6 @@ public interface InternalProvenanceReporter extends ProvenanceReporter { ProvenanceEventRecord generateJoinEvent(Collection parents, FlowFile child); - ProvenanceEventRecord generateModifyContentEvent(FlowFile flowFile, String explanation); - - ProvenanceEventRecord generateCreateEvent(FlowFile flowFile, String explanation); - - ProvenanceEventRecord generateModifyAttributesEvent(FlowFile flowFile, String explanation); - void remove(ProvenanceEventRecord event); void removeEventsForFlowFile(String uuid); diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/test/java/org/apache/nifi/controller/repository/StandardProcessSessionIT.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/test/java/org/apache/nifi/controller/repository/StandardProcessSessionIT.java index ea38d07b6d16..06a659e2dd9a 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/test/java/org/apache/nifi/controller/repository/StandardProcessSessionIT.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/test/java/org/apache/nifi/controller/repository/StandardProcessSessionIT.java @@ -1449,10 +1449,11 @@ public void testProvenanceEventsHaveDurationFromSession() throws IOException { final FlowFile orig = session.get(); final FlowFile newFlowFile = session.create(orig); - session.getProvenanceReporter().fork(orig, Collections.singletonList(newFlowFile), 0L); - session.getProvenanceReporter().fetch(newFlowFile, "nowhere://"); - session.getProvenanceReporter().send(newFlowFile, "nowhere://"); - session.transfer(newFlowFile, new Relationship.Builder().name("A").build()); + final Relationship a = new Relationship.Builder().name("A").build(); + session.getProvenanceReporter().fork(orig, Collections.singletonList(newFlowFile), 0L, a); + session.getProvenanceReporter().fetch(newFlowFile, "nowhere://", a); + session.getProvenanceReporter().send(newFlowFile, "nowhere://", a); + session.transfer(newFlowFile, a); session.commit(); List events = provenanceRepo.getEvents(0L, 100000); diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-site-to-site/src/main/java/org/apache/nifi/remote/StandardRemoteGroupPort.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-site-to-site/src/main/java/org/apache/nifi/remote/StandardRemoteGroupPort.java index b6e6b0460d21..ac89ac10d691 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-site-to-site/src/main/java/org/apache/nifi/remote/StandardRemoteGroupPort.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-site-to-site/src/main/java/org/apache/nifi/remote/StandardRemoteGroupPort.java @@ -338,7 +338,7 @@ public void process(final InputStream in) throws IOException { final String transitUri = transaction.getCommunicant().createTransitUri(flowFile.getAttribute(CoreAttributes.UUID.key())); flowFile = session.putAttribute(flowFile, SiteToSiteAttributes.S2S_PORT_ID.key(), getTargetIdentifier()); - session.getProvenanceReporter().send(flowFile, transitUri, "Remote DN=" + userDn, transferMillis, false); + session.getProvenanceReporter().send(flowFile, transitUri, "Remote DN=" + userDn, transferMillis, false, null); session.remove(flowFile); final long sendingNanos = System.nanoTime() - startSendingNanos; @@ -419,7 +419,7 @@ private int receiveFlowFiles(final Transaction transaction, final ProcessContext final String transitUri = transaction.getCommunicant().createTransitUri(sourceFlowFileIdentifier); session.getProvenanceReporter().receive(flowFile, transitUri, "urn:nifi:" + sourceFlowFileIdentifier, - "Remote DN=" + userDn, TimeUnit.NANOSECONDS.toMillis(receiveNanos)); + "Remote DN=" + userDn, TimeUnit.NANOSECONDS.toMillis(receiveNanos), Relationship.ANONYMOUS); session.transfer(flowFile, Relationship.ANONYMOUS); bytesReceived += dataPacket.getSize(); diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-site-to-site/src/main/java/org/apache/nifi/remote/protocol/AbstractFlowFileServerProtocol.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-site-to-site/src/main/java/org/apache/nifi/remote/protocol/AbstractFlowFileServerProtocol.java index e97379785507..1ce3d916da3f 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-site-to-site/src/main/java/org/apache/nifi/remote/protocol/AbstractFlowFileServerProtocol.java +++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-site-to-site/src/main/java/org/apache/nifi/remote/protocol/AbstractFlowFileServerProtocol.java @@ -254,12 +254,9 @@ public int transferFlowFiles(final Peer peer, final ProcessContext context, fina final StopWatch transferWatch = new StopWatch(true); final FlowFile toSend = flowFile; - session.read(flowFile, new InputStreamCallback() { - @Override - public void process(final InputStream in) throws IOException { - final DataPacket dataPacket = new StandardDataPacket(toSend.getAttributes(), in, toSend.getSize()); - codec.encode(dataPacket, checkedOutputStream); - } + session.read(flowFile, in -> { + final DataPacket dataPacket = new StandardDataPacket(toSend.getAttributes(), in, toSend.getSize()); + codec.encode(dataPacket, checkedOutputStream); }); final long transmissionMillis = transferWatch.getElapsed(TimeUnit.MILLISECONDS); @@ -275,7 +272,7 @@ public void process(final InputStream in) throws IOException { bytesSent += flowFile.getSize(); final String transitUri = createTransitUri(peer, flowFile.getAttribute(CoreAttributes.UUID.key())); - session.getProvenanceReporter().send(flowFile, transitUri, "Remote Host=" + peer.getHost() + ", Remote DN=" + remoteDn, transmissionMillis, false); + session.getProvenanceReporter().send(flowFile, transitUri, "Remote Host=" + peer.getHost() + ", Remote DN=" + remoteDn, transmissionMillis, false, null); session.remove(flowFile); // determine if we should check for more data on queue. @@ -368,7 +365,7 @@ protected int commitTransferTransaction(Peer peer, FlowFileTransaction transacti throw e; } - logger.debug("{} received {} from {}", new Object[]{this, transactionResponse, peer}); + logger.debug("{} received {} from {}", this, transactionResponse, peer); if (transactionResponse.getCode() == ResponseCode.TRANSACTION_FINISHED_BUT_DESTINATION_FULL) { peer.penalize(port.getIdentifier(), port.getYieldPeriod(TimeUnit.MILLISECONDS)); } else if (transactionResponse.getCode() != ResponseCode.TRANSACTION_FINISHED) { @@ -474,7 +471,7 @@ public int receiveFlowFiles(final Peer peer, final ProcessContext context, final final String transitUri = createTransitUri(peer, sourceSystemFlowFileUuid); session.getProvenanceReporter().receive(flowFile, transitUri, sourceSystemFlowFileUuid == null - ? null : "urn:nifi:" + sourceSystemFlowFileUuid, "Remote Host=" + peer.getHost() + ", Remote DN=" + remoteDn, transferMillis); + ? null : "urn:nifi:" + sourceSystemFlowFileUuid, "Remote Host=" + peer.getHost() + ", Remote DN=" + remoteDn, transferMillis, Relationship.ANONYMOUS); session.transfer(flowFile, Relationship.ANONYMOUS); flowFilesReceived.add(flowFile); bytesReceived += flowFile.getSize(); diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryBatch.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryBatch.java index 6b229fe91429..c3f6eb480150 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryBatch.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryBatch.java @@ -348,7 +348,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); } else { - session.getProvenanceReporter().send(flowFile, job.getSelfLink(), job.getStatistics().getEndTime() - job.getStatistics().getStartTime()); + session.getProvenanceReporter().send(flowFile, job.getSelfLink(), job.getStatistics().getEndTime() - job.getStatistics().getStartTime(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDrive.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDrive.java index bbdb5550423f..ef1337558f32 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDrive.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDrive.java @@ -265,7 +265,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro final String url = DRIVE_URL + fileMetadata.getId(); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().fetch(flowFile, url, transferMillis); + session.getProvenanceReporter().fetch(flowFile, url, transferMillis, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (GoogleJsonResponseException e) { handleErrorResponse(session, fileId, flowFile, e); diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/PutGoogleDrive.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/PutGoogleDrive.java index 632ff541e5de..6dad97a5e5a8 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/PutGoogleDrive.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/PutGoogleDrive.java @@ -286,7 +286,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final String url = DRIVE_URL + uploadedFile.getId(); flowFile = session.putAllAttributes(flowFile, attributes); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, url, transferMillis); + session.getProvenanceReporter().send(flowFile, url, transferMillis, REL_SUCCESS); } session.transfer(flowFile, REL_SUCCESS); } catch (GoogleJsonResponseException e) { diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/ConsumeGCPubSub.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/ConsumeGCPubSub.java index 77d9fcac87de..0bda3ddbb268 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/ConsumeGCPubSub.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/ConsumeGCPubSub.java @@ -237,7 +237,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.write(flowFile, out -> out.write(message.getMessage().getData().toByteArray())); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, subscriptionName); + session.getProvenanceReporter().receive(flowFile, subscriptionName, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/PublishGCPubSub.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/PublishGCPubSub.java index d48652f0b6b6..6e07fa774c6a 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/PublishGCPubSub.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/PublishGCPubSub.java @@ -421,7 +421,7 @@ private void finishBatch(final ProcessSession session, attributes.put(TOPIC_NAME_ATTRIBUTE, topicName); final FlowFile flowFile = session.putAllAttributes(flowFileResult.getFlowFile(), attributes); final String transitUri = String.format(TRANSIT_URI_FORMAT_STRING, topicName); - session.getProvenanceReporter().send(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().send(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), relationship); session.transfer(flowFile, relationship); } } diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java index d0797fee42ee..0a87f628e679 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/ConsumeGCPubSubLite.java @@ -213,7 +213,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.write(flowFile, out -> out.write(message.getMessage().getData().toStringUtf8().getBytes())); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, context.getProperty(SUBSCRIPTION).evaluateAttributeExpressions().getValue()); + session.getProvenanceReporter().receive(flowFile, context.getProperty(SUBSCRIPTION).evaluateAttributeExpressions().getValue(), REL_SUCCESS); message.getConsumer().ack(); } diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java index 24c3430d0b9a..557728052a8a 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java @@ -242,7 +242,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(successfulFlowFiles, REL_SUCCESS); for (FlowFile flowFile : successfulFlowFiles) { final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, topicName, transmissionMillis); + session.getProvenanceReporter().send(flowFile, topicName, transmissionMillis, REL_SUCCESS); } } } diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java index 50ca5bb6d7c3..9a13427529b0 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java @@ -275,7 +275,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session getLogger().info("Successfully retrieved GCS Object for {} in {} millis; routing to success", new Object[]{flowFile, millis}); final String transitUri = getTransitUri(storage.getOptions().getHost(), bucketName, key); - session.getProvenanceReporter().fetch(flowFile, transitUri, millis); + session.getProvenanceReporter().fetch(flowFile, transitUri, millis, REL_SUCCESS); } private FetchedBlob fetchBlob(final ProcessContext context, final Storage storage, final Map attributes) throws IOException { diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java index 706be6848569..b798b9835098 100644 --- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java +++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java @@ -542,9 +542,9 @@ public void process(InputStream rawIn) throws IOException { final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); final String transitUri = getTransitUri(storage.getOptions().getHost(), bucket, key); - session.getProvenanceReporter().send(flowFile, transitUri, millis); + session.getProvenanceReporter().send(flowFile, transitUri, millis, REL_SUCCESS); getLogger().info("Successfully put {} to Google Cloud Storage in {} milliseconds", - new Object[]{ff, millis}); + ff, millis); } catch (final ProcessException | StorageException e) { getLogger().error("Failed to put {} to Google Cloud Storage due to {}", flowFile, e.getMessage(), e); diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/CypherQueryFromNodesBuilder.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/CypherQueryFromNodesBuilder.java index 2560efb926ae..c9674e4c265c 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/CypherQueryFromNodesBuilder.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/CypherQueryFromNodesBuilder.java @@ -16,43 +16,240 @@ */ package org.apache.nifi.graph; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; + import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; -public class CypherQueryFromNodesBuilder { - public List getQueries(final List> nodeList) { - final List queryList = new ArrayList<>(nodeList.size()); +public class CypherQueryFromNodesBuilder implements QueryFromNodesBuilder { + + private String databaseName; + + @Override + public List getProvenanceQueries(final List> nodeList, final boolean includeFlowGraph) { + // Assume at least four queries per node + final List queryList = new ArrayList<>(nodeList.size() * 4); for (Map eventNode : nodeList) { - StringBuilder queryBuilder = new StringBuilder("MERGE (p:" + eventNode.get("eventType") + " {"); - final List propertyDefinitions = new ArrayList<>(eventNode.entrySet().size()); + StringBuilder queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MERGE (p:`" + eventNode.get("eventType") + "`: NiFiProvenanceEvent {id: '"); + queryBuilder.append(eventNode.get("eventId")); + queryBuilder.append("'})\nON CREATE SET \n\t"); + queryBuilder.append(joinPropertyEntries(eventNode, "p", " =", ",\n\t", "")); + queryBuilder.append("\nON MATCH SET \n\t"); + queryBuilder.append(joinPropertyEntries(eventNode, "p", " =", ",\n\t", "")); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); - for (Map.Entry properties : eventNode.entrySet()) { - propertyDefinitions.add(properties.getKey() + ": \"" + properties.getValue() + "\""); - } - propertyDefinitions.add("nodeType: \"NiFiProvenanceEvent\""); + // Add its entity (FlowFile, e.g.) to the graph + final String entityType = eventNode.get("entityType").toString(); + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MERGE (ff:`"); + queryBuilder.append(entityType); + queryBuilder.append("` {id: '"); + queryBuilder.append(eventNode.get("entityId")); + queryBuilder.append("', type:'"); + // Add a "type" property with just the class name (not the fully-qualified name) + queryBuilder.append(entityType.substring(entityType.lastIndexOf(".") + 1)); + queryBuilder.append("'})"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); + + // Add its component to the graph + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MERGE (p:`" + eventNode.get("componentType") + "` {"); + queryBuilder.append("componentId: '"); + queryBuilder.append(eventNode.get("componentId")); + queryBuilder.append("'})\nON CREATE SET \n\t"); + queryBuilder.append("componentName = '"); + queryBuilder.append(eventNode.get("componentName")); + queryBuilder.append("'\nON MATCH SET \n\t"); - queryBuilder.append(String.join(",", propertyDefinitions)); - queryBuilder.append("})"); + // Add an "entity" edge between them + final String eventOrdinal = eventNode.get("eventOrdinal").toString(); + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MATCH (x:`" + eventNode.get("entityType") + "` {id: '"); + queryBuilder.append(eventNode.get("entityId")); + queryBuilder.append("'}),\n(y:NiFiProvenanceEvent {eventOrdinal: '"); + queryBuilder.append(eventOrdinal); + queryBuilder.append("'})\nMERGE(x) <-[:entity]- (y)"); queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); List previousEventIds = (List) eventNode.get("previousEventIds"); // If there are previous event IDs, add edges if (previousEventIds != null) { - queryBuilder = new StringBuilder(); // Match the source (previous event) and target (this event) and create the edge if it doesn't exist for (Long previousEventId : previousEventIds) { - queryBuilder.append("MATCH\n(x {nodeType: 'NiFiProvenanceEvent', eventOrdinal: '"); + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MATCH (x:NiFiProvenanceEvent {eventOrdinal: '"); queryBuilder.append(previousEventId); - queryBuilder.append("'}),\n(y {nodeType: 'NiFiProvenanceEvent', eventOrdinal: '"); - queryBuilder.append(eventNode.get("eventOrdinal")); - queryBuilder.append("'})\nMERGE(x) -[:next]-> (y)"); + queryBuilder.append("'}), (y:NiFiProvenanceEvent {eventOrdinal: '"); + queryBuilder.append(eventOrdinal); + queryBuilder.append("'})\nMERGE (x)-[z:next]->(y)\n"); + queryBuilder.append("ON CREATE SET\n\tz.relationship = x.relationship,\n\tz.sourceEventOrdinal = x.eventOrdinal,\n\tz.destinationEventOrdinal = y.eventOrdinal\n"); + queryBuilder.append("ON MATCH SET \n\tz.relationship = x.relationship,\n\tz.sourceEventOrdinal = x.eventOrdinal,\n\tz.destinationEventOrdinal = y.eventOrdinal"); + // Add edge to graph queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); } } + + // Connect to flow graph nodes if specified + if (includeFlowGraph) { + // Add a link between the + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MATCH (x {id: '"); + queryBuilder.append(eventNode.get("componentId")); + queryBuilder.append("'}),\n(y {eventOrdinal: '"); + queryBuilder.append(eventOrdinal); + queryBuilder.append("'})\nMERGE(x) -[:reported]-> (y)"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); + } } return queryList; } + + @Override + public List getFlowGraphQueries(final List> nodeList) { + final List queryList = new ArrayList<>(); + for (Map componentStatusNode : nodeList) { + StringBuilder queryBuilder; + // Skip Connection nodes, put all the information into the edge + if (!"Connection".equals(componentStatusNode.get("type"))) { + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MERGE (p:`" + componentStatusNode.get("type") + "` {id: '"); + queryBuilder.append(componentStatusNode.get("id")); + queryBuilder.append("'})\nON CREATE SET \n\t"); + final String properties = joinPropertyEntries(componentStatusNode, "p", " =", ",\n\t", ""); + queryBuilder.append(properties); + queryBuilder.append("\nON MATCH SET \n\t"); + queryBuilder.append(properties); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); + + // Add an edge to the parent process group if appropriate + if (componentStatusNode.containsKey("groupId")) { + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MATCH (x:`" + componentStatusNode.get("type") + "` {id: '"); + queryBuilder.append(componentStatusNode.get("id")); + queryBuilder.append("'}),\n(y:ProcessGroup {id: '"); + queryBuilder.append(componentStatusNode.get("groupId")); + queryBuilder.append("'})\nMERGE(x)-[:parent]->(y)"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); + } + } else { + // Add edges between source and destination processors going through the connection node + final String sourceId = componentStatusNode.get("sourceId").toString(); + final String sourceName = componentStatusNode.get("sourceName").toString(); + // Check for funnels, they do not offer status so they have to be added manually as encountered + if ("Funnel".equals(sourceName)) { + String addFunnelBuilder = generateUseClause(databaseName) + "MERGE (p:Funnel {id: '" + sourceId + "'})"; + queryList.add(new GraphQuery(addFunnelBuilder, GraphClientService.CYPHER)); + } + + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MATCH (x {id: '"); + queryBuilder.append(sourceId); + queryBuilder.append("'}),\n"); + final String destinationId = componentStatusNode.get("destinationId").toString(); + final String destinationName = componentStatusNode.get("destinationName").toString(); + if ("Funnel".equals(destinationName)) { + String addFunnelBuilder = generateUseClause(databaseName) + "MERGE (p:Funnel {id: '" + destinationId + "', name:'Funnel'})"; + queryList.add(new GraphQuery(addFunnelBuilder, GraphClientService.CYPHER)); + } + queryBuilder.append("(z {id: '"); + queryBuilder.append(destinationId); + queryBuilder.append("'})\nMERGE (x)-[y:`"); + final String componentName = componentStatusNode.get("name").toString().isEmpty() + ? "funnel" + : componentStatusNode.get("name").toString(); + queryBuilder.append(componentName); + queryBuilder.append("`:NiFiFlowConnection]->(z)\nON CREATE SET\n\t"); + final Map connectionProperties = new HashMap<>(); + connectionProperties.put("groupId", componentStatusNode.get("groupId")); + connectionProperties.put("backPressureBytesThreshold", componentStatusNode.get("backPressureBytesThreshold")); + connectionProperties.put("backPressureObjectThreshold", componentStatusNode.get("backPressureObjectThreshold")); + connectionProperties.put("backPressureDataSizeThreshold", componentStatusNode.get("backPressureDataSizeThreshold")); + connectionProperties.put("type", "Connection"); + final String properties = joinPropertyEntries(connectionProperties, "y", " = ", ",\n\t", ""); + queryBuilder.append(properties); + queryBuilder.append("\nON MATCH SET \n\t"); + queryBuilder.append(properties); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); + } + } + return queryList; + } + + @Override + public List convertActionsToQueries(final List> nodeList) { + if (nodeList == null) { + return Collections.emptyList(); + } + final List queryList = new ArrayList<>(nodeList.size()); + for (Map actionNode : nodeList) { + final String operation = actionNode.get("operation").toString(); + StringBuilder queryBuilder; + if ("Remove".equals(operation)) { + // TODO move to a "history" database? If so match its nearest neighbors + // Remove the node from the graph, if it has been replaced + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "MATCH (p:Processor {id: '"); + queryBuilder.append(actionNode.get("componentId")); + queryBuilder.append("'} DETACH DELETE p"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); + } + } + return queryList; + } + + @Override + public List generateCreateDatabaseQueries(String databaseName, boolean isCompositeDatabase) { + final List queryList = new ArrayList<>(); + StringBuilder queryBuilder; + // If the provenance database name is not specified, use the default database in the graph DB, otherwise create and use it + if (null != databaseName && !databaseName.isEmpty()) { + this.databaseName = databaseName; + queryBuilder = new StringBuilder("CREATE "); + if (isCompositeDatabase) { + queryBuilder.append("COMPOSITE "); + } + queryBuilder.append("DATABASE `"); + queryBuilder.append(databaseName); + queryBuilder.append("` IF NOT EXISTS"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.CYPHER)); + } + return queryList; + } + + @Override + public List generateCreateIndexQueries(String databaseName, boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + if (null != databaseName && !databaseName.isEmpty()) { + this.databaseName = databaseName; + return Arrays.asList( + new GraphQuery(generateUseClause(databaseName) + + "CREATE INDEX prov_event_id_index IF NOT EXISTS FOR (n:NiFiProvenanceEvent) ON (n.id)", GraphClientService.CYPHER), + new GraphQuery(generateUseClause(databaseName) + + "CREATE INDEX prov_event_entityId_index IF NOT EXISTS FOR (n:NiFiProvenanceEvent) ON (n.entityId)", GraphClientService.CYPHER), + new GraphQuery(generateUseClause(databaseName) + + "CREATE INDEX prov_event_componentId_index IF NOT EXISTS FOR (n:NiFiProvenanceEvent) ON (n.componentId)", GraphClientService.CYPHER), + new GraphQuery(generateUseClause(databaseName) + + "CREATE INDEX prov_event_next_index IF NOT EXISTS\nFOR ()-[r:next]-() ON (r.sourceEventOrdinal,r.destinationEventOrdinal)", GraphClientService.CYPHER), + new GraphQuery(generateUseClause(databaseName) + + "CREATE INDEX component_connection_index IF NOT EXISTS\nFOR ()-[r:NiFiFlowConnection]-() ON (r.sourceId,r.destinationId)", GraphClientService.CYPHER) + ); + } else { + throw new GraphClientMethodNotSupported("Could not CREATE INDEX queries for Cypher as no database was specified"); + } + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) { + // Vertex types are added as nodes are created + return Collections.emptyList(); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) { + // Edge types are added as edges are created + return Collections.emptyList(); + } + + public String generateUseClause(final String databaseName) { + if (databaseName == null) { + return ""; + } + return "USE `" + databaseName + "` "; + } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GraphClientService.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GraphClientService.java index 4c436053ae03..c67053ba5e0b 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GraphClientService.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GraphClientService.java @@ -18,6 +18,8 @@ package org.apache.nifi.graph; import org.apache.nifi.controller.ControllerService; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; +import org.apache.nifi.graph.exception.GraphQueryException; import java.util.List; import java.util.Map; @@ -31,7 +33,7 @@ public interface GraphClientService extends ControllerService { String PROPERTIES_SET = "graph.properties.set"; String ROWS_RETURNED = "graph.rows.returned"; - // Supported query languages (service-dependent) + // Possible supported query languages (service-dependent) String SQL = "sql"; String SQL_SCRIPT = "sqlscript"; String GRAPHQL = "graphql"; @@ -47,7 +49,14 @@ public interface GraphClientService extends ControllerService { * @param handler The callback handler invoked with any returned results * @return Any results returned after handling the query response */ - Map executeQuery(String query, Map parameters, GraphQueryResultCallback handler); + Map executeQuery(GraphQuery query, Map parameters, GraphQueryResultCallback handler) throws GraphQueryException; + + /** + * Returns the name of the database to which the query is submitted + * + * @return the String) of the database in use + */ + String getDatabaseName(); /** * Returns the URL used to submit the query @@ -56,12 +65,33 @@ public interface GraphClientService extends ControllerService { */ String getTransitUrl(); + List convertActionsToQueries(final List> nodeList); + /** - * Builds a list of client-specific queries based on a list of property map nodes. Usually followed by a call to executeQuery + * Builds a list of client-specific queries to generate a graph of the current flow based on a list of property map nodes. Usually followed by a call to executeQuery * * @param nodeList A List of Maps corresponding to property map nodes * @param parameters A Map of parameter values to use in the query and/or execution * @return A List of queries each corresponding to an operation on the node list */ - List buildQueryFromNodes(List> nodeList, Map parameters); + List buildFlowGraphQueriesFromNodes(List> nodeList, Map parameters); + + /** + * Builds a list of client-specific provenance-related queries based on a list of property map nodes. Usually followed by a call to executeQuery + * + * @param nodeList A List of Maps corresponding to property map nodes + * @param parameters A Map of parameter values to use in the query and/or execution + * @param includeFlowGraph Whether to include the flow graph nodes in the provenance queries. Setting this to true assumes buildFlowGraphQueriesFromNodes() has been called. + * @return A List of queries each corresponding to an operation on the node list + */ + List buildProvenanceQueriesFromNodes(List> nodeList, Map parameters, boolean includeFlowGraph); + + List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported; + + List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported; + + List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported; + + List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported; + } \ No newline at end of file diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GraphQuery.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GraphQuery.java index decb6e30fa95..87ca9bbde766 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GraphQuery.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GraphQuery.java @@ -50,4 +50,11 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(query, language); } + + @Override + public String toString() { + return "QueryRecord [" + + "language=" + language + + ", query=" + query; + } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GremlinQueryFromNodesBuilder.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GremlinQueryFromNodesBuilder.java index c61394e87acc..59e961fd0e38 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GremlinQueryFromNodesBuilder.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/GremlinQueryFromNodesBuilder.java @@ -16,12 +16,19 @@ */ package org.apache.nifi.graph; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; + import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; -public class GremlinQueryFromNodesBuilder { - public List getQueries(final List> nodeList) { +public class GremlinQueryFromNodesBuilder implements QueryFromNodesBuilder { + + private String databaseName; + + @Override + public List getProvenanceQueries(final List> nodeList, final boolean includeFlowGraph) { final List queryList = new ArrayList<>(nodeList.size()); for (final Map eventNode : nodeList) { StringBuilder queryBuilder = new StringBuilder(); @@ -65,4 +72,64 @@ public List getQueries(final List> nodeList) { } return queryList; } + + @Override + public List getFlowGraphQueries(List> nodeList) { + return Collections.emptyList(); + } + + @Override + public List convertActionsToQueries(List> nodeList) { + // TODO: Convert FlowActions to Gremlin queries + if (nodeList == null) { + return Collections.emptyList(); + } + final List queryList = new ArrayList<>(nodeList.size()); + for (Map actionNode : nodeList) { + final String operation = actionNode.get("operation").toString(); + StringBuilder queryBuilder; + if ("Remove".equals(operation)) { + // TODO move to a "history" database? If so match its nearest neighbors + // Remove the node from the graph, if it has been replaced + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "DELETE FROM Processor WHERE id = '"); + queryBuilder.append(actionNode.get("componentId")); + queryBuilder.append("'"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.SQL)); + } + } + + // TODO return queryList when the correct Gremlin query is generated above + return Collections.emptyList(); + } + + @Override + public List generateCreateDatabaseQueries(String databaseName, boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + throw new GraphClientMethodNotSupported("Gremlin does not support creating databases"); + } + + @Override + public List generateCreateIndexQueries(String databaseName, boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + throw new GraphClientMethodNotSupported("Gremlin does not support creating indexes"); + } + + @Override + public List generateInitialVertexTypeQueries(String databaseName, boolean isCompositeDatabase) { + if (null != databaseName && !databaseName.isEmpty()) { + this.databaseName = databaseName; + } + return Collections.emptyList(); + } + + @Override + public List generateInitialEdgeTypeQueries(String databaseName, boolean isCompositeDatabase) { + if (null != databaseName && !databaseName.isEmpty()) { + this.databaseName = databaseName; + } + return Collections.emptyList(); + } + + public String generateUseClause(final String databaseName) { + // TODO change this if Gremlin can select a different database + return ""; + } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/QueryFromNodesBuilder.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/QueryFromNodesBuilder.java new file mode 100644 index 000000000000..3a5b46b13a91 --- /dev/null +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/QueryFromNodesBuilder.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.graph; + +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public interface QueryFromNodesBuilder { + + List getProvenanceQueries(final List> nodeList, final boolean includeFlowGraph); + + List getFlowGraphQueries(final List> nodeList); + + List convertActionsToQueries(final List> nodeList); + + List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported; + + List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported; + + List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported; + + List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported; + + default String joinPropertyEntries(final Map componentPropertiesMap, final String nodeName, final String valueSeparator, final String joinSeparator, final String prefix) { + final List propertyDefinitions = new ArrayList<>(componentPropertiesMap.entrySet().size()); + + for (Map.Entry property : componentPropertiesMap.entrySet()) { + if (property.getValue() instanceof String) { + propertyDefinitions.add((prefix == null ? "" : prefix) + (nodeName == null ? "" : nodeName + ".") + property.getKey() + valueSeparator + "'" + property.getValue() + "'"); + } else { + propertyDefinitions.add((prefix == null ? "" : prefix) + (nodeName == null ? "" : nodeName + ".") + property.getKey() + valueSeparator + property.getValue()); + } + } + + return String.join(joinSeparator, propertyDefinitions); + } + + String generateUseClause(final String databaseName); +} diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/SqlQueryFromNodesBuilder.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/SqlQueryFromNodesBuilder.java index 9e2779527420..403e3e83e219 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/SqlQueryFromNodesBuilder.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/SqlQueryFromNodesBuilder.java @@ -16,31 +16,23 @@ */ package org.apache.nifi.graph; +import org.apache.nifi.flow.ComponentType; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; import org.apache.nifi.provenance.ProvenanceEventType; import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; -public class SqlQueryFromNodesBuilder { +public class SqlQueryFromNodesBuilder implements QueryFromNodesBuilder { - public List getQueries(final List> nodeList) { + private String databaseName; + + public List getProvenanceQueries(final List> nodeList, final boolean includeFlowGraph) { final List queryList = new ArrayList<>(nodeList.size()); StringBuilder queryBuilder; - queryList.add(new GraphQuery("CREATE VERTEX TYPE NiFiProvenanceEvent IF NOT EXISTS", GraphClientService.SQL)); - queryList.add(new GraphQuery("CREATE VERTEX TYPE FlowFile IF NOT EXISTS", GraphClientService.SQL)); - - // Create the event types as vertex types in ArcadeDB if they haven't been created already - for (ProvenanceEventType provenanceEventType : ProvenanceEventType.values()) { - queryBuilder = new StringBuilder(); - queryBuilder.append("CREATE VERTEX TYPE `"); - queryBuilder.append(provenanceEventType.name()); - queryBuilder.append("` IF NOT EXISTS EXTENDS `NiFiProvenanceEvent`"); - queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.SQL)); - } - // Add the edge types - queryList.add(new GraphQuery("CREATE EDGE TYPE `next` IF NOT EXISTS", GraphClientService.SQL)); - queryList.add(new GraphQuery("CREATE EDGE TYPE `parentOf` IF NOT EXISTS", GraphClientService.SQL)); // Add the vertices for (final Map eventNode : nodeList) { @@ -76,7 +68,7 @@ public List getQueries(final List> nodeList) { } } - // Parent-child "next" edges will be created using the the previousEventIds, except when a processor has two of the same relationships + // Parent-child "next" edges will be created using the previousEventIds, except when a processor has two of the same relationships // to different downstream processors /*List parentFlowFileIds = (List) eventNode.get("parentIds"); if (parentFlowFileIds != null) { @@ -85,7 +77,7 @@ public List getQueries(final List> nodeList) { for (String parentFlowFileId : parentFlowFileIds) { final Long eventOrdinal = (Long) eventNode.get("eventOrdinal"); - queryBuilder.append("CREATE EDGE next\nFROM (SELECT * FROM `NiFiProvenanceEvent` WHERE entityId = '"); + queryBuilder.append("CREATE EDGE next UPSERT\nFROM (SELECT * FROM `NiFiProvenanceEvent` WHERE entityId = '"); queryBuilder.append(parentFlowFileId); queryBuilder.append("' AND eventOrdinal != "); queryBuilder.append(eventOrdinal); @@ -101,4 +93,150 @@ public List getQueries(final List> nodeList) { } return queryList; } + + @Override + public List getFlowGraphQueries(List> nodeList) { + final List queryList = new ArrayList<>(); + for (Map componentStatusNode : nodeList) { + StringBuilder queryBuilder; + // Skip Connection nodes, put all the information into the edge + if (!"Connection".equals(componentStatusNode.get("type"))) { + final String id = componentStatusNode.get("id").toString(); + queryBuilder = new StringBuilder("UPDATE `" + componentStatusNode.get("type") + "`\n\tSET id = '"); + queryBuilder.append(id); + queryBuilder.append("',\n\t"); + queryBuilder.append(joinPropertyEntries(componentStatusNode, null, " =", ",\n\t", "")); + queryBuilder.append("\nUPSERT WHERE id = '"); + queryBuilder.append(id); + queryBuilder.append("'"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.SQL)); + + // Add an edge to the parent process group if appropriate + if (componentStatusNode.containsKey("groupId")) { + queryBuilder = new StringBuilder("CREATE EDGE `parent` FROM (SELECT * FROM `" + componentStatusNode.get("type") + "` WHERE `id` = '"); + queryBuilder.append(componentStatusNode.get("id")); + queryBuilder.append("') TO (SELECT * FROM ProcessGroup WHERE `id` = '"); + queryBuilder.append(componentStatusNode.get("groupId")); + queryBuilder.append("') IF NOT EXISTS"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.SQL)); + } + } else { + // Add edges between source and destination processors going through the connection node + final String sourceId = componentStatusNode.get("sourceId").toString(); + final String sourceName = componentStatusNode.get("sourceName").toString(); + // Check for funnels, they do not offer status so they have to be added manually as encountered + if ("Funnel".equals(sourceName)) { + String addFunnelBuilder = "UPDATE Funnel SET id = '" + sourceId + "', name = 'Funnel' UPSERT)"; + queryList.add(new GraphQuery(addFunnelBuilder, GraphClientService.SQL)); + } + + queryBuilder = new StringBuilder("CREATE EDGE TYPE `"); + queryBuilder.append(componentStatusNode.get("name")); + queryBuilder.append("` IF NOT EXISTS EXTENDS NiFiFlowConnection"); + + queryBuilder = new StringBuilder("CREATE EDGE `"); + queryBuilder.append(componentStatusNode.get("name")); + queryBuilder.append("` FROM\n(SELECT * FROM NiFiComponent "); + queryBuilder.append(" WHERE id = '"); + queryBuilder.append(sourceId); + queryBuilder.append("')\nTO (SELECT * FROM NiFiComponent WHERE id = '"); + final String destinationId = componentStatusNode.get("destinationId").toString(); + final String destinationName = componentStatusNode.get("destinationName").toString(); + + // Inject a destination for funnels + if ("Funnel".equals(destinationName)) { + String addFunnelBuilder = "UPDATE Funnel SET id = '" + destinationId + "', name = 'Funnel' UPSERT"; + queryList.add(new GraphQuery(addFunnelBuilder, GraphClientService.SQL)); + } + + // Continue with the CREATE EDGE query + queryBuilder.append(destinationId); + queryBuilder.append("') IF NOT EXISTS SET\n\t"); + + final Map connectionProperties = new HashMap<>(); + connectionProperties.put("groupId", componentStatusNode.get("groupId")); + connectionProperties.put("backPressureBytesThreshold", componentStatusNode.get("backPressureBytesThreshold")); + connectionProperties.put("backPressureObjectThreshold", componentStatusNode.get("backPressureObjectThreshold")); + connectionProperties.put("backPressureDataSizeThreshold", componentStatusNode.get("backPressureDataSizeThreshold")); + connectionProperties.put("type", "Connection"); + final String properties = joinPropertyEntries(connectionProperties, null, " = ", ",\n\t", ""); + queryBuilder.append(properties); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.SQL)); + } + } + return queryList; + } + + @Override + public List convertActionsToQueries(List> nodeList) { + if (nodeList == null) { + return Collections.emptyList(); + } + final List queryList = new ArrayList<>(nodeList.size()); + for (Map actionNode : nodeList) { + final String operation = actionNode.get("operation").toString(); + StringBuilder queryBuilder; + if ("Remove".equals(operation)) { + // TODO move to a "history" database? If so match its nearest neighbors + // Remove the node from the graph, if it has been replaced + queryBuilder = new StringBuilder(generateUseClause(databaseName) + "DELETE FROM Processor WHERE id = '"); + queryBuilder.append(actionNode.get("componentId")); + queryBuilder.append("'"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.SQL)); + } + } + return queryList; + } + + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + throw new GraphClientMethodNotSupported("SQL does not support creating databases"); + } + + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + if (null != databaseName && !databaseName.isEmpty()) { + this.databaseName = databaseName; + } + final List queryList = new ArrayList<>(); + StringBuilder queryBuilder; + // Add the vertex types + queryList.add(new GraphQuery("CREATE VERTEX TYPE NiFiProvenanceEvent IF NOT EXISTS", GraphClientService.SQL)); + queryList.add(new GraphQuery("CREATE VERTEX TYPE `org.apache.nifi.flowfile.FlowFile` IF NOT EXISTS", GraphClientService.SQL)); + queryList.add(new GraphQuery("CREATE VERTEX TYPE NiFiComponent IF NOT EXISTS", GraphClientService.SQL)); + + for (ComponentType componentType : ComponentType.values()) { + queryList.add(new GraphQuery("CREATE VERTEX TYPE `" + componentType.getTypeName().replace(" ", "") + "` IF NOT EXISTS EXTENDS NiFiComponent", GraphClientService.SQL)); + } + + // Create the event types as vertex types if they haven't been created already + for (ProvenanceEventType provenanceEventType : ProvenanceEventType.values()) { + queryBuilder = new StringBuilder(); + queryBuilder.append("CREATE VERTEX TYPE `"); + queryBuilder.append(provenanceEventType.name()); + queryBuilder.append("` IF NOT EXISTS EXTENDS `NiFiProvenanceEvent`"); + queryList.add(new GraphQuery(queryBuilder.toString(), GraphClientService.SQL)); + } + return queryList; + } + + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + if (null != databaseName && !databaseName.isEmpty()) { + this.databaseName = databaseName; + } + final List queryList = new ArrayList<>(); + // Add the edge types + queryList.add(new GraphQuery("CREATE EDGE TYPE NiFiFlowConnection IF NOT EXISTS", GraphClientService.SQL)); + queryList.add(new GraphQuery("CREATE EDGE TYPE `next` IF NOT EXISTS EXTENDS NiFiFlowConnection", GraphClientService.SQL)); + queryList.add(new GraphQuery("CREATE EDGE TYPE `parent` IF NOT EXISTS EXTENDS NiFiFlowConnection", GraphClientService.SQL)); + return queryList; + } + + @Override + public List generateCreateIndexQueries(String databaseName, boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + throw new GraphClientMethodNotSupported("SQL does not support creating indexes"); + } + + public String generateUseClause(final String databaseName) { + // TODO change this if SQL can select a different database + return ""; + } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/exception/GraphClientMethodNotSupported.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/exception/GraphClientMethodNotSupported.java new file mode 100644 index 000000000000..375f8ac9c273 --- /dev/null +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/exception/GraphClientMethodNotSupported.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.graph.exception; + +/** + * This class represents an exception when a QueryFromNodesBuilder does not support generating a certain type of query, + * such as creating the database or creating indexes. + */ +public class GraphClientMethodNotSupported extends Exception { + + public GraphClientMethodNotSupported(String message) { + super(message); + } + + public GraphClientMethodNotSupported(String message, Throwable cause) { + super(message, cause); + } + + public GraphClientMethodNotSupported(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/exception/GraphQueryException.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/exception/GraphQueryException.java new file mode 100644 index 000000000000..20b957a6f524 --- /dev/null +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-client-service-api/src/main/java/org/apache/nifi/graph/exception/GraphQueryException.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.graph.exception; + +import java.io.IOException; + +/** + * This class represents an exception that happens while issuing a query to the graph database. + */ +public class GraphQueryException extends IOException { + public GraphQueryException(String message) { + super(message); + } + + public GraphQueryException(String message, Throwable cause) { + super(message, cause); + } + + public GraphQueryException(Throwable cause) { + super(cause); + } +} diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/AbstractGraphExecutor.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/AbstractGraphExecutor.java index b8bc7dedf9e1..c0d1c2a025ca 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/AbstractGraphExecutor.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/AbstractGraphExecutor.java @@ -52,6 +52,16 @@ abstract class AbstractGraphExecutor extends AbstractProcessor { .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); + public static final PropertyDescriptor QUERY_LANGUAGE = new PropertyDescriptor.Builder() + .name("query-language") + .displayName("Query Language") + .description("The language that the query is written in. This property must be set to a value that the graph database accepts.") + .required(true) + .defaultValue(GraphClientService.CYPHER) + .allowableValues(GraphClientService.SQL, GraphClientService.CYPHER, GraphClientService.GREMLIN) + .build(); + + static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") .description("Successful FlowFiles are routed to this relationship").build(); @@ -82,7 +92,7 @@ protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String name) public void onScheduled(ProcessContext context) { queryParameters = context.getProperties() .keySet().stream() - .filter(prop -> prop.isDynamic()) + .filter(PropertyDescriptor::isDynamic) .collect(Collectors.toList()); } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java index 99e97daa5c88..b62a304a63fe 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java @@ -29,6 +29,7 @@ import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.graph.GraphClientService; import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.graph.GraphQuery; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; @@ -81,6 +82,7 @@ public class ExecuteGraphQuery extends AbstractGraphExecutor { final List tempDescriptors = new ArrayList<>(); tempDescriptors.add(CLIENT_SERVICE); tempDescriptors.add(QUERY); + tempDescriptors.add(QUERY_LANGUAGE); propertyDescriptors = Collections.unmodifiableList(tempDescriptors); } @@ -99,10 +101,13 @@ public final List getSupportedPropertyDescriptors() { private volatile GraphClientService clientService; + private volatile String language; + @OnScheduled public void onScheduled(final ProcessContext context) { super.onScheduled(context); clientService = context.getProperty(CLIENT_SERVICE).asControllerService(GraphClientService.class); + language = context.getProperty(QUERY_LANGUAGE).getValue(); } @Override @@ -111,11 +116,11 @@ public void onTrigger(final ProcessContext context, final ProcessSession session FlowFile output = flowFile != null ? session.create(flowFile) : session.create(); try (OutputStream os = session.write(output)) { - String query = getQuery(context, session, flowFile); + GraphQuery graphQuery = getQuery(context, session, flowFile); long startTimeMillis = System.currentTimeMillis(); os.write("[".getBytes()); - Map resultAttrs = clientService.executeQuery(query, getParameters(context, output), (record, hasMore) -> { + Map resultAttrs = clientService.executeQuery(graphQuery, getParameters(context, output), (record, hasMore) -> { try { String obj = mapper.writeValueAsString(record); os.write(obj.getBytes()); @@ -137,7 +142,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session output = session.putAllAttributes(output, resultAttrs); session.transfer(output, REL_SUCCESS); session.getProvenanceReporter().invokeRemoteProcess(output, clientService.getTransitUrl(), - String.format("The following query was executed in %s milliseconds: \"%s\"", executionTime, query) + String.format("The following query in %s language was executed in %s milliseconds: \"%s\"", + graphQuery.getLanguage(), executionTime, graphQuery.getQuery()), + REL_SUCCESS ); if (flowFile != null) { session.transfer(flowFile, REL_ORIGINAL); @@ -154,9 +161,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } } - protected String getQuery(ProcessContext context, ProcessSession session, FlowFile input) { - String query = context.getProperty(QUERY).evaluateAttributeExpressions(input).getValue(); - if (StringUtils.isEmpty(query) && input != null) { + protected GraphQuery getQuery(ProcessContext context, ProcessSession session, FlowFile input) { + final String queryString = context.getProperty(QUERY).evaluateAttributeExpressions(input).getValue(); + if (StringUtils.isEmpty(queryString) && input != null) { try { if (input.getSize() > (64 * 1024)) { throw new Exception("Input bigger than 64kb. Cannot assume this is a valid query for Gremlin Server " + @@ -173,12 +180,11 @@ protected String getQuery(ProcessContext context, ProcessSession session, FlowFi session.exportTo(input, out); out.close(); - - query = new String(out.toByteArray()); + return new GraphQuery(out.toString(), language); } catch (Exception ex) { throw new ProcessException(ex); } } - return query; + throw new IllegalArgumentException(QUERY.getDisplayName() + " must be set"); } } \ No newline at end of file diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecord.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecord.java index 07bc9ec63fa0..e4fc06c821e2 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecord.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecord.java @@ -30,10 +30,11 @@ import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.graph.GraphClientService; +import org.apache.nifi.graph.GraphQuery; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; -import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.record.path.FieldValue; import org.apache.nifi.record.path.RecordPath; @@ -121,7 +122,7 @@ protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String } public static final List DESCRIPTORS = Collections.unmodifiableList(Arrays.asList( - CLIENT_SERVICE, READER_SERVICE, WRITER_SERVICE, SUBMISSION_SCRIPT + CLIENT_SERVICE, QUERY_LANGUAGE, READER_SERVICE, WRITER_SERVICE, SUBMISSION_SCRIPT )); public static final Relationship SUCCESS = new Relationship.Builder().name("original") @@ -144,6 +145,8 @@ protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String public static final String GRAPH_OPERATION_TIME = "graph.operations.took"; private volatile RecordPathCache recordPathCache; + private volatile String language; + @Override public Set getRelationships() { return RELATIONSHIPS; @@ -162,6 +165,7 @@ public List getSupportedPropertyDescriptors() { @OnScheduled public void onScheduled(ProcessContext context) { clientService = context.getProperty(CLIENT_SERVICE).asControllerService(GraphClientService.class); + language = context.getProperty(QUERY_LANGUAGE).getValue(); recordReaderFactory = context.getProperty(READER_SERVICE).asControllerService(RecordReaderFactory.class); recordSetWriterFactory = context.getProperty(WRITER_SERVICE).asControllerService(RecordSetWriterFactory.class); recordPathCache = new RecordPathCache(100); @@ -170,7 +174,7 @@ public void onScheduled(ProcessContext context) { private Object getRecordValue(Record record, RecordPath recordPath){ final RecordPathResult result = recordPath.evaluate(record); final List values = result.getSelectedFields().collect(Collectors.toList()); - if (values != null && !values.isEmpty()) { + if (!values.isEmpty()) { if (values.size() == 1) { Object raw = values.get(0).getValue(); @@ -181,7 +185,7 @@ private Object getRecordValue(Record record, RecordPath recordPath){ return raw; } else { - return values.stream().map(fv -> fv.getValue()).collect(Collectors.toList()); + return values.stream().map(FieldValue::getValue).collect(Collectors.toList()); } } else { return null; @@ -189,7 +193,7 @@ private Object getRecordValue(Record record, RecordPath recordPath){ } @Override - public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { + public void onTrigger(final ProcessContext context, final ProcessSession session) { FlowFile input = session.get(); if ( input == null ) { return; @@ -215,7 +219,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session long delta; FlowFile failedRecords = session.create(input); - WriteResult failedWriteResult = null; + WriteResult failedWriteResult; try (InputStream is = session.read(input); RecordReader reader = recordReaderFactory.createRecordReader(input, is, getLogger()); OutputStream os = session.write(failedRecords); @@ -238,9 +242,10 @@ public void onTrigger(final ProcessContext context, final ProcessSession session dynamicPropertyMap.putAll(input.getAttributes()); if (getLogger().isDebugEnabled()) { - getLogger().debug("Dynamic Properties: {}", new Object[]{dynamicPropertyMap}); + getLogger().debug("Dynamic Properties: {}", dynamicPropertyMap); } - List> graphResponses = new ArrayList<>(executeQuery(recordScript, dynamicPropertyMap)); + final GraphQuery graphQuery = new GraphQuery(recordScript, language); + List> graphResponses = new ArrayList<>(executeQuery(graphQuery, dynamicPropertyMap)); OutputStream graphOutputStream = session.write(graph); String graphOutput = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(graphResponses); @@ -271,14 +276,13 @@ public void onTrigger(final ProcessContext context, final ProcessSession session return; } - // Generate provenance and send input flowfile to success - session.getProvenanceReporter().send(input, clientService.getTransitUrl(), delta*1000); - if (failedWriteResult.getRecordCount() < 1) { // No failed records, remove the failure flowfile and send the input flowfile to success session.remove(failedRecords); input = session.putAttribute(input, GRAPH_OPERATION_TIME, String.valueOf(delta)); session.transfer(input, SUCCESS); + // Generate provenance and send input flowfile to success + session.getProvenanceReporter().send(input, clientService.getTransitUrl(), delta*1000, REL_SUCCESS); } else { failedRecords = session.putAttribute(failedRecords, RECORD_COUNT, String.valueOf(failedWriteResult.getRecordCount())); session.transfer(failedRecords, FAILURE); @@ -287,10 +291,10 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } } - private List> executeQuery(String recordScript, Map parameters) { + private List> executeQuery(GraphQuery graphQuery, Map parameters) throws GraphQueryException { ObjectMapper mapper = new ObjectMapper(); List> graphResponses = new ArrayList<>(); - clientService.executeQuery(recordScript, parameters, (map, b) -> { + clientService.executeQuery(graphQuery, parameters, (map, b) -> { if (getLogger().isDebugEnabled()){ try { getLogger().debug(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(map)); diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/MockCypherClientService.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/MockCypherClientService.java index 9ad402100a95..2d3da5ebfcd2 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/MockCypherClientService.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/MockCypherClientService.java @@ -22,15 +22,20 @@ import org.apache.nifi.graph.GraphClientService; import org.apache.nifi.graph.GraphQuery; import org.apache.nifi.graph.GraphQueryResultCallback; +import org.apache.nifi.graph.QueryFromNodesBuilder; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; public class MockCypherClientService extends AbstractControllerService implements GraphClientService { + protected volatile QueryFromNodesBuilder cypherQueryFromNodesBuilder = new CypherQueryFromNodesBuilder(); + @Override - public Map executeQuery(String query, Map parameters, GraphQueryResultCallback handler) { + public Map executeQuery(GraphQuery graphQuery, Map parameters, GraphQueryResultCallback handler) { handler.process(new HashMap(){{ put("name", "John Smith"); put("age", 40); @@ -58,8 +63,44 @@ public String getTransitUrl() { } @Override - public List buildQueryFromNodes(List> eventList, Map parameters) { + public List convertActionsToQueries(final List> nodeList) { + return Collections.emptyList(); + } + + @Override + public List buildFlowGraphQueriesFromNodes(List> eventList, Map parameters) { + // Build queries from event list + return cypherQueryFromNodesBuilder.getFlowGraphQueries(eventList); + } + + @Override + public List buildProvenanceQueriesFromNodes(List> eventList, Map parameters, final boolean includeFlowGraph) { // Build queries from event list - return new CypherQueryFromNodesBuilder().getQueries(eventList); + return cypherQueryFromNodesBuilder.getProvenanceQueries(eventList, includeFlowGraph); + } + + @Override + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateDatabaseQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateIndexQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialVertexTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialEdgeTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public String getDatabaseName() { + return "mockDB"; } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/util/InMemoryGraphClient.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/util/InMemoryGraphClient.java index 031ae9bb97a5..f8fb4a2b6638 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/util/InMemoryGraphClient.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/util/InMemoryGraphClient.java @@ -23,7 +23,9 @@ import org.apache.nifi.graph.GraphQuery; import org.apache.nifi.graph.GraphQueryResultCallback; import org.apache.nifi.graph.GremlinQueryFromNodesBuilder; -import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.graph.QueryFromNodesBuilder; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.tinkerpop.gremlin.structure.Graph; import org.janusgraph.core.JanusGraph; import org.janusgraph.core.JanusGraphFactory; @@ -32,6 +34,7 @@ import javax.script.ScriptEngineManager; import javax.script.ScriptException; import java.util.AbstractMap.SimpleEntry; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -39,8 +42,12 @@ public class InMemoryGraphClient extends AbstractControllerService implements GraphClientService { private Graph graph; + + private final QueryFromNodesBuilder gremlinQueryFromNodesBuilder = new GremlinQueryFromNodesBuilder(); private boolean generateExceptionOnQuery = false; + private String databaseName; + public InMemoryGraphClient() { this(false); } @@ -59,14 +66,12 @@ private static JanusGraph buildGraph() { } @Override - public Map executeQuery(String query, Map parameters, GraphQueryResultCallback graphQueryResultCallback) { + public Map executeQuery(GraphQuery graphQuery, Map parameters, GraphQueryResultCallback graphQueryResultCallback) throws GraphQueryException { if(generateExceptionOnQuery) { - throw new ProcessException("Generated test exception"); + throw new GraphQueryException("Generated test exception"); } ScriptEngine engine = new ScriptEngineManager().getEngineByName("groovy"); - parameters.entrySet().stream().forEach( it -> { - engine.put(it.getKey(), it.getValue()); - }); + parameters.forEach(engine::put); if (graph == null) { graph = buildGraph(); } @@ -75,29 +80,28 @@ public Map executeQuery(String query, Map parame Object response; try { - response = engine.eval(query); + response = engine.eval(graphQuery.getQuery()); } catch (ScriptException ex) { - throw new ProcessException(ex); + throw new GraphQueryException(ex); } if (response instanceof Map) { //The below logic helps with the handling of complex Map relationships - Map resultMap = (Map) response; + Map resultMap = (Map) response; if (!resultMap.isEmpty()) { // Convertex a resultMap to an entrySet iterator - Iterator outerResultSet = resultMap.entrySet().iterator(); // this loops over the outermost map - while(outerResultSet.hasNext()) { - Map.Entry innerResultSet = (Map.Entry) outerResultSet.next(); + for (Object o : resultMap.entrySet()) { + Map.Entry innerResultSet = (Map.Entry) o; // this is for edge case handling where innerResultSet is also a Map if (innerResultSet.getValue() instanceof Map) { - Iterator resultSet = ((Map) innerResultSet.getValue()).entrySet().iterator(); + Iterator> resultSet = ((Map) innerResultSet.getValue()).entrySet().iterator(); // looping over each result in the inner map while (resultSet.hasNext()) { - Map.Entry tempResult = (Map.Entry) resultSet.next(); + Map.Entry tempResult = resultSet.next(); Map tempRetObject = new HashMap<>(); tempRetObject.put(tempResult.getKey(), tempResult.getValue()); - SimpleEntry returnObject = new SimpleEntry(tempResult.getKey(), tempRetObject); + SimpleEntry returnObject = new SimpleEntry<>(tempResult.getKey(), tempRetObject); Map resultReturnMap = new HashMap<>(); resultReturnMap.put(innerResultSet.getKey(), returnObject); if (getLogger().isDebugEnabled()) { @@ -114,7 +118,6 @@ public Map executeQuery(String query, Map parame graphQueryResultCallback.process(resultReturnMap, false); } } - } } @@ -127,8 +130,45 @@ public String getTransitUrl() { } @Override - public List buildQueryFromNodes(List> eventList, Map parameters) { - // Build query from event list - return new GremlinQueryFromNodesBuilder().getQueries(eventList); + public List convertActionsToQueries(final List> nodeList) { + return Collections.emptyList(); } + + @Override + public List buildFlowGraphQueriesFromNodes(List> eventList, Map parameters) { + // Build queries from event list + return gremlinQueryFromNodesBuilder.getFlowGraphQueries(eventList); + } + + @Override + public List buildProvenanceQueriesFromNodes(List> eventList, Map parameters, final boolean includeFlowGraph) { + // Build queries from event list + return gremlinQueryFromNodesBuilder.getProvenanceQueries(eventList, includeFlowGraph); + } + + @Override + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateCreateDatabaseQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateCreateIndexQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateInitialVertexTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateInitialEdgeTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public String getDatabaseName() { + return databaseName; + } + } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-test-clients/src/main/java/org/apache/nifi/graph/InMemoryJanusGraphClientService.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-test-clients/src/main/java/org/apache/nifi/graph/InMemoryJanusGraphClientService.java index 8ed9f2efa999..ff9e1f289b1e 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-test-clients/src/main/java/org/apache/nifi/graph/InMemoryJanusGraphClientService.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-test-clients/src/main/java/org/apache/nifi/graph/InMemoryJanusGraphClientService.java @@ -20,12 +20,14 @@ import org.apache.nifi.annotation.lifecycle.OnEnabled; import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.controller.ConfigurationContext; -import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.tinkerpop.gremlin.structure.Graph; import org.janusgraph.core.JanusGraphFactory; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -38,6 +40,10 @@ public class InMemoryJanusGraphClientService extends AbstractControllerService implements GraphClientService { private Graph graph; + private final QueryFromNodesBuilder gremlinQueryFromNodesBuilder = new GremlinQueryFromNodesBuilder(); + + private String databaseName; + @OnEnabled public void onEnabled(ConfigurationContext context) { graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open(); @@ -48,7 +54,7 @@ public void onEnabled(ConfigurationContext context) { * * This instantiate a new script engine every time to ensure a pristine environment for testing. * - * @param query A gremlin query (Groovy syntax) + * @param graphQuery A gremlin query (Groovy syntax) * @param parameters A map of parameters to be injected into the script. This can be structured the way you would * expect a REST API call to Gremlin Server. * @param handler The callback for parsing the rsponse. @@ -56,15 +62,15 @@ public void onEnabled(ConfigurationContext context) { * in particular. */ @Override - public Map executeQuery(String query, Map parameters, GraphQueryResultCallback handler) { + public Map executeQuery(GraphQuery graphQuery, Map parameters, GraphQueryResultCallback handler) throws GraphQueryException { ScriptEngine engine = new ScriptEngineManager().getEngineByName("groovy"); - parameters.entrySet().forEach(entry -> engine.put(entry.getKey(), entry.getValue())); + parameters.forEach(engine::put); engine.put("graph", graph); engine.put("g", graph.traversal()); try { - Object response = engine.eval(query); + Object response = engine.eval(graphQuery.getQuery()); if (response instanceof Map) { Map resp = (Map) response; @@ -79,7 +85,7 @@ public Map executeQuery(String query, Map parame return new HashMap<>(); } catch (Exception ex) { - throw new ProcessException(ex); + throw new GraphQueryException(ex); } } @@ -100,8 +106,45 @@ public Graph getGraph() { } @Override - public List buildQueryFromNodes(List> eventList, Map parameters) { - // Build query from event list - return new GremlinQueryFromNodesBuilder().getQueries(eventList); + public List convertActionsToQueries(final List> nodeList) { + return Collections.emptyList(); + } + + @Override + public List buildFlowGraphQueriesFromNodes(List> eventList, Map parameters) { + // Build queries from event list + return gremlinQueryFromNodesBuilder.getFlowGraphQueries(eventList); + } + + @Override + public List buildProvenanceQueriesFromNodes(List> eventList, Map parameters, final boolean includeFlowGraph) { + // Build queries from event list + return gremlinQueryFromNodesBuilder.getProvenanceQueries(eventList, includeFlowGraph); + } + + + @Override + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateCreateDatabaseQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateCreateIndexQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateInitialVertexTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateInitialEdgeTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public String getDatabaseName() { + return databaseName; } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-3-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypher3ClientService.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-3-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypher3ClientService.java index 93d55c8dc982..488ad25b35bc 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-3-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypher3ClientService.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-3-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypher3ClientService.java @@ -27,7 +27,8 @@ import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.expression.ExpressionLanguageScope; -import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.ssl.SSLContextService; import org.neo4j.driver.internal.InternalNode; @@ -174,6 +175,10 @@ public class Neo4JCypher3ClientService extends AbstractControllerService impleme protected String password; protected String connectionUrl; + protected String databaseName; + + protected final QueryFromNodesBuilder cypherQueryFromNodesBuilder = new CypherQueryFromNodesBuilder(); + private static final List DESCRIPTORS; static { List _temp = new ArrayList<>(); @@ -248,15 +253,14 @@ protected Driver getNeo4JDriver() { } @OnEnabled - public void onEnabled(final ConfigurationContext context) { + public void onEnabled(final ConfigurationContext context) throws GraphQueryException { try { neo4JDriver = getDriver(context); } catch(Exception e) { getLogger().error("Error while getting connection " + e.getLocalizedMessage(),e); - throw new ProcessException("Error while getting connection" + e.getLocalizedMessage(),e); + throw new GraphQueryException("Error while getting connection" + e.getLocalizedMessage(),e); } - getLogger().info("Neo4JCypherExecutor connection created for url {}", - new Object[] {connectionUrl}); + getLogger().info("Neo4JCypherExecutor connection created for url {}", connectionUrl); } @OnDisabled @@ -281,9 +285,9 @@ private Map handleInternalNode(Map recordMap) { } @Override - public Map executeQuery(String query, Map parameters, GraphQueryResultCallback handler) { + public Map executeQuery(GraphQuery graphQuery, Map parameters, GraphQueryResultCallback handler) throws GraphQueryException { try (Session session = neo4JDriver.session()) { - StatementResult result = session.run(query, parameters); + StatementResult result = session.run(graphQuery.getQuery(), parameters); long count = 0; while (result.hasNext()) { Record record = result.next(); @@ -306,7 +310,7 @@ public Map executeQuery(String query, Map parame return resultAttributes; } catch (Exception ex) { - throw new ProcessException("Query execution failed", ex); + throw new GraphQueryException("Query execution failed", ex); } } @@ -316,8 +320,44 @@ public String getTransitUrl() { } @Override - public List buildQueryFromNodes(List> eventList, Map parameters) { + public List convertActionsToQueries(final List> nodeList) { + return Collections.emptyList(); + } + + @Override + public List buildFlowGraphQueriesFromNodes(List> eventList, Map parameters) { + // Build queries from event list + return cypherQueryFromNodesBuilder.getFlowGraphQueries(eventList); + } + + @Override + public List buildProvenanceQueriesFromNodes(List> eventList, Map parameters, final boolean includeFlowGraph) { // Build queries from event list - return new CypherQueryFromNodesBuilder().getQueries(eventList); + return cypherQueryFromNodesBuilder.getProvenanceQueries(eventList, includeFlowGraph); + } + + @Override + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateDatabaseQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateIndexQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialVertexTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialEdgeTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public String getDatabaseName() { + return databaseName; } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-3-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypher3Executor.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-3-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypher3Executor.java index f3164238a0bd..4427cb08e693 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-3-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypher3Executor.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-3-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypher3Executor.java @@ -16,6 +16,7 @@ */ package org.apache.nifi.graph; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.util.NoOpProcessor; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; @@ -56,7 +57,7 @@ public class ITNeo4JCypher3Executor { protected String password = "testing1234"; private GraphClientService clientService; - private GraphQueryResultCallback EMPTY_CALLBACK = (record, hasMore) -> {}; + private final GraphQueryResultCallback EMPTY_CALLBACK = (record, hasMore) -> {}; @BeforeEach public void setUp() throws Exception { @@ -91,10 +92,11 @@ public void tearDown() { } @Test - public void testCreateNodeNoReturn() { + public void testCreateNodeNoReturn() throws GraphQueryException { String query = "create (n:nodereturn { name: \"Testing\"})"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); - Map attributes = clientService.executeQuery(query, new HashMap<>(), EMPTY_CALLBACK); + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), EMPTY_CALLBACK); assertEquals("1",attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("1",attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0",attributes.get(GraphClientService.NODES_DELETED)); @@ -105,11 +107,12 @@ public void testCreateNodeNoReturn() { } @Test - public void testCreateNodeOnePropertyWithReturn() { + public void testCreateNodeOnePropertyWithReturn() throws GraphQueryException { String query = "create (n { name:'abc' }) return n.name"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); final List> result = new ArrayList<>(); - Map attributes = clientService.executeQuery(query, new HashMap<>(), (record, hasMore) -> result.add(record)); + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), (record, hasMore) -> result.add(record)); assertEquals("0",attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("1",attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0",attributes.get(GraphClientService.NODES_DELETED)); @@ -122,11 +125,12 @@ public void testCreateNodeOnePropertyWithReturn() { } @Test - public void testCreateNodeTwoPropertyOneLabelWithReturn() { + public void testCreateNodeTwoPropertyOneLabelWithReturn() throws GraphQueryException { String query = "create (n:Person { name:'abc', age : 1 }) return n.name, n.age"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); final List> result = new ArrayList<>(); - Map attributes = clientService.executeQuery(query, new HashMap<>(), (record, hasMore) -> { + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), (record, hasMore) -> { result.add(record); }); @@ -139,15 +143,16 @@ public void testCreateNodeTwoPropertyOneLabelWithReturn() { assertEquals("1",attributes.get(GraphClientService.ROWS_RETURNED)); assertEquals(1, result.size()); assertEquals("abc", result.get(0).get("n.name")); - assertEquals(1l, result.get(0).get("n.age")); + assertEquals(1L, result.get(0).get("n.age")); } @Test - public void testCreateTwoNodeTwoPropertyOneRelationshipWithReturn() { + public void testCreateTwoNodeTwoPropertyOneRelationshipWithReturn() throws GraphQueryException { String query = "create (m:Person { name:'abc', age : 1 }) create (n:Person { name : 'pqr'}) create (m)-[r:hello]->(n) return m.name, n.name, type(r)"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); List> result = new ArrayList<>(); - Map attributes = clientService.executeQuery(query, new HashMap<>(), ((record, isMore) -> result.add(record))); + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), ((record, isMore) -> result.add(record))); assertEquals("2",attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("2",attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0",attributes.get(GraphClientService.NODES_DELETED)); diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypherClientService.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypherClientService.java index 334042e8b69f..fd2ab487bb87 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypherClientService.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypherClientService.java @@ -27,7 +27,8 @@ import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.expression.ExpressionLanguageScope; -import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.processor.util.StandardValidators; import org.neo4j.driver.AuthTokens; import org.neo4j.driver.Config; @@ -69,6 +70,15 @@ public class Neo4JCypherClientService extends AbstractControllerService implemen .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); + public static final PropertyDescriptor DATABASE_NAME = new PropertyDescriptor.Builder() + .name("database-name") + .displayName("Provenance Database Name") + .description("The name of the database upon which queries (such as provenance queries) should be invoked. If no database name is supplied, the default database will be used.") + .required(false) + .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) + .addValidator(StandardValidators.NON_BLANK_VALIDATOR) + .build(); + public static final PropertyDescriptor USERNAME = new PropertyDescriptor.Builder() .name("neo4j-username") .displayName("Username") @@ -151,15 +161,20 @@ public class Neo4JCypherClientService extends AbstractControllerService implemen .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .build(); - protected Driver neo4JDriver; - protected String username; - protected String password; - protected String connectionUrl; + protected volatile Driver neo4JDriver; + protected volatile String username; + protected volatile String password; + protected volatile String connectionUrl; + + protected volatile String databaseName; + + protected final QueryFromNodesBuilder cypherQueryFromNodesBuilder = new CypherQueryFromNodesBuilder(); private static final List DESCRIPTORS; static { List _temp = new ArrayList<>(); _temp.add(CONNECTION_URL); + _temp.add(DATABASE_NAME); _temp.add(USERNAME); _temp.add(PASSWORD); _temp.add(CONNECTION_TIMEOUT); @@ -231,15 +246,15 @@ protected Driver getNeo4JDriver() { } @OnEnabled - public void onEnabled(final ConfigurationContext context) { + public void onEnabled(final ConfigurationContext context) throws GraphQueryException { try { neo4JDriver = getDriver(context); } catch(Exception e) { getLogger().error("Error while getting connection " + e.getLocalizedMessage(),e); - throw new ProcessException("Error while getting connection" + e.getLocalizedMessage(),e); + throw new GraphQueryException("Error while getting connection" + e.getLocalizedMessage(),e); } - getLogger().info("Neo4JCypherExecutor connection created for url {}", - new Object[] {connectionUrl}); + getLogger().info("Neo4JCypherExecutor connection created for url {}", connectionUrl); + databaseName = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions().getValue(); } @OnDisabled @@ -264,7 +279,9 @@ private Map handleInternalNode(Map recordMap) { } @Override - public Map executeQuery(String query, Map parameters, GraphQueryResultCallback handler) { + public Map executeQuery(GraphQuery graphQuery, Map parameters, GraphQueryResultCallback handler) throws GraphQueryException { + final String query = graphQuery.getQuery(); + getLogger().info("Executing Query:\n" + query); try (Session session = neo4JDriver.session()) { Result result = session.run(query, parameters); long count = 0; @@ -289,7 +306,8 @@ public Map executeQuery(String query, Map parame return resultAttributes; } catch (Exception ex) { - throw new ProcessException("Query execution failed", ex); + getLogger().error("Query execution failed", ex); + throw new GraphQueryException("Query execution failed", ex); } } @@ -299,8 +317,44 @@ public String getTransitUrl() { } @Override - public List buildQueryFromNodes(List> eventList, Map parameters) { + public List convertActionsToQueries(final List> nodeList) { + return Collections.emptyList(); + } + + @Override + public List buildFlowGraphQueriesFromNodes(List> eventList, Map parameters) { // Build queries from event list - return new CypherQueryFromNodesBuilder().getQueries(eventList); + return cypherQueryFromNodesBuilder.getFlowGraphQueries(eventList); + } + + @Override + public List buildProvenanceQueriesFromNodes(List> eventList, Map parameters, final boolean includeFlowGraph) { + // Build queries from event list + return cypherQueryFromNodesBuilder.getProvenanceQueries(eventList, includeFlowGraph); + } + + @Override + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateDatabaseQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateIndexQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialVertexTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialEdgeTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public String getDatabaseName() { + return databaseName; } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypherClientServiceSSL.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypherClientServiceSSL.java index 6b87b3306cd9..10b0374e8ae0 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypherClientServiceSSL.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypherClientServiceSSL.java @@ -17,6 +17,7 @@ package org.apache.nifi.graph; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.security.util.KeyStoreUtils; import org.apache.nifi.security.util.TemporaryKeyStoreBuilder; import org.apache.nifi.security.util.TlsConfiguration; @@ -128,11 +129,12 @@ public void setUp() throws Exception { } @Test - public void testQuery() { + public void testQuery() throws GraphQueryException { String query = "create (n { name:'abc' }) return n.name"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); final List> result = new ArrayList<>(); - Map attributes = clientService.executeQuery(query, new HashMap<>(), (record, hasMore) -> result.add(record)); + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), (record, hasMore) -> result.add(record)); assertEquals("0",attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("1",attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0",attributes.get(GraphClientService.NODES_DELETED)); @@ -184,7 +186,7 @@ private static Path writeCertificateEncoded(final String certificateEncoded) thr } @Test - public void testBuildQueryFromNodes() { + public void testBuildQueryFromNodes() throws GraphQueryException { final List> nodeList = new ArrayList<>(); nodeList.add(Collections.singletonMap("name", "Matt")); final Map node2 = new LinkedHashMap<>(); @@ -203,11 +205,11 @@ public void testBuildQueryFromNodes() { new GraphQuery("MERGE (p:NiFiProvenanceEvent {color: \"blue\",name: \"Joe\",age: \"40\"})", GraphClientService.CYPHER), new GraphQuery("MERGE (p:NiFiProvenanceEvent {name: \"Mary\",state: \"FL\",age: \"40\"})", GraphClientService.CYPHER) ); - final List queryList = clientService.buildQueryFromNodes(nodeList, new HashMap<>()); + final List queryList = clientService.buildProvenanceQueriesFromNodes(nodeList, new HashMap<>(), false); assertEquals(expectedQuery, queryList); final List> result = new ArrayList<>(); for (GraphQuery query : queryList) { - Map attributes = clientService.executeQuery(query.getQuery(), new HashMap<>(), (record, hasMore) -> result.add(record)); + Map attributes = clientService.executeQuery(query, new HashMap<>(), (record, hasMore) -> result.add(record)); assertEquals("0", attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("1", attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0", attributes.get(GraphClientService.NODES_DELETED)); diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypherExecutorNoSSL.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypherExecutorNoSSL.java index 58196fed1ec0..a80abe3f131c 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypherExecutorNoSSL.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/ITNeo4JCypherExecutorNoSSL.java @@ -16,6 +16,7 @@ */ package org.apache.nifi.graph; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.util.NoOpProcessor; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; @@ -95,10 +96,11 @@ public void tearDown() { } @Test - public void testCreateNodeNoReturn() { - String query = "create (n:nodereturn { name: \"Testing\"})"; + public void testCreateNodeNoReturn() throws GraphQueryException { + final String query = "create (n:nodereturn { name: \"Testing\"})"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); - Map attributes = clientService.executeQuery(query, new HashMap<>(), EMPTY_CALLBACK); + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), EMPTY_CALLBACK); assertEquals("1",attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("1",attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0",attributes.get(GraphClientService.NODES_DELETED)); @@ -109,11 +111,12 @@ public void testCreateNodeNoReturn() { } @Test - public void testCreateNodeOnePropertyWithReturn() { + public void testCreateNodeOnePropertyWithReturn() throws GraphQueryException { String query = "create (n { name:'abc' }) return n.name"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); final List> result = new ArrayList<>(); - Map attributes = clientService.executeQuery(query, new HashMap<>(), (record, hasMore) -> result.add(record)); + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), (record, hasMore) -> result.add(record)); assertEquals("0",attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("1",attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0",attributes.get(GraphClientService.NODES_DELETED)); @@ -126,11 +129,12 @@ public void testCreateNodeOnePropertyWithReturn() { } @Test - public void testCreateNodeTwoPropertyOneLabelWithReturn() { + public void testCreateNodeTwoPropertyOneLabelWithReturn() throws GraphQueryException { String query = "create (n:Person { name:'abc', age : 1 }) return n.name, n.age"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); final List> result = new ArrayList<>(); - Map attributes = clientService.executeQuery(query, new HashMap<>(), (record, hasMore) -> { + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), (record, hasMore) -> { result.add(record); }); @@ -147,11 +151,12 @@ public void testCreateNodeTwoPropertyOneLabelWithReturn() { } @Test - public void testCreateTwoNodeTwoPropertyOneRelationshipWithReturn() { + public void testCreateTwoNodeTwoPropertyOneRelationshipWithReturn() throws GraphQueryException { String query = "create (m:Person { name:'abc', age : 1 }) create (n:Person { name : 'pqr'}) create (m)-[r:hello]->(n) return m.name, n.name, type(r)"; + final GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); List> result = new ArrayList<>(); - Map attributes = clientService.executeQuery(query, new HashMap<>(), ((record, isMore) -> result.add(record))); + Map attributes = clientService.executeQuery(graphQuery, new HashMap<>(), ((record, isMore) -> result.add(record))); assertEquals("2",attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("2",attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0",attributes.get(GraphClientService.NODES_DELETED)); @@ -166,7 +171,7 @@ public void testCreateTwoNodeTwoPropertyOneRelationshipWithReturn() { } @Test - public void testBuildQueryFromNodes() { + public void testBuildQueryFromNodes() throws GraphQueryException { final List> nodeList = new ArrayList<>(); nodeList.add(Collections.singletonMap("name", "Matt")); final Map node2 = new LinkedHashMap<>(); @@ -185,11 +190,11 @@ public void testBuildQueryFromNodes() { new GraphQuery("MERGE (p:NiFiProvenanceEvent {color: \"blue\",name: \"Joe\",age: \"40\"})", GraphClientService.CYPHER), new GraphQuery("MERGE (p:NiFiProvenanceEvent {name: \"Mary\",state: \"FL\",age: \"40\"})", GraphClientService.CYPHER) ); - final List queryList = clientService.buildQueryFromNodes(nodeList, new HashMap<>()); + final List queryList = clientService.buildProvenanceQueriesFromNodes(nodeList, new HashMap<>(), false); assertEquals(expectedQuery, queryList); final List> result = new ArrayList<>(); for (GraphQuery query : queryList) { - Map attributes = clientService.executeQuery(query.getQuery(), new HashMap<>(), (record, hasMore) -> result.add(record)); + Map attributes = clientService.executeQuery(query, new HashMap<>(), (record, hasMore) -> result.add(record)); assertEquals("0", attributes.get(GraphClientService.LABELS_ADDED)); assertEquals("1", attributes.get(GraphClientService.NODES_CREATED)); assertEquals("0", attributes.get(GraphClientService.NODES_DELETED)); diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/TestNeo4JCypherClientService.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/TestNeo4JCypherClientService.java index b09cf10587b8..996a12aaa146 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/TestNeo4JCypherClientService.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/test/java/org/apache/nifi/graph/TestNeo4JCypherClientService.java @@ -57,7 +57,7 @@ public void testBuildQueryFromNodes() { new GraphQuery("MERGE (p:NiFiProvenanceEvent {color: \"blue\",name: \"Joe\",age: \"40\"})", GraphClientService.CYPHER), new GraphQuery("MERGE (p:NiFiProvenanceEvent {name: \"Mary\",state: \"FL\",age: \"40\"})", GraphClientService.CYPHER) ); - final List queryList = clientService.buildQueryFromNodes(nodeList, new HashMap<>()); + final List queryList = clientService.buildProvenanceQueriesFromNodes(nodeList, new HashMap<>(), false); assertEquals(expectedQueryList, queryList); } } \ No newline at end of file diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/main/java/org/apache/nifi/graph/ArcadeDBClientService.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/main/java/org/apache/nifi/graph/ArcadeDBClientService.java index ea8e9fba10aa..c45b59560f91 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/main/java/org/apache/nifi/graph/ArcadeDBClientService.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/main/java/org/apache/nifi/graph/ArcadeDBClientService.java @@ -27,7 +27,8 @@ import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.expression.ExpressionLanguageScope; -import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.web.client.api.HttpResponseEntity; import org.apache.nifi.web.client.provider.api.WebClientServiceProvider; @@ -38,6 +39,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Collections; @@ -86,22 +88,13 @@ public class ArcadeDBClientService extends AbstractControllerService implements .build(); public static final PropertyDescriptor DATABASE_NAME = new PropertyDescriptor.Builder() .name("database-name") - .displayName("Database name") - .description("The name of the database the query should be invoked on.") - .required(true) + .displayName("Provenance Database Name") + .description("The name of the database upon which queries (such as provenance queries) should be invoked. If no database name is supplied, the default database will be used.") + .required(false) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); - public static final PropertyDescriptor QUERY_LANGUAGE = new PropertyDescriptor.Builder() - .name("query-language") - .displayName("Query language") - .description("Query language to use with ArcadeDB.") - .required(true) - .defaultValue("gremlin") - .allowableValues("sql", "cypher", "gremlin") - .build(); - private static final String NOT_SUPPORTED = "NOT_SUPPORTED"; private static final String RESULT_TOKEN = "result"; private static final ObjectMapper MAPPER = new ObjectMapper(); @@ -111,36 +104,40 @@ public class ArcadeDBClientService extends AbstractControllerService implements private String databaseName; private String userName; private String password; - private String language; static final List PROPERTY_DESCRIPTORS = Arrays.asList( API_URL, WEB_CLIENT_SERVICE_PROVIDER, REQUEST_USERNAME, REQUEST_PASSWORD, - DATABASE_NAME, - QUERY_LANGUAGE + DATABASE_NAME ); + private final QueryFromNodesBuilder cypherQueryFromNodesBuilder = new CypherQueryFromNodesBuilder(); + private final QueryFromNodesBuilder sqlQueryFromNodesBuilder = new SqlQueryFromNodesBuilder(); + private final QueryFromNodesBuilder gremlinQueryFromNodesBuilder = new GremlinQueryFromNodesBuilder(); + @Override protected List getSupportedPropertyDescriptors() { return PROPERTY_DESCRIPTORS; } @OnEnabled - public void onEnabled(final ConfigurationContext context) { + public void onEnabled(final ConfigurationContext context) throws GraphQueryException { webClientServiceProvider = context.getProperty(WEB_CLIENT_SERVICE_PROVIDER).asControllerService(WebClientServiceProvider.class); apiUrl = context.getProperty(API_URL).evaluateAttributeExpressions().getValue(); databaseName = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions().getValue(); userName = context.getProperty(REQUEST_USERNAME).getValue(); password = context.getProperty(REQUEST_PASSWORD).getValue(); - language = context.getProperty(QUERY_LANGUAGE).getValue(); uri = getUri(); + + // TODO add a USE statement for the database name } @Override - public Map executeQuery(final String query, final Map parameters, final GraphQueryResultCallback handler) { + public Map executeQuery(final GraphQuery graphQuery, final Map parameters, final GraphQueryResultCallback handler) throws GraphQueryException { + final String query = graphQuery.getQuery(); getLogger().info("Executing Query:\n" + query); - final ArcadeDbRequestBody body = new ArcadeDbRequestBody(language, query, parameters); + final ArcadeDbRequestBody body = new ArcadeDbRequestBody(graphQuery.getLanguage(), query, parameters); final HttpResponseEntity httpResponseEntity = getHttpResponseEntity(body); final int responseStatusCode = httpResponseEntity.statusCode(); @@ -153,7 +150,7 @@ public Map executeQuery(final String query, final Map executeQuery(final String query, final Map buildQueryFromNodes(final List> nodeList, final Map parameters) { - // Build queries from event list - if (GraphClientService.GREMLIN.equals(language)) { - return new GremlinQueryFromNodesBuilder().getQueries(nodeList); - } else if (GraphClientService.SQL.equals(language)) { - return new SqlQueryFromNodesBuilder().getQueries(nodeList); - } else if (GraphClientService.CYPHER.equals(language)) { - return new CypherQueryFromNodesBuilder().getQueries(nodeList); - } - return Collections.emptyList(); + public List convertActionsToQueries(final List> nodeList) { + return new ArrayList<>(0); + } + @Override + public List buildFlowGraphQueriesFromNodes(List> nodeList, Map parameters) { + // Use Cypher + return cypherQueryFromNodesBuilder.getFlowGraphQueries(nodeList); + } + + @Override + public List buildProvenanceQueriesFromNodes(final List> nodeList, final Map parameters, final boolean includeFlowGraph) { + return cypherQueryFromNodesBuilder.getProvenanceQueries(nodeList, includeFlowGraph); + } + + @Override + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + // Cypher for Gremlin (used by ArcadeDB) doesn't support creating DBs + // Return an empty list for now, TODO use server REST API rather than a query language + getLogger().warn(this.getClass().getSimpleName() + " does not support creating databases from queries, will use default database"); + return new ArrayList<>(0); + } + + @Override + public List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateIndexQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialVertexTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialEdgeTypeQueries(databaseName, isCompositeDatabase); + } + + public String getDatabaseName() { + return databaseName; } private String normalizeURL(final String url) { return url.replaceAll("(? doQuery(String query, Map parameters, GraphQueryResultCallback handler) { + public Map doQuery(String query, Map parameters, GraphQueryResultCallback handler) throws GraphQueryException { try { Iterator iterator = client.submit(query, parameters).iterator(); long count = 0; @@ -85,12 +91,13 @@ public Map doQuery(String query, Map parameters, return resultAttributes; } catch (Exception ex) { - throw new ProcessException(ex); + throw new GraphQueryException(ex); } } @Override - public Map executeQuery(String query, Map parameters, GraphQueryResultCallback handler) { + public Map executeQuery(GraphQuery graphQuery, Map parameters, GraphQueryResultCallback handler) throws GraphQueryException { + final String query = graphQuery.getQuery(); try { return doQuery(query, parameters, handler); } catch (Exception ex) { @@ -108,8 +115,45 @@ public String getTransitUrl() { } @Override - public List buildQueryFromNodes(List> eventList, Map parameters) { - // Build query from event list - return new GremlinQueryFromNodesBuilder().getQueries(eventList); + public List convertActionsToQueries(final List> nodeList) { + return Collections.emptyList(); + } + + @Override + public List buildFlowGraphQueriesFromNodes(List> eventList, Map parameters) { + // Build queries from event list + return gremlinQueryFromNodesBuilder.getFlowGraphQueries(eventList); + } + + @Override + public List buildProvenanceQueriesFromNodes(List> eventList, Map parameters, final boolean includeFlowGraph) { + // Build queries from event list + return gremlinQueryFromNodesBuilder.getProvenanceQueries(eventList, includeFlowGraph); + } + + @Override + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateCreateDatabaseQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateCreateIndexQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateInitialVertexTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return gremlinQueryFromNodesBuilder.generateInitialEdgeTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public String getDatabaseName() { + return databaseName; } + } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/main/java/org/apache/nifi/graph/OpenCypherClientService.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/main/java/org/apache/nifi/graph/OpenCypherClientService.java index fb5c202363c8..54caf7894d30 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/main/java/org/apache/nifi/graph/OpenCypherClientService.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/main/java/org/apache/nifi/graph/OpenCypherClientService.java @@ -22,7 +22,8 @@ import org.apache.nifi.annotation.lifecycle.OnDisabled; import org.apache.nifi.annotation.lifecycle.OnEnabled; import org.apache.nifi.controller.ConfigurationContext; -import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.graph.exception.GraphClientMethodNotSupported; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.tinkerpop.gremlin.driver.Cluster; import org.neo4j.driver.internal.InternalNode; import org.neo4j.driver.v1.Driver; @@ -31,6 +32,7 @@ import org.neo4j.driver.v1.StatementResult; import org.opencypher.gremlin.neo4j.driver.GremlinDatabase; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -42,6 +44,10 @@ public class OpenCypherClientService extends AbstractTinkerpopClientService implements GraphClientService { private volatile Driver gremlinDriver; + protected volatile QueryFromNodesBuilder cypherQueryFromNodesBuilder = new CypherQueryFromNodesBuilder(); + + private String databaseName; + @OnEnabled public void onEnabled(ConfigurationContext context) { Cluster cluster = buildCluster(context); @@ -69,9 +75,9 @@ private Map handleInternalNode(Map recordMap) { } @Override - public Map executeQuery(String query, Map parameters, GraphQueryResultCallback handler) { + public Map executeQuery(GraphQuery graphQuery, Map parameters, GraphQueryResultCallback handler) throws GraphQueryException { try (Session session = gremlinDriver.session()) { - StatementResult result = session.run(query, parameters); + StatementResult result = session.run(graphQuery.getQuery(), parameters); long count = 0; while (result.hasNext()) { Record record = result.next(); @@ -91,7 +97,7 @@ public Map executeQuery(String query, Map parame return resultAttributes; } catch (Exception ex) { - throw new ProcessException(ex); + throw new GraphQueryException(ex); } } @@ -101,8 +107,44 @@ public String getTransitUrl() { } @Override - public List buildQueryFromNodes(List> eventList, Map parameters) { + public List convertActionsToQueries(final List> nodeList) { + return Collections.emptyList(); + } + + @Override + public List buildFlowGraphQueriesFromNodes(List> eventList, Map parameters) { + // Build queries from event list + return cypherQueryFromNodesBuilder.getFlowGraphQueries(eventList); + } + + @Override + public List buildProvenanceQueriesFromNodes(List> eventList, Map parameters, final boolean includeFlowGraph) { // Build queries from event list - return new CypherQueryFromNodesBuilder().getQueries(eventList); + return cypherQueryFromNodesBuilder.getProvenanceQueries(eventList, includeFlowGraph); + } + + @Override + public List generateCreateDatabaseQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateDatabaseQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateCreateIndexQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateCreateIndexQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialVertexTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialVertexTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public List generateInitialEdgeTypeQueries(final String databaseName, final boolean isCompositeDatabase) throws GraphClientMethodNotSupported { + return cypherQueryFromNodesBuilder.generateInitialEdgeTypeQueries(databaseName, isCompositeDatabase); + } + + @Override + public String getDatabaseName() { + return databaseName; } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/test/java/org/apache/nifi/graph/GremlinClientServiceIT.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/test/java/org/apache/nifi/graph/GremlinClientServiceIT.java index 15e8a87e83da..d853c65a5f58 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/test/java/org/apache/nifi/graph/GremlinClientServiceIT.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/test/java/org/apache/nifi/graph/GremlinClientServiceIT.java @@ -18,6 +18,7 @@ package org.apache.nifi.graph; import org.apache.commons.io.IOUtils; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.util.NoOpProcessor; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; @@ -34,13 +35,12 @@ * As of JanusGraph 0.3.X these tests can be a little inconsistent for a few runs at first. */ public class GremlinClientServiceIT { - private TestRunner runner; private TestableGremlinClientService clientService; @BeforeEach public void setup() throws Exception { clientService = new TestableGremlinClientService(); - runner = TestRunners.newTestRunner(NoOpProcessor.class); + TestRunner runner = TestRunners.newTestRunner(NoOpProcessor.class); runner.addControllerService("gremlinService", clientService); runner.setProperty(clientService, AbstractTinkerpopClientService.CONTACT_POINTS, "localhost"); runner.enableControllerService(clientService); @@ -59,19 +59,21 @@ public void tearDown() throws Exception { } @Test - public void testValueMap() { + public void testValueMap() throws GraphQueryException { String gremlin = "g.V().hasLabel('dog').valueMap()"; + GraphQuery graphQuery = new GraphQuery(gremlin, GraphClientService.GREMLIN); AtomicInteger integer = new AtomicInteger(); - Map result = clientService.executeQuery(gremlin, new HashMap<>(), (record, isMore) -> integer.incrementAndGet()); + Map result = clientService.executeQuery(graphQuery, new HashMap<>(), (record, isMore) -> integer.incrementAndGet()); assertEquals(2, integer.get()); } @Test - public void testCount() { + public void testCount() throws GraphQueryException { String gremlin = "g.V().hasLabel('dog').count()"; + GraphQuery graphQuery = new GraphQuery(gremlin, GraphClientService.GREMLIN); AtomicInteger integer = new AtomicInteger(); - Map result = clientService.executeQuery(gremlin, new HashMap<>(), (record, isMore) -> integer.incrementAndGet()); + Map result = clientService.executeQuery(graphQuery, new HashMap<>(), (record, isMore) -> integer.incrementAndGet()); assertEquals(1, integer.get()); } } diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/test/java/org/apache/nifi/graph/OpenCypherClientServiceIT.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/test/java/org/apache/nifi/graph/OpenCypherClientServiceIT.java index f3174b4a9c4a..1e3c50277471 100644 --- a/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/test/java/org/apache/nifi/graph/OpenCypherClientServiceIT.java +++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-other-graph-services/src/test/java/org/apache/nifi/graph/OpenCypherClientServiceIT.java @@ -17,6 +17,7 @@ package org.apache.nifi.graph; +import org.apache.nifi.graph.exception.GraphQueryException; import org.apache.nifi.util.NoOpProcessor; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; @@ -25,7 +26,6 @@ import org.junit.jupiter.api.Test; import org.neo4j.driver.v1.Driver; import org.neo4j.driver.v1.Session; -import org.neo4j.driver.v1.StatementResult; import org.opencypher.gremlin.neo4j.driver.GremlinDatabase; import java.util.ArrayList; @@ -78,18 +78,19 @@ public void after() { executeSession("MATCH (n) DETACH DELETE n"); } - protected StatementResult executeSession(String statement) { + protected void executeSession(String statement) { try (Session session = driver.session()) { - return session.run(statement); + session.run(statement); } } @Test - public void testBasicQuery() { + public void testBasicQuery() throws GraphQueryException { String query = "MATCH (n) RETURN n"; + GraphQuery graphQuery = new GraphQuery(query, GraphClientService.CYPHER); List> results = new ArrayList<>(); - Map attributes = service.executeQuery(query, new HashMap<>(), (record, hasMore) -> results.add(record)); + Map attributes = service.executeQuery(graphQuery, new HashMap<>(), (record, hasMore) -> results.add(record)); assertNotNull(attributes); assertEquals(7, attributes.size()); assertEquals(2, results.size()); diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java index 55b69f71b3a5..96824002f6f2 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java @@ -75,11 +75,11 @@ public class CreateHadoopSequenceFile extends AbstractHadoopProcessor { private static final String NOT_PACKAGED = "not packaged"; // Relationships. - public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder() + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Generated Sequence Files are sent to this relationship") .build(); - public static final Relationship RELATIONSHIP_FAILURE = new Relationship.Builder() + public static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("Incoming files that failed to generate a Sequence File are sent to this relationship") .build(); @@ -87,8 +87,8 @@ public class CreateHadoopSequenceFile extends AbstractHadoopProcessor { static { Set rels = new HashSet<>(); - rels.add(RELATIONSHIP_SUCCESS); - rels.add(RELATIONSHIP_FAILURE); + rels.add(REL_SUCCESS); + rels.add(REL_FAILURE); relationships = Collections.unmodifiableSet(rels); } // Optional Properties. @@ -161,7 +161,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro final Configuration configuration = getConfiguration(); if (configuration == null) { getLogger().error("HDFS not configured properly"); - session.transfer(flowFile, RELATIONSHIP_FAILURE); + session.transfer(flowFile, REL_FAILURE); context.yield(); return; } @@ -178,12 +178,12 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro try { StopWatch stopWatch = new StopWatch(true); flowFile = sequenceFileWriter.writeSequenceFile(flowFile, session, configuration, compressionType, codec); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); - session.transfer(flowFile, RELATIONSHIP_SUCCESS); - getLogger().info("Transferred flowfile {} to {}", new Object[]{flowFile, RELATIONSHIP_SUCCESS}); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); + session.transfer(flowFile, REL_SUCCESS); + getLogger().info("Transferred flowfile {} to {}", flowFile, REL_SUCCESS); } catch (ProcessException e) { getLogger().error("Failed to create Sequence File. Transferring {} to 'failure'", flowFile, e); - session.transfer(flowFile, RELATIONSHIP_FAILURE); + session.transfer(flowFile, REL_FAILURE); } } diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java index 7f9459f03fb9..c86d5b2abefb 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java @@ -166,6 +166,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro } int failedPath = 0; + Path qualifiedPath = null; for (Path path : pathList) { if (fileSystem.exists(path)) { try { @@ -175,10 +176,9 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro flowFile = session.putAllAttributes(flowFile, attributes); fileSystem.delete(path, isRecursive(context, session)); - getLogger().debug("For flowfile {} Deleted file at path {} with name {}", new Object[]{originalFlowFile, path.getParent().toString(), path.getName()}); - final Path qualifiedPath = path.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory()); + getLogger().debug("For flowfile {} Deleted file at path {} with name {}", originalFlowFile, path.getParent().toString(), path.getName()); + qualifiedPath = path.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory()); flowFile = session.putAttribute(flowFile, HADOOP_FILE_URL_ATTRIBUTE, qualifiedPath.toString()); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, qualifiedPath.toString()); } catch (IOException ioe) { // One possible scenario is that the IOException is permissions based, however it would be impractical to check every possible // external HDFS authorization tool (Ranger, Sentry, etc). Local ACLs could be checked but the operation would be expensive. @@ -188,6 +188,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro // The error message is helpful in understanding at a flowfile level what caused the IOException (which ACL is denying the operation, e.g.) attributes.put(getAttributePrefix() + ".error.message", ioe.getMessage()); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, qualifiedPath == null ? "" : qualifiedPath.toString(), getFailureRelationship()); session.transfer(session.putAllAttributes(session.clone(flowFile), attributes), getFailureRelationship()); failedPath++; } diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java index 2a4986cadd82..4899f5f6b9e3 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java @@ -176,17 +176,17 @@ public Object run() { flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), outputFilename); stopWatch.stop(); - getLogger().info("Successfully received content from {} for {} in {}", new Object[] {qualifiedPath, flowFile, stopWatch.getDuration()}); + getLogger().info("Successfully received content from {} for {} in {}", qualifiedPath, flowFile, stopWatch.getDuration()); flowFile = session.putAttribute(flowFile, HADOOP_FILE_URL_ATTRIBUTE, qualifiedPath.toString()); - session.getProvenanceReporter().fetch(flowFile, qualifiedPath.toString(), stopWatch.getDuration(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().fetch(flowFile, qualifiedPath.toString(), stopWatch.getDuration(TimeUnit.MILLISECONDS), getSuccessRelationship()); session.transfer(flowFile, getSuccessRelationship()); } catch (final FileNotFoundException | AccessControlException e) { - getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", new Object[] {qualifiedPath, flowFile, e}); + getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", qualifiedPath, flowFile, e); flowFile = session.putAttribute(flowFile, getAttributePrefix() + ".failure.reason", e.getMessage()); flowFile = session.penalize(flowFile); session.transfer(flowFile, getFailureRelationship()); } catch (final IOException e) { - getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to comms.failure", new Object[] {qualifiedPath, flowFile, e}); + getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to comms.failure", qualifiedPath, flowFile, e); flowFile = session.penalize(flowFile); session.transfer(flowFile, getCommsFailureRelationship()); } finally { diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java index f1e86613665f..a4d773795672 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java @@ -389,17 +389,17 @@ protected void processBatchOfFiles(final List files, final ProcessContext if (!keepSourceFiles && !getUserGroupInformation().doAs((PrivilegedExceptionAction) () -> hdfs.delete(file, false))) { getLogger().warn("Could not remove {} from HDFS. Not ingesting this file ...", - new Object[]{file}); + file); session.remove(flowFile); continue; } - session.getProvenanceReporter().receive(flowFile, file.toString()); + session.getProvenanceReporter().receive(flowFile, file.toString(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().info("retrieved {} from HDFS {} in {} milliseconds at a rate of {}", - new Object[]{flowFile, file, millis, dataRate}); + flowFile, file, millis, dataRate); } catch (final Throwable t) { - getLogger().error("Error retrieving file {} from HDFS due to {}", new Object[]{file, t}); + getLogger().error("Error retrieving file {} from HDFS due to {}", file, t); session.rollback(); context.yield(); } finally { diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java index 86fe7917335f..fd5cc58fa65c 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java @@ -109,7 +109,7 @@ protected void processBatchOfFiles(final List files, final ProcessContext logger.warn("Unable to delete path " + file.toString() + " from HDFS. Will likely be picked up over and over..."); } } catch (Throwable t) { - logger.error("Error retrieving file {} from HDFS due to {}", new Object[]{file, t}); + logger.error("Error retrieving file {} from HDFS due to {}", file, t); session.rollback(); context.yield(); } finally { @@ -117,14 +117,13 @@ protected void processBatchOfFiles(final List files, final ProcessContext long totalSize = 0; for (FlowFile flowFile : flowFiles) { totalSize += flowFile.getSize(); - session.getProvenanceReporter().receive(flowFile, file.toString()); + session.getProvenanceReporter().receive(flowFile, file.toString(), REL_SUCCESS); } if (totalSize > 0) { final String dataRate = stopWatch.calculateDataRate(totalSize); final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS); - logger.info("Created {} flowFiles from SequenceFile {}. Ingested in {} milliseconds at a rate of {}", new Object[]{ - flowFiles.size(), file.toUri().toASCIIString(), millis, dataRate}); - logger.info("Transferred flowFiles {} to success", new Object[]{flowFiles}); + logger.info("Created {} flowFiles from SequenceFile {}. Ingested in {} milliseconds at a rate of {}", flowFiles.size(), file.toUri().toASCIIString(), millis, dataRate); + logger.info("Transferred flowFiles {} to success", flowFiles); session.transfer(flowFiles, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java index 33e1fac44c75..fef8b93d21e0 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java @@ -431,7 +431,7 @@ public Object run() { final Path qualifiedPath = newFile.makeQualified(hdfs.getUri(), hdfs.getWorkingDirectory()); flowFile = session.putAttribute(flowFile, HADOOP_FILE_URL_ATTRIBUTE, qualifiedPath.toString()); final String transitUri = hdfs.getUri() + StringUtils.prependIfMissing(outputPath, "/"); - session.getProvenanceReporter().send(flowFile, transitUri); + session.getProvenanceReporter().send(flowFile, transitUri, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final Throwable t) { diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java index 91e91ff7b1b0..c42d025e303d 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java @@ -458,7 +458,7 @@ public void process(InputStream in) throws IOException { putFlowFile = session.putAttribute(putFlowFile, TARGET_HDFS_DIR_CREATED_ATTRIBUTE, String.valueOf(targetDirCreated)); final Path qualifiedPath = copyFile.makeQualified(hdfs.getUri(), hdfs.getWorkingDirectory()); putFlowFile = session.putAttribute(putFlowFile, HADOOP_FILE_URL_ATTRIBUTE, qualifiedPath.toString()); - session.getProvenanceReporter().send(putFlowFile, qualifiedPath.toString()); + session.getProvenanceReporter().send(putFlowFile, qualifiedPath.toString(), getSuccessRelationship()); session.transfer(putFlowFile, getSuccessRelationship()); diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/inotify/GetHDFSEvents.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/inotify/GetHDFSEvents.java index 5ad19583be65..6b8b44699f82 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/inotify/GetHDFSEvents.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/inotify/GetHDFSEvents.java @@ -216,7 +216,7 @@ public void process(OutputStream out) throws IOException { final String transitUri = path.startsWith("/") ? "hdfs:/" + path : "hdfs://" + path; getLogger().debug("Transferring flow file {} and creating provenance event with URI {}.", new Object[]{flowFile, transitUri}); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestCreateHadoopSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestCreateHadoopSequenceFile.java index 1390f8d93f5a..f1b86f81773d 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestCreateHadoopSequenceFile.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestCreateHadoopSequenceFile.java @@ -99,8 +99,8 @@ public void testSimpleCase() throws IOException { } controller.run(3); - List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_SUCCESS); - List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_FAILURE); + List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_SUCCESS); + List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_FAILURE); assertEquals(0, failedFlowFiles.size()); assertEquals(3, successSeqFiles.size()); @@ -116,8 +116,8 @@ public void testSequenceFileSaysValueIsBytesWritable() throws UnsupportedEncodin } controller.run(3); - List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_SUCCESS); - List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_FAILURE); + List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_SUCCESS); + List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_FAILURE); assertEquals(0, failedFlowFiles.size()); assertEquals(3, successSeqFiles.size()); @@ -148,7 +148,7 @@ public void testMergedTarData() throws IOException { try (final FileInputStream fin = new FileInputStream("src/test/resources/testdata/13545312236534130.tar")) { controller.enqueue(fin, attributes); controller.run(); - List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_SUCCESS); + List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_SUCCESS); assertEquals(1, successSeqFiles.size()); final byte[] data = successSeqFiles.iterator().next().toByteArray(); // Data should be greater than 1000000 because that's the size of 2 of our input files, @@ -166,7 +166,7 @@ public void testMergedZipData() throws IOException { try (FileInputStream fin = new FileInputStream("src/test/resources/testdata/13545423550275052.zip")){ controller.enqueue(fin, attributes); controller.run(); - List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_SUCCESS); + List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_SUCCESS); assertEquals(1, successSeqFiles.size()); final byte[] data = successSeqFiles.iterator().next().toByteArray(); // Data should be greater than 1000000 because that's the size of 2 of our input files, @@ -189,7 +189,7 @@ public void testMergedFlowfilePackagedData() throws IOException { controller.enqueue(fin, attributes); controller.run(); - List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_SUCCESS); + List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_SUCCESS); assertEquals(1, successSeqFiles.size()); final byte[] data = successSeqFiles.iterator().next().toByteArray(); // Data should be greater than 1000000 because that's the size of 2 of our input files, @@ -216,8 +216,8 @@ public void testSequenceFileBzipCompressionCodec() throws UnsupportedEncodingExc } controller.run(); - List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_SUCCESS); - List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_FAILURE); + List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_SUCCESS); + List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_FAILURE); assertEquals(0, failedFlowFiles.size()); assertEquals(1, successSeqFiles.size()); @@ -263,8 +263,8 @@ public void testSequenceFileDefaultCompressionCodec() throws UnsupportedEncoding } controller.run(); - List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_SUCCESS); - List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_FAILURE); + List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_SUCCESS); + List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_FAILURE); assertEquals(0, failedFlowFiles.size()); assertEquals(1, successSeqFiles.size()); @@ -310,8 +310,8 @@ public void testSequenceFileNoneCompressionCodec() throws UnsupportedEncodingExc } controller.run(); - List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_SUCCESS); - List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.RELATIONSHIP_FAILURE); + List successSeqFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_SUCCESS); + List failedFlowFiles = controller.getFlowFilesForRelationship(CreateHadoopSequenceFile.REL_FAILURE); assertEquals(0, failedFlowFiles.size()); assertEquals(1, successSeqFiles.size()); diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/AbstractPutHBase.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/AbstractPutHBase.java index 614b20ddfd02..df38f1320b43 100644 --- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/AbstractPutHBase.java +++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/AbstractPutHBase.java @@ -193,14 +193,14 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_FAILURE); } else if (!putFlowFile.isValid()) { if (StringUtils.isBlank(putFlowFile.getTableName())) { - getLogger().error("Missing table name for FlowFile {}; routing to failure", new Object[]{flowFile}); + getLogger().error("Missing table name for FlowFile {}; routing to failure", flowFile); } else if (null == putFlowFile.getRow()) { - getLogger().error("Missing row id for FlowFile {}; routing to failure", new Object[]{flowFile}); + getLogger().error("Missing row id for FlowFile {}; routing to failure", flowFile); } else if (putFlowFile.getColumns() == null || putFlowFile.getColumns().isEmpty()) { - getLogger().error("No columns provided for FlowFile {}; routing to failure", new Object[]{flowFile}); + getLogger().error("No columns provided for FlowFile {}; routing to failure", flowFile); } else { // really shouldn't get here, but just in case - getLogger().error("Failed to produce a put for FlowFile {}; routing to failure", new Object[]{flowFile}); + getLogger().error("Failed to produce a put for FlowFile {}; routing to failure", flowFile); } session.transfer(flowFile, REL_FAILURE); } else { @@ -213,7 +213,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } } - getLogger().debug("Sending {} FlowFiles to HBase in {} put operations", new Object[]{flowFiles.size(), tablePuts.size()}); + getLogger().debug("Sending {} FlowFiles to HBase in {} put operations", flowFiles.size(), tablePuts.size()); final long start = System.nanoTime(); final List successes = new ArrayList<>(); @@ -228,7 +228,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } catch (final Exception e) { getLogger().error(e.getMessage(), e); for (PutFlowFile putFlowFile : entry.getValue()) { - getLogger().error("Failed to send {} to HBase due to {}; routing to failure", new Object[]{putFlowFile.getFlowFile(), e}); + getLogger().error("Failed to send {} to HBase due to {}; routing to failure", putFlowFile.getFlowFile(), e); final FlowFile failure = session.penalize(putFlowFile.getFlowFile()); session.transfer(failure, REL_FAILURE); } @@ -236,12 +236,12 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } final long sendMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start); - getLogger().debug("Sent {} FlowFiles to HBase successfully in {} milliseconds", new Object[]{successes.size(), sendMillis}); + getLogger().debug("Sent {} FlowFiles to HBase successfully in {} milliseconds", successes.size(), sendMillis); for (PutFlowFile putFlowFile : successes) { session.transfer(putFlowFile.getFlowFile(), REL_SUCCESS); final String details = "Put " + putFlowFile.getColumns().size() + " cells to HBase"; - session.getProvenanceReporter().send(putFlowFile.getFlowFile(), getTransitUri(putFlowFile), details, sendMillis); + session.getProvenanceReporter().send(putFlowFile.getFlowFile(), getTransitUri(putFlowFile), details, sendMillis, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/DeleteHBaseCells.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/DeleteHBaseCells.java index 5d0bacd21cc8..90844df35b45 100644 --- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/DeleteHBaseCells.java +++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/DeleteHBaseCells.java @@ -135,7 +135,7 @@ protected void doDelete(ProcessContext context, ProcessSession session) throws E is.close(); clientService.deleteCells(tableName, deletes); for (int index = 0; index < rowKeys.size(); index++) { //Could be many row keys in one flowfile. - session.getProvenanceReporter().invokeRemoteProcess(input, clientService.toTransitUri(tableName, rowKeys.get(index))); + session.getProvenanceReporter().invokeRemoteProcess(input, clientService.toTransitUri(tableName, rowKeys.get(index)), REL_SUCCESS); } session.transfer(input, REL_SUCCESS); diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/DeleteHBaseRow.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/DeleteHBaseRow.java index fb978af6cd07..df1d1a57a93e 100644 --- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/DeleteHBaseRow.java +++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/DeleteHBaseRow.java @@ -151,12 +151,12 @@ protected void doDelete(ProcessContext context, ProcessSession session) throws E } else { final String transitUrl = clientService.toTransitUri(tableName, flowFile.getAttribute(ROWKEY_END)); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, transitUrl); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, transitUrl, REL_SUCCESS); } } else { String transitUrl = doDeleteFromAttribute(flowFile, context, tableName, charset, visibility); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, transitUrl); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, transitUrl, REL_SUCCESS); } } catch (Exception ex) { getLogger().error(ex.getMessage(), ex); diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/FetchHBaseRow.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/FetchHBaseRow.java index 1a29d50bac13..482928b1f4d2 100644 --- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/FetchHBaseRow.java +++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/FetchHBaseRow.java @@ -291,9 +291,9 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro final String transitUri = hBaseClientService.toTransitUri(tableName, rowId); // Regardless to where the result is written to, emit a fetch event. - session.getProvenanceReporter().fetch(handlerFlowFile, transitUri); + session.getProvenanceReporter().fetch(handlerFlowFile, transitUri, REL_SUCCESS); if (!destination.equals(DESTINATION_CONTENT.getValue())) { - session.getProvenanceReporter().modifyAttributes(handlerFlowFile, "Added attributes to FlowFile from " + transitUri); + session.getProvenanceReporter().modifyAttributes(handlerFlowFile, "Added attributes to FlowFile from " + transitUri, REL_SUCCESS); } session.transfer(handlerFlowFile, REL_SUCCESS); diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/GetHBase.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/GetHBase.java index d11fcd5042ce..74e2475d2280 100644 --- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/GetHBase.java +++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/GetHBase.java @@ -364,7 +364,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session attributes.put("mime.type", "application/json"); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().receive(flowFile, hBaseClientService.toTransitUri(tableName, rowKeyString)); + session.getProvenanceReporter().receive(flowFile, hBaseClientService.toTransitUri(tableName, rowKeyString), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().debug("Received {} from HBase with row key {}", new Object[]{flowFile, rowKeyString}); diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/PutHBaseRecord.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/PutHBaseRecord.java index d9b67c700e8b..60fb4b17858c 100644 --- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/PutHBaseRecord.java +++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/PutHBaseRecord.java @@ -284,7 +284,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session if (!failed) { if (columns > 0) { - sendProvenance(session, flowFile, columns, System.nanoTime() - start, last); + sendProvenance(session, flowFile, columns, System.nanoTime() - start, last, REL_SUCCESS); } flowFile = session.removeAttribute(flowFile, "restart.index"); session.transfer(flowFile, REL_SUCCESS); @@ -292,16 +292,16 @@ public void onTrigger(final ProcessContext context, final ProcessSession session String restartIndex = Integer.toString(index - flowFiles.size()); flowFile = session.putAttribute(flowFile, "restart.index", restartIndex); if (columns > 0) { - sendProvenance(session, flowFile, columns, System.nanoTime() - start, last); + sendProvenance(session, flowFile, columns, System.nanoTime() - start, last, REL_FAILURE); } flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); } } - private void sendProvenance(ProcessSession session, FlowFile flowFile, int columns, long time, PutFlowFile pff) { + private void sendProvenance(ProcessSession session, FlowFile flowFile, int columns, long time, PutFlowFile pff, Relationship relationship) { final String details = String.format("Put %d cells to HBase.", columns); - session.getProvenanceReporter().send(flowFile, getTransitUri(pff), details, time); + session.getProvenanceReporter().send(flowFile, getTransitUri(pff), details, time, relationship); } @Override diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/ScanHBase.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/ScanHBase.java index 849e5266e1db..7f2961ccc0ad 100644 --- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/ScanHBase.java +++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/ScanHBase.java @@ -470,7 +470,7 @@ private void finalizeFlowFile(final ProcessSession session, final HBaseClientSer flowFile = session.putAttribute(flowFile, "scanhbase.error", (e==null?e:ioe.get()).toString()); rel = REL_FAILURE; } else { - session.getProvenanceReporter().receive(flowFile, hBaseClientService.toTransitUri(tableName, "{ids}")); + session.getProvenanceReporter().receive(flowFile, hBaseClientService.toTransitUri(tableName, "{ids}"), rel); } session.transfer(flowFile, rel); } diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/ConvertAvroToORC.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/ConvertAvroToORC.java index 1bf126e582cb..08822753b8bc 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/ConvertAvroToORC.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/ConvertAvroToORC.java @@ -283,7 +283,10 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), ORC_MIME_TYPE); flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), newFilename.toString()); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().modifyContent(flowFile, "Converted " + totalRecordCount.get() + " records", System.currentTimeMillis() - startTime); + session.getProvenanceReporter().modifyContent(flowFile, + "Converted " + totalRecordCount.get() + " records", + System.currentTimeMillis() - startTime, + REL_SUCCESS); } catch (ProcessException | IllegalArgumentException e) { getLogger().error("Failed to convert {} from Avro to ORC due to {}; transferring to failure", new Object[]{flowFile, e}); diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/PutHiveQL.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/PutHiveQL.java index e142b4b4b8a0..409e36f9048c 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/PutHiveQL.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/PutHiveQL.java @@ -263,7 +263,7 @@ private FunctionContext(boolean rollbackOnFailure, Charset charset, String state final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - fc.startNanos); final FlowFile updatedFlowFile = session.putAllAttributes(flowFile, toQueryTableAttributes(tableNames)); - session.getProvenanceReporter().send(updatedFlowFile, fc.connectionUrl, transmissionMillis, true); + session.getProvenanceReporter().send(updatedFlowFile, fc.connectionUrl, transmissionMillis, true, REL_SUCCESS); result.routeTo(flowFile, REL_SUCCESS); }, onFlowFileError(context, session, result)); diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/PutHiveStreaming.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/PutHiveStreaming.java index 3923a1648096..20b5afdc923d 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/PutHiveStreaming.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/PutHiveStreaming.java @@ -603,7 +603,7 @@ private void transferFlowFiles(ProcessSession session, RoutingResult result, Hiv updateAttributes.put(HIVE_STREAMING_RECORD_COUNT_ATTR, Integer.toString(successfulRecordCount.get())); updateAttributes.put(AbstractHiveQLProcessor.ATTR_OUTPUT_TABLES, options.getQualifiedTableName()); successFlowFile.set(session.putAllAttributes(successFlowFile.get(), updateAttributes)); - session.getProvenanceReporter().send(successFlowFile.get(), options.getMetaStoreURI()); + session.getProvenanceReporter().send(successFlowFile.get(), options.getMetaStoreURI(), REL_SUCCESS); result.routeTo(successFlowFile.get(), REL_SUCCESS); } else { session.remove(successFlowFile.get()); diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/SelectHiveQL.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/SelectHiveQL.java index 3a1c911effe0..443dd00d83f2 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/SelectHiveQL.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/SelectHiveQL.java @@ -463,15 +463,15 @@ private void onTrigger(final ProcessContext context, final ProcessSession sessio flowfile = session.putAllAttributes(flowfile, attributes); logger.info("{} contains {} " + outputFormat + " records; transferring to 'success'", - new Object[]{flowfile, nrOfRows.get()}); + flowfile, nrOfRows.get()); if (context.hasIncomingConnection()) { // If the flow file came from an incoming connection, issue a Fetch provenance event session.getProvenanceReporter().fetch(flowfile, dbcpService.getConnectionURL(), - "Retrieved " + nrOfRows.get() + " rows", stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + "Retrieved " + nrOfRows.get() + " rows", stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); } else { // If we created a flow file from rows received from Hive, issue a Receive provenance event - session.getProvenanceReporter().receive(flowfile, dbcpService.getConnectionURL(), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowfile, dbcpService.getConnectionURL(), stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); } resultSetFlowFiles.add(flowfile); } else { diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHiveTable.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHiveTable.java index 01c0ce14f531..1f754b3d892f 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHiveTable.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHiveTable.java @@ -462,7 +462,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro } attributes.put(ATTR_OUTPUT_TABLE, tableName); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, dbcpService.getConnectionURL()); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, dbcpService.getConnectionURL(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } catch (IOException | SQLException e) { diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3QL.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3QL.java index 6e4b822a61da..43809fb4c144 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3QL.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3QL.java @@ -264,7 +264,7 @@ private FunctionContext(boolean rollbackOnFailure, Charset charset, String state final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - fc.startNanos); final FlowFile updatedFlowFile = session.putAllAttributes(flowFile, toQueryTableAttributes(tableNames)); - session.getProvenanceReporter().send(updatedFlowFile, fc.connectionUrl, transmissionMillis, true); + session.getProvenanceReporter().send(updatedFlowFile, fc.connectionUrl, transmissionMillis, true, REL_SUCCESS); result.routeTo(flowFile, REL_SUCCESS); }, onFlowFileError(context, session, result)); diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3Streaming.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3Streaming.java index 520e343d22c7..1d5dad402a43 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3Streaming.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3Streaming.java @@ -504,7 +504,6 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro updateAttributes.put(HIVE_STREAMING_RECORD_COUNT_ATTR, Long.toString(hiveStreamingConnection.getConnectionStats().getRecordsWritten())); updateAttributes.put(ATTR_OUTPUT_TABLES, options.getQualifiedTableName()); flowFile = session.putAllAttributes(flowFile, updateAttributes); - session.getProvenanceReporter().send(flowFile, hiveStreamingConnection.getMetastoreUri()); } catch (TransactionError te) { if (rollbackOnFailure) { throw new ProcessException(te.getLocalizedMessage(), te); @@ -520,10 +519,12 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro RecordReader.class.getSimpleName(), flowFile, rrfe ); + session.getProvenanceReporter().send(flowFile, hiveStreamingConnection.getMetastoreUri(), REL_FAILURE); session.transfer(flowFile, REL_FAILURE); return null; } } + session.getProvenanceReporter().send(flowFile, hiveStreamingConnection.getMetastoreUri(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (InvalidTable | SerializationError | StreamingIOFailure | IOException e) { if (rollbackOnFailure) { diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive3QL.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive3QL.java index a845be83c77b..804890bc2ed3 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive3QL.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive3QL.java @@ -481,15 +481,15 @@ private void onTrigger(final ProcessContext context, final ProcessSession sessio flowfile = session.putAllAttributes(flowfile, attributes); logger.info("{} contains {} " + outputFormat + " records; transferring to 'success'", - new Object[]{flowfile, nrOfRows.get()}); + flowfile, nrOfRows.get()); if (context.hasIncomingConnection()) { // If the flow file came from an incoming connection, issue a Fetch provenance event session.getProvenanceReporter().fetch(flowfile, dbcpService.getConnectionURL(), - "Retrieved " + nrOfRows.get() + " rows", stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + "Retrieved " + nrOfRows.get() + " rows", stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); } else { // If we created a flow file from rows received from Hive, issue a Receive provenance event - session.getProvenanceReporter().receive(flowfile, dbcpService.getConnectionURL(), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowfile, dbcpService.getConnectionURL(), stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); } resultSetFlowFiles.add(flowfile); } else { diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/TriggerHiveMetaStoreEvent.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/TriggerHiveMetaStoreEvent.java index 89961a023e5d..3f16472fbc73 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/TriggerHiveMetaStoreEvent.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/TriggerHiveMetaStoreEvent.java @@ -283,6 +283,7 @@ public void doOnTrigger(ProcessContext context, ProcessSession session, FlowFile final String databaseName = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String path = context.getProperty(PATH).evaluateAttributeExpressions(flowFile).getValue(); + final String hiveMetastoreUrl = context.getProperty(METASTORE_URI).evaluateAttributeExpressions(flowFile).getValue(); try (final HiveMetaStoreClient metaStoreClient = new HiveMetaStoreClient(hiveConfig)) { final Table table = metaStoreClient.getTable(catalogName, databaseName, tableName); @@ -316,7 +317,7 @@ public void doOnTrigger(ProcessContext context, ProcessSession session, FlowFile session.transfer(flowFile, REL_FAILURE); return; } - + session.getProvenanceReporter().invokeRemoteProcess(flowFile, hiveMetastoreUrl, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive3Table.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive3Table.java index 632220049ce9..e0de87547bf7 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive3Table.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive3Table.java @@ -462,7 +462,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro } attributes.put(ATTR_OUTPUT_TABLE, tableName); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, dbcpService.getConnectionURL()); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, dbcpService.getConnectionURL(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } catch (IOException | SQLException e) { diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/PutHive_1_1QL.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/PutHive_1_1QL.java index 50efa4f4ed7e..29a6d2925790 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/PutHive_1_1QL.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/PutHive_1_1QL.java @@ -263,7 +263,7 @@ private FunctionContext(boolean rollbackOnFailure, Charset charset, String state final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - fc.startNanos); final FlowFile updatedFlowFile = session.putAllAttributes(flowFile, toQueryTableAttributes(tableNames)); - session.getProvenanceReporter().send(updatedFlowFile, fc.connectionUrl, transmissionMillis, true); + session.getProvenanceReporter().send(updatedFlowFile, fc.connectionUrl, transmissionMillis, true, REL_SUCCESS); result.routeTo(flowFile, REL_SUCCESS); }, onFlowFileError(context, session, result)); diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive_1_1QL.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive_1_1QL.java index a239bba0890c..fdde3ad7252e 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive_1_1QL.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive_1_1QL.java @@ -450,10 +450,10 @@ private void onTrigger(final ProcessContext context, final ProcessSession sessio if (context.hasIncomingConnection()) { // If the flow file came from an incoming connection, issue a Fetch provenance event session.getProvenanceReporter().fetch(flowfile, dbcpService.getConnectionURL(), - "Retrieved " + nrOfRows.get() + " rows", stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + "Retrieved " + nrOfRows.get() + " rows", stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); } else { // If we created a flow file from rows received from Hive, issue a Receive provenance event - session.getProvenanceReporter().receive(flowfile, dbcpService.getConnectionURL(), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowfile, dbcpService.getConnectionURL(), stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); } resultSetFlowFiles.add(flowfile); } else { diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive_1_1Table.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive_1_1Table.java index 1f7129b64f73..0a99e9aa99af 100644 --- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive_1_1Table.java +++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive_1_1-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive_1_1Table.java @@ -467,7 +467,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro } attributes.put(ATTR_OUTPUT_TABLE, tableName); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, dbcpService.getConnectionURL()); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, dbcpService.getConnectionURL(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } catch (IOException | SQLException e) { diff --git a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java index d624b58afe87..c235290942b2 100644 --- a/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java +++ b/nifi-nar-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java @@ -239,7 +239,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session if (total.get() > 0) { flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json"); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, uri.toString()); + session.getProvenanceReporter().receive(flowFile, uri.toString(), REL_SUCCESS); } else { getLogger().debug("Empty response when requested HubSpot endpoint: [{}]", endpoint); context.yield(); diff --git a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java index 22b1ec550755..d276228f0b89 100644 --- a/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java +++ b/nifi-nar-bundles/nifi-iceberg-bundle/nifi-iceberg-processors/src/main/java/org/apache/nifi/processors/iceberg/PutIceberg.java @@ -294,7 +294,7 @@ public void doOnTrigger(ProcessContext context, ProcessSession session, FlowFile flowFile = session.putAttribute(flowFile, ICEBERG_RECORD_COUNT, String.valueOf(recordCount)); final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, table.location(), transferMillis); + session.getProvenanceReporter().send(flowFile, table.location(), transferMillis, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/src/main/java/org/apache/nifi/processors/ignite/cache/PutIgniteCache.java b/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/src/main/java/org/apache/nifi/processors/ignite/cache/PutIgniteCache.java index 9c6748ec2e54..e3b70bfb5455 100644 --- a/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/src/main/java/org/apache/nifi/processors/ignite/cache/PutIgniteCache.java +++ b/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/src/main/java/org/apache/nifi/processors/ignite/cache/PutIgniteCache.java @@ -291,7 +291,7 @@ public void process(final InputStream in) throws IOException { session.transfer(successfulFlowFiles, REL_SUCCESS); for (FlowFile flowFile : successfulFlowFiles) { String key = context.getProperty(IGNITE_CACHE_ENTRY_KEY).evaluateAttributeExpressions(flowFile).getValue(); - session.getProvenanceReporter().send(flowFile, "ignite://cache/" + getIgniteCache().getName() + "/" + key); + session.getProvenanceReporter().send(flowFile, "ignite://cache/" + getIgniteCache().getName() + "/" + key, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/ExecuteInfluxDBQuery.java b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/ExecuteInfluxDBQuery.java index 748b5899eefc..ac2ca58b680c 100644 --- a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/ExecuteInfluxDBQuery.java +++ b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/ExecuteInfluxDBQuery.java @@ -229,7 +229,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session if ( ! hasErrors(result) ) { outgoingFlowFile = session.putAttribute(outgoingFlowFile, INFLUX_DB_EXECUTED_QUERY, String.valueOf(query)); session.getProvenanceReporter().send(outgoingFlowFile, makeProvenanceUrl(context, database), - (endTimeMillis - startTimeMillis)); + (endTimeMillis - startTimeMillis), REL_SUCCESS); session.transfer(outgoingFlowFile, REL_SUCCESS); } else { outgoingFlowFile = populateErrorAttributes(session, outgoingFlowFile, query, queryErrors(result)); diff --git a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/PutInfluxDB.java b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/PutInfluxDB.java index 6a77c5237ea9..9493dd45477f 100644 --- a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/PutInfluxDB.java +++ b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/PutInfluxDB.java @@ -175,7 +175,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); session.getProvenanceReporter().send(flowFile, new StringBuilder("influxdb://").append(context.getProperty(INFLUX_DB_URL).evaluateAttributeExpressions().getValue()).append("/").append(database).toString(), - (endTimeMillis - startTimeMillis)); + (endTimeMillis - startTimeMillis), REL_SUCCESS); } catch (InfluxDBIOException exception) { flowFile = session.putAttribute(flowFile, INFLUX_DB_ERROR_MESSAGE, String.valueOf(exception.getMessage())); if ( exception.getCause() instanceof SocketTimeoutException ) { diff --git a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/ConsumeJMS.java b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/ConsumeJMS.java index c3027bff7640..4346ac8b66ab 100644 --- a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/ConsumeJMS.java +++ b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/ConsumeJMS.java @@ -347,7 +347,7 @@ private void processSingleMessage(ProcessSession processSession, JMSConsumer con try { final FlowFile flowFile = createFlowFileFromMessage(processSession, destinationName, response); - processSession.getProvenanceReporter().receive(flowFile, destinationName); + processSession.getProvenanceReporter().receive(flowFile, destinationName, REL_SUCCESS); processSession.transfer(flowFile, REL_SUCCESS); processSession.commitAsync( () -> withLog(() -> acknowledge(response)), @@ -393,7 +393,7 @@ private void processMessageSet(ProcessContext context, ProcessSession session, J flowFileWriter.write(session, jmsResponses, new FlowFileWriterCallback() { @Override public void onSuccess(FlowFile flowFile, List processedMessages, List failedMessages) { - session.getProvenanceReporter().receive(flowFile, destinationName); + session.getProvenanceReporter().receive(flowFile, destinationName, REL_SUCCESS); session.adjustCounter(COUNTER_RECORDS_RECEIVED, processedMessages.size() + failedMessages.size(), false); session.adjustCounter(COUNTER_RECORDS_PROCESSED, processedMessages.size(), false); diff --git a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/PublishJMS.java b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/PublishJMS.java index 333fcad1e2bf..1006c93eb27e 100644 --- a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/PublishJMS.java +++ b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/PublishJMS.java @@ -254,7 +254,8 @@ public void onSuccess(FlowFile flowFile, int processedRecords, boolean isRecover flowFile, destinationName, String.format(eventTemplate, processedRecords), - transmissionMillis); + transmissionMillis, + REL_SUCCESS); processSession.transfer(flowFile, REL_SUCCESS); } @@ -265,7 +266,8 @@ public void onFailure(FlowFile flowFile, int processedRecords, long transmission flowFile, destinationName, String.format(PROVENANCE_EVENT_DETAILS_ON_RECORDSET_FAILURE, processedRecords), - transmissionMillis); + transmissionMillis, + REL_FAILURE); handleException(context, processSession, publisher, flowFile, e); } @@ -274,7 +276,7 @@ public void onFailure(FlowFile flowFile, int processedRecords, long transmission } else { processStandardFlowFile(context, processSession, publisher, flowFile, destinationName, charset, attributesToSend); processSession.transfer(flowFile, REL_SUCCESS); - processSession.getProvenanceReporter().send(flowFile, destinationName); + processSession.getProvenanceReporter().send(flowFile, destinationName, REL_SUCCESS); } } catch (Exception e) { handleException(context, processSession, publisher, flowFile, e); diff --git a/nifi-nar-bundles/nifi-jolt-record-bundle/nifi-jolt-record-processors/src/main/java/org/apache/nifi/processors/jolt/record/JoltTransformRecord.java b/nifi-nar-bundles/nifi-jolt-record-bundle/nifi-jolt-record-processors/src/main/java/org/apache/nifi/processors/jolt/record/JoltTransformRecord.java index 5f0bf54edbdd..c30a07fde79c 100644 --- a/nifi-nar-bundles/nifi-jolt-record-bundle/nifi-jolt-record-processors/src/main/java/org/apache/nifi/processors/jolt/record/JoltTransformRecord.java +++ b/nifi-nar-bundles/nifi-jolt-record-bundle/nifi-jolt-record-processors/src/main/java/org/apache/nifi/processors/jolt/record/JoltTransformRecord.java @@ -406,7 +406,7 @@ record = transformedFirstRecords.get(i); final String transformType = context.getProperty(JOLT_TRANSFORM).getValue(); transformed = session.putAllAttributes(transformed, attributes); - session.getProvenanceReporter().modifyContent(transformed, "Modified With " + transformType, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(transformed, "Modified With " + transformType, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); logger.debug("Transform completed {}", original); } } catch (final Exception e) { diff --git a/nifi-nar-bundles/nifi-jslt-bundle/nifi-jslt-processors/src/main/java/org/apache/nifi/processors/jslt/JSLTTransformJSON.java b/nifi-nar-bundles/nifi-jslt-bundle/nifi-jslt-processors/src/main/java/org/apache/nifi/processors/jslt/JSLTTransformJSON.java index 312923926e2a..86d36ad851b0 100644 --- a/nifi-nar-bundles/nifi-jslt-bundle/nifi-jslt-processors/src/main/java/org/apache/nifi/processors/jslt/JSLTTransformJSON.java +++ b/nifi-nar-bundles/nifi-jslt-bundle/nifi-jslt-processors/src/main/java/org/apache/nifi/processors/jslt/JSLTTransformJSON.java @@ -284,7 +284,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session transformed = session.putAttribute(transformed, CoreAttributes.MIME_TYPE.key(), "application/json"); session.transfer(transformed, REL_SUCCESS); - session.getProvenanceReporter().modifyContent(transformed, "Modified With " + transform, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(transformed, "Modified With " + transform, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); stopWatch.stop(); getLogger().debug("JSLT Transform completed {}", original); } catch (final Exception e) { diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java index 6279b6ea499b..03dd4bbefa06 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java @@ -473,7 +473,7 @@ private void handleParseFailure(final ConsumerRecord consumerRec failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, consumerRecord.topic()); - session.getProvenanceReporter().receive(failureFlowFile, transitUri); + session.getProvenanceReporter().receive(failureFlowFile, transitUri, REL_PARSE_FAILURE); session.transfer(failureFlowFile, REL_PARSE_FAILURE); @@ -639,7 +639,7 @@ private void populateAttributes(final BundleTracker tracker) { final FlowFile newFlowFile = getProcessSession().putAllAttributes(tracker.flowFile, kafkaAttrs); final long executionDurationMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - leaseStartNanos); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, tracker.topic); - getProcessSession().getProvenanceReporter().receive(newFlowFile, transitUri, executionDurationMillis); + getProcessSession().getProvenanceReporter().receive(newFlowFile, transitUri, executionDurationMillis, REL_SUCCESS); tracker.updateFlowFile(newFlowFile); } diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_1_0.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_1_0.java index 56165625fa48..5be56deefe80 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_1_0.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_1_0.java @@ -506,7 +506,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.adjustCounter("Messages Sent", msgCount, true); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, topic); - session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis); + session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis, REL_SUCCESS); session.transfer(success, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_1_0.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_1_0.java index 5ba93eb80dac..b90998dcfacc 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_1_0.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-1-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_1_0.java @@ -475,7 +475,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.adjustCounter("Messages Sent", msgCount, true); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, topic); - session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis); + session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis, REL_SUCCESS); session.transfer(success, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java index e7f64599902b..887fea2d8f9c 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java @@ -488,7 +488,7 @@ private void handleParseFailure(final ConsumerRecord consumerRec failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, consumerRecord.topic()); - session.getProvenanceReporter().receive(failureFlowFile, transitUri); + session.getProvenanceReporter().receive(failureFlowFile, transitUri, REL_PARSE_FAILURE); session.transfer(failureFlowFile, REL_PARSE_FAILURE); @@ -661,7 +661,7 @@ private void populateAttributes(final BundleTracker tracker) { final FlowFile newFlowFile = getProcessSession().putAllAttributes(tracker.flowFile, kafkaAttrs); final long executionDurationMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - leaseStartNanos); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, tracker.topic); - getProcessSession().getProvenanceReporter().receive(newFlowFile, transitUri, executionDurationMillis); + getProcessSession().getProvenanceReporter().receive(newFlowFile, transitUri, executionDurationMillis, REL_SUCCESS); tracker.updateFlowFile(newFlowFile); } diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_2_0.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_2_0.java index 4879afa7fc8e..5fafd50c14ca 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_2_0.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_2_0.java @@ -521,7 +521,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.adjustCounter("Messages Sent", msgCount, true); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, topic); - session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis); + session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis, REL_SUCCESS); session.transfer(success, REL_SUCCESS); } } catch (final ProducerFencedException | OutOfOrderSequenceException | AuthorizationException e) { diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_2_0.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_2_0.java index 1722767d9e55..aa794db66c67 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_2_0.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-0-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_2_0.java @@ -490,7 +490,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.adjustCounter("Messages Sent", msgCount, true); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, topic); - session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis); + session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis, REL_SUCCESS); session.transfer(success, REL_SUCCESS); } } catch (final ProducerFencedException | OutOfOrderSequenceException | AuthorizationException e) { diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java index 388384269475..850a6483c2c1 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumerLease.java @@ -555,7 +555,7 @@ private void handleParseFailure(final ConsumerRecord consumerRec failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, consumerRecord.topic()); - session.getProvenanceReporter().receive(failureFlowFile, transitUri); + session.getProvenanceReporter().receive(failureFlowFile, transitUri, REL_PARSE_FAILURE); session.transfer(failureFlowFile, REL_PARSE_FAILURE); @@ -840,7 +840,7 @@ private void populateAttributes(final BundleTracker tracker) { final FlowFile newFlowFile = getProcessSession().putAllAttributes(tracker.flowFile, kafkaAttrs); final long executionDurationMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - leaseStartNanos); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, tracker.topic); - getProcessSession().getProvenanceReporter().receive(newFlowFile, transitUri, executionDurationMillis); + getProcessSession().getProvenanceReporter().receive(newFlowFile, transitUri, executionDurationMillis, REL_SUCCESS); tracker.updateFlowFile(newFlowFile); } diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_2_6.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_2_6.java index f15a97e98aba..4755f2468111 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_2_6.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafkaRecord_2_6.java @@ -582,7 +582,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.adjustCounter("Messages Sent", msgCount, true); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, topic); - session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis); + session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis, REL_SUCCESS); session.transfer(success, REL_SUCCESS); } } catch (final ProducerFencedException | OutOfOrderSequenceException | AuthorizationException e) { diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_2_6.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_2_6.java index ca1e381acfd2..e5014ee28f7e 100644 --- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_2_6.java +++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/PublishKafka_2_6.java @@ -505,7 +505,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.adjustCounter("Messages Sent", msgCount, true); final String transitUri = StandardTransitUriProvider.getTransitUri(securityProtocol, bootstrapServers, topic); - session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis); + session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis, REL_SUCCESS); session.transfer(success, REL_SUCCESS); } } catch (final ProducerFencedException | OutOfOrderSequenceException | AuthorizationException e) { diff --git a/nifi-nar-bundles/nifi-kudu-bundle/nifi-kudu-processors/src/main/java/org/apache/nifi/processors/kudu/PutKudu.java b/nifi-nar-bundles/nifi-kudu-bundle/nifi-kudu-processors/src/main/java/org/apache/nifi/processors/kudu/PutKudu.java index e4caef0e452a..e322d3f3877c 100644 --- a/nifi-nar-bundles/nifi-kudu-bundle/nifi-kudu-processors/src/main/java/org/apache/nifi/processors/kudu/PutKudu.java +++ b/nifi-nar-bundles/nifi-kudu-bundle/nifi-kudu-processors/src/main/java/org/apache/nifi/processors/kudu/PutKudu.java @@ -611,7 +611,7 @@ private void transferFlowFiles(final List flowFiles, session.transfer(flowFile, REL_FAILURE); } else { session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().send(flowFile, "Successfully added FlowFile to Kudu"); + session.getProvenanceReporter().send(flowFile, "Successfully added FlowFile to Kudu", REL_SUCCESS); } } } diff --git a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java index ca860b4758f4..970e118b1bef 100644 --- a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java +++ b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java @@ -224,7 +224,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session return; } - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java index c89853b99746..c7bedab848f0 100644 --- a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java +++ b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java @@ -199,9 +199,9 @@ public void process(InputStream in) throws IOException { } session.transfer(flowFile, SUCCESS); - session.getProvenanceReporter().modifyAttributes(flowFile, "media attributes extracted"); + session.getProvenanceReporter().modifyAttributes(flowFile, "media attributes extracted", SUCCESS); } catch (ProcessException e) { - logger.error("Failed to extract media metadata from {} due to {}", new Object[]{flowFile, e}); + logger.error("Failed to extract media metadata from {} due to {}", flowFile, e); flowFile = session.penalize(flowFile); session.transfer(flowFile, FAILURE); } diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/AbstractMongoProcessor.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/AbstractMongoProcessor.java index a802135a7bf6..91ab12342bef 100644 --- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/AbstractMongoProcessor.java +++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/AbstractMongoProcessor.java @@ -352,7 +352,7 @@ protected void writeBatch(String payload, FlowFile parent, ProcessContext contex flowFile = session.importFrom(new ByteArrayInputStream(payload.getBytes(charset)), flowFile); flowFile = session.putAllAttributes(flowFile, extraAttributes); if (parent == null) { - session.getProvenanceReporter().receive(flowFile, getURI(context)); + session.getProvenanceReporter().receive(flowFile, getURI(context), rel); } session.transfer(flowFile, rel); } diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongo.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongo.java index 1f149dd12f9b..0bdcd2d86091 100644 --- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongo.java +++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongo.java @@ -253,7 +253,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session }); outgoingFlowFile = session.putAllAttributes(outgoingFlowFile, attributes); - session.getProvenanceReporter().receive(outgoingFlowFile, getURI(context)); + session.getProvenanceReporter().receive(outgoingFlowFile, getURI(context), REL_SUCCESS); session.transfer(outgoingFlowFile, REL_SUCCESS); sent++; } diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongoRecord.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongoRecord.java index 1c67ad1ea33f..5cd1095ba564 100644 --- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongoRecord.java +++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongoRecord.java @@ -188,7 +188,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro output = session.putAllAttributes(output, attributes); - session.getProvenanceReporter().fetch(output, getURI(context)); + session.getProvenanceReporter().fetch(output, getURI(context), REL_SUCCESS); session.transfer(output, REL_SUCCESS); if (input != null) { session.transfer(input, REL_ORIGINAL); diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongo.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongo.java index 7df15c8e1f8b..2cd7ce6de7c5 100644 --- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongo.java +++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongo.java @@ -211,7 +211,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session if (MODE_INSERT.equalsIgnoreCase(mode)) { collection.insertOne((Document)doc); - logger.info("inserted {} into MongoDB", new Object[] { flowFile }); + logger.info("inserted {} into MongoDB", flowFile); } else { // update final boolean upsert = context.getProperty(UPSERT).asBoolean(); @@ -233,10 +233,10 @@ public void onTrigger(final ProcessContext context, final ProcessSession session update.remove(updateKey); collection.updateOne(query, update, new UpdateOptions().upsert(upsert)); } - logger.info("updated {} into MongoDB", new Object[] { flowFile }); + logger.info("updated {} into MongoDB", flowFile); } - session.getProvenanceReporter().send(flowFile, getURI(context)); + session.getProvenanceReporter().send(flowFile, getURI(context), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (Exception e) { logger.error("Failed to insert {} into MongoDB due to {}", flowFile, e, e); diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongoRecord.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongoRecord.java index 9c3df044f6dc..22d7f9fa23ef 100644 --- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongoRecord.java +++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongoRecord.java @@ -269,7 +269,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session String url = clientService != null ? clientService.getURI() : context.getProperty(URI).evaluateAttributeExpressions().getValue(); - session.getProvenanceReporter().send(flowFile, url, String.format("Written %d documents to MongoDB.", written)); + session.getProvenanceReporter().send(flowFile, url, String.format("Written %d documents to MongoDB.", written), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().info("Written {} records into MongoDB", new Object[]{ written }); } diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/gridfs/FetchGridFS.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/gridfs/FetchGridFS.java index 0d691eca1219..bfc2b5530373 100644 --- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/gridfs/FetchGridFS.java +++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/gridfs/FetchGridFS.java @@ -200,6 +200,6 @@ private void handleFile(GridFSBucket bucket, ProcessSession session, ProcessCont output = session.write(output, out -> bucket.downloadToStream(input.getObjectId(), out)); output = session.putAllAttributes(output, attrs); session.transfer(output, REL_SUCCESS); - session.getProvenanceReporter().receive(output, getTransitUri(input.getObjectId(), output, context)); + session.getProvenanceReporter().receive(output, getTransitUri(input.getObjectId(), output, context), REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/gridfs/PutGridFS.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/gridfs/PutGridFS.java index 9e2a6d4ef578..f82853db6e97 100644 --- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/gridfs/PutGridFS.java +++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/gridfs/PutGridFS.java @@ -185,7 +185,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro if (id != null) { input = session.putAttribute(input, ID_ATTRIBUTE, id.toString()); session.transfer(input, REL_SUCCESS); - session.getProvenanceReporter().send(input, getTransitUri(id, input, context)); + session.getProvenanceReporter().send(input, getTransitUri(id, input, context), REL_SUCCESS); } else { getLogger().error("ID was null, assuming failure."); session.transfer(input, REL_FAILURE); diff --git a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java index 02982600f7ce..15693172d52d 100644 --- a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java +++ b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java @@ -398,7 +398,7 @@ private void transferQueue(ProcessSession session) { final FlowFile messageFlowfile = session.write(createFlowFileAndPopulateAttributes(session, mqttMessage), out -> out.write(mqttMessage.getPayload() == null ? new byte[0] : mqttMessage.getPayload())); - session.getProvenanceReporter().receive(messageFlowfile, getTransitUri(mqttMessage.getTopic())); + session.getProvenanceReporter().receive(messageFlowfile, getTransitUri(mqttMessage.getTopic()), REL_MESSAGE); session.transfer(messageFlowfile, REL_MESSAGE); session.commitAsync(); mqttQueue.remove(mqttMessage); @@ -424,7 +424,7 @@ private void transferQueueDemarcator(final ProcessContext context, final Process } }); - session.getProvenanceReporter().receive(messageFlowfile, getTransitUri(topicPrefix, topicFilter)); + session.getProvenanceReporter().receive(messageFlowfile, getTransitUri(topicPrefix, topicFilter), REL_MESSAGE); session.transfer(messageFlowfile, REL_MESSAGE); session.commitAsync(); } @@ -433,7 +433,7 @@ private void transferFailure(final ProcessSession session, final ReceivedMqttMes final FlowFile messageFlowfile = session.write(createFlowFileAndPopulateAttributes(session, mqttMessage), out -> out.write(mqttMessage.getPayload())); - session.getProvenanceReporter().receive(messageFlowfile, getTransitUri(mqttMessage.getTopic())); + session.getProvenanceReporter().receive(messageFlowfile, getTransitUri(mqttMessage.getTopic()), REL_PARSE_FAILURE); session.transfer(messageFlowfile, REL_PARSE_FAILURE); session.adjustCounter(COUNTER_PARSE_FAILURES, 1, false); } @@ -586,7 +586,7 @@ private void transferQueueRecord(final ProcessContext context, final ProcessSess } session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().receive(flowFile, getTransitUri(topicPrefix, topicFilter)); + session.getProvenanceReporter().receive(flowFile, getTransitUri(topicPrefix, topicFilter), REL_MESSAGE); session.transfer(flowFile, REL_MESSAGE); final int count = recordCount.get(); diff --git a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/PublishMQTT.java b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/PublishMQTT.java index 13b18abb862a..a13146a20929 100644 --- a/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/PublishMQTT.java +++ b/nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/PublishMQTT.java @@ -240,7 +240,7 @@ private void processMultiMessageFlowFile(ProcessStrategy processStrategy, Proces provenanceEventDetails = String.format(processStrategy.getSuccessTemplateMessage(), processedRecords.get()); } - session.getProvenanceReporter().send(flowfile, clientProperties.getRawBrokerUris(), provenanceEventDetails, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().send(flowfile, clientProperties.getRawBrokerUris(), provenanceEventDetails, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(successFlowFile, REL_SUCCESS); } catch (Exception e) { logger.error("An error happened during publishing records. Routing to failure.", e); @@ -252,7 +252,8 @@ private void processMultiMessageFlowFile(ProcessStrategy processStrategy, Proces failedFlowFile, clientProperties.getRawBrokerUris(), String.format(processStrategy.getFailureTemplateMessage(), processedRecords.get()), - stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + stopWatch.getElapsed(TimeUnit.MILLISECONDS), + REL_FAILURE); } session.transfer(failedFlowFile, REL_FAILURE); @@ -266,7 +267,7 @@ private void processStandardFlowFile(ProcessContext context, ProcessSession sess final StopWatch stopWatch = new StopWatch(true); publishMessage(context, flowfile, topic, messageContent); - session.getProvenanceReporter().send(flowfile, clientProperties.getRawBrokerUris(), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().send(flowfile, clientProperties.getRawBrokerUris(), stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowfile, REL_SUCCESS); } catch (Exception e) { logger.error("An error happened during publishing a message. Routing to failure.", e); diff --git a/nifi-nar-bundles/nifi-opentelemetry-bundle/nifi-opentelemetry-processors/src/main/java/org/apache/nifi/processors/opentelemetry/ListenOTLP.java b/nifi-nar-bundles/nifi-opentelemetry-bundle/nifi-opentelemetry-processors/src/main/java/org/apache/nifi/processors/opentelemetry/ListenOTLP.java index 710709653f84..a6ed8df5b3d2 100644 --- a/nifi-nar-bundles/nifi-opentelemetry-bundle/nifi-opentelemetry-processors/src/main/java/org/apache/nifi/processors/opentelemetry/ListenOTLP.java +++ b/nifi-nar-bundles/nifi-opentelemetry-bundle/nifi-opentelemetry-processors/src/main/java/org/apache/nifi/processors/opentelemetry/ListenOTLP.java @@ -206,7 +206,7 @@ private void processRequestCallback(final ProcessSession session, final RequestC try { flowFile = session.write(flowFile, requestCallback); flowFile = session.putAllAttributes(flowFile, requestCallback.getAttributes()); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, SUCCESS); session.transfer(flowFile, SUCCESS); } catch (final Exception e) { getLogger().warn("Request Transit URI [{}] processing failed {}", transitUri, flowFile, e); diff --git a/nifi-nar-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/ConvertAvroToParquet.java b/nifi-nar-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/ConvertAvroToParquet.java index eec48afd0e1d..a296c5b67b39 100644 --- a/nifi-nar-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/ConvertAvroToParquet.java +++ b/nifi-nar-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/ConvertAvroToParquet.java @@ -177,10 +177,10 @@ record = dataFileReader.next(); putFlowFile = session.putAllAttributes(putFlowFile, outAttributes); session.transfer(putFlowFile, SUCCESS); - session.getProvenanceReporter().modifyContent(putFlowFile, "Converted "+totalRecordCount.get()+" records", System.currentTimeMillis() - startTime); + session.getProvenanceReporter().modifyContent(putFlowFile, "Converted "+totalRecordCount.get()+" records", System.currentTimeMillis() - startTime, SUCCESS); } catch (final ProcessException pe) { - getLogger().error("Failed to convert {} from Avro to Parquet due to {}; transferring to failure", new Object[]{flowFile, pe}); + getLogger().error("Failed to convert {} from Avro to Parquet due to {}; transferring to failure", flowFile, pe); session.transfer(flowFile, FAILURE); } diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/EventIdFirstSchemaRecordWriter.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/EventIdFirstSchemaRecordWriter.java index a2da8cb8f427..c9c17b329c3d 100644 --- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/EventIdFirstSchemaRecordWriter.java +++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/EventIdFirstSchemaRecordWriter.java @@ -102,7 +102,10 @@ public Map writeRecords(final Iterable s try { recordIdentifier = event.getEventId() == -1 ? getIdGenerator().getAndIncrement() : event.getEventId(); - updateEvent(event, recordIdentifier); startBytes = getBytesWritten(); ensureStreamState(recordIdentifier, startBytes); @@ -230,7 +232,8 @@ protected String getSerializationName() { return SERIALIZATION_NAME; } - private void updateEvent(ProvenanceEventRecord event, long recordIdentifier) { + private void updateEvent(UpdateableProvenanceEventRecord event, long recordIdentifier) { + event.setEventId(recordIdentifier); final String flowFileUUID = event.getFlowFileUuid(); List previousEventIds = previousEventIdsMap.get(flowFileUUID); switch (event.getEventType()) { diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java index 060e2431b84d..dd1b414329ae 100644 --- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java +++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java @@ -131,7 +131,7 @@ * @deprecated This class is now deprecated in favor of {@link WriteAheadProvenanceRepository}. */ @Deprecated -public class PersistentProvenanceRepository extends AbstractProvenanceRepository { +public class PersistentProvenanceRepository implements ProvenanceRepository { public static final String EVENT_CATEGORY = "Provenance Repository"; private static final String FILE_EXTENSION = ".prov"; diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java index ce875d619644..c1b28bdd7789 100644 --- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java +++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java @@ -193,6 +193,8 @@ public StandardProvenanceEventRecord nextRecord(final DataInputStream dis, final builder.setAttributes(Collections.emptyMap(), attrs); } + builder.setRelationship(readNullableString(dis)); + // Read Event-Type specific fields. if (eventType == ProvenanceEventType.FORK || eventType == ProvenanceEventType.JOIN || eventType == ProvenanceEventType.CLONE || eventType == ProvenanceEventType.REPLAY) { final int numParents = dis.readInt(); @@ -213,8 +215,6 @@ public StandardProvenanceEventRecord nextRecord(final DataInputStream dis, final builder.setTransitUri(readNullableString(dis)); } else if (eventType == ProvenanceEventType.ADDINFO) { builder.setAlternateIdentifierUri(readNullableString(dis)); - } else if (eventType == ProvenanceEventType.ROUTE) { - builder.setRelationship(readNullableString(dis)); } builder.setFlowFileEntryDate(flowFileEntryDate); diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java index 0a749adb2edd..6333cdf13a61 100644 --- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java +++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java @@ -144,6 +144,9 @@ protected void writeRecord(final ProvenanceEventRecord record, final long record // write out the identifier of the destination queue. writeNullableString(out, record.getSourceQueueIdentifier(), "SourceQueueIdentifier"); + // write out the relationship to which the associated FlowFile was transferred + writeNullableString(out, record.getRelationship(), "Relationship"); + // Write type-specific info if (recordType == ProvenanceEventType.FORK || recordType == ProvenanceEventType.JOIN || recordType == ProvenanceEventType.CLONE || recordType == ProvenanceEventType.REPLAY) { writeUUIDs(out, record.getParentUuids()); @@ -157,8 +160,6 @@ protected void writeRecord(final ProvenanceEventRecord record, final long record writeNullableString(out, record.getTransitUri(), "TransitUri"); } else if (recordType == ProvenanceEventType.ADDINFO) { writeNullableString(out, record.getAlternateIdentifierUri(), "AlternateIdentifierUri"); - } else if (recordType == ProvenanceEventType.ROUTE) { - writeNullableString(out, record.getRelationship(), "Relationship"); } } diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/WriteAheadProvenanceRepository.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/WriteAheadProvenanceRepository.java index a61bc2bef222..08cfc3f017da 100644 --- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/WriteAheadProvenanceRepository.java +++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/WriteAheadProvenanceRepository.java @@ -88,7 +88,7 @@ * across disks for far greater performance. *

*/ -public class WriteAheadProvenanceRepository extends AbstractProvenanceRepository { +public class WriteAheadProvenanceRepository implements ProvenanceRepository { private static final Logger logger = LoggerFactory.getLogger(WriteAheadProvenanceRepository.class); static final int BLOCK_SIZE = 1024 * 32; public static final String EVENT_CATEGORY = "Provenance Repository"; diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/index/lucene/ConvertEventToLuceneDocument.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/index/lucene/ConvertEventToLuceneDocument.java index 5c31ba2ecfc9..b03220272e83 100644 --- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/index/lucene/ConvertEventToLuceneDocument.java +++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/index/lucene/ConvertEventToLuceneDocument.java @@ -97,10 +97,10 @@ public Document convert(final ProvenanceEventRecord record, final long eventId) final List previousEventIDs = record.getPreviousEventIds(); if (previousEventIDs != null) { for (Long previousEventID : previousEventIDs) { - doc.add(new UnIndexedLongField(SearchableFields.PreviousEventIdentifiers.getSearchableFieldName(), previousEventID)); + doc.add(new StringField(SearchableFields.PreviousEventIdentifiers.getSearchableFieldName(), String.valueOf(previousEventID), Store.YES)); } } else { - doc.add(new UnIndexedLongField(SearchableFields.PreviousEventIdentifiers.getSearchableFieldName(), -1L)); + doc.add(new StringField(SearchableFields.PreviousEventIdentifiers.getSearchableFieldName(), "-1", Store.YES)); } // If it's event is a FORK, or JOIN, add the FlowFileUUID for all child/parent UUIDs. diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/util/StorageSummaryEvent.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/util/StorageSummaryEvent.java index 9257fdf33197..72a918189964 100644 --- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/util/StorageSummaryEvent.java +++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/util/StorageSummaryEvent.java @@ -21,9 +21,10 @@ import java.util.Map; import org.apache.nifi.provenance.ProvenanceEventRecord; import org.apache.nifi.provenance.ProvenanceEventType; +import org.apache.nifi.provenance.UpdateableProvenanceEventRecord; import org.apache.nifi.provenance.serialization.StorageSummary; -public class StorageSummaryEvent implements ProvenanceEventRecord { +public class StorageSummaryEvent implements UpdateableProvenanceEventRecord { private final ProvenanceEventRecord event; private final StorageSummary storageSummary; @@ -37,6 +38,13 @@ public long getEventId() { return storageSummary.getEventId(); } + @Override + public void setEventId(long eventId) { + if (event instanceof UpdateableProvenanceEventRecord) { + ((UpdateableProvenanceEventRecord) event).setEventId(eventId); + } + } + @Override public List getPreviousEventIds() { return event.getPreviousEventIds(); @@ -44,9 +52,10 @@ public List getPreviousEventIds() { @Override public void setPreviousEventIds(List previousEventIds) { - event.setPreviousEventIds(previousEventIds); + if (event instanceof UpdateableProvenanceEventRecord) { + ((UpdateableProvenanceEventRecord) event).setPreviousEventIds(previousEventIds); + } } - @Override public long getEventTime() { return event.getEventTime(); diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-volatile-provenance-repository/src/main/java/org/apache/nifi/provenance/VolatileProvenanceRepository.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-volatile-provenance-repository/src/main/java/org/apache/nifi/provenance/VolatileProvenanceRepository.java index 9c02781c17a9..98c97865fceb 100644 --- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-volatile-provenance-repository/src/main/java/org/apache/nifi/provenance/VolatileProvenanceRepository.java +++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-volatile-provenance-repository/src/main/java/org/apache/nifi/provenance/VolatileProvenanceRepository.java @@ -64,7 +64,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; -public class VolatileProvenanceRepository extends AbstractProvenanceRepository { +public class VolatileProvenanceRepository implements ProvenanceRepository { // properties public static final String BUFFER_SIZE = "nifi.provenance.repository.buffer.size"; @@ -786,7 +786,7 @@ public void run() { } } - private static class IdEnrichedProvEvent implements ProvenanceEventRecord { + private static class IdEnrichedProvEvent implements UpdateableProvenanceEventRecord { private final ProvenanceEventRecord record; private final long id; @@ -801,6 +801,13 @@ public long getEventId() { return id; } + @Override + public void setEventId(long eventId) { + if (record instanceof UpdateableProvenanceEventRecord) { + ((UpdateableProvenanceEventRecord) record).setEventId(eventId); + } + } + @Override public List getPreviousEventIds() { return record.getPreviousEventIds(); @@ -808,7 +815,9 @@ public List getPreviousEventIds() { @Override public void setPreviousEventIds(List previousEventIds) { - record.setPreviousEventIds(previousEventIds); + if (record instanceof UpdateableProvenanceEventRecord) { + ((UpdateableProvenanceEventRecord) record).setPreviousEventIds(previousEventIds); + } } @Override diff --git a/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/DeleteRethinkDB.java b/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/DeleteRethinkDB.java index c9827c8399e9..2fe66c9b2fe5 100644 --- a/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/DeleteRethinkDB.java +++ b/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/DeleteRethinkDB.java @@ -179,7 +179,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.getProvenanceReporter().modifyContent(flowFile, new StringBuilder("rethinkdb://").append(databaseName).append("/").append(tableName).append("/").append(id).toString(), - (endTimeMillis - startTimeMillis)); + (endTimeMillis - startTimeMillis), REL_SUCCESS); } session.transfer(flowFile, REL_SUCCESS); diff --git a/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/GetRethinkDB.java b/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/GetRethinkDB.java index 42f68ab9d119..ccc6d31add4a 100644 --- a/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/GetRethinkDB.java +++ b/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/GetRethinkDB.java @@ -166,7 +166,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); session.getProvenanceReporter().fetch(flowFile, new StringBuilder("rethinkdb://").append(databaseName).append("/").append(tableName).append("/").append(id).toString(), - (endTimeMillis - startTimeMillis)); + (endTimeMillis - startTimeMillis), REL_SUCCESS); } catch (Exception exception) { getLogger().error("Failed to get document from RethinkDB due to error {}", diff --git a/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/PutRethinkDB.java b/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/PutRethinkDB.java index 3a9b80244a22..2da2cfe6e725 100644 --- a/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/PutRethinkDB.java +++ b/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/src/main/java/org/apache/nifi/processors/rethinkdb/PutRethinkDB.java @@ -174,18 +174,19 @@ public void onTrigger(final ProcessContext context, final ProcessSession session HashMap result = runInsert(insert); final long endTimeMillis = System.currentTimeMillis(); - getLogger().debug("Json documents {} inserted Result: {}", new Object[] {documents, result}); + getLogger().debug("Json documents {} inserted Result: {}", documents, result); flowFile = populateAttributes(session, flowFile, result); if ( (Long)result.get(RESULT_ERROR_KEY) != 0 ) { getLogger().error("There were errors while inserting data documents {} result {}", - new Object [] {documents, result}); + documents, result); session.transfer(flowFile, REL_FAILURE); } else { session.transfer(flowFile, REL_SUCCESS); session.getProvenanceReporter().send(flowFile, - new StringBuilder("rethinkdb://").append(databaseName).append("/").append(tableName).toString(), - (endTimeMillis - startTimeMillis)); + new StringBuilder("rethinkdb://").append(databaseName).append("/").append(tableName).toString(), + (endTimeMillis - startTimeMillis), + REL_SUCCESS); } } catch (Exception exception) { getLogger().error("Failed to insert into RethinkDB due to {}", diff --git a/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java b/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java index 210ceafedd83..ca8bc910d89e 100644 --- a/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java +++ b/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/PutSalesforceObject.java @@ -145,7 +145,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro processRecords(flowFile, objectType, context, session); session.transfer(flowFile, REL_SUCCESS); long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - session.getProvenanceReporter().send(flowFile, salesforceRestClient.getVersionedBaseUrl() + "/put/" + objectType, transferMillis); + session.getProvenanceReporter().send(flowFile, salesforceRestClient.getVersionedBaseUrl() + "/put/" + objectType, transferMillis, REL_SUCCESS); } catch (MalformedRecordException e) { getLogger().error("Couldn't read records from input", e); transferToFailure(session, flowFile, e.getMessage()); diff --git a/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/QuerySalesforceObject.java b/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/QuerySalesforceObject.java index 538734051178..44ed18a4af1f 100644 --- a/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/QuerySalesforceObject.java +++ b/nifi-nar-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/QuerySalesforceObject.java @@ -535,7 +535,7 @@ private void transferFlowFiles(ProcessSession session, List outgoingFl long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); outgoingFlowFiles.forEach(ff -> - session.getProvenanceReporter().receive(ff, salesforceRestService.getVersionedBaseUrl() + "/" + urlDetail, transferMillis) + session.getProvenanceReporter().receive(ff, salesforceRestService.getVersionedBaseUrl() + "/" + urlDetail, transferMillis, REL_SUCCESS) ); } if (originalFlowFile != null && !isOriginalTransferred.get()) { diff --git a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedPartitionRecord.java b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedPartitionRecord.java index 563a33d0e344..8d8a533b7888 100644 --- a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedPartitionRecord.java +++ b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedPartitionRecord.java @@ -227,7 +227,7 @@ public void process(final InputStream in) throws IOException { final long millis = System.currentTimeMillis() - startMillis; session.adjustCounter("Records Processed", counts.getRecordCount(), true); - session.getProvenanceReporter().fork(incomingFlowFile, outgoingFlowFiles.values(), "Processed " + counts.getRecordCount() + " Records", millis); + session.getProvenanceReporter().fork(incomingFlowFile, outgoingFlowFiles.values(), "Processed " + counts.getRecordCount() + " Records", millis, RELATIONSHIP_SUCCESS); } catch (final ScriptException | SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("After processing " + counts.getRecordCount() + " Records, encountered failure when attempting to process " + incomingFlowFile, e); diff --git a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedRouterProcessor.java b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedRouterProcessor.java index 7c381d89af83..dd856daf659e 100644 --- a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedRouterProcessor.java +++ b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedRouterProcessor.java @@ -31,7 +31,6 @@ import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; -import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.schema.access.SchemaNotFoundException; import org.apache.nifi.serialization.MalformedRecordException; import org.apache.nifi.serialization.RecordReader; @@ -127,72 +126,70 @@ private boolean route( final RecordCounts counts = new RecordCounts(); try { - session.read(incomingFlowFile, new InputStreamCallback() { - @Override - public void process(final InputStream in) throws IOException { - try ( - final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, incomingFlowFile.getSize(), getLogger()) - ) { - final RecordSchema schema = writerFactory.getSchema(originalAttributes, reader.getSchema()); - final RecordSet recordSet = reader.createRecordSet(); - final PushBackRecordSet pushBackSet = new PushBackRecordSet(recordSet); - final Map outgoingFlowFiles = new HashMap<>(); - final Map recordSetWriters = new HashMap<>(); - - // Reading in records and evaluate script - while (pushBackSet.isAnotherRecord()) { - final Record record = pushBackSet.next(); - final Object evaluatedValue = evaluator.evaluate(record, counts.getRecordCount()); - getLogger().debug("Evaluated scripted against {} (index {}), producing result of {}", record, counts.getRecordCount(), evaluatedValue); - counts.incrementRecordCount(); - - if (evaluatedValue != null && scriptResultType.isInstance(evaluatedValue)) { - final Optional outgoingRelationship = resolveRelationship(scriptResultType.cast(evaluatedValue)); - - if (outgoingRelationship.isPresent()) { - if (!outgoingFlowFiles.containsKey(outgoingRelationship.get())) { - final FlowFile outgoingFlowFile = session.create(incomingFlowFile); - final OutputStream out = session.write(outgoingFlowFile); - final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out, outgoingFlowFile); - writer.beginRecordSet(); - - outgoingFlowFiles.put(outgoingRelationship.get(), outgoingFlowFile); - recordSetWriters.put(outgoingRelationship.get(), writer); - } - - recordSetWriters.get(outgoingRelationship.get()).write(record); - } else { - getLogger().debug("Record with evaluated value {} has no outgoing relationship determined", String.valueOf(evaluatedValue)); + session.read(incomingFlowFile, in -> { + try ( + final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, incomingFlowFile.getSize(), getLogger()) + ) { + final RecordSchema schema = writerFactory.getSchema(originalAttributes, reader.getSchema()); + final RecordSet recordSet = reader.createRecordSet(); + final PushBackRecordSet pushBackSet = new PushBackRecordSet(recordSet); + final Map outgoingFlowFiles = new HashMap<>(); + final Map recordSetWriters = new HashMap<>(); + + // Reading in records and evaluate script + while (pushBackSet.isAnotherRecord()) { + final Record record = pushBackSet.next(); + final Object evaluatedValue = evaluator.evaluate(record, counts.getRecordCount()); + getLogger().debug("Evaluated scripted against {} (index {}), producing result of {}", record, counts.getRecordCount(), evaluatedValue); + counts.incrementRecordCount(); + + if (evaluatedValue != null && scriptResultType.isInstance(evaluatedValue)) { + final Optional outgoingRelationship = resolveRelationship(scriptResultType.cast(evaluatedValue)); + + if (outgoingRelationship.isPresent()) { + if (!outgoingFlowFiles.containsKey(outgoingRelationship.get())) { + final FlowFile outgoingFlowFile = session.create(incomingFlowFile); + final OutputStream out = session.write(outgoingFlowFile); + final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out, outgoingFlowFile); + writer.beginRecordSet(); + + outgoingFlowFiles.put(outgoingRelationship.get(), outgoingFlowFile); + recordSetWriters.put(outgoingRelationship.get(), writer); } + + recordSetWriters.get(outgoingRelationship.get()).write(record); } else { - throw new ProcessException("Script returned a value of " + evaluatedValue - + " but this Processor requires that the object returned be an instance of " + scriptResultType.getSimpleName()); + getLogger().debug("Record with evaluated value {} has no outgoing relationship determined", String.valueOf(evaluatedValue)); } + } else { + throw new ProcessException("Script returned a value of " + evaluatedValue + + " but this Processor requires that the object returned be an instance of " + scriptResultType.getSimpleName()); } + } - // Sending outgoing flow files - for (final Relationship relationship : outgoingFlowFiles.keySet()) { - final RecordSetWriter writer = recordSetWriters.get(relationship); - final FlowFile outgoingFlowFile = outgoingFlowFiles.get(relationship); - - final Map attributes = new HashMap<>(incomingFlowFile.getAttributes()); - attributes.put("mime.type", writer.getMimeType()); - - try { - final WriteResult finalResult = writer.finishRecordSet(); - final int outgoingFlowFileRecords = finalResult.getRecordCount(); - attributes.put("record.count", String.valueOf(outgoingFlowFileRecords)); - writer.close(); - } catch (final IOException e) { - throw new ProcessException("Resources used for record writing might not be closed", e); - } - - session.putAllAttributes(outgoingFlowFile, attributes); - session.transfer(outgoingFlowFile, relationship); + // Sending outgoing flow files + for (final Relationship relationship : outgoingFlowFiles.keySet()) { + final RecordSetWriter writer = recordSetWriters.get(relationship); + final FlowFile outgoingFlowFile = outgoingFlowFiles.get(relationship); + + final Map attributes = new HashMap<>(incomingFlowFile.getAttributes()); + attributes.put("mime.type", writer.getMimeType()); + + try { + final WriteResult finalResult = writer.finishRecordSet(); + final int outgoingFlowFileRecords = finalResult.getRecordCount(); + attributes.put("record.count", String.valueOf(outgoingFlowFileRecords)); + writer.close(); + } catch (final IOException e) { + throw new ProcessException("Resources used for record writing might not be closed", e); } - } catch (final ScriptException | SchemaNotFoundException | MalformedRecordException e) { - throw new ProcessException("After processing " + counts.getRecordCount() + " Records, encountered failure when attempting to process " + incomingFlowFile, e); + + session.putAllAttributes(outgoingFlowFile, attributes); + session.getProvenanceReporter().route(outgoingFlowFile, relationship); + session.transfer(outgoingFlowFile, relationship); } + } catch (final ScriptException | SchemaNotFoundException | MalformedRecordException e) { + throw new ProcessException("After processing " + counts.getRecordCount() + " Records, encountered failure when attempting to process " + incomingFlowFile, e); } }); diff --git a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedTransformRecord.java b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedTransformRecord.java index fe3548e98a37..bace519411b1 100644 --- a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedTransformRecord.java +++ b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedTransformRecord.java @@ -222,12 +222,12 @@ public void write(final Record record) throws IOException { session.transfer(flowFile, REL_SUCCESS); final long transformCount = counts.getRecordCount() - counts.getDroppedCount(); - getLogger().info("Successfully transformed {} Records and dropped {} Records for {}", new Object[] {transformCount, counts.getDroppedCount(), flowFile}); + getLogger().info("Successfully transformed {} Records and dropped {} Records for {}", transformCount, counts.getDroppedCount(), flowFile); session.adjustCounter("Records Transformed", transformCount, true); session.adjustCounter("Records Dropped", counts.getDroppedCount(), true); final long millis = System.currentTimeMillis() - startMillis; - session.getProvenanceReporter().modifyContent(flowFile, "Transformed " + transformCount + " Records, Dropped " + counts.getDroppedCount() + " Records", millis); + session.getProvenanceReporter().modifyContent(flowFile, "Transformed " + transformCount + " Records, Dropped " + counts.getDroppedCount() + " Records", millis, REL_SUCCESS); } catch (final ProcessException e) { getLogger().error("After processing {} Records, encountered failure when attempting to transform {}", counts.getRecordCount(), flowFile, e.getCause()); session.transfer(flowFile, REL_FAILURE); diff --git a/nifi-nar-bundles/nifi-shopify-bundle/nifi-shopify-processors/src/main/java/org/apache/nifi/processors/shopify/GetShopify.java b/nifi-nar-bundles/nifi-shopify-bundle/nifi-shopify-processors/src/main/java/org/apache/nifi/processors/shopify/GetShopify.java index f0aee1514d81..afc4f33e9d41 100644 --- a/nifi-nar-bundles/nifi-shopify-bundle/nifi-shopify-processors/src/main/java/org/apache/nifi/processors/shopify/GetShopify.java +++ b/nifi-nar-bundles/nifi-shopify-bundle/nifi-shopify-processors/src/main/java/org/apache/nifi/processors/shopify/GetShopify.java @@ -327,7 +327,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session if (objectCountHolder.get() > 0) { flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json"); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, shopifyRestService.getBaseUriString()); + session.getProvenanceReporter().receive(flowFile, shopifyRestService.getBaseUriString(), REL_SUCCESS); } else { getLogger().debug("Empty response when requested Shopify resource: [{}]", resourceName); session.remove(flowFile); diff --git a/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/PostSlack.java b/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/PostSlack.java index f5817317640b..f6fb562cbe9d 100644 --- a/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/PostSlack.java +++ b/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/PostSlack.java @@ -359,7 +359,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro } session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().send(flowFile, url); + session.getProvenanceReporter().send(flowFile, url, REL_SUCCESS); } catch (IOException | PostSlackException e) { getLogger().error("Failed to send message to Slack.", e); diff --git a/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/PutSlack.java b/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/PutSlack.java index d32a983e1f48..0d717633b03d 100644 --- a/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/PutSlack.java +++ b/nifi-nar-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/PutSlack.java @@ -269,9 +269,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session if (responseCode >= 200 && responseCode < 300) { getLogger().info("Successfully posted message to Slack"); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().send(flowFile, url.toString()); + session.getProvenanceReporter().send(flowFile, url.toString(), REL_SUCCESS); } else { - getLogger().error("Failed to post message to Slack with response code {}", new Object[]{responseCode}); + getLogger().error("Failed to post message to Slack with response code {}", responseCode); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); context.yield(); diff --git a/nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/GetSmbFile.java b/nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/GetSmbFile.java index 244ae4639fea..28fd849499e1 100644 --- a/nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/GetSmbFile.java +++ b/nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/GetSmbFile.java @@ -494,15 +494,15 @@ public void onTrigger(final ProcessContext context, final ProcessSession session attributes.put(SHARE.getName(), shareName); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().receive(flowFile, uri.toString(), importMillis); + session.getProvenanceReporter().receive(flowFile, uri.toString(), importMillis, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); - logger.info("added {} to flow", new Object[]{flowFile}); + logger.info("added {} to flow", flowFile); } catch (SMBApiException e) { // do not fail whole batch if a single file cannot be accessed if (e.getStatus() == NtStatus.STATUS_SHARING_VIOLATION) { - logger.info("Could not acquire sharing access for file {}", new Object[]{file}); + logger.info("Could not acquire sharing access for file {}", file); if (flowFile != null) { session.remove(flowFile); } diff --git a/nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/PutSmbFile.java b/nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/PutSmbFile.java index b7ee444ab986..42320c021498 100644 --- a/nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/PutSmbFile.java +++ b/nifi-nar-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/PutSmbFile.java @@ -403,7 +403,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final URI provenanceUri = new URI("smb", hostname, "/" + destinationFullPath.replace('\\', '/'), null); final long processingTimeInNano = System.nanoTime() - processingStartTime; final long processingTimeInMilli = TimeUnit.MILLISECONDS.convert(processingTimeInNano, TimeUnit.NANOSECONDS); - session.getProvenanceReporter().send(flowFile, provenanceUri.toString(), processingTimeInMilli); + session.getProvenanceReporter().send(flowFile, provenanceUri.toString(), processingTimeInMilli, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } catch (Exception e) { diff --git a/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/operations/SNMPTrapReceiver.java b/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/operations/SNMPTrapReceiver.java index d2eaf19528fa..e7bc416c6371 100644 --- a/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/operations/SNMPTrapReceiver.java +++ b/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/operations/SNMPTrapReceiver.java @@ -47,7 +47,7 @@ public void processPdu(final CommandResponderEvent event) { if (isValidTrapPdu(pdu)) { final ProcessSession processSession = processSessionFactory.createSession(); final FlowFile flowFile = createFlowFile(processSession, pdu); - processSession.getProvenanceReporter().create(flowFile, event.getPeerAddress() + "/" + pdu.getRequestID()); + processSession.getProvenanceReporter().create(flowFile, event.getPeerAddress() + "/" + pdu.getRequestID(), pdu.getErrorStatus() == PDU.noError ? REL_SUCCESS : REL_FAILURE); if (pdu.getErrorStatus() == PDU.noError) { processSession.transfer(flowFile, REL_SUCCESS); } else { diff --git a/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/AbstractSNMPProcessor.java b/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/AbstractSNMPProcessor.java index 43c3a1095dc6..27f21422941a 100644 --- a/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/AbstractSNMPProcessor.java +++ b/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/AbstractSNMPProcessor.java @@ -111,19 +111,23 @@ public void close() { } } - protected void handleResponse(final ProcessContext context, final ProcessSession processSession, final FlowFile flowFile, final SNMPSingleResponse response, + protected Relationship handleResponse(final ProcessContext context, final ProcessSession processSession, final FlowFile flowFile, final SNMPSingleResponse response, final Relationship success, final Relationship failure, final String provenanceAddress) { + final Relationship outgoingRelationship; final SNMPResponseStatus snmpResponseStatus = processResponse(response); processSession.putAllAttributes(flowFile, response.getAttributes()); if (snmpResponseStatus.getErrorStatus() == ErrorStatus.FAILURE) { getLogger().error("SNMP request failed, response error: " + snmpResponseStatus.getErrorMessage()); - processSession.getProvenanceReporter().modifyAttributes(flowFile, response.getTargetAddress() + provenanceAddress); + processSession.getProvenanceReporter().modifyAttributes(flowFile, response.getTargetAddress() + provenanceAddress, failure); + outgoingRelationship = failure; processSession.transfer(flowFile, failure); context.yield(); } else { - processSession.getProvenanceReporter().modifyAttributes(flowFile, response.getTargetAddress() + provenanceAddress); + processSession.getProvenanceReporter().modifyAttributes(flowFile, response.getTargetAddress() + provenanceAddress, success); + outgoingRelationship = success; processSession.transfer(flowFile, success); } + return outgoingRelationship; } protected SNMPResponseStatus processResponse(final SNMPSingleResponse response) { diff --git a/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/GetSNMP.java b/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/GetSNMP.java index 2aa29ab8ef2a..7afa58f5dfb4 100644 --- a/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/GetSNMP.java +++ b/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/GetSNMP.java @@ -184,11 +184,11 @@ private void performSnmpWalkWithFlowFile(ProcessSession processSession, FlowFile final SNMPTreeResponse response = optionalResponse.get(); response.logErrors(getLogger()); processSession.putAllAttributes(flowFile, response.getAttributes()); - processSession.getProvenanceReporter().modifyAttributes(flowFile, response.getTargetAddress() + "/walk"); + processSession.getProvenanceReporter().modifyAttributes(flowFile, response.getTargetAddress() + "/walk", response.isError() ? REL_FAILURE : REL_SUCCESS); processSession.transfer(flowFile, response.isError() ? REL_FAILURE : REL_SUCCESS); } else { getLogger().warn("No SNMP specific attributes found in flowfile."); - processSession.getProvenanceReporter().receive(flowFile, "/walk"); + processSession.getProvenanceReporter().receive(flowFile, "/walk", REL_FAILURE); processSession.transfer(flowFile, REL_FAILURE); } } @@ -198,7 +198,7 @@ private void performSnmpWalkWithoutFlowFile(ProcessSession processSession, Strin response.logErrors(getLogger()); final FlowFile outgoingFlowFile = processSession.create(); processSession.putAllAttributes(outgoingFlowFile, response.getAttributes()); - processSession.getProvenanceReporter().create(outgoingFlowFile, response.getTargetAddress() + "/walk"); + processSession.getProvenanceReporter().create(outgoingFlowFile, response.getTargetAddress() + "/walk", REL_SUCCESS); processSession.transfer(outgoingFlowFile, REL_SUCCESS); } @@ -222,8 +222,9 @@ private void performSnmpGetWithoutFlowFile(ProcessContext context, ProcessSessio final SNMPSingleResponse response = snmpHandler.get(oid); final FlowFile outgoingFlowFile = processSession.create(); processSession.putAllAttributes(outgoingFlowFile, textualOidMap); - processSession.getProvenanceReporter().receive(outgoingFlowFile, response.getTargetAddress() + "/get"); - handleResponse(context, processSession, outgoingFlowFile, response, REL_SUCCESS, REL_FAILURE, "/get"); + final Relationship outgoingRelationship = handleResponse(context, processSession, outgoingFlowFile, response, REL_SUCCESS, REL_FAILURE, "/get"); + processSession.getProvenanceReporter().receive(outgoingFlowFile, response.getTargetAddress() + "/get", outgoingRelationship); + } private void performSnmpGetWithFlowFile(ProcessContext context, ProcessSession processSession, FlowFile flowFile, Map textualOidMap) throws IOException { diff --git a/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/SetSNMP.java b/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/SetSNMP.java index 74d18a071a41..24361e5a02e0 100644 --- a/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/SetSNMP.java +++ b/nifi-nar-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/processors/SetSNMP.java @@ -114,8 +114,8 @@ public void onTrigger(final ProcessContext context, final ProcessSession process processSession.remove(flowFile); final FlowFile outgoingFlowFile = processSession.create(); final SNMPSingleResponse response = optionalResponse.get(); - processSession.getProvenanceReporter().receive(outgoingFlowFile, "/set"); - handleResponse(context, processSession, outgoingFlowFile, response, REL_SUCCESS, REL_FAILURE, "/set"); + final Relationship outgoingRelationship = handleResponse(context, processSession, outgoingFlowFile, response, REL_SUCCESS, REL_FAILURE, "/set"); + processSession.getProvenanceReporter().receive(outgoingFlowFile, "/set", outgoingRelationship); } else { getLogger().warn("No SNMP specific attributes found in flowfile."); processSession.transfer(flowFile, REL_FAILURE); diff --git a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/ConsumeTwitter.java b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/ConsumeTwitter.java index 2517456d74c3..d204e6ac1a33 100644 --- a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/ConsumeTwitter.java +++ b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/ConsumeTwitter.java @@ -340,7 +340,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro final String endpointName = context.getProperty(ENDPOINT).getValue(); final String transitUri = tweetStreamService.getTransitUri(endpointName); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); } @OnPrimaryNodeStateChange diff --git a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java index 0e17106a14c6..6ef930c83421 100644 --- a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java +++ b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java @@ -416,7 +416,7 @@ public void process(final OutputStream out) throws IOException { flowFile = session.putAllAttributes(flowFile, attributes); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, Constants.STREAM_HOST + client.getEndpoint().getURI()); + session.getProvenanceReporter().receive(flowFile, Constants.STREAM_HOST + client.getEndpoint().getURI(), REL_SUCCESS); } private static class FollowingValidator implements Validator { diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java index ce6e3fd99323..02e4526dd16d 100644 --- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java +++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java @@ -409,7 +409,7 @@ public void process(final OutputStream out) throws IOException { transitUri.append(":").append(context.getProperty(COLLECTION).evaluateAttributeExpressions().getValue()); } final long duration = timer.getDuration(TimeUnit.MILLISECONDS); - session.getProvenanceReporter().receive(flowFile, transitUri.toString(), duration); + session.getProvenanceReporter().receive(flowFile, transitUri.toString(), duration, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java index 5bfc7b7a0b38..afc06202531d 100644 --- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java +++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java @@ -260,11 +260,11 @@ public String getContentType() { if (error.get() != null) { getLogger().error("Failed to send {} to Solr due to {}; routing to failure", - new Object[]{flowFile, error.get()}); + flowFile, error.get()); session.transfer(flowFile, REL_FAILURE); } else if (connectionError.get() != null) { getLogger().error("Failed to send {} to Solr due to {}; routing to connection_failure", - new Object[]{flowFile, connectionError.get()}); + flowFile, connectionError.get()); session.penalize(flowFile); session.transfer(flowFile, REL_CONNECTION_FAILURE); } else { @@ -275,7 +275,7 @@ public String getContentType() { } final long duration = timer.getDuration(TimeUnit.MILLISECONDS); - session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true); + session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true, REL_SUCCESS); getLogger().info("Successfully sent {} to Solr in {} millis", new Object[]{flowFile, duration}); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrRecord.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrRecord.java index 90dccfd91a15..1bcdbbe80263 100644 --- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrRecord.java +++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrRecord.java @@ -273,11 +273,11 @@ public void doOnTrigger(final ProcessContext context, final ProcessSession sessi if (error.get() != null) { getLogger().error("Failed to send all the records of the {} to Solr due to {}; routing to failure", - new Object[]{flowFile, error.get()}); + flowFile, error.get()); session.transfer(flowFile, REL_FAILURE); } else if (connectionError.get() != null) { getLogger().error("Failed to send {} to Solr due to {}; routing to connection_failure", - new Object[]{flowFile, connectionError.get()}); + flowFile, connectionError.get()); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_CONNECTION_FAILURE); } else { @@ -288,7 +288,7 @@ public void doOnTrigger(final ProcessContext context, final ProcessSession sessi } final long duration = timer.getDuration(TimeUnit.MILLISECONDS); - session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true); + session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true, REL_SUCCESS); getLogger().info("Successfully sent {} to Solr in {} millis", new Object[]{flowFile, duration}); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/QuerySolr.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/QuerySolr.java index 76686c75ff13..0a6584f020ce 100644 --- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/QuerySolr.java +++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/QuerySolr.java @@ -469,7 +469,7 @@ public void doOnTrigger(final ProcessContext context, final ProcessSession sessi } }); flowFileFacets = session.putAttribute(flowFileFacets, CoreAttributes.MIME_TYPE.key(), MIME_TYPE_JSON); - session.getProvenanceReporter().receive(flowFileFacets, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowFileFacets, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS), FACETS); session.transfer(flowFileFacets, FACETS); } @@ -484,7 +484,7 @@ public void doOnTrigger(final ProcessContext context, final ProcessSession sessi } }); flowFileStats = session.putAttribute(flowFileStats, CoreAttributes.MIME_TYPE.key(), MIME_TYPE_JSON); - session.getProvenanceReporter().receive(flowFileStats, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowFileStats, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS), STATS); session.transfer(flowFileStats, STATS); } processFacetsAndStats = false; @@ -495,7 +495,7 @@ public void doOnTrigger(final ProcessContext context, final ProcessSession sessi final Integer totalDocumentsReturned = solrQuery.getStart() + solrQuery.getRows(); if (totalDocumentsReturned < totalNumberOfResults) { solrQuery.setStart(totalDocumentsReturned); - session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS), RESULTS); session.transfer(flowFileResponse, RESULTS); flowFileResponse = session.create(flowFileResponse); } else { @@ -518,7 +518,7 @@ public void doOnTrigger(final ProcessContext context, final ProcessSession sessi } if (!flowFileResponse.isPenalized()) { - session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS), RESULTS); session.transfer(flowFileResponse, RESULTS); } diff --git a/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/GetSplunk.java b/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/GetSplunk.java index f6884103972f..f41ed4429ba5 100644 --- a/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/GetSplunk.java +++ b/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/GetSplunk.java @@ -504,12 +504,9 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro final InputStream exportSearch = export; FlowFile flowFile = session.create(); - flowFile = session.write(flowFile, new OutputStreamCallback() { - @Override - public void process(OutputStream rawOut) throws IOException { - try (BufferedOutputStream out = new BufferedOutputStream(rawOut)) { - IOUtils.copyLarge(exportSearch, out); - } + flowFile = session.write(flowFile, rawOut -> { + try (BufferedOutputStream out = new BufferedOutputStream(rawOut)) { + IOUtils.copyLarge(exportSearch, out); } }); @@ -519,7 +516,7 @@ public void process(OutputStream rawOut) throws IOException { attributes.put(QUERY_ATTR, query); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().debug("Received {} from Splunk", new Object[] {flowFile}); diff --git a/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/src/main/java/org/apache/nifi/spring/SpringContextProcessor.java b/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/src/main/java/org/apache/nifi/spring/SpringContextProcessor.java index a1d7edcee4e1..014c09ea9def 100644 --- a/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/src/main/java/org/apache/nifi/spring/SpringContextProcessor.java +++ b/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/src/main/java/org/apache/nifi/spring/SpringContextProcessor.java @@ -308,7 +308,7 @@ private void sendToSpring(FlowFile flowFileToProcess, ProcessContext context, Pr try { sent = this.exchanger.send(payload, flowFileToProcess.getAttributes(), this.sendTimeout); if (sent) { - processSession.getProvenanceReporter().send(flowFileToProcess, this.applicationContextConfigFileName); + processSession.getProvenanceReporter().send(flowFileToProcess, this.applicationContextConfigFileName, null); processSession.remove(flowFileToProcess); } else { processSession.transfer(processSession.penalize(flowFileToProcess), REL_FAILURE); @@ -342,7 +342,7 @@ public void process(final OutputStream out) throws IOException { flowFileToProcess = processSession.putAllAttributes(flowFileToProcess, this.extractFlowFileAttributesFromMessageHeaders(msgFromSpring.getHeaders())); processSession.transfer(flowFileToProcess, REL_SUCCESS); - processSession.getProvenanceReporter().receive(flowFileToProcess, this.applicationContextConfigFileName); + processSession.getProvenanceReporter().receive(flowFileToProcess, this.applicationContextConfigFileName, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java index 0816bbf163e6..2b4acd996294 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java @@ -379,9 +379,11 @@ public void onTrigger(final ProcessContext context, final ProcessSession session // Report a FETCH event if there was an incoming flow file, or a RECEIVE event otherwise if (context.hasIncomingConnection()) { - session.getProvenanceReporter().fetch(resultSetFF, "Retrieved " + nrOfRows.get() + " rows", executionTimeElapsed + fetchTimeElapsed); + session.getProvenanceReporter() + .fetch(resultSetFF, "Retrieved " + nrOfRows.get() + " rows", executionTimeElapsed + fetchTimeElapsed, REL_SUCCESS); } else { - session.getProvenanceReporter().receive(resultSetFF, "Retrieved " + nrOfRows.get() + " rows", executionTimeElapsed + fetchTimeElapsed); + session.getProvenanceReporter() + .receive(resultSetFF, "Retrieved " + nrOfRows.get() + " rows", executionTimeElapsed + fetchTimeElapsed, REL_SUCCESS); } resultSetFlowFiles.add(resultSetFF); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java index b4177b964eb8..db90fc1171b2 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java @@ -384,7 +384,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory logger.debug("{} contains {} records; transferring to 'success'", new Object[]{fileToProcess, nrOfRows.get()}); - session.getProvenanceReporter().receive(fileToProcess, jdbcURL, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(fileToProcess, jdbcURL, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); resultSetFlowFiles.add(fileToProcess); // If we've reached the batch size, send out the flow files if (outputBatchSize > 0 && resultSetFlowFiles.size() >= outputBatchSize) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractRecordProcessor.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractRecordProcessor.java index 1ea70e208143..14a5142f057a 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractRecordProcessor.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractRecordProcessor.java @@ -191,12 +191,13 @@ public void process(final InputStream in, final OutputStream out) throws IOExcep if(!includeZeroRecordFlowFiles && recordCount.get() == 0){ session.remove(flowFile); } else { + session.getProvenanceReporter().modifyContent(flowFile, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } final int count = recordCount.get(); session.adjustCounter("Records Processed", count, false); - getLogger().info("Successfully converted {} records for {}", new Object[] {count, flowFile}); + getLogger().info("Successfully converted {} records for {}", count, flowFile); } protected abstract Record process(Record record, FlowFile flowFile, ProcessContext context, long count); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java index 74f06e1dce29..59654e330742 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java @@ -147,11 +147,11 @@ public void process(InputStream in, OutputStream out) throws IOException { }); } - logger.info("Successfully {} {}", new Object[] {encode ? "encoded" : "decoded", flowFile}); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + logger.info("Successfully {} {}", encode ? "encoded" : "decoded", flowFile); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (ProcessException e) { - logger.error("Failed to {} {} due to {}", new Object[] {encode ? "encode" : "decode", flowFile, e}); + logger.error("Failed to {} {} due to {}", encode ? "encode" : "decode", flowFile, e); session.transfer(flowFile, REL_FAILURE); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java index dc83ca66865e..2205da53f7f9 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java @@ -445,10 +445,10 @@ public void process(final InputStream rawIn, final OutputStream rawOut) throws I logger.info("Successfully {}ed {} using {} compression format; size changed from {} to {} bytes", new Object[]{compressionMode.toLowerCase(), flowFile, compressionFormat, sizeBeforeCompression, sizeAfterCompression}); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getDuration(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getDuration(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final ProcessException e) { - logger.error("Unable to {} {} using {} compression format due to {}; routing to failure", new Object[]{compressionMode.toLowerCase(), flowFile, compressionFormat, e}); + logger.error("Unable to {} {} using {} compression format due to {}; routing to failure", compressionMode.toLowerCase(), flowFile, compressionFormat, e); session.transfer(flowFile, REL_FAILURE); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java index ff6964b5610d..d00f0d8399a5 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java @@ -171,9 +171,9 @@ public void process(final InputStream rawIn, final OutputStream rawOut) throws I } }); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); logger.info("successfully converted characters from {} to {} for {}", - new Object[]{inputCharset, outputCharset, flowFile}); + inputCharset, outputCharset, flowFile); session.transfer(flowFile, REL_SUCCESS); } catch (final Exception e) { throw new ProcessException(e); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashAttribute.java index cbc24c63af16..bb160ca72105 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashAttribute.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashAttribute.java @@ -223,7 +223,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session String value = hashValue(algorithm, entry.getValue(), charset); session.putAttribute(flowFile, attributeToGeneratedNameMap.get(entry.getKey()), value); } - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashContent.java index f681ab882170..a348ad250dfb 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashContent.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashContent.java @@ -146,17 +146,17 @@ public void onTrigger(final ProcessContext context, final ProcessSession session // Determine the destination attribute name final String attributeName = "content_" + algorithmName; - logger.debug("Writing {} hash to attribute '{}'", new Object[]{algorithmName, attributeName}); + logger.debug("Writing {} hash to attribute '{}'", algorithmName, attributeName); // Write the attribute flowFile = session.putAttribute(flowFile, attributeName, hashValueHolder.get()); logger.info("Successfully added attribute '{}' to {} with a value of {}; routing to success", new Object[]{attributeName, flowFile, hashValueHolder.get()}); // Update provenance and route to success - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (ProcessException e) { - logger.error("Failed to process {} due to {}; routing to failure", new Object[]{flowFile, e}); + logger.error("Failed to process {} due to {}; routing to failure", flowFile, e); session.transfer(flowFile, REL_FAILURE); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java index 54874a2de267..e708bf70d160 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java @@ -151,7 +151,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.write(flowFile, callback); getLogger().info("{} completed {}", encode ? "Encoding" : "Decoding", flowFile); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final Exception e) { getLogger().error("{} failed {}", encode ? "Encoding" : "Decoding", flowFile, e); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java index 184190ed3fd7..93255c9632f0 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java @@ -623,11 +623,11 @@ public void onTrigger(final ProcessContext context, final ProcessSession session encryptor.updateAttributes(clonedAttributes); flowFile = session.putAllAttributes(flowFile, clonedAttributes); - logger.info("successfully {}crypted {}", new Object[]{encrypt ? "en" : "de", flowFile}); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + logger.info("successfully {}crypted {}", encrypt ? "en" : "de", flowFile); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final ProcessException e) { - logger.error("Cannot {}crypt {} - ", new Object[]{encrypt ? "en" : "de", flowFile, e}); + logger.error("Cannot {}crypt {} - ", encrypt ? "en" : "de", flowFile, e); session.transfer(flowFile, REL_FAILURE); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java index b72a8718db56..dc728b812f5e 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java @@ -334,7 +334,7 @@ public void onTrigger(final ProcessContext processContext, final ProcessSession outputStream.write(resultRepresentation.getBytes(StandardCharsets.UTF_8)); } }); - processSession.getProvenanceReporter().modifyContent(flowFile, "Replaced content with result of expression " + jsonPathExp.getPath()); + processSession.getProvenanceReporter().modifyContent(flowFile, "Replaced content with result of expression " + jsonPathExp.getPath(), REL_MATCH); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java index f325b503c789..649815c4b000 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java @@ -369,11 +369,11 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final Relationship destRel = xpathResults.isEmpty() ? REL_NO_MATCH : REL_MATCH; logger.info("XPath evaluation on {} completed with results [{}]: content updated", flowFile, xpathResults.size()); session.transfer(flowFile, destRel); - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, destRel); } else if (DESTINATION_CONTENT.equals(destination)) { logger.info("XPath evaluation on {} completed: content updated", flowFile); session.transfer(flowFile, REL_MATCH); - session.getProvenanceReporter().modifyContent(flowFile); + session.getProvenanceReporter().modifyContent(flowFile, REL_MATCH); } } else { logger.error("XPath evaluation on {} failed", flowFile, error.get()); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java index 8be53e256594..566e688fbdcb 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java @@ -352,7 +352,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final Relationship destRel = xQueryResults.isEmpty() ? REL_NO_MATCH : REL_MATCH; logger.info("XQuery results found [{}] for {}", xQueryResults.size(), flowFile); session.transfer(flowFile, destRel); - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, destRel); } else { // if (DESTINATION_CONTENT.equals(destination)) { if (!childrenFlowFiles.isEmpty()) { logger.info("XQuery results found [{}] for {} FlowFiles created [{}]", xQueryResults.size(), flowFile, childrenFlowFiles.size()); @@ -361,7 +361,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } else { logger.info("XQuery results found for {} content updated", flowFile); session.transfer(flowFile, REL_MATCH); - session.getProvenanceReporter().modifyContent(flowFile); + session.getProvenanceReporter().modifyContent(flowFile, REL_MATCH); } } } // end flowFileLoop diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java index 6ce06df09e9d..851fd853907a 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java @@ -314,7 +314,7 @@ public void process(final OutputStream flowFileOut) throws IOException { flowFile = session.putAllAttributes(flowFile, attributes); // All was good. Generate event and transfer FlowFile. - session.getProvenanceReporter().create(flowFile, "Created from command: " + commandString); + session.getProvenanceReporter().create(flowFile, "Created from command: " + commandString, REL_SUCCESS); getLogger().info("Created {} and routed to success", flowFile); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractGrok.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractGrok.java index d3b1515f51ae..87299fcf5496 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractGrok.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractGrok.java @@ -322,7 +322,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } flowFile = session.putAllAttributes(flowFile, grokResults); - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, REL_MATCH); session.transfer(flowFile, REL_MATCH); getLogger().info("Matched {} Grok Expressions and added attributes to FlowFile {}", grokResults.size(), flowFile); @@ -330,7 +330,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session case FLOWFILE_CONTENT: FlowFile conFlowfile = session.write(flowFile, outputStream -> objectMapper.writeValue(outputStream, captureMap)); conFlowfile = session.putAttribute(conFlowfile, CoreAttributes.MIME_TYPE.key(), APPLICATION_JSON); - session.getProvenanceReporter().modifyContent(conFlowfile, "Replaced content with parsed Grok fields and values", stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(conFlowfile, "Replaced content with parsed Grok fields and values", stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_MATCH); session.transfer(conFlowfile, REL_MATCH); break; diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java index d25396983e87..b269f297bfd9 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java @@ -474,7 +474,7 @@ public void process(InputStream in) throws IOException { if (!regexResults.isEmpty()) { flowFile = session.putAllAttributes(flowFile, regexResults); - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, REL_MATCH); session.transfer(flowFile, REL_MATCH); logger.info("Matched {} Regular Expressions and added attributes to FlowFile {}", regexResults.size(), flowFile); } else { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java index e16eec71892b..6ea66dae4ad6 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java @@ -272,7 +272,8 @@ public void onTrigger(final ProcessContext context, final ProcessSession session return; } - session.getProvenanceReporter().fetch(flowFile, file.toURI().toString(), "Replaced content of FlowFile with contents of " + file.toURI(), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter() + .fetch(flowFile, file.toURI().toString(), "Replaced content of FlowFile with contents of " + file.toURI(), stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); // It is critical that we commit the session before we perform the Completion Strategy. Otherwise, we could have a case where we diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFileTransfer.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFileTransfer.java index 227904282d95..ce1e7ca5bbfd 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFileTransfer.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFileTransfer.java @@ -299,7 +299,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.putAllAttributes(flowFile, attributes); // emit provenance event and transfer FlowFile - session.getProvenanceReporter().fetch(flowFile, protocolName + "://" + host + ":" + port + "/" + filename, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().fetch(flowFile, protocolName + "://" + host + ":" + port + "/" + filename, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); // it is critical that we commit the session before moving/deleting the remote file. Otherwise, we could have a situation where diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java index d8846c767d63..6ed760731358 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java @@ -254,16 +254,11 @@ public void onTrigger(final ProcessContext context, final ProcessSession session FlowFile flowFile = session.create(); final byte[] writtenData = uniqueData ? generateData(context) : data; if (writtenData.length > 0) { - flowFile = session.write(flowFile, new OutputStreamCallback() { - @Override - public void process(final OutputStream out) throws IOException { - out.write(writtenData); - } - }); + flowFile = session.write(flowFile, out -> out.write(writtenData)); } flowFile = session.putAllAttributes(flowFile, generatedAttributes); - session.getProvenanceReporter().create(flowFile); + session.getProvenanceReporter().create(flowFile, null, SUCCESS); session.transfer(flowFile, SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java index 3c37be84233d..e82d3f5403c0 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java @@ -451,9 +451,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.putAllAttributes(flowFile, attributes); } - session.getProvenanceReporter().receive(flowFile, file.toURI().toString(), importMillis); + session.getProvenanceReporter().receive(flowFile, file.toURI().toString(), importMillis, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); - logger.info("added {} to flow", new Object[]{flowFile}); + logger.info("added {} to flow", flowFile); if (!isScheduled()) { // if processor stopped, put the rest of the files back on the queue. queueLock.lock(); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileTransfer.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileTransfer.java index 6afbc41aa230..269a4ebb7a9b 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileTransfer.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileTransfer.java @@ -204,15 +204,15 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().receive(flowFile, transfer.getProtocolName() + "://" + hostname + "/" + file.getFullPathFileName(), millis); + session.getProvenanceReporter().receive(flowFile, transfer.getProtocolName() + "://" + hostname + "/" + file.getFullPathFileName(), millis, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); logger.info("Successfully retrieved {} from {} in {} milliseconds at a rate of {} and transferred to success", - new Object[]{flowFile, hostname, millis, dataRate}); + flowFile, hostname, millis, dataRate); flowFilesReceived.put(flowFile, file.getFullPathFileName()); } catch (final IOException e) { context.yield(); - logger.error("Unable to retrieve file {} due to {}", new Object[]{file.getFullPathFileName(), e}); + logger.error("Unable to retrieve file {} due to {}", file.getFullPathFileName(), e); try { transfer.close(); } catch (IOException e1) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java index 6fd55acf2a6a..5536a5506eaf 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java @@ -493,7 +493,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory final long flowFileSize = flowFile.getSize(); stopWatch.stop(); final String dataRate = stopWatch.calculateDataRate(flowFileSize); - session.getProvenanceReporter().receive(flowFile, url, stopWatch.getDuration(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().receive(flowFile, url, stopWatch.getDuration(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); logger.info("Successfully received {} from {} at a rate of {}; transferred to success", new Object[]{flowFile, url, dataRate}); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java index 189f8e2da01c..a17b60a33e72 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java @@ -773,7 +773,7 @@ private void forwardFlowFile(final ProcessSession session, final long start, fin final long receiveMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start); final String subjectDn = flowFile.getAttribute(CertificateAttribute.HTTP_SUBJECT_DN.getName()); session.getProvenanceReporter().receive(flowFile, HTTPUtils.getURI(flowFile.getAttributes()), - "Received from " + request.getRemoteAddr() + (subjectDn == null ? "" : " with DN=" + subjectDn), receiveMillis); + "Received from " + request.getRemoteAddr() + (subjectDn == null ? "" : " with DN=" + subjectDn), receiveMillis, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().debug("Transferred {} to [{}] Remote Address [{}] ", flowFile, REL_SUCCESS, request.getRemoteAddr()); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java index 96aa93a80351..a849d149d1ca 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java @@ -207,13 +207,13 @@ public void onTrigger(final ProcessContext context, final ProcessSession session try { contextMap.complete(contextIdentifier); } catch (final RuntimeException ce) { - getLogger().error("Failed to complete HTTP Transaction for {} due to {}", new Object[]{flowFile, ce}); + getLogger().error("Failed to complete HTTP Transaction for {} due to {}", flowFile, ce); session.transfer(flowFile, REL_FAILURE); return; } - session.getProvenanceReporter().send(flowFile, HTTPUtils.getURI(flowFile.getAttributes()), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); - getLogger().info("Successfully responded to HTTP Request for {} with status code {}", new Object[]{flowFile, statusCode}); + session.getProvenanceReporter().send(flowFile, HTTPUtils.getURI(flowFile.getAttributes()), stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); + getLogger().info("Successfully responded to HTTP Request for {} with status code {}", flowFile, statusCode); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java index b8519fde6df8..798d7a6deaf7 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java @@ -230,9 +230,9 @@ public void onTrigger(final ProcessContext context, final ProcessSession session // create group ID final String hashValue = DigestUtils.md5Hex(hashableValue.toString()); - logger.info("adding Hash Value {} to attributes for {} and routing to success", new Object[]{hashValue, flowFile}); + logger.info("adding Hash Value {} to attributes for {} and routing to success", hashValue, flowFile); flowFile = session.putAttribute(flowFile, context.getProperty(HASH_VALUE_ATTRIBUTE).getValue(), hashValue); - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java index 52608b05e6ec..3a633454af17 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java @@ -155,7 +155,7 @@ public void process(final InputStream in) throws IOException { final String attributeName = context.getProperty(ATTRIBUTE_NAME).getValue(); flowFile = session.putAttribute(flowFile, attributeName, hashValueHolder.get()); logger.info("Successfully added attribute '{}' to {} with a value of {}; routing to success", new Object[]{attributeName, flowFile, hashValueHolder.get()}); - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final ProcessException e) { logger.error("Failed to process {} due to {}; routing to failure", new Object[]{flowFile, e}); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java index 5e68ce7416b2..48541d211991 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java @@ -74,7 +74,7 @@ *

* *

- * MIME Type detection is performed by Apache Tika; more information about detection is available at http://tika.apache.org. + * MIME Type detection is performed by Apache Tika; more information about detection is available at https://tika.apache.org. * *

    *
  • application/flowfile-v3
  • @@ -252,7 +252,7 @@ public void process(final InputStream stream) throws IOException { logger.info("Identified {} as having MIME Type {}", new Object[]{flowFile, mimeType}); } - session.getProvenanceReporter().modifyAttributes(flowFile); + session.getProvenanceReporter().modifyAttributes(flowFile, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java index e0023fc82113..6dfcaecc5650 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java @@ -945,7 +945,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro logRequest(logger, httpRequest); if (httpRequest.body() != null) { - session.getProvenanceReporter().send(requestFlowFile, url.toExternalForm(), true); + session.getProvenanceReporter().send(requestFlowFile, url.toExternalForm(), true, RESPONSE); } final long startNanos = System.nanoTime(); @@ -1037,9 +1037,9 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro // emit provenance event final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); if (requestFlowFile != null) { - session.getProvenanceReporter().fetch(responseFlowFile, url.toExternalForm(), millis); + session.getProvenanceReporter().fetch(responseFlowFile, url.toExternalForm(), millis, RESPONSE); } else { - session.getProvenanceReporter().receive(responseFlowFile, url.toExternalForm(), millis); + session.getProvenanceReporter().receive(responseFlowFile, url.toExternalForm(), millis, RESPONSE); } } } @@ -1065,7 +1065,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro final long processingDuration = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); final String eventDetails = String.format("Response Body Attribute Added [%s] Processing Duration [%d ms]", attributeKey, processingDuration); - session.getProvenanceReporter().modifyAttributes(requestFlowFile, eventDetails); + session.getProvenanceReporter().modifyAttributes(requestFlowFile, eventDetails, null); // TODO } } finally { if (outputStreamToRequestAttribute != null) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java index 2d8f96f5f4b0..70e57e2268a6 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java @@ -206,10 +206,10 @@ public void process(final OutputStream rawOut) throws IOException { flowFile = session.putAllAttributes(flowFile, JmsFactory.createAttributeMap(message)); } - session.getProvenanceReporter().receive(flowFile, context.getProperty(URL).getValue()); + session.getProvenanceReporter().receive(flowFile, context.getProperty(URL).getValue(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); logger.info("Created {} from {} messages received from JMS Server and transferred to 'success'", - new Object[]{flowFile, msgsThisFlowFile.get()}); + flowFile, msgsThisFlowFile.get()); return new JmsProcessingSummary(flowFile.getSize(), message, flowFile); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JoltTransformJSON.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JoltTransformJSON.java index feac10a5ffbc..42fef7004ace 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JoltTransformJSON.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JoltTransformJSON.java @@ -334,7 +334,7 @@ public void onTrigger(final ProcessContext context, ProcessSession session) thro final String transformType = context.getProperty(JOLT_TRANSFORM).getValue(); transformed = session.putAttribute(transformed, CoreAttributes.MIME_TYPE.key(), "application/json"); session.transfer(transformed, REL_SUCCESS); - session.getProvenanceReporter().modifyContent(transformed, "Modified With " + transformType, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(transformed, "Modified With " + transformType, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); logger.info("Transform completed for {}", original); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java index 998ecf1a71a3..340a8f6ee9c6 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java @@ -381,7 +381,7 @@ public void beginListing() { public void addToListing(final Map tableInformation, final String transitUri) { FlowFile flowFile = session.create(); flowFile = session.putAllAttributes(flowFile, tableInformation); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } @@ -459,7 +459,7 @@ public void finishListing() throws IOException { flowFile = session.putAllAttributes(flowFile, attributes); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenRELP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenRELP.java index 86771dd656e6..42a625da5f15 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenRELP.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenRELP.java @@ -273,7 +273,7 @@ private void processEvents(final ProcessSession session, final Map 1 ? sender.substring(1) : sender; final String transitUri = new StringBuilder().append("tcp").append("://").append(senderHost).append(":").append(port).toString(); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDPRecord.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDPRecord.java index 2a20d5d45ee3..b079dacb84aa 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDPRecord.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDPRecord.java @@ -351,7 +351,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); final String transitUri = getTransitUri(sender); - session.getProvenanceReporter().receive(flowFile, transitUri); + session.getProvenanceReporter().receive(flowFile, transitUri, REL_SUCCESS); } catch (final Exception e) { getLogger().error("Unable to properly complete record set due to: " + e.getMessage(), e); @@ -374,7 +374,7 @@ private void handleParseFailure(final StandardEvent event, final ProcessSession failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = getTransitUri(event.getSender()); - session.getProvenanceReporter().receive(failureFlowFile, transitUri); + session.getProvenanceReporter().receive(failureFlowFile, transitUri, REL_PARSE_FAILURE); session.transfer(failureFlowFile, REL_PARSE_FAILURE); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java index e9d164f4c8fe..328c976cff4d 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java @@ -145,8 +145,8 @@ public void process(final InputStream in, final OutputStream out) throws IOExcep }); } - logger.info("Transferred {} to 'success'", new Object[]{ff}); - session.getProvenanceReporter().modifyContent(ff, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + logger.info("Transferred {} to 'success'", ff); + session.getProvenanceReporter().modifyContent(ff, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(ff, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java index 2568f50e9b7f..b42ae7492c9e 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java @@ -318,7 +318,7 @@ public void process(final OutputStream out) throws IOException { } }); - session.getProvenanceReporter().create(inactiveFlowFile); + session.getProvenanceReporter().create(inactiveFlowFile, null, REL_INACTIVE); session.transfer(inactiveFlowFile, REL_INACTIVE); logger.info("Transferred {} to 'inactive'", new Object[]{inactiveFlowFile}); } else { @@ -395,7 +395,7 @@ public void process(final OutputStream out) throws IOException { final byte[] outBytes = context.getProperty(ACTIVITY_RESTORED_MESSAGE).evaluateAttributeExpressions(activityRestoredFlowFile).getValue().getBytes(UTF8); activityRestoredFlowFile = session.write(activityRestoredFlowFile, out -> out.write(outBytes)); - session.getProvenanceReporter().create(activityRestoredFlowFile); + session.getProvenanceReporter().create(activityRestoredFlowFile, null, REL_ACTIVITY_RESTORED); session.transfer(activityRestoredFlowFile, REL_ACTIVITY_RESTORED); logger.info("Transferred {} to 'activity.restored'", new Object[]{activityRestoredFlowFile}); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseCEF.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseCEF.java index 6590b3003994..d3eec00abe9a 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseCEF.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseCEF.java @@ -343,7 +343,7 @@ public void process(OutputStream out) throws IOException { flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json"); // Update the provenance for good measure - session.getProvenanceReporter().modifyContent(flowFile, "Replaced content with parsed CEF fields and values"); + session.getProvenanceReporter().modifyContent(flowFile, "Replaced content with parsed CEF fields and values", REL_SUCCESS); break; } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java index 6258e370b9c9..bb5dfce6e1a1 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java @@ -781,10 +781,10 @@ public long getContentLength() { } logger.info("Successfully Posted {} to {} in {} at a rate of {}", - new Object[]{flowFileDescription, url, FormatUtils.formatMinutesSeconds(uploadMillis, TimeUnit.MILLISECONDS), uploadDataRate}); + flowFileDescription, url, FormatUtils.formatMinutesSeconds(uploadMillis, TimeUnit.MILLISECONDS), uploadDataRate); for (final FlowFile flowFile : toSend) { - session.getProvenanceReporter().send(flowFile, url, "Remote DN=" + httpClientContext.getAttribute(REMOTE_DN), uploadMillis, true); + session.getProvenanceReporter().send(flowFile, url, "Remote DN=" + httpClientContext.getAttribute(REMOTE_DN), uploadMillis, true, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } return; @@ -827,7 +827,7 @@ public long getContentLength() { final String holdReason = holdResponse.getStatusLine().getReasonPhrase(); if (holdStatusCode >= 300) { logger.error("Failed to delete Hold that destination placed on {}: got response code {}:{}; routing to failure", - new Object[]{flowFileDescription, holdStatusCode, holdReason}); + flowFileDescription, holdStatusCode, holdReason); for (FlowFile flowFile : toSend) { flowFile = session.penalize(flowFile); @@ -837,10 +837,10 @@ public long getContentLength() { } logger.info("Successfully Posted {} to {} in {} at a rate of {}", - new Object[]{flowFileDescription, url, FormatUtils.formatMinutesSeconds(uploadMillis, TimeUnit.MILLISECONDS), uploadDataRate}); + flowFileDescription, url, FormatUtils.formatMinutesSeconds(uploadMillis, TimeUnit.MILLISECONDS), uploadDataRate); for (final FlowFile flowFile : toSend) { - session.getProvenanceReporter().send(flowFile, url, "Remote DN=" + httpClientContext.getAttribute(REMOTE_DN), uploadMillis, true); + session.getProvenanceReporter().send(flowFile, url, "Remote DN=" + httpClientContext.getAttribute(REMOTE_DN), uploadMillis, true, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } return; diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java index fdad3fe56f56..b199d94900ff 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java @@ -503,7 +503,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().send(flowFile, getJdbcUrl(connection)); + session.getProvenanceReporter().send(flowFile, getJdbcUrl(connection), REL_SUCCESS); } catch (final Exception e) { // When an Exception is thrown, we want to route to 'retry' if we expect that attempting the same request again // might work. Otherwise, route to failure. SQLTransientException is a specific type that indicates that a retry may work. diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java index a94037f2023c..26d5c0edcdc0 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java @@ -496,7 +496,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session send(message); - session.getProvenanceReporter().send(flowFile, "mailto:" + message.getAllRecipients()[0].toString()); + session.getProvenanceReporter().send(flowFile, "mailto:" + message.getAllRecipients()[0].toString(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().debug("Sent email as a result of receiving {}", flowFile); } catch (final ProcessException | MessagingException | IOException e) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java index cdb7cba4d693..cbdacfe75cfc 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java @@ -393,7 +393,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session logger.info("Produced copy of {} at location {}", new Object[]{flowFile, finalCopyFile}); } - session.getProvenanceReporter().send(flowFile, finalCopyFile.toFile().toURI().toString(), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().send(flowFile, finalCopyFile.toFile().toURI().toString(), stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } catch (final Throwable t) { if (tempDotCopyFile != null) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java index afb6bb6058d4..350e5f7c8470 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java @@ -141,14 +141,14 @@ public void process(final InputStream in) throws IOException { final String dataRate = stopWatch.calculateDataRate(flowFile.getSize()); final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS); logger.info("Successfully transferred {} to {} on remote host {} in {} milliseconds at a rate of {}", - new Object[]{flowFile, fullPathRef.get(), hostname, millis, dataRate}); + flowFile, fullPathRef.get(), hostname, millis, dataRate); String fullPathWithSlash = fullPathRef.get(); if (!fullPathWithSlash.startsWith("/")) { fullPathWithSlash = "/" + fullPathWithSlash; } final String destinationUri = transfer.getProtocolName() + "://" + hostname + fullPathWithSlash; - session.getProvenanceReporter().send(flowFile, destinationUri, millis); + session.getProvenanceReporter().send(flowFile, destinationUri, millis, conflictResult.getRelationship()); } if (conflictResult.isPenalize()) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java index d489280ab774..fec69c0a0fe6 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java @@ -244,17 +244,21 @@ public void process(final InputStream in) throws IOException { } successfulFlowFiles.add(flowFile); - session.getProvenanceReporter().send(flowFile, context.getProperty(URL).getValue()); } try { jmsSession.commit(); - + for (FlowFile flowFile : successfulFlowFiles) { + session.getProvenanceReporter().send(flowFile, context.getProperty(URL).getValue(), REL_SUCCESS); + } session.transfer(successfulFlowFiles, REL_SUCCESS); final String flowFileDescription = successfulFlowFiles.size() > 10 ? successfulFlowFiles.size() + " FlowFiles" : successfulFlowFiles.toString(); logger.info("Sent {} to JMS Server and transferred to 'success'", new Object[]{flowFileDescription}); } catch (JMSException e) { logger.error("Failed to commit JMS Session due to {} and transferred to 'failure'", new Object[]{e}); + for (FlowFile flowFile : flowFiles) { + session.getProvenanceReporter().send(flowFile, context.getProperty(URL).getValue(), REL_FAILURE); + } session.transfer(flowFiles, REL_FAILURE); context.yield(); wrappedProducer.close(logger); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java index be56ea43df1f..132d33494ff6 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java @@ -159,7 +159,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro final long transmissionMillis = stopWatch.getElapsed(TimeUnit.MILLISECONDS); // Only record provenance if we sent any records if (writeResult.getRecordCount() > 0 || transmitZeroRecords) { - session.getProvenanceReporter().send(flowFile, recordSinkURL, transmissionMillis); + session.getProvenanceReporter().send(flowFile, recordSinkURL, transmissionMillis, REL_SUCCESS); } } catch (RetryableIOException rioe) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java index 3068f0758756..d14003d0be01 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java @@ -460,7 +460,7 @@ void apply(final ProcessContext context, final ProcessSession session, final Fun // Emit a Provenance SEND event final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - fc.startNanos); for (final FlowFile flowFile : sentFlowFiles) { - session.getProvenanceReporter().send(flowFile, url, transmissionMillis, true); + session.getProvenanceReporter().send(flowFile, url, transmissionMillis, true, REL_SUCCESS); } } }; diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSyslog.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSyslog.java index b9fcd2001f58..43c5b3f184df 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSyslog.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSyslog.java @@ -255,7 +255,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session timer.stop(); final long duration = timer.getDuration(TimeUnit.MILLISECONDS); - session.getProvenanceReporter().send(flowFile, transitUri, duration, true); + session.getProvenanceReporter().send(flowFile, transitUri, duration, true, REL_SUCCESS); getLogger().debug("Send Completed {}", flowFile); session.transfer(flowFile, REL_SUCCESS); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutTCP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutTCP.java index cc2c5d177471..0ba778b83290 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutTCP.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutTCP.java @@ -141,7 +141,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory } final FlowFile processedFlowFile = session.putAttribute(flowFile, RECORD_COUNT_TRANSMITTED, Integer.toString(recordCount)); - session.getProvenanceReporter().send(processedFlowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().send(processedFlowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(processedFlowFile, REL_SUCCESS); session.commitAsync(); } catch (final Exception e) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutUDP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutUDP.java index b36129c62ab5..40dc2aa101bc 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutUDP.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutUDP.java @@ -60,7 +60,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory final byte[] content = readContent(session, flowFile); eventSender.sendEvent(content); - session.getProvenanceReporter().send(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().send(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); session.commitAsync(); } catch (final Exception e) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryRecord.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryRecord.java index 627202c1b712..cb823d62463d 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryRecord.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryRecord.java @@ -353,7 +353,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final long elapsedMillis = stopWatch.getElapsed(TimeUnit.MILLISECONDS); if (transformedFlowFiles.size() > 0) { - session.getProvenanceReporter().fork(original, transformedFlowFiles.keySet(), elapsedMillis); + session.getProvenanceReporter().fork(original, transformedFlowFiles.keySet(), elapsedMillis, REL_ORIGINAL); for (final Map.Entry entry : transformedFlowFiles.entrySet()) { final FlowFile transformed = entry.getKey(); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java index ecedd2dd1efb..a743a7da3eac 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java @@ -83,7 +83,7 @@ @SystemResourceConsideration(resource = SystemResource.MEMORY) public class ReplaceText extends AbstractProcessor { - private static Pattern REPLACEMENT_NORMALIZATION_PATTERN = Pattern.compile("(\\$\\D)"); + private static final Pattern REPLACEMENT_NORMALIZATION_PATTERN = Pattern.compile("(\\$\\D)"); // Constants public static final String LINE_BY_LINE = "Line-by-Line"; @@ -375,8 +375,8 @@ public void onTrigger(final ProcessContext context, final ProcessSession session return; } - logger.info("Transferred {} to 'success'", new Object[] {flowFile}); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + logger.info("Transferred {} to 'success'", flowFile); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java index 45a425a2f779..a2f767d9e210 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java @@ -209,8 +209,8 @@ public void onTrigger(final ProcessContext context, final ProcessSession session flowFile = session.write(flowFile, new ReplaceTextCallback(context, flowFile, maxBufferSize)); - logger.info("Transferred {} to 'success'", new Object[]{flowFile}); - session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + logger.info("Transferred {} to 'success'", flowFile); + session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java index a68cac609ed4..6e09be9d3900 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java @@ -956,7 +956,7 @@ public void process(final OutputStream rawOut) throws IOException { flowFile = session.putAllAttributes(flowFile, attributes); session.getProvenanceReporter().receive(flowFile, file.toURI().toString(), "FlowFile contains bytes " + position + " through " + positionHolder.get() + " of source file", - TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos)); + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().debug("Created {} and routed to success", flowFile); } @@ -1487,7 +1487,7 @@ private boolean tailRolledFile(final ProcessContext context, final ProcessSessio flowFile = session.putAllAttributes(flowFile, attributes); session.getProvenanceReporter().receive(flowFile, fileToTail.toURI().toString(), "FlowFile contains bytes 0 through " + position + " of source file", - TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos)); + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().debug("Created {} from rolled over file {} and routed to success", new Object[]{flowFile, fileToTail}); } @@ -1542,7 +1542,7 @@ private boolean tailRolledFile(final ProcessContext context, final ProcessSessio private TailFileState consumeFileFully(final File file, final ProcessContext context, final ProcessSession session, TailFileObject tfo) throws IOException { FlowFile flowFile = session.create(); - try (final InputStream fis = new FileInputStream(file)) { + try (final InputStream fis = Files.newInputStream(file.toPath())) { flowFile = session.write(flowFile, out -> { flushLinesBuffer(out, new CRC32()); StreamUtils.copy(fis, out); @@ -1557,9 +1557,9 @@ private TailFileState consumeFileFully(final File file, final ProcessContext con attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain"); attributes.put("tailfile.original.path", tfo.getState().getFilename()); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().receive(flowFile, file.toURI().toString()); + session.getProvenanceReporter().receive(flowFile, file.toURI().toString(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); - getLogger().debug("Created {} from {} and routed to success", new Object[]{flowFile, file}); + getLogger().debug("Created {} from {} and routed to success", flowFile, file); // use a timestamp of lastModified() + 1 so that we do not ingest this file again. cleanup(context); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java index 2896e54dfe8c..35af80db42d9 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java @@ -319,7 +319,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } }); session.transfer(transformed, REL_SUCCESS); - session.getProvenanceReporter().modifyContent(transformed, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); + session.getProvenanceReporter().modifyContent(transformed, stopWatch.getElapsed(TimeUnit.MILLISECONDS), REL_SUCCESS); getLogger().info("Transformation Completed {}", original); } catch (final ProcessException e) { getLogger().error("Transformation Failed", original, e); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java index 32e73ae4c1a8..1fd4ba848950 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java @@ -442,7 +442,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } attributes.put(ATTR_OUTPUT_TABLE, tableName); flowFile = session.putAllAttributes(flowFile, attributes); - session.getProvenanceReporter().invokeRemoteProcess(flowFile, getJdbcUrl(connection)); + session.getProvenanceReporter().invokeRemoteProcess(flowFile, getJdbcUrl(connection), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } catch (IOException | SQLException e) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/commands/FtpCommandSTOR.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/commands/FtpCommandSTOR.java index 70714d152e9b..35e34505eab0 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/commands/FtpCommandSTOR.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/commands/FtpCommandSTOR.java @@ -194,7 +194,7 @@ private void transferData(final DataConnection dataConnection, final FtpIoSessio processSession.putAttribute(flowFile, CoreAttributes.FILENAME.key(), ftpFile.getName()); processSession.putAttribute(flowFile, CoreAttributes.PATH.key(), getPath(ftpFile)); - processSession.getProvenanceReporter().modifyContent(flowFile); + processSession.getProvenanceReporter().modifyContent(flowFile, relationshipSuccess); processSession.transfer(flowFile, relationshipSuccess); } catch (Exception exception) { diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java index 36e1f2a1de82..050bd7f0c017 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java @@ -403,7 +403,7 @@ public void complete(final String completionReason) throws IOException { merged = session.putAllAttributes(merged, attributes); flowFiles.forEach(ff -> session.putAttribute(ff, MergeRecord.MERGE_UUID_ATTRIBUTE, merged.getAttribute(CoreAttributes.UUID.key()))); - session.getProvenanceReporter().join(flowFiles, merged, "Records Merged due to: " + completionReason); + session.getProvenanceReporter().join(flowFiles, merged, "Records Merged due to: " + completionReason, MergeRecord.REL_MERGED); session.transfer(merged, MergeRecord.REL_MERGED); session.transfer(flowFiles, MergeRecord.REL_ORIGINAL); session.adjustCounter("Records Merged", writeResult.getRecordCount(), false); @@ -411,7 +411,7 @@ public void complete(final String completionReason) throws IOException { if (logger.isDebugEnabled()) { final List ids = flowFiles.stream().map(ff -> "id=" + ff.getId()).collect(Collectors.toList()); - logger.debug("Completed bin {} with {} records with Merged FlowFile {} using input FlowFiles {}", new Object[] {this, writeResult.getRecordCount(), merged, ids}); + logger.debug("Completed bin {} with {} records with Merged FlowFile {} using input FlowFiles {}", this, writeResult.getRecordCount(), merged, ids); } } catch (final Exception e) { session.rollback(true); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBinManager.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBinManager.java index ac134e18e111..3b6d82ff4042 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBinManager.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBinManager.java @@ -154,7 +154,7 @@ public void add(final String groupIdentifier, final FlowFile flowFile, final Rec throw new RuntimeException("Attempted to add " + flowFile + " to a new bin but failed. This is unexpected. Will roll back session and try again."); } - logger.debug("Transferred id={} to {}", new Object[] {flowFile.getId(), bin}); + logger.debug("Transferred id={} to {}", flowFile.getId(), bin); if (!bin.isComplete()) { final int updatedBinCount = binCount.incrementAndGet(); diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java index a321ece9f18d..41e566d106fb 100644 --- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java +++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java @@ -386,7 +386,7 @@ private Set handleRequest(final HttpServletRequest request, final Proc flowFile = session.putAllAttributes(flowFile, attributes); flowFile = saveRequestDetailsAsAttributes(request, session, foundSubject, foundIssuer, flowFile); final String details = String.format("Remote DN=%s, Issuer DN=%s", foundSubject, foundIssuer); - session.getProvenanceReporter().receive(flowFile, request.getRequestURL().toString(), sourceSystemFlowFileIdentifier, details, transferMillis); + session.getProvenanceReporter().receive(flowFile, request.getRequestURL().toString(), sourceSystemFlowFileIdentifier, details, transferMillis, ListenHTTP.RELATIONSHIP_SUCCESS); flowFileSet.add(flowFile); if (holdUuid == null) { diff --git a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java index fa29a7bea2d5..04444a463314 100644 --- a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java +++ b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java @@ -541,7 +541,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } for(FlowFile toTransfer: flowFilesToTransfer) { - session.getProvenanceReporter().modifyAttributes(toTransfer); + session.getProvenanceReporter().modifyAttributes(toTransfer, REL_SUCCESS); } session.transfer(flowFilesToTransfer, REL_SUCCESS); } @@ -717,7 +717,7 @@ private FlowFile executeActions(final ProcessSession session, final ProcessConte final String namespace = uri.getScheme(); if (namespace != null) { final String identifier = alternateIdentifierAdd.substring(Math.min(namespace.length() + 1, alternateIdentifierAdd.length() - 1)); - session.getProvenanceReporter().associate(flowfile, namespace, identifier); + session.getProvenanceReporter().associate(flowfile, namespace, identifier, REL_SUCCESS); } } catch (final URISyntaxException e) { } diff --git a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java index 803ffc4b5917..b078500336e0 100644 --- a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java +++ b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java @@ -248,16 +248,17 @@ private void enqueueMessage(final WebSocketMessage incomingMessage) { ); } - session.getProvenanceReporter().receive(messageFlowFile, getTransitUri(sessionInfo)); - if (incomingMessage instanceof WebSocketConnectedMessage) { + session.getProvenanceReporter().receive(messageFlowFile, getTransitUri(sessionInfo), REL_CONNECTED); session.transfer(messageFlowFile, REL_CONNECTED); } else { switch (Objects.requireNonNull(messageType)) { case TEXT: + session.getProvenanceReporter().receive(messageFlowFile, getTransitUri(sessionInfo), REL_MESSAGE_TEXT); session.transfer(messageFlowFile, REL_MESSAGE_TEXT); break; case BINARY: + session.getProvenanceReporter().receive(messageFlowFile, getTransitUri(sessionInfo), REL_MESSAGE_BINARY); session.transfer(messageFlowFile, REL_MESSAGE_BINARY); break; } diff --git a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/PutWebSocket.java b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/PutWebSocket.java index e910ae3187cc..0d18fb868fdb 100644 --- a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/PutWebSocket.java +++ b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/PutWebSocket.java @@ -230,7 +230,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession process final FlowFile updatedFlowFile = processSession.putAllAttributes(flowfile, attrs); final long transmissionMillis = System.currentTimeMillis() - startSending; - processSession.getProvenanceReporter().send(updatedFlowFile, transitUri.get(), transmissionMillis); + processSession.getProvenanceReporter().send(updatedFlowFile, transitUri.get(), transmissionMillis, REL_SUCCESS); processSession.transfer(updatedFlowFile, REL_SUCCESS); diff --git a/nifi-nar-bundles/nifi-windows-event-log-bundle/nifi-windows-event-log-processors/src/main/java/org/apache/nifi/processors/windows/event/log/ConsumeWindowsEventLog.java b/nifi-nar-bundles/nifi-windows-event-log-bundle/nifi-windows-event-log-processors/src/main/java/org/apache/nifi/processors/windows/event/log/ConsumeWindowsEventLog.java index e7698f9368da..e3d08adb5f44 100644 --- a/nifi-nar-bundles/nifi-windows-event-log-bundle/nifi-windows-event-log-processors/src/main/java/org/apache/nifi/processors/windows/event/log/ConsumeWindowsEventLog.java +++ b/nifi-nar-bundles/nifi-windows-event-log-bundle/nifi-windows-event-log-processors/src/main/java/org/apache/nifi/processors/windows/event/log/ConsumeWindowsEventLog.java @@ -338,7 +338,7 @@ private int processQueue(ProcessSession session) { byte[] xmlBytes = xmlMessage.getBytes(StandardCharsets.UTF_8); flowFile = session.write(flowFile, out -> out.write(xmlBytes)); flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), APPLICATION_XML); - session.getProvenanceReporter().receive(flowFile, provenanceUri); + session.getProvenanceReporter().receive(flowFile, provenanceUri, REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); } } catch (final Throwable t) { diff --git a/nifi-nar-bundles/nifi-workday-bundle/nifi-workday-processors/src/main/java/org/apache/nifi/processors/workday/GetWorkdayReport.java b/nifi-nar-bundles/nifi-workday-bundle/nifi-workday-processors/src/main/java/org/apache/nifi/processors/workday/GetWorkdayReport.java index 53ce88228da8..0ed8c5130970 100644 --- a/nifi-nar-bundles/nifi-workday-bundle/nifi-workday-processors/src/main/java/org/apache/nifi/processors/workday/GetWorkdayReport.java +++ b/nifi-nar-bundles/nifi-workday-bundle/nifi-workday-processors/src/main/java/org/apache/nifi/processors/workday/GetWorkdayReport.java @@ -229,9 +229,9 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro if (responseFlowFile != null) { responseFlowFile = session.putAllAttributes(responseFlowFile, commonAttributes); if (flowFile == null) { - session.getProvenanceReporter().receive(responseFlowFile, uri.toString(), elapsedTime); + session.getProvenanceReporter().receive(responseFlowFile, uri.toString(), elapsedTime, SUCCESS); } else { - session.getProvenanceReporter().fetch(responseFlowFile, uri.toString(), elapsedTime); + session.getProvenanceReporter().fetch(responseFlowFile, uri.toString(), elapsedTime, SUCCESS); } } diff --git a/nifi-nar-bundles/nifi-zendesk-bundle/nifi-zendesk-processors/src/main/java/org/apache/nifi/processors/zendesk/GetZendesk.java b/nifi-nar-bundles/nifi-zendesk-bundle/nifi-zendesk-processors/src/main/java/org/apache/nifi/processors/zendesk/GetZendesk.java index 27f25a18f490..d9d1cfb8d36c 100644 --- a/nifi-nar-bundles/nifi-zendesk-bundle/nifi-zendesk-processors/src/main/java/org/apache/nifi/processors/zendesk/GetZendesk.java +++ b/nifi-nar-bundles/nifi-zendesk-bundle/nifi-zendesk-processors/src/main/java/org/apache/nifi/processors/zendesk/GetZendesk.java @@ -249,7 +249,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) { int recordCount = resultCount.get(); if (recordCount > 0) { FlowFile updatedFlowFile = session.putAttribute(createdFlowFile, RECORD_COUNT_ATTRIBUTE_NAME, Integer.toString(recordCount)); - session.getProvenanceReporter().receive(updatedFlowFile, uri.toString()); + session.getProvenanceReporter().receive(updatedFlowFile, uri.toString(), REL_SUCCESS); session.transfer(updatedFlowFile, REL_SUCCESS); } else { session.remove(createdFlowFile); diff --git a/nifi-stateless/nifi-stateless-bundle/nifi-stateless-engine/src/main/java/org/apache/nifi/stateless/repository/StatelessProvenanceRepository.java b/nifi-stateless/nifi-stateless-bundle/nifi-stateless-engine/src/main/java/org/apache/nifi/stateless/repository/StatelessProvenanceRepository.java index 5eccd3e797cf..733959778155 100644 --- a/nifi-stateless/nifi-stateless-bundle/nifi-stateless-engine/src/main/java/org/apache/nifi/stateless/repository/StatelessProvenanceRepository.java +++ b/nifi-stateless/nifi-stateless-bundle/nifi-stateless-engine/src/main/java/org/apache/nifi/stateless/repository/StatelessProvenanceRepository.java @@ -19,7 +19,6 @@ import org.apache.nifi.authorization.Authorizer; import org.apache.nifi.authorization.user.NiFiUser; import org.apache.nifi.events.EventReporter; -import org.apache.nifi.provenance.AbstractProvenanceRepository; import org.apache.nifi.provenance.AsyncLineageSubmission; import org.apache.nifi.provenance.IdentifierLookup; import org.apache.nifi.provenance.ProvenanceAuthorizableFactory; @@ -27,7 +26,9 @@ import org.apache.nifi.provenance.ProvenanceEventRecord; import org.apache.nifi.provenance.ProvenanceEventRepository; import org.apache.nifi.provenance.ProvenanceEventType; +import org.apache.nifi.provenance.ProvenanceRepository; import org.apache.nifi.provenance.StandardProvenanceEventRecord; +import org.apache.nifi.provenance.UpdateableProvenanceEventRecord; import org.apache.nifi.provenance.lineage.ComputeLineageSubmission; import org.apache.nifi.provenance.search.Query; import org.apache.nifi.provenance.search.QuerySubmission; @@ -42,7 +43,7 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -public class StatelessProvenanceRepository extends AbstractProvenanceRepository { +public class StatelessProvenanceRepository implements ProvenanceRepository { public static String CONTAINER_NAME = "in-memory"; @@ -107,12 +108,7 @@ public Long getMaxEventId() { } public ProvenanceEventRecord getEvent(final String identifier) throws IOException { - final List records = ringBuffer.getSelectedElements(new RingBuffer.Filter() { - @Override - public boolean select(final ProvenanceEventRecord event) { - return identifier.equals(event.getFlowFileUuid()); - } - }, 1); + final List records = ringBuffer.getSelectedElements(event -> identifier.equals(event.getFlowFileUuid()), 1); return records.isEmpty() ? null : records.get(0); } @@ -208,7 +204,7 @@ public String getContainerFileStoreName(String containerName) { return null; } - private static class IdEnrichedProvEvent implements ProvenanceEventRecord { + private static class IdEnrichedProvEvent implements UpdateableProvenanceEventRecord { private final ProvenanceEventRecord record; private final long id; @@ -223,6 +219,13 @@ public long getEventId() { return id; } + @Override + public void setEventId(long eventId) { + if (record instanceof UpdateableProvenanceEventRecord) { + ((UpdateableProvenanceEventRecord) record).setEventId(eventId); + } + } + @Override public List getPreviousEventIds() { return record.getPreviousEventIds(); @@ -230,7 +233,9 @@ public List getPreviousEventIds() { @Override public void setPreviousEventIds(List previousEventIds) { - record.setPreviousEventIds(previousEventIds); + if (record instanceof UpdateableProvenanceEventRecord) { + ((UpdateableProvenanceEventRecord) record).setPreviousEventIds(previousEventIds); + } } @Override diff --git a/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/IngestFile.java b/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/IngestFile.java index 844aed1788f4..658f8e600a5b 100644 --- a/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/IngestFile.java +++ b/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/IngestFile.java @@ -86,7 +86,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session FlowFile flowFile = session.create(); flowFile = session.importFrom(file.toPath(), true, flowFile); session.transfer(flowFile, REL_SUCCESS); - session.getProvenanceReporter().receive(flowFile, file.toURI().toString()); + session.getProvenanceReporter().receive(flowFile, file.toURI().toString(), REL_SUCCESS); final boolean deleteFile = context.getProperty(DELETE_FILE).asBoolean(); diff --git a/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/ReplaceWithFile.java b/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/ReplaceWithFile.java index 70fd09ecc639..f86914f882ae 100644 --- a/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/ReplaceWithFile.java +++ b/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/ReplaceWithFile.java @@ -73,7 +73,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } flowFile = session.importFrom(file.toPath(), true, flowFile); - session.getProvenanceReporter().fetch(flowFile, file.toURI().toString()); + session.getProvenanceReporter().fetch(flowFile, file.toURI().toString(), REL_SUCCESS); session.transfer(flowFile, REL_SUCCESS); getLogger().info("Successfully imported replacement file {}", file.getAbsolutePath()); diff --git a/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/WriteToFile.java b/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/WriteToFile.java index e5086cc9cd16..fc0eacce0937 100644 --- a/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/WriteToFile.java +++ b/nifi-system-tests/nifi-system-test-extensions-bundle/nifi-system-test-extensions/src/main/java/org/apache/nifi/processors/tests/system/WriteToFile.java @@ -98,6 +98,6 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); getLogger().info("Wrote one FlowFile of size {} to {}", flowFile.getSize(), file.getAbsolutePath()); - session.getProvenanceReporter().send(flowFile, file.toURI().toString()); + session.getProvenanceReporter().send(flowFile, file.toURI().toString(), REL_SUCCESS); } }