Skip to content

Commit

Permalink
fixed typos, improved logging
Browse files Browse the repository at this point in the history
Issue #196
  • Loading branch information
rsoika committed Sep 27, 2023
1 parent 5f14582 commit 88c3880
Showing 1 changed file with 25 additions and 27 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
import jakarta.inject.Named;

/**
* The CSVImportService reacts on DocumentImportEvent and importes a CSV file
* The CSVImportService reacts on DocumentImportEvent and imports a CSV file
* form a FTP data source.
* <p>
* The implementation is based on org.apache.commons.net.ftp
Expand Down Expand Up @@ -109,7 +109,7 @@ public void onEvent(@Observes DocumentImportEvent event) {

if (!"CSV".equalsIgnoreCase(event.getSource().getItemValueString("type"))) {
// ignore data source
logger.finest("...... type '" + event.getSource().getItemValueString("type") + "' skiped.");
logger.finest("...... type '" + event.getSource().getItemValueString("type") + "' skipped.");
return;
}
try {
Expand Down Expand Up @@ -198,13 +198,12 @@ public void onEvent(@Observes DocumentImportEvent event) {
try (ByteArrayOutputStream is = new ByteArrayOutputStream();) {

// because time stamps are not provided by all ftp servers and always in same
// format
// we store the checksum of the file to test if the file has changed since the
// last import
// format we store the checksum of the file to test if the file has changed
// since the last import
ftpClient.retrieveFile(csvFilename, is);
byte[] rawData = is.toByteArray();
if (rawData != null && rawData.length > 0) {
logger.finest("......file '" + file.getName() + "' successfull read - bytes size = "
logger.finest("......file '" + file.getName() + "' successful read - bytes size = "
+ rawData.length);

String lastChecksum = event.getSource().getItemValueString("csv.checksum");
Expand All @@ -214,8 +213,8 @@ public void onEvent(@Observes DocumentImportEvent event) {
documentImportService.logMessage("...checksum=" + newChecksum, event);
if (lastChecksum.isEmpty() || !lastChecksum.equals(newChecksum)) {
// read data....
InputStream imputStream = new ByteArrayInputStream(rawData);
String log = importData(imputStream, encoding, type, keyField, event);
InputStream inputStream = new ByteArrayInputStream(rawData);
String log = importData(inputStream, encoding, type, keyField, event);
// update checksum
event.getSource().setItemValue("csv.checksum", newChecksum);
documentImportService.logMessage(log, event);
Expand Down Expand Up @@ -294,7 +293,7 @@ public void onEvent(@Observes DocumentImportEvent event) {
* @return ErrorMessage or empty String
* @throws PluginException
*/
public String importData(InputStream imputStream, String encoding, String type, String keyField,
public String importData(InputStream inputStream, String encoding, String type, String keyField,
DocumentImportEvent event) throws PluginException {

logger.fine("...starting csv data import...");
Expand All @@ -309,13 +308,13 @@ public String importData(InputStream imputStream, String encoding, String type,
int workitemsDeleted = 0;
int workitemsFailed = 0;
int blockSize = 0;

String csvFileName = event.getSource().getItemValueString("selector");
if (encoding == null) {
encoding = "UTF-8";
}

try {
BufferedReader in = new BufferedReader(new InputStreamReader(imputStream, encoding));
BufferedReader in = new BufferedReader(new InputStreamReader(inputStream, encoding));

// read first line containing the object type
String header = in.readLine();
Expand Down Expand Up @@ -355,13 +354,13 @@ public String importData(InputStream imputStream, String encoding, String type,

// store id into cache
if (idCache.contains(keyItemValue)) {
logger.warning("...WARNING dupplicate entry found: " + keyField + "=" + keyItemValue);
logger.warning("...WARNING duplicate entry found: " + keyField + "=" + keyItemValue);
documentImportService
.logMessage("...WARNING dupplicate entry found: " + keyField + "=" + keyItemValue, event);
.logMessage("...WARNING duplicate entry found: " + keyField + "=" + keyItemValue, event);
} else {
idCache.add(keyItemValue);
}
// test if entity already exits....
// test if entity already exists....
ItemCollection oldEntity = findEntityByName(entity.getItemValueString("Name"), type);
if (oldEntity == null) {
// create new workitem
Expand All @@ -370,7 +369,7 @@ public String importData(InputStream imputStream, String encoding, String type,
} else {
// test if modified....
if (!isEqualEntity(oldEntity, entity, fields)) {
logger.fine("update exsting entity: " + oldEntity.getUniqueID());
logger.fine("update existing entity: " + oldEntity.getUniqueID());
// copy all entries from the import into the
// existing entity
oldEntity.replaceAllItems(entity.getAllItems());
Expand All @@ -381,7 +380,8 @@ public String importData(InputStream imputStream, String encoding, String type,

if (blockSize >= 100) {
blockSize = 0;
logger.info("..." + workitemsTotal + " entries read (" + workitemsUpdated + " updates)");
logger.info("..." + csvFileName + ": " + workitemsTotal + " entries read (" + workitemsUpdated
+ " updates)");
// flush lucene index!
indexUpdateService.updateIndex();
}
Expand All @@ -400,16 +400,16 @@ public String importData(InputStream imputStream, String encoding, String type,
finally {
// Close the input stream
try {
if (imputStream != null) {
imputStream.close();
if (inputStream != null) {
inputStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}

// now we remove all existing entries not listed in the file
workitemsDeleted = removeDeprecatedDocuments(idCache, type);
workitemsDeleted = removeDeprecatedDocuments(idCache, type, csvFileName);
log += "..." + workitemsTotal + " entries read -> " + workitemsImported + " new entries - " + workitemsUpdated
+ " updates - " + workitemsDeleted + " deletions - " + workitemsFailed + " errors";

Expand All @@ -423,12 +423,12 @@ public String importData(InputStream imputStream, String encoding, String type,
*
* @return count of deletions
*/
private int removeDeprecatedDocuments(List<String> idCache, String type) {
private int removeDeprecatedDocuments(List<String> idCache, String type, String csvFileName) {
int deletions = 0;
int firstResult = 0;
int blockSize = 100;

logger.info("removing deprecated entries...");
logger.info("..." + csvFileName + ": delete deprecated entries...");
// now we remove all existing entries not listed in the file
String sQuery = "SELECT document FROM Document AS document WHERE document.type='" + type
+ "' ORDER BY document.created ASC";
Expand All @@ -446,13 +446,13 @@ private int removeDeprecatedDocuments(List<String> idCache, String type) {

if (entries.size() == blockSize) {
firstResult = firstResult + blockSize;
logger.info("..." + csvFileName + ": " + firstResult + " entries verified (" + deletions
+ " deletions)");
} else {
// end
break;
}

}

return deletions;
}

Expand All @@ -479,10 +479,8 @@ private ItemCollection readEntity(String data, List<String> fieldnames, String t
// test if the token has content
itemValue = itemValue.trim();
if (itemValue != null && !itemValue.isEmpty()) {
// create a itemvalue with the corresponding fieldname

// create a itemValue with the corresponding fieldName
result.replaceItemValue(fieldnames.get(iCol), itemValue);
// searchstring += itemValue + " ";
} else {
// empty value
result.replaceItemValue(fieldnames.get(iCol), "");
Expand Down Expand Up @@ -571,7 +569,7 @@ private boolean isEqualEntity(ItemCollection oldEntity, ItemCollection entity, L
* @param key - name of the object (name)
* @param type - type of the object
*
* @return entity or null if no entity with the given name exits
* @return entity or null if no entity with the given name exists
*/
public ItemCollection findEntityByName(String key, String type) {

Expand Down

0 comments on commit 88c3880

Please sign in to comment.