Skip to content

Commit

Permalink
Merge pull request #85 from PhenoApps/issue_83
Browse files Browse the repository at this point in the history
Issue 83 - sanitize text columns for csv output
  • Loading branch information
trife authored Oct 11, 2021
2 parents d32cf8d + ce41c2e commit 097b65f
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 20 deletions.
3 changes: 3 additions & 0 deletions app/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,9 @@ dependencies {
implementation "com.mikepenz:aboutlibraries:$about_version"
implementation "com.github.daniel-stoneuk:material-about-library:$daniel_version"

//apache
implementation "org.apache.commons:commons-csv:1.5"

//androidx
//noinspection GradleDependency
implementation "androidx.fragment:fragment-ktx:$fragment_version"
Expand Down
38 changes: 18 additions & 20 deletions app/src/main/java/org/phenoapps/prospector/utils/FileUtil.kt
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import android.content.Context
import android.net.Uri
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import org.apache.commons.csv.CSVFormat
import org.apache.commons.csv.CSVPrinter
import org.phenoapps.prospector.R
import org.phenoapps.prospector.data.models.DeviceTypeExport

Expand Down Expand Up @@ -62,13 +64,9 @@ open class FileUtil(private val ctx: Context) {
@Suppress("BlockingMethodInNonBlockingContext")
suspend fun exportCsv(uri: Uri, exports: List<DeviceTypeExport>, convert: Boolean = false) = withContext(Dispatchers.IO) {

val newline = System.lineSeparator()

//false warning, this function is run always in Dispatchers.IO
ctx.contentResolver.openOutputStream(uri)?.let { stream ->

val writer = stream.buffered().writer()

val prefixHeaders = "${experimentNameHeader},$scanIdHeader,$scanDateHeader," +
"$deviceTypeHeader,$scanDeviceIdHeader,$operatorHeader," +
"$specLightSourceHeader,$scanNoteHeader,"
Expand Down Expand Up @@ -98,11 +96,10 @@ open class FileUtil(private val ctx: Context) {

first?.let { firstWave ->

val headers = prefixHeaders + (firstWave.map { it.first }).joinToString(",")

writer.write(headers)
val headers = (prefixHeaders + (firstWave.map { it.first }).joinToString(","))
.split(",").toTypedArray()

writer.write(newline)
val csvWriter = CSVPrinter(stream.buffered().writer(), CSVFormat.DEFAULT.withHeader(*headers))

exports.forEach { export ->

Expand Down Expand Up @@ -136,22 +133,22 @@ open class FileUtil(private val ctx: Context) {
it.first in deviceTypeMin..deviceTypeMax
}

writer.write("${data.joinToString(",")},${wave.map { it.second }.joinToString(",")}")
csvWriter.printRecord(*(data + wave.map { it.second.toString() }.toTypedArray()))
}

writer.write(newline)
csvWriter.flush()

}
csvWriter.close()
}

} else {

exports.firstOrNull()?.spectralData?.split(" ")?.size?.let { size ->

val headers = prefixHeaders + (1..size).joinToString(",")
val headers = (prefixHeaders + (1..size).joinToString(","))
.split(",").toTypedArray()

writer.write(headers)

writer.write(newline)
val csvWriter = CSVPrinter(stream.buffered().writer(), CSVFormat.DEFAULT.withHeader(*headers))

exports.forEach { e ->

Expand All @@ -166,17 +163,18 @@ open class FileUtil(private val ctx: Context) {
e.note
)

val frameData = e.spectralData.replace(" ", ",")
val frameData = e.spectralData.split(" ")

writer.write("${data.joinToString(",")},$frameData")
csvWriter.printRecord(*(data + frameData))

writer.write(newline)
}

csvWriter.flush()

csvWriter.close()
}
}

writer.close()

stream.close()
}
}
Expand Down

0 comments on commit 097b65f

Please sign in to comment.