diff --git a/api/build.gradle.kts b/api/build.gradle.kts index 47b11a299..05557ac63 100644 --- a/api/build.gradle.kts +++ b/api/build.gradle.kts @@ -24,6 +24,7 @@ dependencies { implementation(project(":nebulosa-log")) implementation(project(":nebulosa-lx200-protocol")) implementation(project(":nebulosa-nova")) + implementation(project(":nebulosa-pixinsight")) implementation(project(":nebulosa-sbd")) implementation(project(":nebulosa-simbad")) implementation(project(":nebulosa-siril")) diff --git a/api/src/main/kotlin/nebulosa/api/alignment/polar/darv/DARVTask.kt b/api/src/main/kotlin/nebulosa/api/alignment/polar/darv/DARVTask.kt index 65e4ea05c..73842019a 100644 --- a/api/src/main/kotlin/nebulosa/api/alignment/polar/darv/DARVTask.kt +++ b/api/src/main/kotlin/nebulosa/api/alignment/polar/darv/DARVTask.kt @@ -17,6 +17,7 @@ import nebulosa.indi.device.camera.Camera import nebulosa.indi.device.camera.CameraEvent import nebulosa.indi.device.camera.FrameType import nebulosa.indi.device.guide.GuideOutput +import nebulosa.indi.device.mount.Mount import nebulosa.log.loggerFor import java.nio.file.Files import java.time.Duration @@ -64,7 +65,7 @@ data class DARVTask( override fun execute(cancellationToken: CancellationToken) { LOG.info("DARV started. camera={}, guideOutput={}, request={}", camera, guideOutput, request) - camera.snoop(listOf(guideOutput)) + if (guideOutput is Mount) camera.snoop(camera.snoopedDevices.filter { it !is Mount } + guideOutput) val task = SplitTask(listOf(cameraCaptureTask, Task.of(delayTask, forwardGuidePulseTask, backwardGuidePulseTask)), executor) task.execute(cancellationToken) diff --git a/api/src/main/kotlin/nebulosa/api/alignment/polar/tppa/TPPATask.kt b/api/src/main/kotlin/nebulosa/api/alignment/polar/tppa/TPPATask.kt index b3d8e926c..192faa152 100644 --- a/api/src/main/kotlin/nebulosa/api/alignment/polar/tppa/TPPATask.kt +++ b/api/src/main/kotlin/nebulosa/api/alignment/polar/tppa/TPPATask.kt @@ -110,7 +110,7 @@ data class TPPATask( rightAscension = mount?.rightAscension ?: 0.0 declination = mount?.declination ?: 0.0 - camera.snoop(listOf(mount)) + camera.snoop(camera.snoopedDevices.filter { it !is Mount } + mount) cancellationToken.listenToPause(this) diff --git a/api/src/main/kotlin/nebulosa/api/autofocus/AutoFocusTask.kt b/api/src/main/kotlin/nebulosa/api/autofocus/AutoFocusTask.kt index d1231501b..17492d104 100644 --- a/api/src/main/kotlin/nebulosa/api/autofocus/AutoFocusTask.kt +++ b/api/src/main/kotlin/nebulosa/api/autofocus/AutoFocusTask.kt @@ -85,7 +85,7 @@ data class AutoFocusTask( var numberOfAttempts = 0 val maximumFocusPoints = request.capture.exposureAmount * request.initialOffsetSteps * 10 - // camera.snoop(listOf(focuser)) + camera.snoop(camera.snoopedDevices.filter { it !is Focuser } + focuser) while (!exited && !cancellationToken.isCancelled) { numberOfAttempts++ diff --git a/api/src/main/kotlin/nebulosa/api/calibration/CalibrationFrameService.kt b/api/src/main/kotlin/nebulosa/api/calibration/CalibrationFrameService.kt index 25ecd51d6..012a7dbd7 100644 --- a/api/src/main/kotlin/nebulosa/api/calibration/CalibrationFrameService.kt +++ b/api/src/main/kotlin/nebulosa/api/calibration/CalibrationFrameService.kt @@ -31,26 +31,37 @@ class CalibrationFrameService( fun calibrate(name: String, image: Image, createNew: Boolean = false): Image { return synchronized(image) { val darkFrame = findBestDarkFrames(name, image).firstOrNull() - val biasFrame = findBestBiasFrames(name, image).firstOrNull() + val biasFrame = if (darkFrame == null) findBestBiasFrames(name, image).firstOrNull() else null val flatFrame = findBestFlatFrames(name, image).firstOrNull() - if (darkFrame != null || biasFrame != null || flatFrame != null) { + val darkImage = darkFrame?.path?.fits()?.use(Image::open) + val biasImage = biasFrame?.path?.fits()?.use(Image::open) + var flatImage = flatFrame?.path?.fits()?.use(Image::open) + + if (darkImage != null || biasImage != null || flatImage != null) { var transformedImage = if (createNew) image.clone() else image - if (biasFrame != null) { - val calibrationImage = biasFrame.path!!.fits().use(Image::open) - transformedImage = transformedImage.transform(BiasSubtraction(calibrationImage)) + // If not using dark frames. + if (biasImage != null) { + // Subtract Master Bias from Flat Frames. + if (flatImage != null) { + flatImage = flatImage.transform(BiasSubtraction(biasImage)) + LOG.info("bias frame subtraction applied to flat frame. frame={}", biasFrame) + } + + // Subtract the Master Bias frame. + transformedImage = transformedImage.transform(BiasSubtraction(biasImage)) LOG.info("bias frame subtraction applied. frame={}", biasFrame) - } else { + } else if (darkFrame == null) { LOG.info( "no bias frames found. width={}, height={}, bin={}, gain={}", image.width, image.height, image.header.binX, image.header.gain ) } - if (darkFrame != null) { - val calibrationImage = darkFrame.path!!.fits().use(Image::open) - transformedImage = transformedImage.transform(DarkSubtraction(calibrationImage)) + // Subtract Master Dark frame. + if (darkImage != null) { + transformedImage = transformedImage.transform(DarkSubtraction(darkImage)) LOG.info("dark frame subtraction applied. frame={}", darkFrame) } else { LOG.info( @@ -59,9 +70,9 @@ class CalibrationFrameService( ) } - if (flatFrame != null) { - val calibrationImage = flatFrame.path!!.fits().use(Image::open) - transformedImage = transformedImage.transform(FlatCorrection(calibrationImage)) + // Divide the Dark-subtracted Light frame by the Master Flat frame to correct for variations in the optical path. + if (flatImage != null) { + transformedImage = transformedImage.transform(FlatCorrection(flatImage)) LOG.info("flat frame correction applied. frame={}", flatFrame) } else { LOG.info( @@ -177,7 +188,7 @@ class CalibrationFrameService( name: String, width: Int, height: Int, binX: Int, binY: Int, filter: String? ): List { - // TODO: Generate master from matched frames. + // TODO: Generate master from matched frames. (Subtract the master bias frame from each flat frame) return calibrationFrameRepository .flatFrames(name, filter, width, height, binX) } diff --git a/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureExecutor.kt b/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureExecutor.kt index 2f7db5453..572dd53cc 100644 --- a/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureExecutor.kt +++ b/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureExecutor.kt @@ -48,6 +48,14 @@ class CameraCaptureExecutor( } } + fun pause(camera: Camera) { + jobs.find { it.task.camera === camera }?.pause() + } + + fun unpause(camera: Camera) { + jobs.find { it.task.camera === camera }?.unpause() + } + fun stop(camera: Camera) { jobs.find { it.task.camera === camera }?.stop() } diff --git a/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureState.kt b/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureState.kt index 4052d019a..092c6a400 100644 --- a/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureState.kt +++ b/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureState.kt @@ -8,6 +8,8 @@ enum class CameraCaptureState { WAITING, SETTLING, DITHERING, + PAUSING, + PAUSED, EXPOSURE_FINISHED, CAPTURE_FINISHED, } diff --git a/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureTask.kt b/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureTask.kt index 4b78b7e59..d27ae0fdd 100644 --- a/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureTask.kt +++ b/api/src/main/kotlin/nebulosa/api/cameras/CameraCaptureTask.kt @@ -11,6 +11,7 @@ import nebulosa.api.tasks.SplitTask import nebulosa.api.tasks.delay.DelayEvent import nebulosa.api.tasks.delay.DelayTask import nebulosa.common.concurrency.cancel.CancellationToken +import nebulosa.common.concurrency.latch.PauseListener import nebulosa.guiding.Guider import nebulosa.indi.device.camera.Camera import nebulosa.indi.device.camera.CameraEvent @@ -20,6 +21,7 @@ import nebulosa.log.loggerFor import java.nio.file.Path import java.time.Duration import java.util.concurrent.Executor +import java.util.concurrent.atomic.AtomicBoolean data class CameraCaptureTask( @JvmField val camera: Camera, @@ -29,7 +31,7 @@ data class CameraCaptureTask( private val exposureMaxRepeat: Int = 0, private val executor: Executor? = null, private val calibrationFrameProvider: CalibrationFrameProvider? = null, -) : AbstractTask(), Consumer, CameraEventAware { +) : AbstractTask(), Consumer, PauseListener, CameraEventAware { private val delayTask = DelayTask(request.exposureDelay) private val waitForSettleTask = WaitForSettleTask(guider) @@ -54,6 +56,8 @@ data class CameraCaptureTask( @Volatile private var exposureRepeatCount = 0 @Volatile private var liveStacker: LiveStacker? = null + private val pausing = AtomicBoolean() + init { delayTask.subscribe(this) cameraExposureTask.subscribe(this) @@ -68,6 +72,14 @@ data class CameraCaptureTask( cameraExposureTask.handleCameraEvent(event) } + override fun onPause(paused: Boolean) { + pausing.set(paused) + + if (paused) { + sendEvent(CameraCaptureState.PAUSING) + } + } + private fun LiveStackingRequest.processCalibrationGroup(): LiveStackingRequest { return if (calibrationFrameProvider != null && enabled && !request.calibrationGroup.isNullOrBlank() && (dark == null || flat == null) @@ -107,6 +119,9 @@ data class CameraCaptureTask( cameraExposureTask.reset() + pausing.set(false) + cancellationToken.listenToPause(this) + if (liveStacker == null && request.liveStacking.enabled && (request.isLoop || request.exposureAmount > 1 || exposureMaxRepeat > 1) ) { @@ -126,6 +141,12 @@ data class CameraCaptureTask( ((exposureMaxRepeat > 0 && exposureRepeatCount < exposureMaxRepeat) || (exposureMaxRepeat <= 0 && (request.isLoop || exposureCount < request.exposureAmount))) ) { + if (cancellationToken.isPaused) { + pausing.set(false) + sendEvent(CameraCaptureState.PAUSED) + cancellationToken.waitForPause() + } + if (exposureCount == 0) { sendEvent(CameraCaptureState.CAPTURE_STARTED) @@ -160,6 +181,9 @@ data class CameraCaptureTask( } } + pausing.set(false) + cancellationToken.unlistenToPause(this) + sendEvent(CameraCaptureState.CAPTURE_FINISHED) liveStacker?.close() @@ -216,12 +240,14 @@ data class CameraCaptureTask( captureProgress = (estimatedCaptureTime - captureRemainingTime).toNanos().toDouble() / estimatedCaptureTime.toNanos() } + val isExposureFinished = state == CameraCaptureState.EXPOSURE_FINISHED + val event = CameraCaptureEvent( - this, camera, state, request.exposureAmount, exposureCount, + this, camera, if (pausing.get() && !isExposureFinished) CameraCaptureState.PAUSING else state, request.exposureAmount, exposureCount, captureRemainingTime, captureElapsedTime, captureProgress, stepRemainingTime, stepElapsedTime, stepProgress, savedPath, liveStackedPath, - if (state == CameraCaptureState.EXPOSURE_FINISHED) request else null + if (isExposureFinished) request else null ) onNext(event) @@ -237,6 +263,7 @@ data class CameraCaptureTask( delayAndWaitForSettleSplitTask.close() cameraExposureTask.close() ditherAfterExposureTask.close() + liveStacker?.close() super.close() } @@ -256,6 +283,7 @@ data class CameraCaptureTask( cameraExposureTask.reset() ditherAfterExposureTask.reset() + pausing.set(false) exposureRepeatCount = 0 } diff --git a/api/src/main/kotlin/nebulosa/api/cameras/CameraController.kt b/api/src/main/kotlin/nebulosa/api/cameras/CameraController.kt index 58f4e460b..38dd89fc6 100644 --- a/api/src/main/kotlin/nebulosa/api/cameras/CameraController.kt +++ b/api/src/main/kotlin/nebulosa/api/cameras/CameraController.kt @@ -61,6 +61,16 @@ class CameraController( @RequestBody body: CameraStartCaptureRequest, ) = cameraService.startCapture(camera, body) + @PutMapping("{camera}/capture/pause") + fun pauseCapture(camera: Camera) { + cameraService.pauseCapture(camera) + } + + @PutMapping("{camera}/capture/unpause") + fun unpauseCapture(camera: Camera) { + cameraService.unpauseCapture(camera) + } + @PutMapping("{camera}/capture/abort") fun abortCapture(camera: Camera) { cameraService.abortCapture(camera) diff --git a/api/src/main/kotlin/nebulosa/api/cameras/CameraService.kt b/api/src/main/kotlin/nebulosa/api/cameras/CameraService.kt index ddd92e97a..f92b075bf 100644 --- a/api/src/main/kotlin/nebulosa/api/cameras/CameraService.kt +++ b/api/src/main/kotlin/nebulosa/api/cameras/CameraService.kt @@ -43,6 +43,14 @@ class CameraService( cameraCaptureExecutor.execute(camera, request.copy(savePath = savePath)) } + fun pauseCapture(camera: Camera) { + cameraCaptureExecutor.pause(camera) + } + + fun unpauseCapture(camera: Camera) { + cameraCaptureExecutor.unpause(camera) + } + @Synchronized fun abortCapture(camera: Camera) { cameraCaptureExecutor.stop(camera) diff --git a/api/src/main/kotlin/nebulosa/api/livestacking/LiveStackerType.kt b/api/src/main/kotlin/nebulosa/api/livestacking/LiveStackerType.kt index 5e1d6426e..d5f5f6c09 100644 --- a/api/src/main/kotlin/nebulosa/api/livestacking/LiveStackerType.kt +++ b/api/src/main/kotlin/nebulosa/api/livestacking/LiveStackerType.kt @@ -2,4 +2,5 @@ package nebulosa.api.livestacking enum class LiveStackerType { SIRIL, + PIXINSIGHT, } diff --git a/api/src/main/kotlin/nebulosa/api/livestacking/LiveStackingRequest.kt b/api/src/main/kotlin/nebulosa/api/livestacking/LiveStackingRequest.kt index 9f26e144b..62dd939e9 100644 --- a/api/src/main/kotlin/nebulosa/api/livestacking/LiveStackingRequest.kt +++ b/api/src/main/kotlin/nebulosa/api/livestacking/LiveStackingRequest.kt @@ -3,6 +3,10 @@ package nebulosa.api.livestacking import com.fasterxml.jackson.databind.annotation.JsonDeserialize import nebulosa.api.beans.converters.angle.DegreesDeserializer import nebulosa.livestacking.LiveStacker +import nebulosa.pixinsight.livestacking.PixInsightLiveStacker +import nebulosa.pixinsight.script.PixInsightIsRunning +import nebulosa.pixinsight.script.PixInsightScriptRunner +import nebulosa.pixinsight.script.PixInsightStartup import nebulosa.siril.livestacking.SirilLiveStacker import org.jetbrains.annotations.NotNull import java.nio.file.Files @@ -15,8 +19,10 @@ data class LiveStackingRequest( @JvmField @field:NotNull val executablePath: Path? = null, @JvmField val dark: Path? = null, @JvmField val flat: Path? = null, + @JvmField val bias: Path? = null, @JvmField @field:JsonDeserialize(using = DegreesDeserializer::class) val rotate: Double = 0.0, @JvmField val use32Bits: Boolean = false, + @JvmField val slot: Int = 1, ) : Supplier { override fun get(): LiveStacker { @@ -24,6 +30,17 @@ data class LiveStackingRequest( return when (type) { LiveStackerType.SIRIL -> SirilLiveStacker(executablePath!!, workingDirectory, dark, flat, rotate, use32Bits) + LiveStackerType.PIXINSIGHT -> { + val runner = PixInsightScriptRunner(executablePath!!) + + if (!PixInsightIsRunning(slot).use { it.runSync(runner) }) { + if (!PixInsightStartup(slot).use { it.runSync(runner) }) { + throw IllegalStateException("unable to start PixInsight") + } + } + + PixInsightLiveStacker(runner, workingDirectory, dark, flat, bias, use32Bits, slot) + } } } diff --git a/api/src/main/kotlin/nebulosa/api/sequencer/SequencerController.kt b/api/src/main/kotlin/nebulosa/api/sequencer/SequencerController.kt index fd36445da..17f9d715a 100644 --- a/api/src/main/kotlin/nebulosa/api/sequencer/SequencerController.kt +++ b/api/src/main/kotlin/nebulosa/api/sequencer/SequencerController.kt @@ -5,6 +5,7 @@ import nebulosa.indi.device.camera.Camera import nebulosa.indi.device.filterwheel.FilterWheel import nebulosa.indi.device.focuser.Focuser import nebulosa.indi.device.mount.Mount +import nebulosa.indi.device.rotator.Rotator import org.springframework.web.bind.annotation.* @RestController @@ -16,9 +17,9 @@ class SequencerController( @PutMapping("{camera}/start") fun start( camera: Camera, - mount: Mount?, wheel: FilterWheel?, focuser: Focuser?, + mount: Mount?, wheel: FilterWheel?, focuser: Focuser?, rotator: Rotator?, @RequestBody @Valid body: SequencePlanRequest, - ) = sequencerService.start(camera, body, mount, wheel, focuser) + ) = sequencerService.start(camera, body, mount, wheel, focuser, rotator) @PutMapping("{camera}/stop") fun stop(camera: Camera) { diff --git a/api/src/main/kotlin/nebulosa/api/sequencer/SequencerExecutor.kt b/api/src/main/kotlin/nebulosa/api/sequencer/SequencerExecutor.kt index 48b10b2af..4f3bd05eb 100644 --- a/api/src/main/kotlin/nebulosa/api/sequencer/SequencerExecutor.kt +++ b/api/src/main/kotlin/nebulosa/api/sequencer/SequencerExecutor.kt @@ -16,6 +16,7 @@ import nebulosa.indi.device.filterwheel.FilterWheelEvent import nebulosa.indi.device.focuser.Focuser import nebulosa.indi.device.focuser.FocuserEvent import nebulosa.indi.device.mount.Mount +import nebulosa.indi.device.rotator.Rotator import org.greenrobot.eventbus.Subscribe import org.greenrobot.eventbus.ThreadMode import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor @@ -54,7 +55,7 @@ class SequencerExecutor( fun execute( camera: Camera, request: SequencePlanRequest, - mount: Mount? = null, wheel: FilterWheel? = null, focuser: Focuser? = null, + mount: Mount? = null, wheel: FilterWheel? = null, focuser: Focuser? = null, rotator: Rotator? = null, ) { check(camera.connected) { "${camera.name} Camera is not connected" } check(jobs.none { it.task.camera === camera }) { "${camera.name} Sequencer Job is already in progress" } @@ -67,7 +68,11 @@ class SequencerExecutor( check(jobs.none { it.task.focuser === focuser }) { "${camera.name} Sequencer Job is already in progress" } } - val task = SequencerTask(camera, request, guider, mount, wheel, focuser, threadPoolTaskExecutor, calibrationFrameService) + if (rotator != null && rotator.connected) { + check(jobs.none { it.task.rotator === rotator }) { "${camera.name} Sequencer Job is already in progress" } + } + + val task = SequencerTask(camera, request, guider, mount, wheel, focuser, rotator, threadPoolTaskExecutor, calibrationFrameService) task.subscribe(this) with(SequencerJob(task)) { diff --git a/api/src/main/kotlin/nebulosa/api/sequencer/SequencerService.kt b/api/src/main/kotlin/nebulosa/api/sequencer/SequencerService.kt index fe26e8e24..871fcd1f3 100644 --- a/api/src/main/kotlin/nebulosa/api/sequencer/SequencerService.kt +++ b/api/src/main/kotlin/nebulosa/api/sequencer/SequencerService.kt @@ -4,6 +4,7 @@ import nebulosa.indi.device.camera.Camera import nebulosa.indi.device.filterwheel.FilterWheel import nebulosa.indi.device.focuser.Focuser import nebulosa.indi.device.mount.Mount +import nebulosa.indi.device.rotator.Rotator import org.springframework.stereotype.Service import java.nio.file.Path import kotlin.io.path.exists @@ -18,13 +19,13 @@ class SequencerService( @Synchronized fun start( camera: Camera, request: SequencePlanRequest, - mount: Mount?, wheel: FilterWheel?, focuser: Focuser?, + mount: Mount?, wheel: FilterWheel?, focuser: Focuser?, rotator: Rotator?, ) { val savePath = request.savePath ?.takeIf { "$it".isNotBlank() && it.exists() && it.isDirectory() } ?: Path.of("$sequencesPath", (System.currentTimeMillis() / 1000).toString()) - sequencerExecutor.execute(camera, request.copy(savePath = savePath), mount, wheel, focuser) + sequencerExecutor.execute(camera, request.copy(savePath = savePath), mount, wheel, focuser, rotator) } @Synchronized diff --git a/api/src/main/kotlin/nebulosa/api/sequencer/SequencerTask.kt b/api/src/main/kotlin/nebulosa/api/sequencer/SequencerTask.kt index ecd8aca07..9b624c38d 100644 --- a/api/src/main/kotlin/nebulosa/api/sequencer/SequencerTask.kt +++ b/api/src/main/kotlin/nebulosa/api/sequencer/SequencerTask.kt @@ -19,6 +19,7 @@ import nebulosa.indi.device.filterwheel.FilterWheel import nebulosa.indi.device.filterwheel.FilterWheelEvent import nebulosa.indi.device.focuser.Focuser import nebulosa.indi.device.mount.Mount +import nebulosa.indi.device.rotator.Rotator import nebulosa.log.loggerFor import java.time.Duration import java.util.* @@ -37,6 +38,7 @@ data class SequencerTask( @JvmField val mount: Mount? = null, @JvmField val wheel: FilterWheel? = null, @JvmField val focuser: Focuser? = null, + @JvmField val rotator: Rotator? = null, private val executor: Executor? = null, private val calibrationFrameProvider: CalibrationFrameProvider? = null, ) : AbstractTask(), Consumer, CameraEventAware, WheelEventAware { @@ -131,7 +133,7 @@ data class SequencerTask( override fun execute(cancellationToken: CancellationToken) { LOG.info("Sequencer started. camera={}, mount={}, wheel={}, focuser={}, plan={}", camera, mount, wheel, focuser, plan) - camera.snoop(listOf(mount, wheel, focuser)) + camera.snoop(listOf(mount, wheel, focuser, rotator)) for (task in tasks) { if (cancellationToken.isCancelled) break diff --git a/api/src/main/kotlin/nebulosa/api/stardetection/ImageStarSerializer.kt b/api/src/main/kotlin/nebulosa/api/stardetection/ImageStarSerializer.kt new file mode 100644 index 000000000..f4e190ecd --- /dev/null +++ b/api/src/main/kotlin/nebulosa/api/stardetection/ImageStarSerializer.kt @@ -0,0 +1,24 @@ +package nebulosa.api.stardetection + +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.SerializerProvider +import com.fasterxml.jackson.databind.ser.std.StdSerializer +import nebulosa.star.detection.ImageStar +import org.springframework.stereotype.Component + +@Component +class ImageStarSerializer : StdSerializer(ImageStar::class.java) { + + override fun serialize(star: ImageStar?, gen: JsonGenerator, provider: SerializerProvider) { + if (star == null) gen.writeNull() + else { + gen.writeStartObject() + gen.writeNumberField("x", star.x) + gen.writeNumberField("y", star.y) + gen.writeNumberField("hfd", star.hfd) + gen.writeNumberField("snr", star.snr) + gen.writeNumberField("flux", star.flux) + gen.writeEndObject() + } + } +} diff --git a/api/src/main/kotlin/nebulosa/api/stardetection/StarDetectionRequest.kt b/api/src/main/kotlin/nebulosa/api/stardetection/StarDetectionRequest.kt index dcdd0ab97..489d4feec 100644 --- a/api/src/main/kotlin/nebulosa/api/stardetection/StarDetectionRequest.kt +++ b/api/src/main/kotlin/nebulosa/api/stardetection/StarDetectionRequest.kt @@ -1,6 +1,10 @@ package nebulosa.api.stardetection import nebulosa.astap.star.detection.AstapStarDetector +import nebulosa.pixinsight.script.PixInsightIsRunning +import nebulosa.pixinsight.script.PixInsightScriptRunner +import nebulosa.pixinsight.script.PixInsightStartup +import nebulosa.pixinsight.star.detection.PixInsightStarDetector import nebulosa.star.detection.StarDetector import java.nio.file.Path import java.time.Duration @@ -11,10 +15,22 @@ data class StarDetectionRequest( @JvmField val executablePath: Path? = null, @JvmField val timeout: Duration = Duration.ZERO, @JvmField val minSNR: Double = 0.0, + @JvmField val slot: Int = 1, ) : Supplier> { override fun get() = when (type) { StarDetectorType.ASTAP -> AstapStarDetector(executablePath!!, minSNR) + StarDetectorType.PIXINSIGHT -> { + val runner = PixInsightScriptRunner(executablePath!!) + + if (!PixInsightIsRunning(slot).use { it.runSync(runner) }) { + if (!PixInsightStartup(slot).use { it.runSync(runner) }) { + throw IllegalStateException("unable to start PixInsight") + } + } + + PixInsightStarDetector(runner, slot, minSNR, timeout) + } } companion object { diff --git a/api/src/main/kotlin/nebulosa/api/stardetection/StarDetectorType.kt b/api/src/main/kotlin/nebulosa/api/stardetection/StarDetectorType.kt index 31ae2f97c..d34427b0b 100644 --- a/api/src/main/kotlin/nebulosa/api/stardetection/StarDetectorType.kt +++ b/api/src/main/kotlin/nebulosa/api/stardetection/StarDetectorType.kt @@ -1,5 +1,6 @@ package nebulosa.api.stardetection enum class StarDetectorType { - ASTAP + ASTAP, + PIXINSIGHT } diff --git a/desktop/camera.png b/desktop/camera.png index 8487d530d..c25ce3252 100644 Binary files a/desktop/camera.png and b/desktop/camera.png differ diff --git a/desktop/guider.png b/desktop/guider.png index 6c2bf9829..e37d31d52 100644 Binary files a/desktop/guider.png and b/desktop/guider.png differ diff --git a/desktop/indi.png b/desktop/indi.png index 14853441d..13db4d97e 100644 Binary files a/desktop/indi.png and b/desktop/indi.png differ diff --git a/desktop/sequencer.png b/desktop/sequencer.png index a3926c583..679d27ebc 100644 Binary files a/desktop/sequencer.png and b/desktop/sequencer.png differ diff --git a/desktop/src/app/alignment/alignment.component.html b/desktop/src/app/alignment/alignment.component.html index 3acb4adf8..fcdc2d66b 100644 --- a/desktop/src/app/alignment/alignment.component.html +++ b/desktop/src/app/alignment/alignment.component.html @@ -83,15 +83,15 @@
@if (pausingOrPaused) { + severity="success" size="small" [text]="true" /> } @else if(!running) { + label="Start" (onClick)="tppaStart()" icon="mdi mdi-play" severity="success" size="small" [text]="true" /> } - + + icon="mdi mdi-stop" severity="danger" size="small" [text]="true" />
@@ -124,15 +124,15 @@
+ icon="mdi mdi-play" severity="success" size="small" [text]="true" /> + icon="mdi mdi-stop" severity="danger" size="small" [text]="true" /> + styleClass="ml-4" pTooltip="View image" tooltipPosition="bottom" size="small" [text]="true" />
-
+
1. Locate a star near the south meridian and close to declination 0. 2. Start DARV and wait for routine to complete. 3. If you see V shaped track, adjust the Azimuth and repeat the step 2 till you get a line. diff --git a/desktop/src/app/app.module.ts b/desktop/src/app/app.module.ts index 0826d8f46..f0cd52ca6 100644 --- a/desktop/src/app/app.module.ts +++ b/desktop/src/app/app.module.ts @@ -49,6 +49,7 @@ import { HistogramComponent } from '../shared/components/histogram/histogram.com import { MapComponent } from '../shared/components/map/map.component' import { MenuItemComponent } from '../shared/components/menu-item/menu-item.component' import { MoonComponent } from '../shared/components/moon/moon.component' +import { PathChooserComponent } from '../shared/components/path-chooser/path-chooser.component' import { SlideMenuComponent } from '../shared/components/slide-menu/slide-menu.component' import { LocationDialog } from '../shared/dialogs/location/location.dialog' import { ScrollableNumberDirective } from '../shared/directives/input-number-scrollable' @@ -56,6 +57,7 @@ import { NoDropdownDirective } from '../shared/directives/no-dropdown.directive' import { StopPropagationDirective } from '../shared/directives/stop-propagation.directive' import { LocationInterceptor } from '../shared/interceptors/location.interceptor' import { AnglePipe } from '../shared/pipes/angle.pipe' +import { DropdownOptionsPipe } from '../shared/pipes/dropdown-options' import { EnumPipe } from '../shared/pipes/enum.pipe' import { EnvPipe } from '../shared/pipes/env.pipe' import { ExposureTimePipe } from '../shared/pipes/exposureTime.pipe' @@ -84,7 +86,6 @@ import { MountComponent } from './mount/mount.component' import { RotatorComponent } from './rotator/rotator.component' import { SequencerComponent } from './sequencer/sequencer.component' import { SettingsComponent } from './settings/settings.component' -import { PathChooserComponent } from '../shared/components/path-chooser/path-chooser.component' @NgModule({ declarations: [ @@ -101,6 +102,7 @@ import { PathChooserComponent } from '../shared/components/path-chooser/path-cho DeviceChooserComponent, DeviceListMenuComponent, DialogMenuComponent, + DropdownOptionsPipe, EnumPipe, EnvPipe, ExposureTimePipe, @@ -179,6 +181,7 @@ import { PathChooserComponent } from '../shared/components/path-chooser/path-cho AnglePipe, ConfirmationService, DialogService, + DropdownOptionsPipe, EnumPipe, EnvPipe, ExposureTimePipe, diff --git a/desktop/src/app/autofocus/autofocus.component.html b/desktop/src/app/autofocus/autofocus.component.html index 2e39c859f..bbad649d8 100644 --- a/desktop/src/app/autofocus/autofocus.component.html +++ b/desktop/src/app/autofocus/autofocus.component.html @@ -58,7 +58,7 @@
- @@ -66,8 +66,8 @@
- +
@@ -83,7 +83,7 @@
- @@ -120,9 +120,9 @@
+ icon="mdi mdi-play" severity="success" size="small" [text]="true" /> + icon="mdi mdi-stop" severity="danger" size="small" [text]="true" />
diff --git a/desktop/src/app/camera/camera.component.html b/desktop/src/app/camera/camera.component.html index 793d9b3b2..3accc3f0e 100644 --- a/desktop/src/app/camera/camera.component.html +++ b/desktop/src/app/camera/camera.component.html @@ -156,7 +156,7 @@
+ severity="info" size="small" pTooltip="Full size" tooltipPosition="bottom" [text]="true" />
@@ -203,13 +203,20 @@
+ @if (pausingOrPaused) { + + } @else if(!running) { + tooltipStyleClass="min-w-22rem flex justify-content-center" [text]="true" /> + } + + severity="danger" size="small" [text]="true" /> + severity="info" size="small" [text]="true" />
@@ -255,7 +262,7 @@
- @@ -263,7 +270,7 @@
32-bits (slower) -
@@ -282,5 +289,9 @@
+
+ +
\ No newline at end of file diff --git a/desktop/src/app/camera/camera.component.ts b/desktop/src/app/camera/camera.component.ts index 4dd5aa290..4aa9b489f 100644 --- a/desktop/src/app/camera/camera.component.ts +++ b/desktop/src/app/camera/camera.component.ts @@ -176,6 +176,14 @@ export class CameraComponent implements AfterContentInit, OnDestroy, Pingable { @ViewChild('cameraExposure') private readonly cameraExposure!: CameraExposureComponent + get status() { + return this.cameraExposure?.state ?? 'IDLE' + } + + get pausingOrPaused() { + return this.status === 'PAUSING' || this.status === 'PAUSED' + } + constructor( private app: AppComponent, private api: ApiService, @@ -532,7 +540,9 @@ export class CameraComponent implements AfterContentInit, OnDestroy, Pingable { const exposureAmount = this.exposureMode === 'LOOP' ? 0 : (this.exposureMode === 'FIXED' ? this.request.exposureAmount : 1) const savePath = this.mode !== 'CAPTURE' ? this.request.savePath : this.savePath - this.request.liveStacking.executablePath = this.preference.liveStackingRequest(this.request.liveStacking.type).get().executablePath + const liveStackingRequest = this.preference.liveStackingRequest(this.request.liveStacking.type).get() + this.request.liveStacking.executablePath = liveStackingRequest.executablePath + this.request.liveStacking.slot = liveStackingRequest.slot || 1 return { ...this.request, @@ -543,10 +553,23 @@ export class CameraComponent implements AfterContentInit, OnDestroy, Pingable { } async startCapture() { - await this.openCameraImage() - await this.api.cameraSnoop(this.camera, this.equipment) - await this.api.cameraStartCapture(this.camera, this.makeCameraStartCapture()) - this.preference.equipmentForDevice(this.camera).set(this.equipment) + try { + this.running = true + await this.openCameraImage() + await this.api.cameraSnoop(this.camera, this.equipment) + await this.api.cameraStartCapture(this.camera, this.makeCameraStartCapture()) + this.preference.equipmentForDevice(this.camera).set(this.equipment) + } catch { + this.running = false + } + } + + pauseCapture() { + return this.api.cameraPauseCapture(this.camera) + } + + unpauseCapture() { + return this.api.cameraUnpauseCapture(this.camera) } abortCapture() { diff --git a/desktop/src/app/filterwheel/filterwheel.component.html b/desktop/src/app/filterwheel/filterwheel.component.html index 9ae270e7d..eb8b6f42c 100644 --- a/desktop/src/app/filterwheel/filterwheel.component.html +++ b/desktop/src/app/filterwheel/filterwheel.component.html @@ -47,7 +47,7 @@
+ (onClick)="moveToSelectedFilter()" size="small" [text]="true" />
diff --git a/desktop/src/app/flat-wizard/flat-wizard.component.html b/desktop/src/app/flat-wizard/flat-wizard.component.html index 9c6bf6135..8fb92158c 100644 --- a/desktop/src/app/flat-wizard/flat-wizard.component.html +++ b/desktop/src/app/flat-wizard/flat-wizard.component.html @@ -67,9 +67,9 @@
+ severity="success" size="small" [text]="true" /> + severity="danger" size="small" [text]="true" />
\ No newline at end of file diff --git a/desktop/src/app/guider/guider.component.html b/desktop/src/app/guider/guider.component.html index afdf8c25b..ad692a1fe 100644 --- a/desktop/src/app/guider/guider.component.html +++ b/desktop/src/app/guider/guider.component.html @@ -1,8 +1,8 @@ -
+
-
+
- + + size="small" severity="info" [text]="true" />
-
-
{{ guideState | enum | lowercase }} {{ message }}
-
- - -
-
- - - - -
-
- - - - -
-
- - - - -
-
- - - - -
-
- - - - -
-
- - - - -
-
- - - - -
-
- - - - -
-
-
-
- -
- North - East -
-
- -
-
- - - - -
-
- - - - -
-
- - - - -
-
-
-
-
- - - -
+
+ + +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+
+
+ +
+ North + East +
+
+ +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+
+
+
+ + + +
+
+
- +
-
- -
-
+
+
+ +
diff --git a/desktop/src/app/guider/guider.component.ts b/desktop/src/app/guider/guider.component.ts index 8b05c6572..318fb1a39 100644 --- a/desktop/src/app/guider/guider.component.ts +++ b/desktop/src/app/guider/guider.component.ts @@ -70,7 +70,7 @@ export class GuiderComponent implements AfterViewInit, OnDestroy, Pingable { } readonly chartData: ChartData = { - labels: Array.from({ length: 100 }), + labels: Array.from({ length: 100 }, (_, i) => `${i}`), datasets: [ // RA. { @@ -129,9 +129,8 @@ export class GuiderComponent implements AfterViewInit, OnDestroy, Pingable { const scale = barType ? this.phdDurationScale : 1.0 const y = context.parsed.y * scale const prefix = raType ? 'RA: ' : 'DEC: ' - const barSuffix = ' ms' const lineSuffix = this.yAxisUnit === 'ARCSEC' ? '"' : 'px' - const formattedY = prefix + (barType ? y.toFixed(0) + barSuffix : y.toFixed(2) + lineSuffix) + const formattedY = prefix + (barType ? y.toFixed(0) + ' ms' : y.toFixed(2) + lineSuffix) return formattedY } } @@ -188,7 +187,8 @@ export class GuiderComponent implements AfterViewInit, OnDestroy, Pingable { } }, x: { - stacked: false, + type: 'linear', + stacked: true, min: 0, max: 100, border: { @@ -196,13 +196,20 @@ export class GuiderComponent implements AfterViewInit, OnDestroy, Pingable { dash: [2, 4], }, ticks: { - autoSkip: true, + autoSkip: false, count: 11, maxRotation: 0, minRotation: 0, - callback: (value) => { + callback: (value, i, ticks) => { const a = value as number - return (a - Math.trunc(a) > 0) ? undefined : a.toFixed(0) + + if (i === 0) { + return a.toFixed(0) + } else if (ticks[i - 1]) { + if (Math.abs(Math.trunc(ticks[i - 1].value) - Math.trunc(a)) >= 1) { + return a.toFixed(0) + } + } } }, grid: { diff --git a/desktop/src/app/image/image.component.html b/desktop/src/app/image/image.component.html index ef6ee2804..c372fd401 100644 --- a/desktop/src/app/image/image.component.html +++ b/desktop/src/app/image/image.component.html @@ -10,9 +10,9 @@ - - + {{ (a.star ?? a.dso ?? a.minorPlanet) | skyObject:'name' }} @@ -85,71 +85,76 @@
- + -
- +
- +
-
-
-
+
-
-
+
- +
-
+
- +
-
+
-
-
- Simbad +
+ Simbad
- - - - + + + +
@@ -159,7 +164,8 @@
- + @@ -230,18 +236,18 @@
- - - -
- + @@ -295,7 +301,7 @@
+ [options]="'SCNR_PROTECTION_METHOD' | dropdownOptions" styleClass="p-inputtext-sm border-0" [autoDisplayFirst]="false">
{{ item | enum }} @@ -421,7 +427,7 @@
- @@ -457,7 +463,7 @@
+ value="{{ starDetection.computed.minFlux.toFixed(0) }} | {{ starDetection.computed.maxFlux.toFixed(0) }}" />
@@ -478,7 +484,7 @@
- +
@@ -490,7 +496,7 @@
- +
@@ -498,7 +504,7 @@
- + @@ -673,13 +679,13 @@
-
diff --git a/desktop/src/app/image/image.component.ts b/desktop/src/app/image/image.component.ts index 5a2a2f04f..58075f525 100644 --- a/desktop/src/app/image/image.component.ts +++ b/desktop/src/app/image/image.component.ts @@ -16,9 +16,9 @@ import { BrowserWindowService } from '../../shared/services/browser-window.servi import { ElectronService } from '../../shared/services/electron.service' import { PreferenceService } from '../../shared/services/preference.service' import { PrimeService } from '../../shared/services/prime.service' -import { Angle, AstronomicalObject, DeepSkyObject, EquatorialCoordinateJ2000, Star } from '../../shared/types/atlas.types' +import { Angle, EquatorialCoordinateJ2000 } from '../../shared/types/atlas.types' import { Camera } from '../../shared/types/camera.types' -import { DEFAULT_FOV, DetectedStar, EMPTY_IMAGE_SOLVED, FOV, IMAGE_STATISTICS_BIT_OPTIONS, ImageAnnotation, ImageAnnotationDialog, ImageChannel, ImageData, ImageFITSHeadersDialog, ImageFOVDialog, ImageInfo, ImageROI, ImageSCNRDialog, ImageSaveDialog, ImageSolved, ImageSolverDialog, ImageStatisticsBitOption, ImageStretchDialog, ImageTransformation, SCNR_PROTECTION_METHODS, StarDetectionDialog } from '../../shared/types/image.types' +import { AnnotationInfoDialog, DEFAULT_FOV, DetectedStar, EMPTY_IMAGE_SOLVED, FOV, IMAGE_STATISTICS_BIT_OPTIONS, ImageAnnotation, ImageAnnotationDialog, ImageChannel, ImageData, ImageFITSHeadersDialog, ImageFOVDialog, ImageInfo, ImageROI, ImageSCNRDialog, ImageSaveDialog, ImageSolved, ImageSolverDialog, ImageStatisticsBitOption, ImageStretchDialog, ImageTransformation, StarDetectionDialog } from '../../shared/types/image.types' import { Mount } from '../../shared/types/mount.types' import { CoordinateInterpolator, InterpolatedCoordinate } from '../../shared/utils/coordinate-interpolation' import { AppComponent } from '../app.component' @@ -59,7 +59,6 @@ export class ImageComponent implements AfterViewInit, OnDestroy { { name: 'Green', value: 'GREEN' }, { name: 'Blue', value: 'BLUE' }, ] - readonly scnrMethods = Array.from(SCNR_PROTECTION_METHODS) readonly scnr: ImageSCNRDialog = { showDialog: false, amount: 0.5, @@ -93,14 +92,22 @@ export class ImageComponent implements AfterViewInit, OnDestroy { readonly annotation: ImageAnnotationDialog = { showDialog: false, + running: false, + visible: false, useStarsAndDSOs: true, useMinorPlanets: false, minorPlanetsMagLimit: 18.0, - useSimbad: false + useSimbad: false, + data: [] + } + + readonly annotationInfo: AnnotationInfoDialog = { + showDialog: false } readonly starDetection: StarDetectionDialog = { showDialog: false, + running: false, type: 'ASTAP', minSNR: 0, visible: false, @@ -122,22 +129,16 @@ export class ImageComponent implements AfterViewInit, OnDestroy { readonly solver: ImageSolverDialog = { showDialog: false, - solving: false, + running: false, blind: true, centerRA: '', centerDEC: '', radius: 4, solved: structuredClone(EMPTY_IMAGE_SOLVED), - types: ['ASTAP', 'ASTROMETRY_NET_ONLINE'], type: 'ASTAP' } crossHair = false - annotations: ImageAnnotation[] = [] - annotating = false - showAnnotationInfoDialog = false - annotationInfo?: AstronomicalObject & Partial - annotationIsVisible = false readonly fitsHeaders: ImageFITSHeadersDialog = { showDialog: false, @@ -342,7 +343,7 @@ export class ImageComponent implements AfterViewInit, OnDestroy { }, toggle: (event) => { event.originalEvent?.stopImmediatePropagation() - this.annotationIsVisible = event.checked + this.annotation.visible = event.checked }, } @@ -477,6 +478,11 @@ export class ImageComponent implements AfterViewInit, OnDestroy { toggle: (event) => { if (event.originalEvent) { this.showLiveStackedImage = !!event.checked + + if (this.showLiveStackedImage) { + this.disableCalibration(true) + } + this.loadImage(true) } }, @@ -532,6 +538,7 @@ export class ImageComponent implements AfterViewInit, OnDestroy { this.showLiveStackedImage = true this.app.topMenu[0].toggled = true this.app.topMenu[0].visible = true + this.disableCalibration(true) } } else if (!event.liveStackedPath) { this.showLiveStackedImage = undefined @@ -542,6 +549,7 @@ export class ImageComponent implements AfterViewInit, OnDestroy { this.imageData.path = event.savedPath this.imageData.liveStackedPath = event.liveStackedPath this.imageData.capture = event.capture + this.imageData.exposureCount = event.exposureCount this.clearOverlay() this.loadImage(true) @@ -591,12 +599,9 @@ export class ImageComponent implements AfterViewInit, OnDestroy { } private markCalibrationGroupItem(name?: string) { - this.calibrationMenuItem.items![1].checked = this.calibrationViaCamera - for (let i = 3; i < this.calibrationMenuItem.items!.length; i++) { const item = this.calibrationMenuItem.items![i] item.checked = item.label === (name ?? 'None') - item.disabled = this.calibrationViaCamera } } @@ -618,11 +623,12 @@ export class ImageComponent implements AfterViewInit, OnDestroy { return { label, icon, checked: this.transformation.calibrationGroup === name, - disabled: this.calibrationViaCamera, - command: async () => { - this.transformation.calibrationGroup = name - this.markCalibrationGroupItem(label) - await this.loadImage() + command: async (e) => { + if (!this.calibrationViaCamera) { + this.transformation.calibrationGroup = name + this.markCalibrationGroupItem(label) + await this.loadImage() + } }, } } @@ -638,9 +644,11 @@ export class ImageComponent implements AfterViewInit, OnDestroy { menu.push({ label: 'Camera', icon: 'mdi mdi-camera-iris', - checked: this.calibrationViaCamera, - command: () => { - this.calibrationViaCamera = !this.calibrationViaCamera + toggleable: true, + toggled: this.calibrationViaCamera, + toggle: (e) => { + e.originalEvent?.stopImmediatePropagation() + this.calibrationViaCamera = !!e.checked this.markCalibrationGroupItem(this.transformation.calibrationGroup) } }) @@ -753,8 +761,8 @@ export class ImageComponent implements AfterViewInit, OnDestroy { } private clearOverlay() { - this.annotations = [] - this.annotationIsVisible = false + this.annotation.data = [] + this.annotation.visible = false this.annotationMenuItem.toggleable = false this.starDetection.stars = [] @@ -778,7 +786,13 @@ export class ImageComponent implements AfterViewInit, OnDestroy { async detectStars() { const options = this.preference.starDetectionRequest(this.starDetection.type).get() options.minSNR = this.starDetection.minSNR - this.starDetection.stars = await this.api.detectStars(this.imagePath!, options) + + try { + this.starDetection.running = true + this.starDetection.stars = await this.api.detectStars(this.imagePath!, options) + } finally { + this.starDetection.running = false + } let hfd = 0 let snr = 0 @@ -822,6 +836,12 @@ export class ImageComponent implements AfterViewInit, OnDestroy { await this.loadImageFromPath(path) } + let extraInfo = '' + + if (this.imageData.exposureCount) { + extraInfo += ` · ${this.imageData.exposureCount}` + } + if (this.imageData.title) { this.app.subTitle = this.imageData.title } else if (this.imageData.camera) { @@ -831,6 +851,8 @@ export class ImageComponent implements AfterViewInit, OnDestroy { } else { this.app.subTitle = '' } + + this.app.subTitle += extraInfo } private async loadImageFromPath(path: string) { @@ -900,21 +922,21 @@ export class ImageComponent implements AfterViewInit, OnDestroy { async annotateImage() { try { - this.annotating = true - this.annotations = await this.api.annotationsOfImage(this.imagePath!, this.annotation.useStarsAndDSOs, + this.annotation.running = true + this.annotation.data = await this.api.annotationsOfImage(this.imagePath!, this.annotation.useStarsAndDSOs, this.annotation.useMinorPlanets, this.annotation.minorPlanetsMagLimit, this.annotation.useSimbad) - this.annotationIsVisible = true - this.annotationMenuItem.toggleable = this.annotations.length > 0 - this.annotationMenuItem.toggled = this.annotationMenuItem.toggleable + this.annotation.visible = this.annotation.data.length > 0 + this.annotationMenuItem.toggleable = this.annotation.visible + this.annotationMenuItem.toggled = this.annotation.visible this.annotation.showDialog = false } finally { - this.annotating = false + this.annotation.running = false } } showAnnotationInfo(annotation: ImageAnnotation) { - this.annotationInfo = annotation.star ?? annotation.dso ?? annotation.minorPlanet - this.showAnnotationInfoDialog = true + this.annotationInfo.info = annotation.star ?? annotation.dso ?? annotation.minorPlanet + this.annotationInfo.showDialog = true } private disableAutoStretch() { @@ -1031,7 +1053,7 @@ export class ImageComponent implements AfterViewInit, OnDestroy { } async solveImage() { - this.solver.solving = true + this.solver.running = true try { const solver = this.preference.plateSolverRequest(this.solver.type).get() @@ -1043,7 +1065,7 @@ export class ImageComponent implements AfterViewInit, OnDestroy { } catch { this.updateImageSolved(this.imageInfo?.solved) } finally { - this.solver.solving = false + this.solver.running = false if (this.solver.solved.solved) { this.retrieveCoordinateInterpolation() @@ -1220,9 +1242,9 @@ export class ImageComponent implements AfterViewInit, OnDestroy { private loadPreference() { const preference = this.preference.imagePreference.get() this.solver.radius = preference.solverRadius ?? this.solver.radius - this.solver.type = preference.solverType ?? this.solver.types[0] + this.solver.type = preference.solverType ?? 'ASTAP' this.starDetection.type = preference.starDetectionType ?? this.starDetection.type - this.starDetection.minSNR = this.preference.starDetectionRequest(this.starDetection.type).get().minSNR ?? this.starDetection.type + this.starDetection.minSNR = preference.starDetectionMinSNR ?? this.preference.starDetectionRequest(this.starDetection.type).get().minSNR ?? this.starDetection.minSNR this.fov.fovs = this.preference.imageFOVs.get() this.fov.fovs.forEach(e => { e.enabled = false; e.computed = undefined }) @@ -1233,6 +1255,7 @@ export class ImageComponent implements AfterViewInit, OnDestroy { preference.solverRadius = this.solver.radius preference.solverType = this.solver.type preference.starDetectionType = this.starDetection.type + preference.starDetectionMinSNR = this.starDetection.minSNR this.preference.imagePreference.set(preference) } diff --git a/desktop/src/app/indi/property/indi-property.component.html b/desktop/src/app/indi/property/indi-property.component.html index 7b5c9c3f6..84098b176 100644 --- a/desktop/src/app/indi/property/indi-property.component.html +++ b/desktop/src/app/indi/property/indi-property.component.html @@ -6,17 +6,17 @@
+ (onClick)="sendSwitch(item)" icon="pi" [severity]="item.value ? 'success' : 'danger'" size="small" [text]="true">
+ (onClick)="sendSwitch(item)" icon="mdi mdi-check" size="small" [text]="true" />
+ (onClick)="sendSwitch(item)" icon="pi" [severity]="item.value ? 'success' : 'danger'" size="small" [text]="true">
@@ -39,7 +39,7 @@
+ size="small" [text]="true" />
@@ -60,7 +60,8 @@
- +
diff --git a/desktop/src/app/mount/mount.component.html b/desktop/src/app/mount/mount.component.html index 471ac72fc..a21d943c1 100644 --- a/desktop/src/app/mount/mount.component.html +++ b/desktop/src/app/mount/mount.component.html @@ -226,7 +226,7 @@
- @@ -244,6 +244,12 @@
+
+ Used with the mobile app + Used with the desktop app +
diff --git a/desktop/src/app/sequencer/sequencer.component.html b/desktop/src/app/sequencer/sequencer.component.html index 52c823755..998f0830e 100644 --- a/desktop/src/app/sequencer/sequencer.component.html +++ b/desktop/src/app/sequencer/sequencer.component.html @@ -27,13 +27,8 @@ - - - - - - +
@@ -144,6 +139,8 @@ (deviceChange)="wheelChanged()" /> +
@@ -226,9 +223,9 @@
+ severity="success" size="small" [text]="true" /> + severity="danger" size="small" [text]="true" />
diff --git a/desktop/src/app/sequencer/sequencer.component.ts b/desktop/src/app/sequencer/sequencer.component.ts index c17bd92d8..43b06746c 100644 --- a/desktop/src/app/sequencer/sequencer.component.ts +++ b/desktop/src/app/sequencer/sequencer.component.ts @@ -13,6 +13,7 @@ import { JsonFile } from '../../shared/types/app.types' import { Camera, CameraCaptureEvent, CameraStartCapture } from '../../shared/types/camera.types' import { Focuser } from '../../shared/types/focuser.types' import { Mount } from '../../shared/types/mount.types' +import { Rotator } from '../../shared/types/rotator.types' import { EMPTY_SEQUENCE_PLAN, SEQUENCE_ENTRY_PROPERTIES, SequenceCaptureMode, SequenceEntryProperty, SequencePlan, SequencerEvent } from '../../shared/types/sequencer.types' import { FilterWheel } from '../../shared/types/wheel.types' import { deviceComparator } from '../../shared/utils/comparators' @@ -34,11 +35,13 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable mounts: Mount[] = [] wheels: FilterWheel[] = [] focusers: Focuser[] = [] + rotators: Rotator[] = [] camera?: Camera mount?: Mount wheel?: FilterWheel focuser?: Focuser + rotator?: Rotator readonly captureModes: SequenceCaptureMode[] = ['FULLY', 'INTERLEAVED'] readonly plan = structuredClone(EMPTY_SEQUENCE_PLAN) @@ -177,43 +180,53 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable }) electron.on('CAMERA.UPDATED', event => { - ngZone.run(() => { - const camera = this.cameras.find(e => e.id === event.device.id) + const camera = this.cameras.find(e => e.id === event.device.id) - if (camera) { + if (camera) { + ngZone.run(() => { Object.assign(camera, event.device) - } - }) + }) + } }) electron.on('MOUNT.UPDATED', event => { - ngZone.run(() => { - const mount = this.mounts.find(e => e.id === event.device.id) + const mount = this.mounts.find(e => e.id === event.device.id) - if (mount) { + if (mount) { + ngZone.run(() => { Object.assign(mount, event.device) - } - }) + }) + } }) electron.on('WHEEL.UPDATED', event => { - ngZone.run(() => { - const wheel = this.wheels.find(e => e.id === event.device.id) + const wheel = this.wheels.find(e => e.id === event.device.id) - if (wheel) { + if (wheel) { + ngZone.run(() => { Object.assign(wheel, event.device) - } - }) + }) + } }) electron.on('FOCUSER.UPDATED', event => { - ngZone.run(() => { - const focuser = this.focusers.find(e => e.id === event.device.id) + const focuser = this.focusers.find(e => e.id === event.device.id) - if (focuser) { + if (focuser) { + ngZone.run(() => { Object.assign(focuser, event.device) - } - }) + }) + } + }) + + electron.on('ROTATOR.UPDATED', event => { + const rotator = this.rotators.find(e => e.id === event.device.id) + + if (rotator) { + ngZone.run(() => { + Object.assign(rotator, event.device) + }) + } }) electron.on('SEQUENCER.ELAPSED', event => { @@ -246,6 +259,7 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable this.mounts = (await this.api.mounts()).sort(deviceComparator) this.wheels = (await this.api.wheels()).sort(deviceComparator) this.focusers = (await this.api.focusers()).sort(deviceComparator) + this.rotators = (await this.api.rotators()).sort(deviceComparator) this.loadSavedJsonFileFromPathOrAddDefault() @@ -262,6 +276,7 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable if (this.mount) this.api.mountListen(this.mount) if (this.focuser) this.api.focuserListen(this.focuser) if (this.wheel) this.api.wheelListen(this.wheel) + if (this.rotator) this.api.rotatorListen(this.rotator) } private enableOrDisableTopbarMenu(enable: boolean) { @@ -272,6 +287,7 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable const camera = this.camera ?? this.cameras[0] // const wheel = this.wheel ?? this.wheels[0] // const focuser = this.focuser ?? this.focusers[0] + // const rotator = this.rotator ?? this.rotators[0] this.plan.entries.push({ enabled: true, @@ -301,7 +317,8 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable type: 'SIRIL', executablePath: '', rotate: 0, - use32Bits: false + use32Bits: false, + slot: 1, }, }) @@ -355,8 +372,9 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable this.camera = this.cameras.find(e => e.name === this.plan.camera?.name) ?? this.cameras[0] this.mount = this.mounts.find(e => e.name === this.plan.mount?.name) ?? this.mounts[0] - this.focuser = this.focusers.find(e => e.name === this.plan.focuser?.name) ?? this.focusers[0] this.wheel = this.wheels.find(e => e.name === this.plan.wheel?.name) ?? this.wheels[0] + this.focuser = this.focusers.find(e => e.name === this.plan.focuser?.name) ?? this.focusers[0] + this.rotator = this.rotators.find(e => e.name === this.plan.rotator?.name) ?? this.rotators[0] return plan.entries.length } @@ -376,16 +394,6 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable } } - async chooseSavePath() { - const defaultPath = this.plan.savePath - const path = await this.electron.openDirectory({ defaultPath }) - - if (path) { - this.plan.savePath = path - this.savePlan() - } - } - async showCameraDialog(entry: CameraStartCapture) { if (await CameraComponent.showAsDialog(this.browserWindow, 'SEQUENCER', this.camera!, entry)) { this.savePlan() @@ -414,11 +422,16 @@ export class SequencerComponent implements AfterContentInit, OnDestroy, Pingable this.ping() } + rotatorChanged() { + this.ping() + } + savePlan() { this.plan.camera = this.camera this.plan.mount = this.mount this.plan.wheel = this.wheel this.plan.focuser = this.focuser + this.plan.rotator = this.rotator this.storage.set(SEQUENCER_PLAN_KEY, this.plan) this.savedPathWasModified = !!this.savedPath } diff --git a/desktop/src/app/settings/settings.component.html b/desktop/src/app/settings/settings.component.html index e7f18020c..38c8a9dd8 100644 --- a/desktop/src/app/settings/settings.component.html +++ b/desktop/src/app/settings/settings.component.html @@ -31,7 +31,7 @@
- @@ -78,12 +78,17 @@
-
-
+
+ +
+
Min SNR
-
+
Timeout (s)
-
- +
+ + + +
@@ -111,7 +120,7 @@
- @@ -121,6 +130,15 @@ [path]="liveStackers.get(liveStackerType)!.executablePath" label="Executable path" (pathChange)="liveStackers.get(liveStackerType)!.executablePath = $event; save()" />
+
+ + + + +
\ No newline at end of file diff --git a/desktop/src/app/settings/settings.component.ts b/desktop/src/app/settings/settings.component.ts index dfd22b51c..25ffcd1c6 100644 --- a/desktop/src/app/settings/settings.component.ts +++ b/desktop/src/app/settings/settings.component.ts @@ -62,8 +62,10 @@ export class SettingsComponent implements AfterViewInit, OnDestroy { this.plateSolvers.set('ASTROMETRY_NET_ONLINE', preference.plateSolverRequest('ASTROMETRY_NET_ONLINE').get()) this.starDetectors.set('ASTAP', preference.starDetectionRequest('ASTAP').get()) + this.starDetectors.set('PIXINSIGHT', preference.starDetectionRequest('PIXINSIGHT').get()) this.liveStackers.set('SIRIL', preference.liveStackingRequest('SIRIL').get()) + this.liveStackers.set('PIXINSIGHT', preference.liveStackingRequest('PIXINSIGHT').get()) } async ngAfterViewInit() { } @@ -130,7 +132,9 @@ export class SettingsComponent implements AfterViewInit, OnDestroy { this.preference.plateSolverRequest('ASTROMETRY_NET_ONLINE').set(this.plateSolvers.get('ASTROMETRY_NET_ONLINE')) this.preference.starDetectionRequest('ASTAP').set(this.starDetectors.get('ASTAP')) + this.preference.starDetectionRequest('PIXINSIGHT').set(this.starDetectors.get('PIXINSIGHT')) this.preference.liveStackingRequest('SIRIL').set(this.liveStackers.get('SIRIL')) + this.preference.liveStackingRequest('PIXINSIGHT').set(this.liveStackers.get('PIXINSIGHT')) } } \ No newline at end of file diff --git a/desktop/src/shared/components/camera-exposure/camera-exposure.component.ts b/desktop/src/shared/components/camera-exposure/camera-exposure.component.ts index c25094060..d413467c7 100644 --- a/desktop/src/shared/components/camera-exposure/camera-exposure.component.ts +++ b/desktop/src/shared/components/camera-exposure/camera-exposure.component.ts @@ -47,6 +47,8 @@ export class CameraExposureComponent { this.state = 'EXPOSURING' } else if (event.state === 'IDLE' || event.state === 'CAPTURE_FINISHED') { this.reset() + } else if (event.state === 'PAUSING' || event.state === 'PAUSED') { + this.state = event.state } return this.state !== undefined diff --git a/desktop/src/shared/pipes/dropdown-options.ts b/desktop/src/shared/pipes/dropdown-options.ts new file mode 100644 index 000000000..915938d42 --- /dev/null +++ b/desktop/src/shared/pipes/dropdown-options.ts @@ -0,0 +1,33 @@ +import { Pipe, PipeTransform } from '@angular/core' +import { AutoFocusFittingMode, BacklashCompensationMode } from '../types/autofocus.type' +import { LiveStackerType } from '../types/camera.types' +import { Bitpix, ImageChannel, ImageFormat, SCNRProtectionMethod } from '../types/image.types' +import { PlateSolverType, StarDetectorType } from '../types/settings.types' +import { MountRemoteControlType } from '../types/mount.types' + +export type DropdownOptionType = 'STAR_DETECTOR' | 'PLATE_SOLVER' | 'LIVE_STACKER' + | 'AUTO_FOCUS_FITTING_MODE' | 'AUTO_FOCUS_BACKLASH_COMPENSATION_MODE' | 'SCNR_PROTECTION_METHOD' + | 'IMAGE_FORMAT' | 'IMAGE_BITPIX' | 'IMAGE_CHANNEL' | 'MOUNT_REMOTE_CONTROL_TYPE' + +export type DropdownOptionReturnType = StarDetectorType[] | PlateSolverType[] | LiveStackerType[] + | AutoFocusFittingMode[] | BacklashCompensationMode[] | SCNRProtectionMethod[] + | ImageFormat[] | Bitpix[] | ImageChannel[] | MountRemoteControlType[] + +@Pipe({ name: 'dropdownOptions' }) +export class DropdownOptionsPipe implements PipeTransform { + + transform(type: DropdownOptionType): DropdownOptionReturnType | undefined { + switch (type) { + case 'STAR_DETECTOR': return ['ASTAP', 'PIXINSIGHT'] + case 'PLATE_SOLVER': return ['ASTAP', 'ASTROMETRY_NET_ONLINE'] + case 'AUTO_FOCUS_FITTING_MODE': return ['TRENDLINES', 'PARABOLIC', 'TREND_PARABOLIC', 'HYPERBOLIC', 'TREND_HYPERBOLIC'] + case 'AUTO_FOCUS_BACKLASH_COMPENSATION_MODE': return ['NONE', 'ABSOLUTE', 'OVERSHOOT'] + case 'LIVE_STACKER': return ['SIRIL', 'PIXINSIGHT'] + case 'SCNR_PROTECTION_METHOD': return ['MAXIMUM_MASK', 'ADDITIVE_MASK', 'AVERAGE_NEUTRAL', 'MAXIMUM_NEUTRAL', 'MINIMUM_NEUTRAL'] + case 'IMAGE_FORMAT': return ['FITS', 'XISF', 'PNG', 'JPG'] + case 'IMAGE_BITPIX': return ['BYTE', 'SHORT', 'INTEGER', 'FLOAT', 'DOUBLE'] + case 'IMAGE_CHANNEL': return ['RED', 'GREEN', 'BLUE', 'GRAY'] + case 'MOUNT_REMOTE_CONTROL_TYPE': return ['LX200', 'STELLARIUM'] + } + } +} diff --git a/desktop/src/shared/services/api.service.ts b/desktop/src/shared/services/api.service.ts index 4c4705545..c55a3ebe6 100644 --- a/desktop/src/shared/services/api.service.ts +++ b/desktop/src/shared/services/api.service.ts @@ -87,6 +87,14 @@ export class ApiService { return this.http.put(`cameras/${camera.id}/capture/start`, data) } + cameraPauseCapture(camera: Camera) { + return this.http.put(`cameras/${camera.id}/capture/pause`) + } + + cameraUnpauseCapture(camera: Camera) { + return this.http.put(`cameras/${camera.id}/capture/unpause`) + } + cameraAbortCapture(camera: Camera) { return this.http.put(`cameras/${camera.id}/capture/abort`) } diff --git a/desktop/src/shared/services/browser-window.service.ts b/desktop/src/shared/services/browser-window.service.ts index 37bb0d4d6..4a7e38b4e 100644 --- a/desktop/src/shared/services/browser-window.service.ts +++ b/desktop/src/shared/services/browser-window.service.ts @@ -95,7 +95,7 @@ export class BrowserWindowService { } openAlignment(options: OpenWindowOptions = {}) { - Object.assign(options, { icon: 'star', width: 415, height: 365 }) + Object.assign(options, { icon: 'star', width: 425, height: 365 }) this.openWindow({ ...options, id: 'alignment', path: 'alignment', data: undefined }) } @@ -115,8 +115,8 @@ export class BrowserWindowService { } openSettings(options: OpenWindowOptions = {}) { - Object.assign(options, { icon: 'settings', width: 340, height: 440 }) - this.openWindow({ ...options, id: 'settings', path: 'settings', data: undefined }) + Object.assign(options, { icon: 'settings', width: 400, height: 450 }) + this.openWindow({ ...options, id: 'settings', path: 'settings', data: undefined, autoResizable: false }) } openCalculator(options: OpenWindowOptions = {}) { diff --git a/desktop/src/shared/types/camera.types.ts b/desktop/src/shared/types/camera.types.ts index b7526cb8f..2da17ce42 100644 --- a/desktop/src/shared/types/camera.types.ts +++ b/desktop/src/shared/types/camera.types.ts @@ -13,7 +13,7 @@ export type AutoSubFolderMode = 'OFF' | 'NOON' | 'MIDNIGHT' export type ExposureMode = 'SINGLE' | 'FIXED' | 'LOOP' -export type LiveStackerType = 'SIRIL' +export type LiveStackerType = 'SIRIL' | 'PIXINSIGHT' export enum ExposureTimeUnit { MINUTE = 'm', @@ -199,6 +199,7 @@ export const EMPTY_CAMERA_START_CAPTURE: CameraStartCapture = { executablePath: "", rotate: 0, use32Bits: false, + slot: 1, } } @@ -234,7 +235,7 @@ export interface CameraCaptureEvent extends MessageEvent { capture?: CameraStartCapture } -export type CameraCaptureState = 'IDLE' | 'CAPTURE_STARTED' | 'EXPOSURE_STARTED' | 'EXPOSURING' | 'WAITING' | 'SETTLING' | 'EXPOSURE_FINISHED' | 'CAPTURE_FINISHED' +export type CameraCaptureState = 'IDLE' | 'CAPTURE_STARTED' | 'EXPOSURE_STARTED' | 'EXPOSURING' | 'WAITING' | 'SETTLING' | 'PAUSING' | 'PAUSED' | 'EXPOSURE_FINISHED' | 'CAPTURE_FINISHED' export interface CameraDialogInput { mode: CameraDialogMode @@ -288,13 +289,15 @@ export const EMPTY_CAMERA_CAPTURE_INFO: CameraCaptureInfo = { } export interface LiveStackingRequest { - enabled: boolean, - type: LiveStackerType, - executablePath: string, - dark?: string, - flat?: string, - rotate: number, - use32Bits: boolean, + enabled: boolean + type: LiveStackerType + executablePath: string + dark?: string + flat?: string + bias?: string + rotate: number + use32Bits: boolean + slot: number } export const EMPTY_LIVE_STACKING_REQUEST: LiveStackingRequest = { @@ -302,5 +305,6 @@ export const EMPTY_LIVE_STACKING_REQUEST: LiveStackingRequest = { type: 'SIRIL', executablePath: '', rotate: 0, - use32Bits: false + use32Bits: false, + slot: 1, } diff --git a/desktop/src/shared/types/image.types.ts b/desktop/src/shared/types/image.types.ts index 06346d2c0..d4533e86d 100644 --- a/desktop/src/shared/types/image.types.ts +++ b/desktop/src/shared/types/image.types.ts @@ -5,8 +5,7 @@ import { PlateSolverType, StarDetectorType } from './settings.types' export type ImageChannel = 'RED' | 'GREEN' | 'BLUE' | 'GRAY' -export const SCNR_PROTECTION_METHODS = ['MAXIMUM_MASK', 'ADDITIVE_MASK', 'AVERAGE_NEUTRAL', 'MAXIMUM_NEUTRAL', 'MINIMUM_NEUTRAL'] as const -export type SCNRProtectionMethod = (typeof SCNR_PROTECTION_METHODS)[number] +export type SCNRProtectionMethod = 'MAXIMUM_MASK' | 'ADDITIVE_MASK' | 'AVERAGE_NEUTRAL' | 'MAXIMUM_NEUTRAL' | 'MINIMUM_NEUTRAL' export type ImageSource = 'FRAMING' | 'PATH' | 'CAMERA' | 'FLAT_WIZARD' | 'SEQUENCER' | 'ALIGNMENT' | 'AUTO_FOCUS' @@ -117,6 +116,7 @@ export interface ImagePreference { solverType?: PlateSolverType savePath?: string starDetectionType?: StarDetectorType + starDetectionMinSNR?: number } export const EMPTY_IMAGE_PREFERENCE: ImagePreference = { @@ -132,6 +132,7 @@ export interface ImageData { source?: ImageSource title?: string capture?: CameraStartCapture + exposureCount?: number } export interface FOV { @@ -209,13 +210,12 @@ export interface ImageStretchDialog { export interface ImageSolverDialog { showDialog: boolean - solving: boolean + running: boolean blind: boolean centerRA: Angle centerDEC: Angle radius: number readonly solved: ImageSolved - readonly types: PlateSolverType[] type: PlateSolverType } @@ -260,10 +260,13 @@ export interface ImageTransformation { export interface ImageAnnotationDialog { showDialog: boolean + running: boolean + visible: boolean useStarsAndDSOs: boolean useMinorPlanets: boolean minorPlanetsMagLimit: number useSimbad: boolean + data: ImageAnnotation[] } export interface ROISelected { @@ -276,6 +279,7 @@ export interface ROISelected { export interface StarDetectionDialog { showDialog: boolean + running: boolean type: StarDetectorType minSNR: number visible: boolean @@ -283,3 +287,8 @@ export interface StarDetectionDialog { computed: Omit & { minFlux: number, maxFlux: number } selected: DetectedStar } + +export interface AnnotationInfoDialog { + showDialog: boolean + info?: AstronomicalObject & Partial +} diff --git a/desktop/src/shared/types/sequencer.types.ts b/desktop/src/shared/types/sequencer.types.ts index 58996386d..844a540b6 100644 --- a/desktop/src/shared/types/sequencer.types.ts +++ b/desktop/src/shared/types/sequencer.types.ts @@ -1,6 +1,7 @@ import { AutoSubFolderMode, Camera, CameraCaptureEvent, CameraStartCapture, Dither } from './camera.types' import { Focuser } from './focuser.types' import { Mount } from './mount.types' +import { Rotator } from './rotator.types' import { FilterWheel } from './wheel.types' export type SequenceCaptureMode = 'FULLY' | 'INTERLEAVED' @@ -39,6 +40,7 @@ export interface SequencePlan { mount?: Mount wheel?: FilterWheel focuser?: Focuser + rotator?: Rotator } export const EMPTY_SEQUENCE_PLAN: SequencePlan = { diff --git a/desktop/src/shared/types/settings.types.ts b/desktop/src/shared/types/settings.types.ts index b9226023d..c048dda1d 100644 --- a/desktop/src/shared/types/settings.types.ts +++ b/desktop/src/shared/types/settings.types.ts @@ -18,13 +18,14 @@ export const EMPTY_PLATE_SOLVER_OPTIONS: PlateSolverOptions = { timeout: 600, } -export type StarDetectorType = 'ASTAP' +export type StarDetectorType = 'ASTAP' | 'PIXINSIGHT' export interface StarDetectionOptions { type: StarDetectorType executablePath: string timeout: number minSNR: number + slot: number } export const EMPTY_STAR_DETECTION_OPTIONS: StarDetectionOptions = { @@ -32,4 +33,5 @@ export const EMPTY_STAR_DETECTION_OPTIONS: StarDetectionOptions = { executablePath: '', timeout: 600, minSNR: 0, + slot: 1, } diff --git a/nebulosa-common/src/main/kotlin/nebulosa/common/exec/CommandLine.kt b/nebulosa-common/src/main/kotlin/nebulosa/common/exec/CommandLine.kt index cf799f08a..550b722e6 100644 --- a/nebulosa-common/src/main/kotlin/nebulosa/common/exec/CommandLine.kt +++ b/nebulosa-common/src/main/kotlin/nebulosa/common/exec/CommandLine.kt @@ -31,6 +31,9 @@ data class CommandLine internal constructor( val pid get() = process?.pid() ?: -1L + val exitCode + get() = process?.takeIf { !it.isAlive }?.exitValue() ?: -1 + val writer = PrintStream(object : OutputStream() { override fun write(b: Int) { @@ -65,7 +68,12 @@ data class CommandLine internal constructor( @Synchronized fun start(timeout: Duration = Duration.ZERO): CommandLine { if (process == null) { - process = builder.start() + process = try { + builder.start() + } catch (e: Throwable) { + completeExceptionally(e) + return this + } if (listeners.isNotEmpty()) { inputReader = StreamLineReader(process!!.inputStream, false) @@ -84,9 +92,6 @@ data class CommandLine internal constructor( @Synchronized fun stop() { - process?.destroyForcibly() - process = null - waiter?.interrupt() waiter = null @@ -95,6 +100,10 @@ data class CommandLine internal constructor( errorReader?.interrupt() errorReader = null + + process?.destroyForcibly() + process?.waitFor() + process = null } fun get(timeout: Duration): Int { @@ -139,7 +148,7 @@ data class CommandLine internal constructor( private inner class StreamLineReader( stream: InputStream, private val isError: Boolean, - ) : Thread("Command Line Stream Line Reader") { + ) : Thread("Command Line ${if (isError) "Error" else "Input"} Stream Line Reader") { private val reader = stream.bufferedReader() private val completable = CompletableFuture() diff --git a/nebulosa-pixinsight/build.gradle.kts b/nebulosa-pixinsight/build.gradle.kts new file mode 100644 index 000000000..1819f5ff6 --- /dev/null +++ b/nebulosa-pixinsight/build.gradle.kts @@ -0,0 +1,24 @@ +plugins { + kotlin("jvm") + id("maven-publish") +} + +dependencies { + api(project(":nebulosa-common")) + api(project(":nebulosa-math")) + api(project(":nebulosa-plate-solving")) + api(project(":nebulosa-star-detection")) + api(project(":nebulosa-livestacking")) + api(libs.bundles.jackson) + api(libs.apache.codec) + implementation(project(":nebulosa-log")) + testImplementation(project(":nebulosa-test")) +} + +publishing { + publications { + create("pluginMaven") { + from(components["java"]) + } + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/livestacking/PixInsightLiveStacker.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/livestacking/PixInsightLiveStacker.kt new file mode 100644 index 000000000..6c4d52115 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/livestacking/PixInsightLiveStacker.kt @@ -0,0 +1,131 @@ +package nebulosa.pixinsight.livestacking + +import nebulosa.livestacking.LiveStacker +import nebulosa.log.loggerFor +import nebulosa.pixinsight.script.* +import java.nio.file.Path +import java.util.concurrent.atomic.AtomicBoolean +import kotlin.io.path.copyTo +import kotlin.io.path.deleteIfExists +import kotlin.io.path.moveTo + +data class PixInsightLiveStacker( + private val runner: PixInsightScriptRunner, + private val workingDirectory: Path, + private val dark: Path? = null, + private val flat: Path? = null, + private val bias: Path? = null, + private val use32Bits: Boolean = false, + private val slot: Int = PixInsightScript.DEFAULT_SLOT, +) : LiveStacker { + + private val running = AtomicBoolean() + private val stacking = AtomicBoolean() + + override val isRunning + get() = running.get() + + override val isStacking + get() = stacking.get() + + @Volatile private var stackCount = 0 + + private val referencePath = Path.of("$workingDirectory", "reference.fits") + private val calibratedPath = Path.of("$workingDirectory", "calibrated.fits") + private val alignedPath = Path.of("$workingDirectory", "aligned.fits") + private val stackedPath = Path.of("$workingDirectory", "stacked.fits") + + @Synchronized + override fun start() { + if (!running.get()) { + val isPixInsightRunning = PixInsightIsRunning(slot).use { it.runSync(runner) } + + if (!isPixInsightRunning) { + try { + check(PixInsightStartup(slot).use { it.runSync(runner) }) + } catch (e: Throwable) { + throw IllegalStateException("unable to start PixInsight") + } + } + + stackCount = 0 + running.set(true) + } + } + + @Synchronized + override fun add(path: Path): Path? { + var targetPath = path + + return if (running.get()) { + stacking.set(true) + + // Calibrate. + val calibrated = if (dark == null && flat == null && bias == null) false else { + PixInsightCalibrate(slot, workingDirectory, targetPath, dark, flat, if (dark == null) bias else null).use { s -> + val outputPath = s.runSync(runner).outputImage ?: return@use false + LOG.info("live stacking calibrated. count={}, output={}", stackCount, outputPath) + outputPath.moveTo(calibratedPath, true) + true + } + } + + if (calibrated) { + targetPath = calibratedPath + } + + // TODO: Debayer, Resample? + + if (stackCount > 0) { + // Align. + val aligned = PixInsightAlign(slot, workingDirectory, referencePath, targetPath).use { s -> + val outputPath = s.runSync(runner).outputImage ?: return@use false + LOG.info("live stacking aligned. count={}, output={}", stackCount, outputPath) + outputPath.moveTo(alignedPath, true) + true + } + + if (aligned) { + targetPath = alignedPath + + // Stack. + val expressionRK = "({{0}} * $stackCount + {{1}}) / ${stackCount + 1}" + PixInsightPixelMath(slot, listOf(stackedPath, targetPath), stackedPath, expressionRK).use { s -> + s.runSync(runner).stackedImage?.also { + LOG.info("live stacking finished. count={}, output={}", stackCount++, it) + } + } + } + } else { + targetPath.copyTo(referencePath, true) + targetPath.copyTo(stackedPath, true) + LOG.info("live stacking started. target={}, reference={}, stacked={}", targetPath, referencePath, stackedPath) + stackCount = 1 + } + + stacking.set(false) + + stackedPath + } else { + path + } + } + + @Synchronized + override fun stop() { + running.set(false) + stackCount = 0 + } + + override fun close() { + referencePath.deleteIfExists() + calibratedPath.deleteIfExists() + alignedPath.deleteIfExists() + // stackedPath.deleteIfExists() + } + + companion object { + + @JvmStatic val LOG = loggerFor() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/AbstractPixInsightScript.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/AbstractPixInsightScript.kt new file mode 100644 index 000000000..57468b766 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/AbstractPixInsightScript.kt @@ -0,0 +1,83 @@ +package nebulosa.pixinsight.script + +import com.fasterxml.jackson.module.kotlin.jsonMapper +import com.fasterxml.jackson.module.kotlin.kotlinModule +import nebulosa.common.exec.CommandLine +import nebulosa.common.exec.LineReadListener +import nebulosa.common.json.PathDeserializer +import nebulosa.common.json.PathSerializer +import nebulosa.log.loggerFor +import org.apache.commons.codec.binary.Hex +import java.nio.file.Path +import java.util.concurrent.CompletableFuture + +abstract class AbstractPixInsightScript : PixInsightScript, LineReadListener, CompletableFuture() { + + override fun onInputRead(line: String) = Unit + + override fun onErrorRead(line: String) = Unit + + protected open fun beforeRun() = Unit + + protected abstract fun processOnComplete(exitCode: Int): T? + + protected open fun waitOnComplete() = Unit + + final override fun run(runner: PixInsightScriptRunner) = runner.run(this) + + final override fun startCommandLine(commandLine: CommandLine) { + commandLine.whenComplete { exitCode, exception -> + try { + LOG.info("{} script finished. done={}, exitCode={}", this::class.simpleName, isDone, exitCode, exception) + + waitOnComplete() + + if (isDone) return@whenComplete + else if (exception != null) completeExceptionally(exception) + else complete(processOnComplete(exitCode).also { LOG.info("script processed. output={}", it) }) + } finally { + commandLine.unregisterLineReadListener(this) + } + } + + commandLine.registerLineReadListener(this) + beforeRun() + commandLine.start() + } + + companion object { + + internal const val START_FILE = "@" + internal const val END_FILE = "#" + + @JvmStatic private val LOG = loggerFor>() + + @JvmStatic private val KOTLIN_MODULE = kotlinModule() + .addDeserializer(Path::class.java, PathDeserializer) + .addSerializer(PathSerializer) + + @JvmStatic internal val OBJECT_MAPPER = jsonMapper { + addModule(KOTLIN_MODULE) + } + + @JvmStatic + internal fun execute(slot: Int, scriptPath: Path, data: Any?): String { + return buildString { + if (slot > 0) append("$slot:") + append("\"$scriptPath") + + if (data != null) { + append(',') + + when (data) { + is Path, is CharSequence -> append("'$data'") + is Number -> append("$data") + else -> append(Hex.encodeHexString(OBJECT_MAPPER.writeValueAsString(data).toByteArray(Charsets.UTF_16BE))) + } + } + + append('"') + } + } + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightAlign.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightAlign.kt new file mode 100644 index 000000000..2958d15b0 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightAlign.kt @@ -0,0 +1,85 @@ +package nebulosa.pixinsight.script + +import nebulosa.io.resource +import nebulosa.io.transferAndClose +import java.nio.file.Files +import java.nio.file.Path +import kotlin.io.path.deleteIfExists +import kotlin.io.path.outputStream +import kotlin.io.path.readText + +data class PixInsightAlign( + private val slot: Int, + private val workingDirectory: Path, + private val referencePath: Path, + private val targetPath: Path, +) : AbstractPixInsightScript() { + + private data class Input( + @JvmField val referencePath: Path, + @JvmField val targetPath: Path, + @JvmField val outputDirectory: Path, + @JvmField val statusPath: Path, + ) + + data class Output( + @JvmField val success: Boolean = false, + @JvmField val errorMessage: String? = null, + @JvmField val outputImage: Path? = null, + @JvmField val outputMaskImage: Path? = null, + @JvmField val totalPairMatches: Int = 0, + @JvmField val inliers: Int = 0, + @JvmField val overlapping: Int = 0, + @JvmField val regularity: Double = 0.0, + @JvmField val quality: Double = 0.0, + @JvmField val rmsError: Double = 0.0, + @JvmField val rmsErrorDev: Double = 0.0, + @JvmField val peakErrorX: Double = 0.0, + @JvmField val peakErrorY: Double = 0.0, + @JvmField val h11: Double = 0.0, + @JvmField val h12: Double = 0.0, + @JvmField val h13: Double = 0.0, + @JvmField val h21: Double = 0.0, + @JvmField val h22: Double = 0.0, + @JvmField val h23: Double = 0.0, + @JvmField val h31: Double = 0.0, + @JvmField val h32: Double = 0.0, + @JvmField val h33: Double = 0.0, + ) { + + companion object { + + @JvmStatic val FAILED = Output() + } + } + + private val scriptPath = Files.createTempFile("pi-", ".js") + private val statusPath = Files.createTempFile("pi-", ".txt") + + init { + resource("pixinsight/Align.js")!!.transferAndClose(scriptPath.outputStream()) + } + + override val arguments = listOf("-x=${execute(slot, scriptPath, Input(referencePath, targetPath, workingDirectory, statusPath))}") + + override fun processOnComplete(exitCode: Int): Output { + if (exitCode == 0) { + repeat(30) { + val text = statusPath.readText() + + if (text.startsWith(START_FILE) && text.endsWith(END_FILE)) { + return OBJECT_MAPPER.readValue(text.substring(1, text.length - 1), Output::class.java) + } + + Thread.sleep(1000) + } + } + + return Output.FAILED + } + + override fun close() { + scriptPath.deleteIfExists() + statusPath.deleteIfExists() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightAutomaticBackgroundExtractor.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightAutomaticBackgroundExtractor.kt new file mode 100644 index 000000000..a68dd5049 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightAutomaticBackgroundExtractor.kt @@ -0,0 +1,64 @@ +package nebulosa.pixinsight.script + +import nebulosa.io.resource +import nebulosa.io.transferAndClose +import java.nio.file.Files +import java.nio.file.Path +import kotlin.io.path.deleteIfExists +import kotlin.io.path.outputStream +import kotlin.io.path.readText + +data class PixInsightAutomaticBackgroundExtractor( + private val slot: Int, + private val targetPath: Path, + private val outputPath: Path, +) : AbstractPixInsightScript() { + + private data class Input( + @JvmField val targetPath: Path, + @JvmField val outputPath: Path, + @JvmField val statusPath: Path, + ) + + data class Output( + @JvmField val success: Boolean = false, + @JvmField val errorMessage: String? = null, + @JvmField val outputImage: Path? = null, + ) { + + companion object { + + @JvmField val FAILED = Output() + } + } + + private val scriptPath = Files.createTempFile("pi-", ".js") + private val statusPath = Files.createTempFile("pi-", ".txt") + + init { + resource("pixinsight/ABE.js")!!.transferAndClose(scriptPath.outputStream()) + } + + override val arguments = listOf("-x=${execute(slot, scriptPath, Input(targetPath, outputPath, statusPath))}") + + override fun processOnComplete(exitCode: Int): Output { + if (exitCode == 0) { + repeat(30) { + val text = statusPath.readText() + + if (text.startsWith(START_FILE) && text.endsWith(END_FILE)) { + return OBJECT_MAPPER.readValue(text.substring(1, text.length - 1), Output::class.java) + } + + Thread.sleep(1000) + } + } + + return Output.FAILED + } + + override fun close() { + scriptPath.deleteIfExists() + statusPath.deleteIfExists() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightCalibrate.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightCalibrate.kt new file mode 100644 index 000000000..53cd83351 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightCalibrate.kt @@ -0,0 +1,75 @@ +package nebulosa.pixinsight.script + +import nebulosa.io.resource +import nebulosa.io.transferAndClose +import java.nio.file.Files +import java.nio.file.Path +import kotlin.io.path.deleteIfExists +import kotlin.io.path.outputStream +import kotlin.io.path.readText + +data class PixInsightCalibrate( + private val slot: Int, + private val workingDirectory: Path, + private val targetPath: Path, + private val dark: Path? = null, + private val flat: Path? = null, + private val bias: Path? = null, + private val compress: Boolean = false, + private val use32Bit: Boolean = false, +) : AbstractPixInsightScript() { + + private data class Input( + @JvmField val targetPath: Path, + @JvmField val outputDirectory: Path, + @JvmField val statusPath: Path, + @JvmField val masterDark: Path? = null, + @JvmField val masterFlat: Path? = null, + @JvmField val masterBias: Path? = null, + @JvmField val compress: Boolean = false, + @JvmField val use32Bit: Boolean = false, + ) + + data class Output( + @JvmField val success: Boolean = false, + @JvmField val errorMessage: String? = null, + @JvmField val outputImage: Path? = null, + ) { + + companion object { + + @JvmStatic val FAILED = Output() + } + } + + private val scriptPath = Files.createTempFile("pi-", ".js") + private val statusPath = Files.createTempFile("pi-", ".txt") + + init { + resource("pixinsight/Calibrate.js")!!.transferAndClose(scriptPath.outputStream()) + } + + override val arguments = + listOf("-x=${execute(slot, scriptPath, Input(targetPath, workingDirectory, statusPath, dark, flat, bias, compress, use32Bit))}") + + override fun processOnComplete(exitCode: Int): Output { + if (exitCode == 0) { + repeat(30) { + val text = statusPath.readText() + + if (text.startsWith(START_FILE) && text.endsWith(END_FILE)) { + return OBJECT_MAPPER.readValue(text.substring(1, text.length - 1), Output::class.java) + } + + Thread.sleep(1000) + } + } + + return Output.FAILED + } + + override fun close() { + scriptPath.deleteIfExists() + statusPath.deleteIfExists() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightDetectStars.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightDetectStars.kt new file mode 100644 index 000000000..db7b9a684 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightDetectStars.kt @@ -0,0 +1,95 @@ +package nebulosa.pixinsight.script + +import nebulosa.io.resource +import nebulosa.io.transferAndClose +import nebulosa.star.detection.ImageStar +import java.nio.file.Files +import java.nio.file.Path +import java.time.Duration +import kotlin.io.path.deleteIfExists +import kotlin.io.path.outputStream +import kotlin.io.path.readText + +data class PixInsightDetectStars( + private val slot: Int, + private val targetPath: Path, + private val minSNR: Double = 0.0, + private val invert: Boolean = false, + private val timeout: Duration = Duration.ZERO, +) : AbstractPixInsightScript() { + + private data class Input( + @JvmField val targetPath: Path, + @JvmField val statusPath: Path, + @JvmField val minSNR: Double = 0.0, + @JvmField val invert: Boolean = false, + ) + + data class Output( + @JvmField val success: Boolean = false, + @JvmField val errorMessage: String? = null, + @JvmField val stars: List = emptyList(), + ) { + + override fun toString() = "Output(success=$success, errorMessage=$errorMessage, stars=${stars.size})" + + companion object { + + @JvmStatic val FAILED = Output() + } + } + + data class Star( + override val x: Double = 0.0, + override val y: Double = 0.0, + override val flux: Double = 0.0, + @JvmField val size: Double = 0.0, + @JvmField val bkg: Double = 0.0, + @JvmField val x0: Int = 0, + @JvmField val y0: Int = 0, + @JvmField val x1: Int = 0, + @JvmField val y1: Int = 0, + @JvmField val nmax: Int = 0, + override val snr: Double = 0.0, + @JvmField val peak: Double = 0.0, + override val hfd: Double = 0.0, + ) : ImageStar + + private val scriptPath = Files.createTempFile("pi-", ".js") + private val statusPath = Files.createTempFile("pi-", ".txt") + + init { + resource("pixinsight/DetectStars.js")!!.transferAndClose(scriptPath.outputStream()) + } + + override val arguments = listOf("-x=${execute(slot, scriptPath, Input(targetPath, statusPath, minSNR, invert))}") + + override fun processOnComplete(exitCode: Int): Output { + val timeoutInMillis = timeout.toMillis() + + if (exitCode == 0) { + val startTime = System.currentTimeMillis() + + repeat(600) { + val text = statusPath.readText() + + if (text.startsWith(START_FILE) && text.endsWith(END_FILE)) { + return OBJECT_MAPPER.readValue(text.substring(1, text.length - 1), Output::class.java) + } + + if (timeoutInMillis == 0L || System.currentTimeMillis() - startTime < timeoutInMillis) { + Thread.sleep(500) + } else { + return@repeat + } + } + } + + return Output.FAILED + } + + override fun close() { + scriptPath.deleteIfExists() + statusPath.deleteIfExists() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightIsRunning.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightIsRunning.kt new file mode 100644 index 000000000..d1a987ae9 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightIsRunning.kt @@ -0,0 +1,55 @@ +package nebulosa.pixinsight.script + +import nebulosa.log.debug +import nebulosa.log.loggerFor + +data class PixInsightIsRunning(private val slot: Int) : AbstractPixInsightScript() { + + override val arguments = listOf(if (slot > 0) "-y=$slot" else "-y") + + private val slotIsNotRunning = "The requested application instance #$slot is not running" + private val slotCrashed = "The requested application instance #$slot has crashed" + private val yieldedExecutionInstance = "$YIELDED_EXECUTION_INSTANCE$slot" + + override fun onInputRead(line: String) { + processLine(line) + } + + override fun onErrorRead(line: String) { + processLine(line) + } + + private fun processLine(line: String) { + if (isDone) return + + if (slot > 0) { + if (line.contains(slotIsNotRunning, true) || line.contains(slotCrashed, true)) { + complete(false) + } else if (line.contains(yieldedExecutionInstance, true)) { + complete(true) + } else { + return + } + } else if (line.contains(YIELDED_EXECUTION_INSTANCE, true)) { + complete(true) + } else if (line.contains(NO_RUNNING_PROCESS, true)) { + complete(false) + } else { + return + } + + LOG.debug { line } + } + + override fun processOnComplete(exitCode: Int) = false + + override fun close() = Unit + + companion object { + + private const val NO_RUNNING_PROCESS = "There is no running (and alive) instance of the PixInsight" + private const val YIELDED_EXECUTION_INSTANCE = "Yielded execution to running application instance #" + + @JvmStatic private val LOG = loggerFor() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightPixelMath.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightPixelMath.kt new file mode 100644 index 000000000..a4cd13ad8 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightPixelMath.kt @@ -0,0 +1,71 @@ +package nebulosa.pixinsight.script + +import nebulosa.io.resource +import nebulosa.io.transferAndClose +import java.nio.file.Files +import java.nio.file.Path +import kotlin.io.path.deleteIfExists +import kotlin.io.path.outputStream +import kotlin.io.path.readText + +data class PixInsightPixelMath( + private val slot: Int, + private val inputPaths: List, + private val outputPath: Path, + private val expressionRK: String? = null, + private val expressionG: String? = null, + private val expressionB: String? = null, +) : AbstractPixInsightScript() { + + private data class Input( + @JvmField val statusPath: Path, + @JvmField val inputPaths: List, + @JvmField val outputPath: Path, + @JvmField val expressionRK: String? = null, + @JvmField val expressionG: String? = null, + @JvmField val expressionB: String? = null, + ) + + data class Output( + @JvmField val success: Boolean = false, + @JvmField val errorMessage: String? = null, + @JvmField val stackedImage: Path? = null, + ) { + + companion object { + + @JvmStatic val FAILED = Output() + } + } + + private val scriptPath = Files.createTempFile("pi-", ".js") + private val statusPath = Files.createTempFile("pi-", ".txt") + + init { + resource("pixinsight/PixelMath.js")!!.transferAndClose(scriptPath.outputStream()) + } + + override val arguments = + listOf("-x=${execute(slot, scriptPath, Input(statusPath, inputPaths, outputPath, expressionRK, expressionG, expressionB))}") + + override fun processOnComplete(exitCode: Int): Output? { + if (exitCode == 0) { + repeat(30) { + val text = statusPath.readText() + + if (text.startsWith(START_FILE) && text.endsWith(END_FILE)) { + return OBJECT_MAPPER.readValue(text.substring(1, text.length - 1), Output::class.java) + } + + Thread.sleep(1000) + } + } + + return Output.FAILED + } + + override fun close() { + scriptPath.deleteIfExists() + statusPath.deleteIfExists() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightScript.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightScript.kt new file mode 100644 index 000000000..58a39d50e --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightScript.kt @@ -0,0 +1,25 @@ +package nebulosa.pixinsight.script + +import nebulosa.common.exec.CommandLine +import java.io.Closeable +import java.util.concurrent.Future + +interface PixInsightScript : Future, Closeable { + + val arguments: Iterable + + fun startCommandLine(commandLine: CommandLine) + + fun run(runner: PixInsightScriptRunner) + + fun runSync(runner: PixInsightScriptRunner): T { + run(runner) + return get() + } + + companion object { + + const val DEFAULT_SLOT = 256 + const val UNSPECIFIED_SLOT = 0 + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightScriptRunner.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightScriptRunner.kt new file mode 100644 index 000000000..6becf6d2b --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightScriptRunner.kt @@ -0,0 +1,26 @@ +package nebulosa.pixinsight.script + +import nebulosa.common.exec.commandLine +import nebulosa.log.loggerFor +import java.nio.file.Path + +data class PixInsightScriptRunner(private val executablePath: Path) { + + fun run(script: PixInsightScript<*>) { + val commandLine = commandLine { + executablePath(executablePath) + script.arguments.forEach(::putArg) + DEFAULT_ARGS.forEach(::putArg) + } + + LOG.info("running {} script: {}", script::class.simpleName, commandLine.command) + + script.startCommandLine(commandLine) + } + + companion object { + + @JvmStatic private val DEFAULT_ARGS = arrayOf("--automation-mode", "--no-startup-scripts") + @JvmStatic private val LOG = loggerFor() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightStartup.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightStartup.kt new file mode 100644 index 000000000..e410a4ea9 --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/script/PixInsightStartup.kt @@ -0,0 +1,44 @@ +package nebulosa.pixinsight.script + +import nebulosa.io.resource +import nebulosa.io.transferAndClose +import java.nio.file.Files +import kotlin.concurrent.timer +import kotlin.io.path.deleteIfExists +import kotlin.io.path.outputStream +import kotlin.io.path.readText + +data class PixInsightStartup(private val slot: Int) : AbstractPixInsightScript() { + + private val scriptPath = Files.createTempFile("pi-", ".js") + private val outputPath = Files.createTempFile("pi-", ".txt") + + init { + resource("pixinsight/Startup.js")!!.transferAndClose(scriptPath.outputStream()) + } + + override val arguments = listOf("-r=${execute(0, scriptPath, outputPath)}", if (slot > 0) "-n=$slot" else "-n") + + override fun beforeRun() { + var count = 0 + + timer("PixInsight Startup Timer", true, 1000L, 500L) { + if (outputPath.readText() == "STARTED") { + complete(true) + cancel() + } else if (count >= 60) { + complete(false) + cancel() + } + + count++ + } + } + + override fun processOnComplete(exitCode: Int) = false + + override fun close() { + scriptPath.deleteIfExists() + outputPath.deleteIfExists() + } +} diff --git a/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/star/detection/PixInsightStarDetector.kt b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/star/detection/PixInsightStarDetector.kt new file mode 100644 index 000000000..b729ba3ab --- /dev/null +++ b/nebulosa-pixinsight/src/main/kotlin/nebulosa/pixinsight/star/detection/PixInsightStarDetector.kt @@ -0,0 +1,21 @@ +package nebulosa.pixinsight.star.detection + +import nebulosa.pixinsight.script.PixInsightDetectStars +import nebulosa.pixinsight.script.PixInsightScriptRunner +import nebulosa.star.detection.ImageStar +import nebulosa.star.detection.StarDetector +import java.nio.file.Path +import java.time.Duration + +data class PixInsightStarDetector( + private val runner: PixInsightScriptRunner, + private val slot: Int, + private val minSNR: Double = 0.0, + private val timeout: Duration = Duration.ZERO, +) : StarDetector { + + override fun detect(input: Path): List { + return PixInsightDetectStars(slot, input, minSNR, false, timeout) + .use { it.runSync(runner).stars.toList() } + } +} diff --git a/nebulosa-pixinsight/src/main/resources/pixinsight/ABE.js b/nebulosa-pixinsight/src/main/resources/pixinsight/ABE.js new file mode 100644 index 000000000..9ffb6e97c --- /dev/null +++ b/nebulosa-pixinsight/src/main/resources/pixinsight/ABE.js @@ -0,0 +1,74 @@ +function decodeParams(hex) { + const buffer = new Uint8Array(hex.length / 4) + + for (let i = 0; i < hex.length; i += 4) { + buffer[i / 4] = parseInt(hex.substr(i, 4), 16) + } + + return JSON.parse(String.fromCharCode.apply(null, buffer)) +} + +function abe() { + const data = { + success: true, + errorMessage: null, + outputImage: null, + } + + try { + const input = decodeParams(jsArguments[0]) + + const targetPath = input.targetPath + const outputPath = input.outputPath + const statusPath = input.statusPath + + console.writeln("targetPath=" + targetPath) + console.writeln("outputPath=" + outputPath) + console.writeln("statusPath=" + statusPath) + + const window = ImageWindow.open(targetPath)[0] + + const P = new AutomaticBackgroundExtractor + P.tolerance = 1.000 + P.deviation = 0.800 + P.unbalance = 1.800 + P.minBoxFraction = 0.050 + P.maxBackground = 1.0000 + P.minBackground = 0.0000 + P.useBrightnessLimits = false + P.polyDegree = 4 + P.boxSize = 5 + P.boxSeparation = 5 + P.modelImageSampleFormat = AutomaticBackgroundExtractor.prototype.f32 + P.abeDownsample = 2.00 + P.writeSampleBoxes = false + P.justTrySamples = false + P.targetCorrection = AutomaticBackgroundExtractor.prototype.Subtract + P.normalize = true + P.discardModel = true + P.replaceTarget = true + P.correctedImageId = "" + P.correctedImageSampleFormat = AutomaticBackgroundExtractor.prototype.SameAsTarget + P.verboseCoefficients = false + P.compareModel = false + P.compareFactor = 10.00 + + P.executeOn(window.mainView) + + window.saveAs(outputPath, false, false, false, false) + + window.forceClose() + + data.outputImage = outputPath + + console.writeln("abe finished") + } catch (e) { + data.success = false + data.errorMessage = e.message + console.writeln(data.errorMessage) + } finally { + File.writeTextFile(statusPath, "@" + JSON.stringify(data) + "#") + } +} + +abe() diff --git a/nebulosa-pixinsight/src/main/resources/pixinsight/Align.js b/nebulosa-pixinsight/src/main/resources/pixinsight/Align.js new file mode 100644 index 000000000..f8f93f38c --- /dev/null +++ b/nebulosa-pixinsight/src/main/resources/pixinsight/Align.js @@ -0,0 +1,155 @@ +function decodeParams(hex) { + const buffer = new Uint8Array(hex.length / 4) + + for (let i = 0; i < hex.length; i += 4) { + buffer[i / 4] = parseInt(hex.substr(i, 4), 16) + } + + return JSON.parse(String.fromCharCode.apply(null, buffer)) +} + +function alignment() { + const data = { + success: true, + errorMessage: null, + outputImage: null, + outputMaskImage: null, + totalPairMatches: 0, + inliers: 0, + overlapping: 0, + regularity: 0, + quality: 0, + rmsError: 0, + rmsErrorDev: 0, + peakErrorX: 0, + peakErrorY: 0, + h11: 0, + h12: 0, + h13: 0, + h21: 0, + h22: 0, + h23: 0, + h31: 0, + h32: 0, + h33: 0, + } + + try { + const input = decodeParams(jsArguments[0]) + + const referencePath = input.referencePath + const targetPath = input.targetPath + const outputDirectory = input.outputDirectory + const statusPath = input.statusPath + + console.writeln("referencePath=" + referencePath) + console.writeln("targetPath=" + targetPath) + console.writeln("outputDirectory=" + outputDirectory) + console.writeln("statusPath=" + statusPath) + + var P = new StarAlignment + P.structureLayers = 5 + P.noiseLayers = 0 + P.hotPixelFilterRadius = 1 + P.noiseReductionFilterRadius = 0 + P.minStructureSize = 0 + P.sensitivity = 0.50 + P.peakResponse = 0.50 + P.brightThreshold = 3.00 + P.maxStarDistortion = 0.60 + P.allowClusteredSources = false + P.localMaximaDetectionLimit = 0.75 + P.upperLimit = 1.000 + P.invert = false + P.distortionModel = "" + P.undistortedReference = false + P.distortionCorrection = false + P.distortionMaxIterations = 20 + P.distortionMatcherExpansion = 1.00 + P.splineOrder = 2 + P.splineSmoothness = 0.005 + P.matcherTolerance = 0.0500 + P.ransacMaxIterations = 2000 + P.ransacMaximizeInliers = 1.00 + P.ransacMaximizeOverlapping = 1.00 + P.ransacMaximizeRegularity = 1.00 + P.ransacMinimizeError = 1.00 + P.maxStars = 0 + P.fitPSF = StarAlignment.prototype.FitPSF_DistortionOnly + P.psfTolerance = 0.50 + P.useTriangles = false + P.polygonSides = 5 + P.descriptorsPerStar = 20 + P.restrictToPreviews = true + P.intersection = StarAlignment.prototype.MosaicOnly + P.useBrightnessRelations = false + P.useScaleDifferences = false + P.scaleTolerance = 0.100 + P.referenceImage = referencePath + P.referenceIsFile = true + P.targets = [ // enabled, isFile, image + [true, true, targetPath] + ] + P.inputHints = "" + P.outputHints = "" + P.mode = StarAlignment.prototype.RegisterMatch + P.writeKeywords = true + P.generateMasks = false + P.generateDrizzleData = false + P.generateDistortionMaps = false + P.inheritAstrometricSolution = false + P.frameAdaptation = false + P.randomizeMosaic = false + P.pixelInterpolation = StarAlignment.prototype.Auto + P.clampingThreshold = 0.30 + P.outputDirectory = outputDirectory + P.outputExtension = ".fits" + P.outputPrefix = "" + P.outputPostfix = "_a" + P.maskPostfix = "_m" + P.distortionMapPostfix = "_dm" + P.outputSampleFormat = StarAlignment.prototype.SameAsTarget + P.overwriteExistingFiles = true + P.onError = StarAlignment.prototype.Continue + P.useFileThreads = true + P.noGUIMessages = true + P.fileThreadOverload = 1.00 + P.maxFileReadThreads = 0 + P.maxFileWriteThreads = 0 + P.memoryLoadControl = true + P.memoryLoadLimit = 0.85 + + P.executeGlobal() + + data.outputImage = P.outputData[0][0] || null + data.outputMaskImage = P.outputData[0][1] || null + data.totalPairMatches = P.outputData[0][2] + data.inliers = P.outputData[0][3] + data.overlapping = P.outputData[0][4] + data.regularity = P.outputData[0][5] + data.quality = P.outputData[0][6] + data.rmsError = P.outputData[0][7] + data.rmsErrorDev = P.outputData[0][8] + data.peakErrorX = P.outputData[0][9] + data.peakErrorY = P.outputData[0][10] + data.h11 = P.outputData[0][11] + data.h12 = P.outputData[0][12] + data.h13 = P.outputData[0][13] + data.h21 = P.outputData[0][14] + data.h22 = P.outputData[0][15] + data.h23 = P.outputData[0][16] + data.h31 = P.outputData[0][17] + data.h32 = P.outputData[0][18] + data.h33 = P.outputData[0][19] + + console.writeln("alignment finished") + } catch (e) { + data.success = false + data.errorMessage = e.message + console.writeln(data.errorMessage) + } finally { + File.writeTextFile(statusPath, "@" + JSON.stringify(data) + "#") + } +} + +alignment() diff --git a/nebulosa-pixinsight/src/main/resources/pixinsight/Calibrate.js b/nebulosa-pixinsight/src/main/resources/pixinsight/Calibrate.js new file mode 100644 index 000000000..814586b86 --- /dev/null +++ b/nebulosa-pixinsight/src/main/resources/pixinsight/Calibrate.js @@ -0,0 +1,121 @@ +function decodeParams(hex) { + const buffer = new Uint8Array(hex.length / 4) + + for (let i = 0; i < hex.length; i += 4) { + buffer[i / 4] = parseInt(hex.substr(i, 4), 16) + } + + return JSON.parse(String.fromCharCode.apply(null, buffer)) +} + +function calibrate() { + const data = { + success: true, + errorMessage: null, + outputImage: null, + } + + try { + const input = decodeParams(jsArguments[0]) + + const targetPath = input.targetPath + const outputDirectory = input.outputDirectory + const statusPath = input.statusPath + const masterDark = input.masterDark || "" + const masterFlat = input.masterFlat || "" + const masterBias = input.masterBias || "" + const compress = input.compress + const use32Bit = input.use32Bit + + console.writeln("targetPath=" + targetPath) + console.writeln("outputDirectory=" + outputDirectory) + console.writeln("statusPath=" + statusPath) + console.writeln("masterDark=" + masterDark) + console.writeln("masterFlat=" + masterFlat) + console.writeln("masterBias=" + masterBias) + console.writeln("compress=" + compress) + console.writeln("use32Bit=" + use32Bit) + + const P = new ImageCalibration + + P.targetFrames = [ // enabled, path + [true, targetPath] + ] + P.enableCFA = true + P.cfaPattern = ImageCalibration.prototype.Auto + P.inputHints = "fits-keywords normalize raw cfa signed-is-physical" + P.outputHints = "properties fits-keywords no-compress-data no-embedded-data no-resolution" + P.pedestal = 0 + P.pedestalMode = ImageCalibration.prototype.Keyword + P.pedestalKeyword = "" + P.overscanEnabled = false + P.overscanImageX0 = 0 + P.overscanImageY0 = 0 + P.overscanImageX1 = 0 + P.overscanImageY1 = 0 + P.overscanRegions = [ // enabled, sourceX0, sourceY0, sourceX1, sourceY1, targetX0, targetY0, targetX1, targetY1 + [false, 0, 0, 0, 0, 0, 0, 0, 0], + [false, 0, 0, 0, 0, 0, 0, 0, 0], + [false, 0, 0, 0, 0, 0, 0, 0, 0], + [false, 0, 0, 0, 0, 0, 0, 0, 0] + ] + P.masterBiasEnabled = !!masterBias + P.masterBiasPath = masterBias + P.masterDarkEnabled = !!masterDark + P.masterDarkPath = masterDark + P.masterFlatEnabled = !!masterFlat + P.masterFlatPath = masterFlat + P.calibrateBias = false + P.calibrateDark = false + P.calibrateFlat = false + P.optimizeDarks = false + P.darkOptimizationThreshold = 0.00000 + P.darkOptimizationLow = 3.0000 + P.darkOptimizationWindow = 0 + P.darkCFADetectionMode = ImageCalibration.prototype.DetectCFA + P.separateCFAFlatScalingFactors = true + P.flatScaleClippingFactor = 0.05 + P.evaluateNoise = false + P.noiseEvaluationAlgorithm = ImageCalibration.prototype.NoiseEvaluation_MRS + P.evaluateSignal = false + P.structureLayers = 5 + P.saturationThreshold = 1.00 + P.saturationRelative = false + P.noiseLayers = 1 + P.hotPixelFilterRadius = 1 + P.noiseReductionFilterRadius = 0 + P.minStructureSize = 0 + P.psfType = ImageCalibration.prototype.PSFType_Moffat4 + P.psfGrowth = 1.00 + P.maxStars = 24576 + P.outputDirectory = outputDirectory + P.outputExtension = ".fits" + P.outputPrefix = "" + P.outputPostfix = "_c" + P.outputSampleFormat = use32Bit ? ImageCalibration.prototype.f32 : ImageCalibration.prototype.i16 + P.outputPedestal = 0 + P.outputPedestalMode = ImageCalibration.prototype.OutputPedestal_Literal + P.autoPedestalLimit = 0.00010 + P.overwriteExistingFiles = true + P.onError = ImageCalibration.prototype.Continue + P.noGUIMessages = true + P.useFileThreads = true + P.fileThreadOverload = 1.00 + P.maxFileReadThreads = 0 + P.maxFileWriteThreads = 0 + + P.executeGlobal() + + data.outputImage = P.outputData[0][0] || null + + console.writeln("calibration finished") + } catch (e) { + data.success = false + data.errorMessage = e.message + console.writeln(data.errorMessage) + } finally { + File.writeTextFile(statusPath, "@" + JSON.stringify(data) + "#") + } +} + +calibrate() diff --git a/nebulosa-pixinsight/src/main/resources/pixinsight/DetectStars.js b/nebulosa-pixinsight/src/main/resources/pixinsight/DetectStars.js new file mode 100644 index 000000000..45b04f3e6 --- /dev/null +++ b/nebulosa-pixinsight/src/main/resources/pixinsight/DetectStars.js @@ -0,0 +1,841 @@ +// ____ __ _____ ____ +// / __ \ / // ___/ / __ \ +// / /_/ /__ / / \__ \ / /_/ / +// / ____// /_/ / ___/ // _, _/ PixInsight JavaScript Runtime +// /_/ \____/ /____//_/ |_| PJSR Version 1.0 +// ---------------------------------------------------------------------------- +// pjsr/StarDetector.jsh - Released 2024-02-28T16:25:35Z +// ---------------------------------------------------------------------------- +// This file is part of the PixInsight JavaScript Runtime (PJSR). +// PJSR is an ECMA-262-5 compliant framework for development of scripts on the +// PixInsight platform. +// +// Copyright (c) 2003-2024 Pleiades Astrophoto S.L. All Rights Reserved. +// +// Redistribution and use in both source and binary forms, with or without +// modification, is permitted provided that the following conditions are met: +// +// 1. All redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// +// 2. All redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// +// 3. Neither the names "PixInsight" and "Pleiades Astrophoto", nor the names +// of their contributors, may be used to endorse or promote products derived +// from this software without specific prior written permission. For written +// permission, please contact info@pixinsight.com. +// +// 4. All products derived from this software, in any form whatsoever, must +// reproduce the following acknowledgment in the end-user documentation +// and/or other materials provided with the product: +// +// "This product is based on software from the PixInsight project, developed +// by Pleiades Astrophoto and its contributors (https://pixinsight.com/)." +// +// Alternatively, if that is where third-party acknowledgments normally +// appear, this acknowledgment must be reproduced in the product itself. +// +// THIS SOFTWARE IS PROVIDED BY PLEIADES ASTROPHOTO AND ITS CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL PLEIADES ASTROPHOTO OR ITS +// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +// EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, BUSINESS +// INTERRUPTION; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; AND LOSS OF USE, +// DATA OR PROFITS) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. +// ---------------------------------------------------------------------------- + +#include +#include + +function Star(pos, flux, bkg, rect, size, nmax, snr, peak) { + // Centroid position in pixels, image coordinates. This property is an + // object with x and y Number properties. + this.pos = pos + // Total flux, normalized intensity units. + this.flux = flux + // Mean local background, normalized intensity units. + this.bkg = bkg + // Detection region, image coordinates. + this.rect = rect + // Area of detected star structure in square pixels. + this.size = size + // Number of local maxima in the detection structure. A value greater than + // one denotes a double/multiple star or a crowded source. A value of zero + // signals that detection of local maxima has been disabled, either globally + // or for this particular structure. + this.nmax = nmax + + this.snr = snr + this.peak = peak +} + +function StarDetector() { + this.__base__ = Object + this.__base__() + + /* + * Number of wavelet layers for structure detection. (default=5) + */ + this.structureLayers = 5 + + /* + * Half size in pixels of a morphological median filter, for hot pixel + * removal. (default=1) + */ + this.hotPixelFilterRadius = 1 + + /* + * Whether the hot pixel filter removal should be applied to the image used + * for star detection, or only to the working image used to build the + * structure map. (default=false) + * + * By setting this parameter to true, the detection algorithm is completely + * robust to hot pixels (of sizes not larger than hotPixelFilterRadius), but + * it is also less sensitive, so less stars will in general be detected. + * With the default value of false, some hot pixels may be wrongly detected + * as stars but the number of true stars detected will generally be larger. + */ + this.applyHotPixelFilterToDetectionImage = false + + /* + * Half size in pixels of a Gaussian convolution filter applied for noise + * reduction. Useful for star detection in low-SNR images. (default=0) + * + * N.B. Setting the value of this parameter > 0 implies + * applyHotPixelFilterToDetectionImage=true. + */ + this.noiseReductionFilterRadius = 0 + + /* + * Sensitivity of the star detection device. + * + * Internally, the sensitivity of the star detection algorithm is expressed + * in signal-to-noise ratio units with respect to the evaluated dispersion + * of local background pixels for each detected structure. Given a source + * with estimated brightness s, local background b and local background + * dispersion n, sensitivity is the minimum value of (s - b)/n necessary to + * trigger star detection. + * + * To isolate the public interface of this class from its internal + * implementation, this parameter is normalized to the [0,1] range, where 0 + * and 1 represent minimum and maximum sensitivity, respectively. This + * abstraction allows us to change the star detection engine without + * breaking dependent tools and processes. + * + * Increase this value to favor detection of fainter stars. Decrease it to + * restrict detection to brighter stars. (default=0.5). + */ + this.sensitivity = 0.5 + + /*! + * Peak sensitivity of the star detection device. + * + * Internally, the peak response property of the star detection algorithm is + * expressed in kurtosis units. For each detected structure, kurtosis is + * evaluated from all significant pixels with values greater than the + * estimated mean local background. Peak response is the minimum value of + * kurtosis necessary to trigger star detection. + * + * To isolate the public interface of this class from its internal + * implementation, this parameter is normalized to the [0,1] range, where 0 + * and 1 represent minimum and maximum peak response, respectively. This + * abstraction allows us to change the star detection engine without + * breaking dependent tools and processes. + * + * If you decrease this parameter, stars will need to have stronger (or more + * prominent) peaks to be detected. This is useful to prevent detection of + * saturated stars, as well as small nonstellar features. By increasing this + * parameter, the star detection algorithm will be more sensitive to + * 'peakedness', and hence more tolerant with relatively flat image + * features. (default=0.5). + */ + this.peakResponse = 0.5 + + /*! + * If this parameter is false, a local maxima map will be generated to + * identify and prevent detection of multiple sources that are too close to + * be separated as individual structures, such as double and multiple stars. + * In general, barycenter positions cannot be accurately determined for + * sources with several local maxima. If this parameter is true, + * non-separable multiple sources will be detectable as single objects. + * (default=false) + */ + this.allowClusteredSources = false + + /* + * Half size in pixels of the local maxima detection filter. (default=2) + */ + this.localDetectionFilterRadius = 2 + + /*! + * This parameter is a normalized pixel value in the [0,1] range. Structures + * with pixels above this value will be excluded for local maxima detection. + * (default=0.75) + */ + this.localMaximaDetectionLimit = 0.75 + + /* + * Set this flag true to avoid detection of local maxima. (default=false) + * Setting this parameter to true implies allowClusteredSources = true. + */ + this.noLocalMaximaDetection = false + + /*! + * Maximum star distortion. + * + * Internally, star distortion is evaluated in units of coverage of a square + * region circumscribed to each detected structure. The coverage of a + * perfectly circular star is pi/4 (about 0.8). Lower values denote + * elongated or irregular sources. + * + * To isolate the public interface of this class from its internal + * implementation, this parameter is normalized to the [0,1] range, where 0 + * and 1 represent minimum and maximum distortion, respectively. This + * abstraction allows us to change the star detection engine without + * breaking dependent tools and processes. + * + * Use this parameter, if necessary, to control inclusion of elongated + * stars, complex clusters of stars, and nonstellar image features. + * (default=0.6) + */ + this.maxDistortion = 0.6 + + /*! + * Stars with measured SNR above this parameter in units of the minimum + * detection level (as defined by the sensitivity parameter) will always be + * detected, even if their profiles are too flat for the current peak + * response. This allows us to force inclusion of bright stars. (default=3) + */ + this.brightThreshold = 3 + + /* + * Minimum signal-to-noise ratio of a detectable star. + * + * Given a source with estimated brightness s, local background b and local + * background dispersion n, SNR is evaluated as (s - b)/n. Stars with + * measured SNR below this parameter won't be detected. (default=0) + * + * The value of this parameter can be increased to limit star detection to a + * subset of the brightest sources in the image adaptively, instead of + * imposing an arbitrary limit on the number of detected stars. + */ + this.minSNR = 0 + + /*! + * Minimum size of a detectable star structure in square pixels. + * + * This parameter can be used to prevent detection of small and bright image + * artifacts as stars. This can be useful to work with uncalibrated or + * wrongly calibrated data, especially demosaiced CFA frames where hot + * pixels have generated large bright artifacts that cannot be removed with + * a median filter, poorly focused images, and images with poor tracking. + * (default=1) + */ + this.minStructureSize = 1 + + /* + * Stars with peak values greater than this value won't be detected. + * (default=1) + */ + this.upperLimit = 1.0 + + /* + * Detect dark structures over a bright background, instead of bright + * structures over a dark background. (default=false) + */ + this.invert = false + + /* + * Optional callback progress function with the following signature: + * + * Boolean progressCallback( int count, int total ) + * + * If defined, this function will be called by the stars() method for each + * row of its target image. The count argument is the current number of + * processed pixel rows, and total is the height of the target image. If the + * function returns false, the star detection task will be aborted. If the + * function returns true, the task will continue. (default=undefined) + */ + this.progressCallback = undefined + + /* + * Optional mask image. If defined, star detection will be restricted to + * nonzero mask pixels. (default=undefined) + */ + this.mask = undefined + + /* + * Stretch factor for the barycenter search algorithm, in sigma units. + * Increase it to make the algorithm more robust to nearby structures, such + * as multiple/crowded stars and small nebular features. However, too large + * of a stretch factor will make the algorithm less accurate. (default=1.5) + */ + this.xyStretch = 1.5 + + /* + * Square structuring element + */ + function BoxStructure(size) { + let B = new Array(size * size) + for (let i = 0; i < B.length; ++i) + B[i] = 1 + let S = new Array + S.push(B) + return S + } + + /* + * Circular structuring element + */ + function CircularStructure(size) { + size |= 1 + let C = new Array(size * size) + let s2 = size >> 1 + let n2 = size / 2 + let n22 = n2 * n2 + for (let i = 0; i < s2; ++i) { + let di = i + 0.5 - n2 + let di2 = di * di + let i2 = i * size + let i1 = i2 + size - 1 + let i3 = (size - i - 1) * size + let i4 = i3 + size - 1 + for (let j = 0; j < s2; ++j) { + let dj = j + 0.5 - n2 + C[i1 - j] = C[i2 + j] = C[i3 + j] = C[i4 - j] = (di2 + dj * dj <= n22) ? 1 : 0 + } + } + for (let i = 0; i < size; ++i) + C[i * size + s2] = C[s2 * size + i] = 1 + let S = new Array + S.push(C) + return S + } + + /* + * Hot pixel removal with a median filter + */ + this.hotPixelFilter = function (image) { + if (this.hotPixelFilterRadius > 0) + if (this.hotPixelFilterRadius > 1) + image.morphologicalTransformation(MorphOp_Median, CircularStructure(2 * this.hotPixelFilterRadius + 1)) + else + image.morphologicalTransformation(MorphOp_Median, BoxStructure(3)) + } + + /* + * Isolate star detection structures in an image. Replaces the specified map + * image with its binary star detection map. + */ + this.getStructureMap = function (map) { + // Flatten the image with a high-pass filter + let s = new Image(map) + let G = Matrix.gaussianFilterBySize(1 + (1 << this.structureLayers)) + s.convolveSeparable(G.rowVector(G.rows >> 1), G.rowVector(G.rows >> 1)) + map.apply(s, ImageOp_Sub) + s.free() + map.truncate() + map.rescale() + + // Strength the smallest structures with a dilation filter + map.morphologicalTransformation(MorphOp_Dilation, BoxStructure(3)) + + // Adaptive binarization + let m = map.median() + if (1 + m == 1) { + // Black background - probably a noiseless synthetic star field + let wasRangeClippingEnabled = map.rangeClippingEnabled + let wasRangeClipLow = map.rangeClipLow + let wasRangeClipHigh = map.rangeClipHigh + map.rangeClippingEnabled = true + map.rangeClipLow = 0 + map.rangeClipHigh = 1 + if (!wasRangeClippingEnabled || wasRangeClipLow != 0 || wasRangeClipHigh != 1) + m = map.median() + map.binarize(m + map.MAD(m)) + map.rangeClippingEnabled = wasRangeClippingEnabled + map.rangeClipLow = wasRangeClipLow + map.rangeClipHigh = wasRangeClipHigh + } + else { + // A "natural" image - binarize at 3*noise_stdDev + let n = map.noiseKSigma(1)[0] + map.binarize(m + 3 * n) + } + + // Remove noise residuals with an erosion filter + map.morphologicalTransformation(MorphOp_Erosion, BoxStructure(3)) + + // Optional star detection mask + if (this.mask != undefined) + map.apply(this.mask, ImageOp_Mul) + } + + /* + * Local maxima detection. + */ + this.getLocalMaximaMap = function (map) { + // We apply a dilation filter with a flat structuring element without its + // central element. Local maxima are those pixels in the input image with + // values greater than the dilated image. + // The localMaximaDetectionLimit parameter allows us to prevent detection of + // false multiple maxima on saturated or close to saturated structures. + let Bh = BoxStructure((this.localDetectionFilterRadius << 1) | 1) + Bh[0][Bh[0].length >> 1] = 0 + let l = new Image(map) + l.binarize(this.localMaximaDetectionLimit) + l.invert() + let t = new Image(map) + t.morphologicalTransformation(MorphOp_Dilation, Bh) + map.apply(t, ImageOp_Sub) + t.free() + map.binarize(0) + map.apply(l, ImageOp_Mul) + l.free() + } + + /* + * Compute star parameters + */ + this.starParameters = function (image, rect, starPoints, lmMap) { + let params = { + pos: { x: 0, y: 0 }, // barycenter image coordinates + rect: { x0: 0, y0: 0, x1: 0, y1: 0 }, // detection rectangle + bkg: 0, // local background + sigma: 0, // local background dispersion + flux: 0, // total flux + max: 0, // maximum pixel value + nmax: 0, // number of local maxima in structure + peak: 0, // robust peak value + kurt: 0, // kurtosis + count: 0, // sample length + size: 0 + } // structure size in square pixels + + // Mean local background and local background dispersion + for (let delta = 4, it = 0, m0 = 1; ; ++delta, ++it) { + let r = rect.inflatedBy(delta) + let a = [], b = [], c = [], d = [] + image.getSamples(a, new Rect(r.x0, r.y0, r.x1, rect.y0)) + image.getSamples(b, new Rect(r.x0, rect.y0, rect.x0, rect.y1)) + image.getSamples(c, new Rect(r.x0, rect.y1, r.x1, r.y1)) + image.getSamples(d, new Rect(rect.x1, rect.y0, r.x1, rect.y1)) + let B = a.concat(b, c, d) + let m = Math.median(B) + if (m > m0 || (m0 - m) / m0 < 0.01) { + params.bkg = m + params.sigma = Math.max(1.4826 * Math.MAD(B), Math.EPSILON32) + break + } + // Guard us against rare ill-posed conditions + if (it == 200) + return null + m0 = m + } + + // Detection region + params.rect = rect.inflatedBy(2).intersection(image.bounds) + + // Structure size + params.size = starPoints.length + + // Significant subset + let v = [] + for (let i = 0; i < starPoints.length; ++i) { + let p = starPoints[i] + let f = image.sample(p.x, p.y) + if (f > params.bkg) { + // Local maxima + if (!this.noLocalMaximaDetection) + if (lmMap.sample(p.x, p.y) != 0) + ++params.nmax + v.push(f) + // Total flux above local background + params.flux += f + } + } + + // Fail if no significant data + if (v.length == 0) + return null + + // Fail if we have multiple maxima and those are not allowed + if (params.nmax > 1) + if (!this.allowClusteredSources) + return null + + // Barycenter coordinates + let M = Matrix.fromImage(image, rect) + M.truncate(Math.range(M.median() + this.xyStretch * M.stdDev(), 0.0, 1.0), 1.0) + M.rescale() + let sx = 0, sy = 0, sz = 0 + for (let y = rect.y0, i = 0; i < M.rows; ++y, ++i) + for (let x = rect.x0, j = 0; j < M.cols; ++x, ++j) { + let z = M.at(i, j) + if (z > 0) { + sx += z * x + sy += z * y + sz += z + } + } + params.pos.x = sx / sz + 0.5 + params.pos.y = sy / sz + 0.5 + + // Sort significant pixels in decreasing flux order + v.sort((a, b) => (a < b) ? +1 : ((b < a) ? -1 : 0)) + // Maximum pixel value + params.max = v[0] + + // Find subset of significant high pixel values + let mn = 0 + for (let i = 0; i < v.length && (mn < 5 || v[i] == v[i - 1]); ++i, ++mn) { } + for (let i = 0; i < mn; ++i) + params.peak += v[i] + // Significant peak value + params.peak /= mn + // Significant sample length + params.count = v.length + + // Kurtosis + let s = Math.stdDev(v) + if (1 + s != 1) { + let m = params.flux / v.length + let k = 0 + for (let i = 0; i < v.length; ++i) { + let d = (v[i] - m) / s + d *= d + k += d * d + } + params.kurt = k / params.count + } + + return params + } + + /* + * Finds all the stars in an image. Returns an array of Star objects. + */ + this.stars = function (image) { + // We work on a duplicate of the source grayscale image, or on its HSI + // intensity component if it is a color image. + let wrk = Image.newFloatImage() + image.getIntensity(wrk) + + // Hot pixel removal, if applied to the image where we are going to find + // stars, not just to the image used to build the structure map. + // When noise reduction is enabled, always remove hot pixels first, or + // hot pixels would be promoted to "stars". + let alreadyFixedHotPixels = false + if (this.applyHotPixelFilterToDetectionImage || this.noiseReductionFilterRadius > 0) { + this.hotPixelFilter(wrk) + alreadyFixedHotPixels = true + } + + // If the invert flag is set, then we are looking for dark structures on + // a bright background. + if (this.invert) + wrk.invert() + + // Optional noise reduction + if (this.noiseReductionFilterRadius > 0) { + let G = Matrix.gaussianFilterBySize((this.noiseReductionFilterRadius << 1) | 1) + wrk.convolveSeparable(G.rowVector(G.rows >> 1), G.rowVector(G.rows >> 1)) + } + + // Structure map + let map = Image.newFloatImage() + map.assign(wrk) + // Hot pixel removal, if applied just to the image used to build the + // structure map. + if (!alreadyFixedHotPixels) + this.hotPixelFilter(map) + this.getStructureMap(map) + + // Use matrices instead of images for faster access + let M = map.toMatrix() + map.free() + + // Local maxima map + let lmMap + if (!this.noLocalMaximaDetection) { + lmMap = Image.newFloatImage() + lmMap.assign(wrk) + this.getLocalMaximaMap(lmMap) + } + + /* + * Internal detection parameters + */ + // Signal detection threshold in local sigma units. + let snrThreshold = 0.1 + 4.8 * (1 - Math.range(this.sensitivity, 0, 1)) + // Peak detection threshold in kurtosis units. + let peakThreshold = 0.1 + 9.8 * (1 - Math.range(this.peakResponse, 0, 1)) + // Maximum distortion in coverage units. + let minCoverage = Math.PI4 * (1 - Math.range(this.maxDistortion, 0, 1)) + + // The detected stars + let S = new Array + + // Structure scanner + for (let y0 = 0, x1 = M.cols - 1, y1 = M.rows - 1; y0 < y1; ++y0) { + if (this.progressCallback != undefined) + if (!this.progressCallback(y0, M.rows)) + return null + + for (let x0 = 0; x0 < x1; ++x0) { + // Exclude background pixels and already visited pixels + if (M.at(y0, x0) == 0) + continue + + // Star pixel coordinates + let starPoints = new Array + + // Star bounding rectangle + let r = new Rect(x0, y0, x0 + 1, y0 + 1) + + // Grow star region downward + for (let y = y0, x = x0, xa, xb; ;) { + // Add this pixel to the current star + starPoints.push({ x: x, y: y }) + + // Explore the left segment of this row + for (xa = x; xa > 0;) { + if (M.at(y, xa - 1) == 0) + break + --xa + starPoints.push({ x: xa, y: y }) + } + + // Explore the right segment of this row + for (xb = x; xb < x1;) { + if (M.at(y, xb + 1) == 0) + break + ++xb + starPoints.push({ x: xb, y: y }) + } + + // xa and xb are now the left and right boundary limits, + // respectively, of this row in the current star. + + if (xa < r.x0) // update left boundary + r.x0 = xa + + if (xb >= r.x1) // update right boundary + r.x1 = xb + 1 // bottom-right corner excluded (PCL-specific) + + // Prepare for next row + ++y + + // Decide whether we are done with this star now, or if + // there is at least one more row that must be explored. + + let nextRow = false + + // Explore the next row from left to right. We'll continue + // gathering pixels if we find at least one nonzero map pixel. + for (x = xa; x <= xb; ++x) + if (M.at(y, x) != 0) { + nextRow = true + break + } + + if (!nextRow) + break + + // Update bottom boundary + r.y1 = y + 1 // Rect *excludes* the bottom-right corner + + // Terminate if we reach the last row of the image + if (y == y1) + break + } + + /* + * If this is a reliable star, compute its barycenter coordinates + * and add it to the star list. + * + * Rejection criteria: + * + * * Stars whose peak values are greater than the upperLimit + * parameter are rejected. + * + * * If this structure is touching a border of the image, reject + * it. We cannot compute an accurate position for a clipped star. + * + * * Too small structures are rejected. This mainly prevents + * inclusion of hot (or cold) pixels. This condition is enforced + * by the hot pixel removal and noise reduction steps performed + * during the structure detection phase, and optionally by + * increasing the minStructureSize parameter. + * + * * Too large structures are rejected. This prevents inclusion of + * extended nonstellar objects and saturated bright stars. This + * is also part of the structure detection algorithm. + * + * * Too elongated stars are rejected. The minCoverage parameter + * determines the maximum distortion allowed. A perfect square + * has coverage = 1. The coverage of a perfect circle is pi/4. + * + * * Too sparse sources are rejected. This prevents detection of + * multiple stars where centroids cannot be well determined. + * + * * Too dim structures are rejected. The sensitivity parameter + * defines the sensitivity of the star detection algorithm in + * local sigma units. The minSNR parameter can be used to limit + * star detection to a subset of the brightest stars adaptively. + * + * * Too flat structures are rejected. The peakThreshold parameter + * defines the peak sensitivity of the star detection algorithm + * in kurtosis units. + */ + if (r.width > 1 && r.height > 1) + if (r.y0 > 0 && r.y1 <= y1 && r.x0 > 0 && r.x1 <= x1) + if (starPoints.length >= this.minStructureSize) { + let p = this.starParameters(wrk, r, starPoints, lmMap) + if (p != null) + if (p.max <= this.upperLimit) { + let d = Math.max(r.width, r.height) + if (p.count / d / d >= minCoverage) { + let ix = Math.trunc(p.pos.x) | 0 + let iy = Math.trunc(p.pos.y) | 0 + if (this.mask == undefined || this.mask.sample(ix, iy) != 0) { + let snr = (p.peak - p.bkg) / p.sigma + if (snr >= this.minSNR) { + let s1 = snr / snrThreshold + if (s1 >= 1) + if (s1 >= this.brightThreshold || p.kurt == 0 || p.kurt / peakThreshold >= 1) + S.push(new Star(p.pos, p.flux, p.bkg, p.rect, p.size, p.nmax, snr, p.peak)) + } + } + } + } + } + + // Erase this structure. + for (let i = 0; i < starPoints.length; ++i) { + let p = starPoints[i] + M.at(p.y, p.x, 0) + } + } + } + + // Sort the list of detected sources in descending brightness order. + S.sort((a, b) => (a.flux < b.flux) ? +1 : ((b.flux < a.flux) ? -1 : 0)) + + // Perform a soft garbage collection. This eases integration with very + // long batch tasks and has no measurable performance penalty. + gc(false/*hard*/) + + if (this.progressCallback != undefined) + if (!this.progressCallback(M.rows, M.rows)) + return null + + return S + } +} + +StarDetector.prototype = new Object + +function computeHfr(image, s) { + let a = 0 + let b = 0 + + const r = Math.min(s.rect.y1 - s.rect.y0, s.rect.x1 - s.rect.x0) / 2 + + for (let y = s.rect.y0; y <= s.rect.y1; y++) { + for (let x = s.rect.x0; x <= s.rect.x1; x++) { + if (x >= 0 && x < image.width && y >= 0 && y < image.height) { + const d = Math.sqrt((x - s.pos.x) * (x - s.pos.x) + (y - s.pos.y) * (y - s.pos.y)) + + if (d <= r) { + const p = image.sample(x, y) + const v = p - s.bkg + a += v * d + b += v + } + } + } + } + + s.hfr = b > 0.0 ? a / b : 0.0 +} + +function decodeParams(hex) { + const buffer = new Uint8Array(hex.length / 4) + + for (let i = 0; i < hex.length; i += 4) { + buffer[i / 4] = parseInt(hex.substr(i, 4), 16) + } + + return JSON.parse(String.fromCharCode.apply(null, buffer)) +} + +function detectStars() { + const data = { + success: true, + errorMessage: null, + stars: [], + } + + try { + const input = decodeParams(jsArguments[0]) + + const targetPath = input.targetPath + const statusPath = input.statusPath + const minSNR = input.minSNR + const invert = input.invert + + console.writeln("targetPath=" + targetPath) + console.writeln("statusPath=" + statusPath) + console.writeln("minSNR=" + minSNR) + console.writeln("invert=" + invert) + + const P = new StarDetector + P.structureLayers = 5 + P.hotPixelFilterRadius = 1 + P.applyHotPixelFilterToDetectionImage = false + P.noiseReductionFilterRadius = 0 + P.sensitivity = 0.5 + P.peakResponse = 0.5 + P.allowClusteredSources = false + P.localDetectionFilterRadius = 2 + P.localMaximaDetectionLimit = 0.75 + P.noLocalMaximaDetection = false + P.maxDistortion = 0.6 + P.brightThreshold = 3 + P.minSNR = minSNR + P.minStructureSize = 1 + P.upperLimit = 1.0 + P.invert = invert + P.xyStretch = 1.5 + + const window = ImageWindow.open(targetPath) + const image = window[0].mainView.image + + const sl = P.stars(image) + + for (let i = 0; i < sl.length; i++) { + const s = sl[i] + computeHfr(image, s) + data.stars.push({ x: s.pos.x, y: s.pos.y, flux: s.flux * 65536, size: s.size, nmax: s.nmax, bkg: s.bkg, x0: s.rect.x0, y0: s.rect.y0, x1: s.rect.x1, y1: s.rect.y1, snr: s.snr, peak: s.peak, hfd: 2 * s.hfr }) + } + + window[0].forceClose() + + console.writeln("star detection finished. stars=", sl.length) + } catch (e) { + data.success = false + data.errorMessage = e.message + console.writeln(data.errorMessage) + } finally { + File.writeTextFile(statusPath, "@" + JSON.stringify(data) + "#") + } +} + +detectStars() diff --git a/nebulosa-pixinsight/src/main/resources/pixinsight/PixelMath.js b/nebulosa-pixinsight/src/main/resources/pixinsight/PixelMath.js new file mode 100644 index 000000000..7c335ff77 --- /dev/null +++ b/nebulosa-pixinsight/src/main/resources/pixinsight/PixelMath.js @@ -0,0 +1,102 @@ +function decodeParams(hex) { + const buffer = new Uint8Array(hex.length / 4) + + for (let i = 0; i < hex.length; i += 4) { + buffer[i / 4] = parseInt(hex.substr(i, 4), 16) + } + + return JSON.parse(String.fromCharCode.apply(null, buffer)) +} + +function pixelMath() { + const data = { + success: true, + errorMessage: null, + stackedImage: null, + } + + try { + const input = decodeParams(jsArguments[0]) + + const statusPath = input.statusPath + const inputPaths = input.inputPaths + const outputPath = input.outputPath + let expressionRK = input.expressionRK + let expressionG = input.expressionG + let expressionB = input.expressionB + + console.writeln("statusPath=" + statusPath) + console.writeln("inputPaths=" + inputPaths) + console.writeln("outputPath=" + outputPath) + + const windows = [] + + for (let i = 0; i < inputPaths.length; i++) { + windows.push(ImageWindow.open(inputPaths[i])[0]) + } + + for (let i = 0; i < windows.length; i++) { + if (expressionRK) { + expressionRK = expressionRK.replace("{{" + i + "}}", windows[i].mainView.id) + } + if (expressionG) { + expressionG = expressionG.replace("{{" + i + "}}", windows[i].mainView.id) + } + if (expressionB) { + expressionB = expressionB.replace("{{" + i + "}}", windows[i].mainView.id) + } + } + + console.writeln("expressionRK=" + expressionRK) + console.writeln("expressionG=" + expressionG) + console.writeln("expressionB=" + expressionB) + + var P = new PixelMath + P.expression = expressionRK || "" + P.expression1 = expressionG || "" + P.expression2 = expressionB || "" + P.expression3 = "" + P.useSingleExpression = false + P.symbols = "" + P.clearImageCacheAndExit = false + P.cacheGeneratedImages = false + P.generateOutput = true + P.singleThreaded = false + P.optimization = true + P.use64BitWorkingImage = false + P.rescale = false + P.rescaleLower = 0 + P.rescaleUpper = 1 + P.truncate = true + P.truncateLower = 0 + P.truncateUpper = 1 + P.createNewImage = false + P.showNewImage = false + P.newImageId = "" + P.newImageWidth = 0 + P.newImageHeight = 0 + P.newImageAlpha = false + P.newImageColorSpace = PixelMath.prototype.SameAsTarget + P.newImageSampleFormat = PixelMath.prototype.SameAsTarget + + P.executeOn(windows[0].mainView) + + windows[0].saveAs(outputPath, false, false, false, false) + + for (let i = 0; i < windows.length; i++) { + windows[i].forceClose() + } + + data.stackedImage = outputPath + + console.writeln("stacking finished") + } catch (e) { + data.success = false + data.errorMessage = e.message + console.writeln(data.errorMessage) + } finally { + File.writeTextFile(statusPath, "@" + JSON.stringify(data) + "#") + } +} + +pixelMath() diff --git a/nebulosa-pixinsight/src/main/resources/pixinsight/Startup.js b/nebulosa-pixinsight/src/main/resources/pixinsight/Startup.js new file mode 100644 index 000000000..3d9a7c5eb --- /dev/null +++ b/nebulosa-pixinsight/src/main/resources/pixinsight/Startup.js @@ -0,0 +1 @@ +File.writeTextFile(jsArguments[0], "STARTED") diff --git a/nebulosa-pixinsight/src/test/kotlin/PixInsightScriptTest.kt b/nebulosa-pixinsight/src/test/kotlin/PixInsightScriptTest.kt new file mode 100644 index 000000000..2bc11d4d2 --- /dev/null +++ b/nebulosa-pixinsight/src/test/kotlin/PixInsightScriptTest.kt @@ -0,0 +1,65 @@ +import io.kotest.core.annotation.EnabledIf +import io.kotest.engine.spec.tempdir +import io.kotest.engine.spec.tempfile +import io.kotest.matchers.booleans.shouldBeTrue +import io.kotest.matchers.doubles.plusOrMinus +import io.kotest.matchers.nulls.shouldNotBeNull +import io.kotest.matchers.paths.shouldExist +import io.kotest.matchers.shouldBe +import nebulosa.pixinsight.script.* +import nebulosa.test.AbstractFitsAndXisfTest +import nebulosa.test.NonGitHubOnlyCondition +import java.nio.file.Files +import java.nio.file.Path + +@EnabledIf(NonGitHubOnlyCondition::class) +class PixInsightScriptTest : AbstractFitsAndXisfTest() { + + init { + val runner = PixInsightScriptRunner(Path.of("PixInsight")) + val workingDirectory = tempdir("pi-").toPath() + + "startup" { + PixInsightStartup(PixInsightScript.DEFAULT_SLOT) + .use { it.runSync(runner).shouldBeTrue() } + } + "is running" { + PixInsightIsRunning(PixInsightScript.DEFAULT_SLOT) + .use { it.runSync(runner).shouldBeTrue() } + } + "calibrate" { + PixInsightCalibrate(PixInsightScript.UNSPECIFIED_SLOT, workingDirectory, PI_01_LIGHT, PI_DARK, PI_FLAT, PI_BIAS) + .use { it.runSync(runner).also(::println).outputImage.shouldNotBeNull().shouldExist() } + } + "align" { + PixInsightAlign(PixInsightScript.UNSPECIFIED_SLOT, workingDirectory, PI_01_LIGHT, PI_02_LIGHT) + .use { it.runSync(runner).also(::println).outputImage.shouldNotBeNull().shouldExist() } + } + "detect stars" { + PixInsightDetectStars(PixInsightScript.UNSPECIFIED_SLOT, PI_FOCUS_0) + .use { it.runSync(runner).also(::println).stars } + .map { it.hfd } + .average() shouldBe (8.43 plusOrMinus 1e-2) + + PixInsightDetectStars(PixInsightScript.UNSPECIFIED_SLOT, PI_FOCUS_30000) + .use { it.runSync(runner).also(::println).stars } + .map { it.hfd } + .average() shouldBe (1.85 plusOrMinus 1e-2) + + PixInsightDetectStars(PixInsightScript.UNSPECIFIED_SLOT, PI_FOCUS_100000) + .use { it.runSync(runner).also(::println).stars } + .map { it.hfd } + .average() shouldBe (18.35 plusOrMinus 1e-2) + } + "pixel math" { + val outputPath = Files.createTempFile("pi-stacked-", ".fits") + PixInsightPixelMath(PixInsightScript.UNSPECIFIED_SLOT, listOf(PI_01_LIGHT, PI_02_LIGHT), outputPath, "{{0}} + {{1}}") + .use { it.runSync(runner).also(::println).stackedImage.shouldNotBeNull().shouldExist() } + } + "abe" { + val outputPath = tempfile("pi-", ".fits").toPath() + PixInsightAutomaticBackgroundExtractor(PixInsightScript.UNSPECIFIED_SLOT, PI_01_LIGHT, outputPath) + .use { it.runSync(runner).also(::println).outputImage.shouldNotBeNull() } + } + } +} diff --git a/nebulosa-pixinsight/src/test/kotlin/PixInsightStarDetectorTest.kt b/nebulosa-pixinsight/src/test/kotlin/PixInsightStarDetectorTest.kt new file mode 100644 index 000000000..d14437843 --- /dev/null +++ b/nebulosa-pixinsight/src/test/kotlin/PixInsightStarDetectorTest.kt @@ -0,0 +1,19 @@ +import io.kotest.core.annotation.EnabledIf +import io.kotest.matchers.collections.shouldHaveSize +import nebulosa.pixinsight.script.PixInsightScriptRunner +import nebulosa.pixinsight.star.detection.PixInsightStarDetector +import nebulosa.test.AbstractFitsAndXisfTest +import nebulosa.test.NonGitHubOnlyCondition +import java.nio.file.Path + +@EnabledIf(NonGitHubOnlyCondition::class) +class PixInsightStarDetectorTest : AbstractFitsAndXisfTest() { + + init { + "detect stars" { + val runner = PixInsightScriptRunner(Path.of("PixInsight")) + val detectedStars = PixInsightStarDetector(runner, 0).detect(NGC3344_MONO_8_FITS) + detectedStars shouldHaveSize 15 + } + } +} diff --git a/nebulosa-test/src/main/kotlin/nebulosa/test/AbstractFitsAndXisfTest.kt b/nebulosa-test/src/main/kotlin/nebulosa/test/AbstractFitsAndXisfTest.kt index 7fda567c9..f14f90738 100644 --- a/nebulosa-test/src/main/kotlin/nebulosa/test/AbstractFitsAndXisfTest.kt +++ b/nebulosa-test/src/main/kotlin/nebulosa/test/AbstractFitsAndXisfTest.kt @@ -109,6 +109,26 @@ abstract class AbstractFitsAndXisfTest : StringSpec() { protected val STAR_FOCUS_16 by lazy { download("STAR.FOCUS.16.fits", GITHUB_FITS_URL) } protected val STAR_FOCUS_17 by lazy { download("STAR.FOCUS.17.fits", GITHUB_FITS_URL) } + protected val PI_01_LIGHT by lazy { download("PI.01.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_02_LIGHT by lazy { download("PI.02.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_03_LIGHT by lazy { download("PI.03.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_04_LIGHT by lazy { download("PI.04.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_05_LIGHT by lazy { download("PI.05.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_06_LIGHT by lazy { download("PI.06.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_07_LIGHT by lazy { download("PI.07.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_08_LIGHT by lazy { download("PI.08.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_09_LIGHT by lazy { download("PI.09.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_10_LIGHT by lazy { download("PI.10.LIGHT.fits", GITHUB_FITS_URL) } + protected val PI_BIAS by lazy { download("PI.BIAS.fits", GITHUB_FITS_URL) } + protected val PI_DARK by lazy { download("PI.DARK.fits", GITHUB_FITS_URL) } + protected val PI_FLAT by lazy { download("PI.FLAT.fits", GITHUB_FITS_URL) } + + protected val PI_FOCUS_0 by lazy { download("PI.FOCUS.0.fits", GITHUB_FITS_URL) } + protected val PI_FOCUS_10000 by lazy { download("PI.FOCUS.10000.fits", GITHUB_FITS_URL) } + protected val PI_FOCUS_20000 by lazy { download("PI.FOCUS.20000.fits", GITHUB_FITS_URL) } + protected val PI_FOCUS_30000 by lazy { download("PI.FOCUS.30000.fits", GITHUB_FITS_URL) } + protected val PI_FOCUS_100000 by lazy { download("PI.FOCUS.100000.fits", GITHUB_FITS_URL) } + private val afterEach = AfterEach() init { diff --git a/settings.gradle.kts b/settings.gradle.kts index c9c622236..3a55fd073 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -77,6 +77,7 @@ include(":nebulosa-nasa") include(":nebulosa-netty") include(":nebulosa-nova") include(":nebulosa-phd2-client") +include(":nebulosa-pixinsight") include(":nebulosa-plate-solving") include(":nebulosa-retrofit") include(":nebulosa-sbd")