Skip to content

Commit

Permalink
HLA-1145: New drizzlepac release candidate 3.6.2rc1 (#1685)
Browse files Browse the repository at this point in the history
Co-authored-by: Nadia Dencheva <[email protected]>
Co-authored-by: Steve Goldman <[email protected]>
Co-authored-by: Zach Burnett <[email protected]>
Co-authored-by: Zach Burnett <[email protected]>
  • Loading branch information
5 people authored Oct 27, 2023
1 parent 59c54bb commit 438c4dd
Show file tree
Hide file tree
Showing 42 changed files with 337 additions and 206 deletions.
18 changes: 18 additions & 0 deletions .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
<!-- If this PR closes a JIRA ticket, make sure the title starts with the JIRA issue number,
for example HLA-1234: <Fix a bug> -->
Resolves [HLA-nnnn](https://jira.stsci.edu/browse/HLA-nnnn)

<!-- If this PR closes a GitHub issue, reference it here by its number -->
Closes #

<!-- describe the changes comprising this PR here -->
This PR addresses ...

**Checklist for maintainers**
- [ ] added entry in `CHANGELOG.rst` within the relevant release section
- [ ] updated or added relevant tests
- [ ] updated relevant documentation
- [ ] added relevant milestone
- [ ] added relevant label(s)
- [ ] ran regression tests, post a link to the Jenkins job below.
[How to run regression tests on a PR](https://github.com/spacetelescope/jwst/wiki/Running-Regression-Tests-Against-PR-Branches)
23 changes: 23 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: build

on:
release:
types: [ released ]
pull_request:
workflow_dispatch:

jobs:
build:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish.yml@v1
with:
upload_to_pypi: ${{ (github.event_name == 'release') && (github.event.action == 'released') }}
targets: |
# Linux wheels
- cp3*-manylinux_x86_64
# MacOS wheels
- cp3*-macosx_x86_64
# Until we have arm64 runners, we can't automatically test arm64 wheels
# - cp3*-macosx_arm64
sdist: true
secrets:
pypi_token: ${{ secrets.PYPI_PASSWORD_STSCI_MAINTAINER }}
1 change: 0 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ jobs:
with:
envs: |
- linux: check-style
- linux: check-build
test:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1
needs: [ crds ]
Expand Down
43 changes: 0 additions & 43 deletions .github/workflows/publish-to-pypi.yml

This file was deleted.

3 changes: 1 addition & 2 deletions .readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,8 @@ conda:
environment: doc/.rtd-environment.yml

python:
system_packages: false
install:
- method: pip
path: .
extra_requirements:
- docs
- docs
4 changes: 4 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ number of the code change for that issue. These PRs can be viewed at:
3.6.2rc0 (unreleased)
=====================

- Added functionality to allow the use of a two-column poller file. This is used
to update the WFPC2 SVM aperture header keywords from the values in the poller
file.

- Removed the version restriction on matplotlib. [#1649]

- Forced a preferential order on the final selection of the WCS solution
Expand Down
16 changes: 7 additions & 9 deletions JenkinsfileRT
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ bc1.nodetype = 'linux'
bc1.env_vars = ['TEST_BIGDATA=https://bytesalad.stsci.edu/artifactory']
bc1.name = '3.9'
bc1.conda_packages = ['python=3.9']
bc1.build_cmds = ["pip install numpy astropy codecov pytest-cov ci-watson==0.5",
bc1.build_cmds = ["pip install numpy astropy codecov pytest-cov ci-watson",
"pip install --upgrade -e '.[test]'",
"pip freeze"]
bc1.test_cmds = ["pytest --cov=./ --basetemp=tests_output --junitxml results.xml --bigdata",
bc1.test_cmds = ["pytest --cov=./ --basetemp=tests_output --junitxml=results.xml --bigdata",
"codecov"]
bc1.test_configs = [data_config]
bc1.failedFailureThresh = 0
Expand All @@ -38,20 +38,18 @@ bc3 = new BuildConfig()
bc3.runtime.add('CFLAGS=-std=gnu99')
bc3.nodetype = 'linux'
bc3.env_vars = ['TEST_BIGDATA=https://bytesalad.stsci.edu/artifactory']
bc3.name = '3.10-dev'
bc3.name = '3.10'
bc3.conda_packages = ['python=3.10']
bc3.build_cmds = ["pip install numpy astropy codecov pytest-cov ci-watson==0.5",
"pip install -r requirements-dev.txt --upgrade -e '.[test]'",
bc3.build_cmds = ["pip install numpy astropy ci-watson",
"pip install --upgrade -e '.[test]'",
"pip freeze"]
bc3.test_cmds = ["pytest --cov=./ --basetemp=tests_output --bigdata",
"codecov"]
bc3.test_configs = [data_config]

bc4 = utils.copy(bc3)
bc4.name = '3.11-dev'
bc4.name = '3.11'
bc4.conda_packages = ['python=3.11']

// Iterate over configurations that define the (distributed) build matrix.
// Spawn a host (or workdir) for each combination and run in parallel.
// Also apply the job configuration defined in `jobconfig` above.
utils.run([bc1, bc3, bc4, jobconfig])
utils.run([bc1, bc2, bc3, bc4, jobconfig])
8 changes: 2 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Drizzlepac

[![Build Status](https://dev.azure.com/spacetelescope/drizzlepac/_apis/build/status/spacetelescope.drizzlepac?branchName=master)](https://dev.azure.com/spacetelescope/drizzlepac/_build/latest?definitionId=2&branchName=master)
[![Build Status](https://ssbjenkins.stsci.edu/job/STScI/job/drizzlepac/job/master/badge/icon)](https://ssbjenkins.stsci.edu/job/STScI/job/drizzlepac/job/master/)
[![Build Status](https://github.com/spacetelescope/drizzlepac/actions/workflows/ci.yml/badge.svg)](https://github.com/spacetelescope/drizzlepac/actions/workflows/ci.yml)
[![Documentation Status](https://readthedocs.org/projects/drizzlepac/badge/?version=latest)](http://drizzlepac.readthedocs.io/en/latest/?badge=latest)
[![codecov](https://codecov.io/gh/spacetelescope/drizzlepac/branch/master/graph/badge.svg)](https://codecov.io/gh/spacetelescope/drizzlepac)
[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3743274.svg)](https://doi.org/10.5281/zenodo.3743274)
Expand All @@ -23,10 +22,7 @@ can be found at:

## Conda (Recommended)

```bash
$ conda config --add channels http://ssb.stsci.edu/astroconda
$ conda create -n astroconda stsci
```
`Drizzlepac` is installed when you install the `stenv` conda environment (a replacement for `astroconda`). Select your desired release and follow the instructions on the [`stenv` installation page](https://stenv.readthedocs.io/en/latest/getting_started.html).

## From Source

Expand Down
45 changes: 44 additions & 1 deletion doc/source/getting_started/installation.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,47 @@
Installation
------------

*Coming soon!*
Conda (Recommended)
===================

``Drizzlepac`` is installed when you install the ``stenv`` conda environment (a replacement for ``astroconda``). Select your desired release and follow the instructions on the `installation page <https://stenv.readthedocs.io/en/latest/getting_started.html>`_.


From Source
===========

Clone this repository
*********************
.. code-block:: shell
git clone https://github.com/spacetelescope/drizzlepac
cd drizzlepac
Build the documentation
=======================

*Note:* If you intend to use ``drizzlepac``'s embedded help feature from within
an interactive ``python`` or ``ipython`` session, we recommend you do not skip
this step.


.. code-block:: shell
cd doc/
make html
Install drizzlepac
==================

.. code-block:: shell
pip install .
The option ``--no-use-pep517`` MAY be required in order to correctly build
the C extensions with ``pip`` versions up to 22.2, after commenting out
the ``build-backend`` from the ``pyproject.toml`` config file.

**Support for installing using `pip` is still evolving, so use of this
command is provided on an experimental basis for now.**
6 changes: 3 additions & 3 deletions drizzlepac/align.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ def perform_align(
hdr0 = fits.getheader(imglist[0])
inst = hdr0.get("instrume")
if inst.lower() == "wfpc2" and "detector" not in hdr0:
det = "wfpc2"
det = "pc"
else:
det = hdr0.get("detector")
apars = get_default_pars(inst, det)
Expand Down Expand Up @@ -961,7 +961,7 @@ def determine_fit_quality(
if catalogs_remaining:
log.warning(
"Not enough cross matches found between astrometric"
" catalog and sources found in {}".format(image_name)
" catalog and sources found in {} ()".format(image_name, num_xmatches)
)
continue

Expand Down Expand Up @@ -1158,7 +1158,7 @@ def determine_fit_quality(
if not overall_valid:
log.info("The fit solution for some or all of the images is not valid.")
if max_rms_val > auto_good_rms or not overall_valid:
log.info("Try again with the next catalog")
log.info("Trying again with the next catalog, method, or geometry depending upon the current fitting cycle.")
else:
log.info("Fit calculations successful.")

Expand Down
10 changes: 5 additions & 5 deletions drizzlepac/hapsequencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@
"SVM_CATALOG_WFC": 'on',
"SVM_CATALOG_UVIS": 'on',
"SVM_CATALOG_IR": 'on',
"SVM_CATALOG_WFPC2": 'on'}
"SVM_CATALOG_PC": 'on'}
envvar_cat_str = "SVM_CATALOG_{}"

# --------------------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -122,7 +122,7 @@ def create_catalog_products(total_obj_list, log_level, diagnostic_mode=False, ph
Specify which, if any, catalogs should be generated at all, based on detector. This dictionary
needs to contain values for all instruments; namely:
SVM_CATALOG_HRC, SVM_CATALOG_SBC, SVM_CATALOG_WFC, SVM_CATALOG_UVIS, SVM_CATALOG_IR, SVM_CATALOG_WFPC2
SVM_CATALOG_HRC, SVM_CATALOG_SBC, SVM_CATALOG_WFC, SVM_CATALOG_UVIS, SVM_CATALOG_IR, SVM_CATALOG_PC
These variables can be defined with values of 'on'/'off'/'yes'/'no'/'true'/'false'.
Expand Down Expand Up @@ -765,10 +765,10 @@ def run_align_to_gaia(tot_obj, log_level=logutil.logging.INFO, diagnostic_mode=F
for exp_obj in tot_obj.edp_list:
if gaia_obj is None:
prod_list = exp_obj.info.split("_")
prod_list[4] = "metawcs"
prod_list[5] = "metawcs"
gaia_obj = product.FilterProduct(prod_list[0], prod_list[1], prod_list[2],
prod_list[3], prod_list[4], "all",
prod_list[5][0:3], log_level)
prod_list[3], prod_list[4], prod_list[5], "all",
prod_list[6][0:3], log_level)
gaia_obj.configobj_pars = tot_obj.configobj_pars
gaia_obj.add_member(exp_obj)

Expand Down
2 changes: 1 addition & 1 deletion drizzlepac/haputils/align_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def __init__(self, input_list, clobber=False, dqname='DQ', process_type='',
hdr0 = fits.getheader(img)
instrume = hdr0.get('instrume')
if instrume.lower() == 'wfpc2' and 'detector' not in hdr0:
detector = 'WFPC2'
detector = 'PC'
else:
detector = hdr0.get('detector')

Expand Down
17 changes: 9 additions & 8 deletions drizzlepac/haputils/astrometric_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@
'mag': 'mag', 'objID': 'objID', 'epoch': 'epoch'},
}

log.info(f'ASTROMETRIC_CATALOG_URL = {SERVICELOCATION}')

# CRBIT definitions
CRBIT = 4096
Expand All @@ -181,7 +182,7 @@ def create_astrometric_catalog(inputs, catalog="GAIAedr3", output="ref_cat.ecsv"
gaia_only=False, table_format="ascii.ecsv",
existing_wcs=None, num_sources=None,
use_footprint=False, full_catalog=False,
user_epoch='match', log_level=logutil.logging.NOTSET):
user_epoch='match', log_level=logutil.logging.INFO):
"""Create an astrometric catalog that covers the inputs' field-of-view.
This function will return a table containing sources derived from the
Expand Down Expand Up @@ -451,7 +452,7 @@ def get_catalog(ra, dec, sr=0.1, epoch=None, catalog='GSC241'):
spec = base_spec + epoch_str.format(epoch) if epoch else base_spec

serviceUrl = '{}/{}?{}'.format(SERVICELOCATION, serviceType, spec)
log.debug("Getting catalog using: \n {}".format(serviceUrl))
log.info(f"Getting catalog using: \n {serviceUrl}")
rawcat = requests.get(serviceUrl, headers=headers)
r_contents = rawcat.content.decode() # convert from bytes to a String
rstr = r_contents.split('\r\n')
Expand All @@ -460,7 +461,7 @@ def get_catalog(ra, dec, sr=0.1, epoch=None, catalog='GSC241'):
if rstr[0].startswith('Error'):
# Try again without EPOCH
serviceUrl = '{}/{}?{}'.format(SERVICELOCATION, serviceType, base_spec)
log.debug("Getting catalog using: \n {}".format(serviceUrl))
log.warning(f"Problem accessing catalog service - getting catalog using: \n {serviceUrl}")
rawcat = requests.get(serviceUrl, headers=headers)
r_contents = rawcat.content.decode() # convert from bytes to a String
rstr = r_contents.split('\r\n')
Expand Down Expand Up @@ -522,7 +523,7 @@ def get_catalog_from_footprint(footprint, epoch=None, catalog='GSC241'):
spec = base_spec + epoch_str.format(epoch) if epoch else base_spec

serviceUrl = '{}/{}?{}'.format(SERVICELOCATION, serviceType, spec)
log.debug("Getting catalog using: \n {}".format(serviceUrl))
log.info(f"Getting catalog from footprint using: \n {serviceUrl}")

rawcat = requests.get(serviceUrl, headers=headers)
r_contents = rawcat.content.decode() # convert from bytes to a String
Expand All @@ -532,7 +533,7 @@ def get_catalog_from_footprint(footprint, epoch=None, catalog='GSC241'):
if rstr[0].startswith('Error'):
# Try again without EPOCH
serviceUrl = '{}/{}?{}'.format(SERVICELOCATION, serviceType, base_spec)
log.debug("Getting catalog using: \n {}".format(serviceUrl))
log.warning(f"Problem accessing catalog service - getting catalog from footprint using: \n {serviceUrl}")
rawcat = requests.get(serviceUrl, headers=headers)
r_contents = rawcat.content.decode() # convert from bytes to a String
rstr = r_contents.split('\r\n')
Expand Down Expand Up @@ -719,7 +720,7 @@ def compute_2d_background(imgarr, box_size, win_size,

def build_auto_kernel(imgarr, whtarr, fwhm=3.0, threshold=None, source_box=7,
good_fwhm=[1.0, 4.0], num_fwhm=30,
isolation_size=11, saturation_limit=70000., log_level=logutil.logging.NOTSET):
isolation_size=11, saturation_limit=70000., log_level=logutil.logging.INFO):
"""Build kernel for use in source detection based on image PSF
This algorithm looks for an isolated point-source that is non-saturated to use as a template
for the source detection kernel. Failing to find any suitable sources, it will return a
Expand Down Expand Up @@ -898,7 +899,7 @@ def find_fwhm(psf, default_fwhm):

def extract_point_sources(img, dqmask=None, fwhm=3.0, kernel=None,
nbright=1000,
threshold=200.0, sigma=3.0, source_box=7, log_level=logutil.logging.NOTSET):
threshold=200.0, sigma=3.0, source_box=7, log_level=logutil.logging.INFO):
"""Use photutils to replicate the IRAF point-source catalogs"""
# Initialize logging for this user-callable function
log.setLevel(log_level)
Expand Down Expand Up @@ -961,7 +962,7 @@ def extract_sources(img, dqmask=None, fwhm=3.0, kernel=None, photmode=None,
dao_nsigma=3.0, source_box=7,
classify=True, centering_mode="starfind", nlargest=None,
outroot=None, plot=False, vmax=None, deblend=False,
log_level=logutil.logging.NOTSET):
log_level=logutil.logging.INFO):
"""Use photutils to find sources in image based on segmentation.
Parameters
Expand Down
Loading

0 comments on commit 438c4dd

Please sign in to comment.