Skip to content

Commit

Permalink
Merge pull request #27 from ACTCollaboration/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
mattyowl authored Oct 19, 2023
2 parents 1c0f16d + 4c06c35 commit 53f984b
Show file tree
Hide file tree
Showing 6 changed files with 156 additions and 45 deletions.
16 changes: 9 additions & 7 deletions bin/zCluster
Original file line number Diff line number Diff line change
Expand Up @@ -305,25 +305,25 @@ def runOnCatalog(catalog, retriever, retrieverOptions, photoRedshiftEngine, outD
gobj['r-z']=gobj['r']-gobj['z']

wantedKeys=['id', 'RADeg', 'decDeg', 'u', 'uErr', 'g', 'gErr', 'r', 'rErr', 'i',
'iErr', 'z', 'zErr', 'Ks', 'KsErr', 'zPhot', 'odds', 'r-i', 'r-z']
if os.path.exists(objOutDir+os.path.sep+"galaxyCatalog_%s.fits" % (obj['name'].replace(" ", "_"))) == True:
os.remove(objOutDir+os.path.sep+"galaxyCatalog_%s.fits" % (obj['name'].replace(" ", "_")))
'iErr', 'z', 'zErr', 'Ks', 'KsErr', 'w1', 'w1Err', 'w2', 'w2Err', 'r-i', 'r-z', 'zPhot', 'odds']
tab=atpy.Table()
for key in wantedKeys:
arr=[]
allSentinelVals=True
for gobj in galaxyCatalog:
try:
arr.append(gobj[key])
allSentinelVals=False
except:
arr.append(99)
tab.add_column(atpy.Column(np.array(arr), key))
if allSentinelVals == False:
tab[key]=arr
# NOTE: to cut down on disk space this takes, include only galaxies within some radius
# Chosen one is just over 1.5 Mpc at z = 0.1
tab.add_column(atpy.Column(astCoords.calcAngSepDeg(obj['RADeg'], obj['decDeg'],
np.array(tab['RADeg']), np.array(tab['decDeg'])), "rDeg"))
tab['rDeg']=astCoords.calcAngSepDeg(obj['RADeg'], obj['decDeg'], tab['RADeg'].data, tab['decDeg'].data)
#tab=tab[np.where(tab['rDeg'] < 14./60.0)]
tab.table_name="zCluster"
tab.write(objOutDir+os.path.sep+"galaxyCatalog_%s.fits" % (obj['name'].replace(" ", "_")))
tab.write(objOutDir+os.path.sep+"galaxyCatalog_%s.fits" % (obj['name'].replace(" ", "_")), overwrite = True)
try:
catalogs.catalog2DS9(galaxyCatalog,
objOutDir+os.path.sep+"galaxyCatalog_%s.reg" % (obj['name'].replace(" ", "_")),
Expand Down Expand Up @@ -637,6 +637,8 @@ if __name__ == '__main__':

if cacheDir is not None:
retrieverOptions['altCacheDir']=cacheDir

retrieverOptions['fetchAndCacheOnly']=args.fetchAndCacheOnly

photoRedshiftEngine=PhotoRedshiftEngine.PhotoRedshiftEngine(absMagCut, passbandSet = passbandSet,
ZPError = ZPError, ZPOffsets = ZPOffsets,
Expand Down
15 changes: 15 additions & 0 deletions bin/zField
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ from zCluster import retrievers
from zCluster import PhotoRedshiftEngine
from zCluster import clusters
from zCluster import catalogs
import pickle
import urllib
import time

Expand Down Expand Up @@ -77,6 +78,10 @@ def makeParser():
masses will be estimated at the given redshift. Otherwise, the stellar masses will be\
estimated at the maximum likelihood redshift of each galaxy (although that isn't\
actually implemented yet).")
parser.add_argument("-p", "--pickle-galaxy-catalog", dest="pickleGalaxyCatalog", default = False,
action = "store_true", help = "If given, pickle the galaxy catalog after estimating\
photo-zs. Use this if you want to e.g. read p(z) for each galaxy from disk for some\
other analysis.")

return parser

Expand Down Expand Up @@ -162,6 +167,16 @@ if __name__ == '__main__':
if args.stellarMassModelDir is not None:
photoRedshiftEngine.estimateStellarMasses(galaxyCatalog, args.stellarMassModelDir, z = z)

# Dump pickle file for p(z), optional
if args.pickleGalaxyCatalog is True:
pickleFileName=args.outDir+os.path.sep+"galaxyCatalog.pkl"
with open(pickleFileName, "wb") as pickleFile:
pickler=pickle.Pickler(pickleFile)
pickler.dump(galaxyCatalog)
# with open(pickleFileName, "rb") as pickleFile:
# unpickler=pickle.Unpickler(pickleFile)
# galaxyCatalog=unpickler.load()

# Write catalog and region file
wantedKeys=['id', 'RADeg', 'decDeg', 'zPhot', 'odds']
if args.stellarMassModelDir is not None:
Expand Down
26 changes: 26 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -1,3 +1,21 @@
[metadata]
name = zCluster
author = Matt Hilton + zCluster Contributors
author_email = [email protected]
url = https://github.com/ACTCollaboration/zCluster
description = A code for measuring galaxy cluster photometric redshifts
long_description = file: README.rst
keywords = astrophysics
license = GPL v3
classifiers =
Programming Language :: Python :: 3
Environment :: Console
Intended Audience :: Science/Research
License :: OSI Approved :: GNU General Public License v3 (GPLv3)
Natural Language :: English
Operating System :: POSIX
Topic :: Scientific/Engineering :: Astronomy

[versioneer]
VCS = git
style = pep440
Expand All @@ -6,3 +24,11 @@ versionfile_build = zCluster/_version.py
tag_prefix = v
parentdir_prefix = zCluster-

[options]
install_requires =
astropy >= 4.0
numpy >= 1.19
matplotlib >= 2.0
astLib >= 0.11.10
scipy >= 1.0
requests
23 changes: 1 addition & 22 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,35 +4,14 @@
import glob
from setuptools import setup
from setuptools import Extension
from setuptools.command.install import install
import stat
#from Cython.Distutils import build_ext
import numpy
import versioneer

cmdclass=versioneer.get_cmdclass()
#cmdclass['build_ext']=build_ext

setup(name='zCluster',
version=versioneer.get_version(),
cmdclass=cmdclass,
url="https://github.com/ACTCollaboration/zCluster",
author='Matt Hilton + zCluster contributors',
author_email='[email protected]',
classifiers=[],
description='A code for measuring galaxy cluster photometric redshifts.',
long_description="""A code for measuring galaxy cluster photometric redshifts. Runs on both large scale
public survey data (e.g., SDSS) and user-supplied photometric catalogs.""",
author_email='[email protected]',
packages=['zCluster'],
package_data={'zCluster': ['data/*', 'SED/CWW/*', 'SED/BR07/*', 'SED/EAZY_v1.0/*',
'passbands/*']},
scripts=['bin/zCluster', 'bin/zField', 'bin/zClusterBCG', 'bin/zClusterComparisonPlot'],
#ext_modules=[Extension("zClusterCython", ["zCluster/zClusterCython.pyx"], include_dirs=[numpy.get_include()])],
install_requires=["astropy >= 4.0",
"numpy >= 1.19",
"matplotlib >= 2.0",
"astLib >= 0.11.7",
"scipy >= 1.0",
#"cython",
"requests"]
)
37 changes: 24 additions & 13 deletions zCluster/PhotoRedshiftEngine.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,12 @@ class PhotoRedshiftEngine:
"""

def __init__(self, absMagCut, passbandSet = 'SDSS+Ks', zMin = 0.01, zMax = 3.0, zStep = 0.01,
ZPError = 0.0, ZPOffsets = None, templatesDir = None):
ZPError = 0.0, ZPOffsets = None, templatesDir = None, EBMinusVList = [0.0],
emLinesScaleList = [0.0]):
# EBMinusVList = [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5],
# emLinesScaleList = [0.0, 1.0, 2.0, 4.0, 6.0, 8.0, 10.0]):
# EBMinusVList = list(np.linspace(0, 0.48, 13))):
# EBMinusVList = list(np.linspace(0, 1.5, 16)),):
"""Sets up the stuff we would otherwise calculate every time, i.e., the templates.
"""
Expand Down Expand Up @@ -133,25 +138,31 @@ def __init__(self, absMagCut, passbandSet = 'SDSS+Ks', zMin = 0.01, zMax = 3.0,
self.templateIndex=unpickler.load()
self.modelFlux2=self.modelFlux**2
else:
self.numModels=len(self.SEDFiles)
self.numModels=len(self.SEDFiles)*len(EBMinusVList)*len(emLinesScaleList)
i=0
t=0
self.templateIndex=[]
for f in self.SEDFiles:
s=astSED.SED()
s.loadFromFile(f)
t=t+1
for z in self.zRange:
s.redshift(z)
modelSEDDict=s.getSEDDict(self.passbandsList)
modelSEDDict['E(B-V)']=None
modelSEDDict['ageGyr']=0.0
modelSEDDict['z']=z
modelSEDDict['fileName']=f
modelSEDDict['modelListIndex']=i
modelSEDDict['SED']=s.copy()
self.modelSEDDictList.append(modelSEDDict)
self.templateIndex.append(t)
for emScaleFactor in emLinesScaleList:
if emScaleFactor > 0:
s.addEmissionLines(emScaleFactor)
for EBMinusV in EBMinusVList:
if EBMinusV > 0:
s.extinctionCalzetti(EBMinusV) # modifies z0flux, so this should be okay
for z in self.zRange:
s.redshift(z)
modelSEDDict=s.getSEDDict(self.passbandsList)
modelSEDDict['E(B-V)']=EBMinusV
modelSEDDict['ageGyr']=0.0
modelSEDDict['z']=z
modelSEDDict['fileName']=f
modelSEDDict['modelListIndex']=i
# modelSEDDict['SED']=s.copy() # Only add this if we need [e.g. rest frame colours] - uses lots of RAM
self.modelSEDDictList.append(modelSEDDict)
self.templateIndex.append(t)
i=i+1
del s
self.templateIndex=np.array(self.templateIndex)
Expand Down
84 changes: 81 additions & 3 deletions zCluster/retrievers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@
import time
import zCluster
import requests
try:
from dl import queryClient as qc
except:
print("WARNING: Failed to import dl module - retrievers that use NOAO Data Lab will not work.")
from astropy.io.votable import parse_single_table

#-------------------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -111,7 +115,11 @@ def getRetriever(database, maxMagError = 0.2):
elif database == 'DELVEDR2':
retriever=DELVEDR2Retriever
retrieverOptions={'maxMagError': maxMagError}
elif database.find("DECaLS") != -1:
elif database == 'DL_DECaLSDR10':
retriever=DL_DECaLSDR10Retriever
passbandSet='DECaLS'
retrieverOptions={'maxMagError': maxMagError}
elif database.find("DECaLS") != -1 and database != 'DECaLSDR10DL':
if database == "DECaLS":
raise Exception("Specify either 'DECaLSDR8', 'DECaLSDR9', or 'DECaLSDR10' instead of DECaLS")
if database == 'DECaLSDR8':
Expand Down Expand Up @@ -924,7 +932,7 @@ def SDSSRetriever(RADeg, decDeg, halfBoxSizeDeg = 18.0/60.0, DR = 7, optionsDict
# For some reason, SDSS changed their whole web API in ~May 2016 without calling it a new DR
#url='http://skyserver.sdss.org/dr12/en/tools/search/x_sql.aspx'
#url='http://skyserver.sdss.org/dr12/en/tools/search/x_results.aspx'
url='http://skyserver.sdss.org/dr12/en/tools/search/x_results.aspx?searchtool=SQL&TaskName=Skyserver.Search.SQL&syntax=NoSyntax&ReturnHtml=false&'
url='https://skyserver.sdss.org/dr12/en/tools/search/x_results.aspx?searchtool=SQL&TaskName=Skyserver.Search.SQL&syntax=NoSyntax&ReturnHtml=false&'
outFileName=cacheDir+os.path.sep+"SDSSDR12_%.4f_%.4f_%.4f.csv" % (RADeg, decDeg, halfBoxSizeDeg)
lineSkip=2

Expand Down Expand Up @@ -988,7 +996,7 @@ def SDSSRetriever(RADeg, decDeg, halfBoxSizeDeg = 18.0/60.0, DR = 7, optionsDict
try:
response=urllib.request.urlopen(url+'%s' % (params))
except:
print("Network down? Waiting 30 sec...")
print("Network down? Waiting 30 sec... - if this persists, probably the query URL has changed.")
time.sleep(30)

# Some faffing about here because of python2 -> python3
Expand Down Expand Up @@ -1261,6 +1269,76 @@ def DECaLSDR10Retriever(RADeg, decDeg, halfBoxSizeDeg = 36.0/60.0, DR = None, op

return stuff

#-------------------------------------------------------------------------------------------------------------
def DL_DECaLSDR10Retriever(RADeg, decDeg, halfBoxSizeDeg = 36.0/60.0, DR = None, optionsDict = {}):
"""DECaLS DR10 retriever, using NOAO datalab.
"""

makeCacheDir()
if 'altCacheDir' in list(optionsDict.keys()):
cacheDir=optionsDict['altCacheDir']
else:
cacheDir=CACHE_DIR
if os.path.exists(cacheDir) == False:
os.makedirs(cacheDir, exist_ok = True)

outFileName=cacheDir+os.path.sep+"DL_DECaLSDR10_%.4f_%.4f_%.2f.fits" % (RADeg, decDeg, halfBoxSizeDeg)
if os.path.exists(outFileName) == False:
RAMin, RAMax, decMin, decMax=astCoords.calcRADecSearchBox(RADeg, decDeg, halfBoxSizeDeg)
try:
result=qc.query(sql='select objid, ra, dec, dered_mag_g, dered_mag_r, dered_mag_i, dered_mag_z, dered_mag_w1, dered_mag_w2,\
snr_g, snr_r, snr_i, snr_z, snr_w1, snr_w2, type, maskbits from ls_dr10.tractor where\
RA BETWEEN %.6f AND %.6f AND DEC BETWEEN %.6f and %.6f' % (RAMin, RAMax, decMin, decMax),
fmt = 'table')
result.write(outFileName, overwrite = True)
except:
result=None
print("... WARNING: datalab query failed to get %s" % (outFileName))
else:
if 'fetchAndCacheOnly' in optionsDict.keys() and optionsDict['fetchAndCacheOnly'] == True:
print("... already retrieved: %s ..." % (outFileName))
return None
print("... reading from cache: %s ..." % (outFileName))
result=atpy.Table().read(outFileName)

if result is None:
return None

# DECaLS redshifts go very wrong when there are stars bright in W1, W2 in the vicinity
# This should fix - we'll also throw out PSF-shaped sources too
tab=result
if len(tab) == 0:
return None
tab=tab[tab['maskbits'] != 2**1]
tab=tab[tab['maskbits'] < 2**11]
tab=tab[np.where(tab['type'] != 'PSF')]
tab=tab[np.where(tab['type'] != 'PSF ')] # Trailing space - probably not an issue on datalab

# WISE fluxes are available... i-band added in DECaLS DR10
bands=['g', 'r', 'i', 'z', "w1", "w2"]
catalog=[]
for row in tab:
photDict={}
photDict['id']=row['objid']
photDict['RADeg']=row['ra']
photDict['decDeg']=row['dec']
for b in bands:
if row['snr_%s' % (b)] > 0:
photDict[b]=row['dered_mag_%s' % (b)]
photDict[b+"Err"]=1/row['snr_%s' % (b)]
else:
photDict[b]=99.0
photDict[b+'Err']=99.0
if 'maxMagError' in list(optionsDict.keys()):
keep=checkMagErrors(photDict, optionsDict['maxMagError'], bands = bands)
else:
keep=True
if keep == True:
catalog.append(photDict)

return catalog

#-------------------------------------------------------------------------------------------------------------
def DELVEDR2Retriever(RADeg, decDeg, halfBoxSizeDeg = 36.0/60.0, DR = None, optionsDict = {}):
"""DELVE DR2 retriever, using NOAO datalab.
Expand Down

0 comments on commit 53f984b

Please sign in to comment.