Commit 21afff81 authored by Mohammad Akhlaghi's avatar Mohammad Akhlaghi

NoiseChisel on MUSE images for astronometry

Until now NoiseChisel was ran on the HST images and the segmentation
map was used for the comparisons of both magitude and
astrometry. While this is accurate for the former, it is not so good
for the latter, especially as the targets become fainter. The shape of
the segmentation map (from HST) will bias the shallower MUSE
astrometry. So for astrometry, we done the opposite: doing detection
on MUSE and using its segmentation map on both MUSE and HST. The final
comparison catalogs are now made with this commit and we can go onto
the statistics.
parent a5d7f860
......@@ -62,9 +62,12 @@ all: $(BSYM) description.pdf
# -----
#
# Note that we cannot simply include `reproduce/src/*.mk'. Because the
# order of reading them into Make actually matters in some cases.
include $(foreach m, preparations download degrade-hst input-cutouts \
catalog statistics description, reproduce/make/$(m).mk)
# order of reading them into Make actually matters in the case of
# variables especially. The names are mostly descriptive and following
# theses make files in the same order that they are defined here can
# help in understanding the full process.
include $(foreach m, preparations download hst muse catalog statistics \
description, reproduce/make/$(m).mk)
......@@ -86,4 +89,4 @@ include $(foreach m, preparations download degrade-hst input-cutouts \
#
# To clean the outputs if necessary.
clean:
rm -rf $(BDIR)/* $(BSYM) tikz tex/pipeline.tex
rm -rf $(BDIR)/* $(BSYM) tikz tex/pipeline.tex *.aux *.bbl
# Broad-band photometry checks with MUSE generated broad-band images.
#
# Make a segmentation map and generate catalogs for each region.
# Generate the necessary catalogs for each region.
#
# Original author:
# Mohammad Akhlaghi <mohammad.akhlaghi@univ-lyon1.fr>
......@@ -36,7 +36,8 @@
ssdir = $(BDIR)/sky-and-std
skys-and-stds = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(ssdir)/udf$(uid)-sky.fits)
$(skys-and-stds): $(ssdir)/%-sky.fits: $(segdir)/%-f606w.fits | $(ssdir)
genssname = $(ssdir)/$(word 1, $(subst -, ,$(1)))-$(2).fits
$(skys-and-stds): $(ssdir)/%-sky.fits: $(hsegdir)/%-f606w.fits | $(ssdir)
# The segmentation map is only 0 and positive values, so if we
# check the equality with a negative value, all the pixels
......@@ -49,8 +50,13 @@ $(skys-and-stds): $(ssdir)/%-sky.fits: $(segdir)/%-f606w.fits | $(ssdir)
# Generate the MUSE and HST catalogs
# ----------------------------------
# Magnitude catalogs
# ------------------
#
# For the magnitude comparison we are using a segmentation map from
# the HST images (to go to fainter magnitudes). For the
# astrometry/position comparison (another rule) we will be using
# segmentation maps defined by MUSE.
#
# The MUSE zeropoint values are calculated based on the following:
#
......@@ -82,16 +88,51 @@ $(skys-and-stds): $(ssdir)/%-sky.fits: $(segdir)/%-f606w.fits | $(ssdir)
# generated for both HST and MUSE in each field and filter, also all
# filters of one field have the same Sky file name. These functions
# are used to generate the proper name from the output's name.
catdir = $(BDIR)/catalogs
mcatdir = $(BDIR)/catalogs-magnitude
fieldfilter = $(word 1, $(subst -, ,$(1)))-$(word 2, $(subst -, ,$(1)))
catalogs = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), \
$(foreach i, h m, $(catdir)/udf$(uid)-$(f)-$(i).txt) ) )
gensegname = $(segdir)/$(call fieldfilter, $(1)).fits
genssname = $(ssdir)/$(word 1, $(subst -, ,$(1)))-$(2).fits
$(catalogs): $(catdir)/%.txt: $(cutdir)/%.fits $$(call gensegname,%) \
$$(call genssname,%,sky) $(lambda-eff) $(mkcatalog) \
| $(catdir)
mag-catalogs = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), \
$(foreach i, h m, $(mcatdir)/udf$(uid)-$(f)-$(i).txt) ) )
magsegname = $(hsegdir)/$(call fieldfilter, $(1)).fits
$(mag-catalogs): $(mcatdir)/%.txt: $(cutdir)/%.fits $$(call magsegname,%) \
$$(call genssname,%,sky) $(lambda-eff) $(mkcatalog) \
| $(mcatdir)
# Set the zeropoint value from the filter depending on the
# instrument.
filter=$(word 2, $(subst -, ,$*)); \
if [ $(lastword $(subst -, ,$*)) = "h" ]; then \
if [ $$filter = "f606w" ]; then zp=26.51; \
elif [ $$filter = "f775w" ]; then zp=25.69; \
elif [ $$filter = "f814w" ]; then zp=25.94; \
elif [ $$filter = "f850lp" ]; then zp=24.87; \
fi; \
else \
zp=$$(awk '{print 47.5908 - 5*log($$1)/log(10)}' \
$(leffdir)/$$filter".txt"); \
fi; \
astmkcatalog $< --zeropoint=$$zp --magnitude --dec --ra -o$@ \
--objlabs=$(call magsegname, $*) --objhdu=0 \
--skyfilename=$(call genssname, $*,sky) --skyhdu=0 \
--stdfilename=$(call genssname, $*,std) --stdhdu=0
# Astrometry catalogs
# -------------------
#
# We need to use the NoiseChisel segmentation maps derived from MUSE
# images for the astrometry/position catalogs.
acatdir = $(BDIR)/catalogs-astrometry
ast-catalogs = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), \
$(foreach i, h m, $(acatdir)/udf$(uid)-$(f)-$(i).txt) ) )
astsegname = $(msegdir)/$(call fieldfilter, $(1)).fits
$(ast-catalogs): $(acatdir)/%.txt: $(cutdir)/%.fits $$(call astsegname,%) \
$$(call genssname,%,sky) $(lambda-eff) $(mkcatalog) \
| $(acatdir)
# Set the zeropoint value from the filter depending on the
# instrument.
......@@ -107,7 +148,7 @@ $(catalogs): $(catdir)/%.txt: $(cutdir)/%.fits $$(call gensegname,%) \
$(leffdir)/$$filter".txt"); \
fi; \
astmkcatalog $< --zeropoint=$$zp --magnitude --dec --ra -o$@ \
--objlabs=$(call gensegname, $*) --objhdu=0 \
--objlabs=$(call astsegname, $*) --objhdu=0 \
--skyfilename=$(call genssname, $*,sky) --skyhdu=0 \
--stdfilename=$(call genssname, $*,std) --stdhdu=0
......@@ -115,24 +156,25 @@ $(catalogs): $(catdir)/%.txt: $(cutdir)/%.fits $$(call gensegname,%) \
# Clean catalogs
# --------------
# Merge the MUSE and HST catalogs into one
# ----------------------------------------
#
# Put the necessary columns of the HST and MUSE catalogs into the same
# file and remove those that have NaN measurements.
ccatdir = $(BDIR)/catalogs-cleaned
cleancats = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(ccatdir)/udf$(uid)-$(f).txt) )
$(cleancats): $(ccatdir)/%.txt: $(catdir)/%-h.txt $(catdir)/%-m.txt | $(ccatdir)
# Put the necessary columns of the HST and MUSE magnitude catalogs
# into the same file and remove those that have NaN measurements.
samecat = $(BDIR)/catalogs-
merged-cats = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), \
$(foreach d, $(acatdir) $(mcatdir), $(d)/udf$(uid)-$(f).txt)))
$(merged-cats): $(samecat)%.txt: $(samecat)%-h.txt $(samecat)%-m.txt
# First merge the two catalogs of the same field and filter
# together into one file to make it easier to process them.
paste $(catdir)/$*-h.txt $(catdir)/$*-m.txt > $(ccatdir)/$*-pasted.txt
paste $^ > $(samecat)$*-pasted.txt
# Remove the comments and NaN magnitude results and keep the
# differences between the MUSE and HST positions and
# magnitudes.
subfield=$(subst udf,,$(word 1, $(subst -, ,$*))); \
subfield=$(subst udf,,$(word 1, $(subst -, ,$(notdir $*)))); \
awk 'BEGIN{ \
print "# Reproduction pipeline $(gitdescribe)."; \
print "# Column 1: Sub-field ID."; \
......@@ -147,10 +189,12 @@ $(cleancats): $(ccatdir)/%.txt: $(catdir)/%-h.txt $(catdir)/%-m.txt | $(ccatdir)
!/^#/ && $$4!="nan" && $$8!="nan" { \
printf("%-4d%-5d%-14.8f%-14.8f%-8.3f%-14.8f%-14.8f%-8.3f\n", \
'$$subfield', $$1, $$2, $$3, $$4, $$6-$$2, $$7-$$3, \
$$8-$$4) }' $(ccatdir)/$*-pasted.txt > $@
$$8-$$4) }' $(samecat)$*-pasted.txt > $@
# Clean up.
rm $(ccatdir)/$*-pasted.txt
rm $(samecat)$*-pasted.txt
......@@ -159,13 +203,11 @@ $(cleancats): $(ccatdir)/%.txt: $(catdir)/%-h.txt $(catdir)/%-m.txt | $(ccatdir)
#
# The cleaned catalogs were for one sub-field in each filter, so now
# we will merge the nine UDF subfields into one file. Note that the
# UDF10 field doesn't need any merging of catalogs, so the
# $(fullmosaic) rule is for the 9 subfields and $(fulludf) is just
# defined for easy dependencies at later steps.
fullmosaic = $(foreach f, $(filters), $(ccatdir)/udf-$(f).txt)
fulludf = $(fullmosaic) $(foreach f, $(filters), $(ccatdir)/udf10-$(f).txt)
$(fullmosaic): $(ccatdir)/udf-%.txt: $$(foreach i, 1 2 3 4 5 6 7 8 9, \
$(ccatdir)/udf$$(i)-%.txt )
# UDF10 field doesn't need any merging of catalogs.
fullmosaic = $(foreach f, $(filters), \
$(foreach d, $(acatdir) $(mcatdir), $(d)/udf-$(f).txt) )
$(fullmosaic): $(samecat)%.txt: $(foreach i, 1 2 3 4 5 6 7 8 9, \
$$(dir $$@)udf$(i)-$$(subst udf-,,$$(notdir $$*)).txt )
# The comments are similar to all the catalogs, so just put
# those of the first prerequisite.
......
......@@ -61,7 +61,9 @@ tex/pipeline.tex: $(foreach t, versions statistics, $(mtexdir)/$(t).tex)
#
# To build the BibLaTeX references, we need to run LaTeX once and then
# biber, the rule for building the final PDF will build the final PDF.
description.bbl: tex/ref.tex tex/pipeline.tex | tikz
description.bbl: tex/ref.tex tex/pipeline.tex | tikz
echo; echo "Wants to make bib"; echo; exit 1
if pdflatex -shell-escape -halt-on-error description.tex; then \
echo "LaTeX (in preparation for BibLaTeX) is done."; \
......@@ -79,11 +81,19 @@ description.bbl: tex/ref.tex tex/pipeline.tex | tikz
# Build final PDF
# ---------------
#
# Before building the pdf, we want to build all the data-products, so
# fill in the targts into the data products variable.
#
# Note that eventhough `tex/pipeline.tex' is in the `tex/' directory
# `tex/*' will not detect it necessarily because it might not be
# present (it is a target to be built by Make).
description.pdf: description.tex tex/* tex/pipeline.tex description.bbl \
$(two-d-hists) $(deghst-demo) | tikz
data-products = $(fullmosaic)
description.pdf: $(data-products) description.tex tex/* tex/pipeline.tex \
description.bbl | tikz
echo; echo "Wants to make PDF"; echo; exit 1
# Delete some outputs for TeX to rebuild (if needed)
# rm tikz/description-figure0*
......
......@@ -336,7 +336,8 @@ $(hst-centered-star): $(hpsfdir)/star-%.fits: $(hpsfdir)/star-%.txt \
# Kernels to match HST and MUSE
# Kernels to match HST with MUSE
# ------------------------------
#
# Using the PSF images for MUSE and HST, now we want to find the
# kernel that the HST images should be convolved with to have similar
......@@ -356,11 +357,6 @@ $(kernels): $(kerneldir)/udf%.fits: $(mpsfdir)/udf%.fits \
# Convolve HST images with proper PSF
# -----------------------------------
#
......@@ -372,17 +368,134 @@ $(kernels): $(kerneldir)/udf%.fits: $(mpsfdir)/udf%.fits \
# things up better.
hst-convolved = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(hdegdir)/udf$(uid)-$(f)-c.fits) )
$(hst-convolved): $(hdegdir)/%-c.fits: $(hdegdir)/%-o.fits $(kerneldir)/%.fits
# We want to subtract the Sky value, so run NoiseChisel first.
astnoisechisel $< -o$(hdegdir)/$*-nc.fits
# Subtract the Sky value.
astarithmetic $(hdegdir)/$*-nc.fits $(hdegdir)/$*-nc.fits - \
-h0 -h3 -o$(hdegdir)/$*-ss.fits
$(hst-convolved): $(hdegdir)/%-c.fits: $(hdegdir)/%-o.fits $(kerneldir)/%.fits \
$(convolve)
# Convolve the Sky subtracted image.
astconvolve --spatial $< --kernel=$(kerneldir)/$*.fits -o$@ -N1
# Clean up
rm $(hdegdir)/$*-nc.fits $(hdegdir)/$*-ss.fits
# Pixel scale conversion
# ----------------------
#
# For the final degrading of HST images, we also need the scaling
# factor between the HST and MUSE images, so read the MUSE pixel
# resolution from one of the aligned images, then also read the HST
# pixel resolution from one of the images and find scale factor
$(hdegdir)/pix-res-scale.txt: $(MUSEINPUTS)/muse-udf10-f606w.fits \
$(hdegdir)/udf1-f606w-o.fits
mres=$$(astheader $(MUSEINPUTS)/muse-udf10-f606w.fits -h1 \
| grep CD2_2 | awk '{print $$3}'); \
hres=$$(astheader $(hdegdir)/udf1-f606w-o.fits | grep PC2_2 \
| awk '{print $$3}'); \
echo | awk '{print '$$hres'/'$$mres'}' > $@
# HST images to MUSE resolution
# -----------------------------
#
# The HST images were convolved with the MUSE PSF for the same spatial
# resolution, now, we need to warp them to the MUSE pixel grid to
# easily use one segmentation map over both images.
h-to-m-pixres = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(hdegdir)/udf$(uid)-$(f).fits) )
$(h-to-m-pixres): $(hdegdir)/%.fits: $(hdegdir)/%-c.fits \
$(hdegdir)/pix-res-scale.txt $(imgwarp)
# Warp the HST image to the MUSE pixel scale, first find the
# scale factor (sf), then warp the image.
scalefactor=$$(cat $(hdegdir)/pix-res-scale.txt); \
astimgwarp $< --scale=$$scalefactor -o$@ --numthreads=1
# One pixel kernel
# ----------------
#
# This kernel is created since in practice it means no convolution
# with NoiseChisel.
hsegdir = $(BDIR)/seg-hst
onepkernel = $(hsegdir)/one-pix-kernel.fits
$(onepkernel): | $(hsegdir)
echo "1" > $(hsegdir)/one-pix-kernel.txt
astconvertt $(hsegdir)/one-pix-kernel.txt -o$@
rm $(hsegdir)/one-pix-kernel.txt
# Create the segmentation map
# ---------------------------
#
# The first thing we need to do is to create a segmentation map that
# will be fed into Gnuastro's MakeCatalog to generate the final
# catalog. Unfortunately as it currently stands, the MUSE-generated
# broad-band image has too many artifacts at low surface
# brightnesses. So if we want to do detection over it, we are going to
# miss a lot of the fainter objects. Thus, the convolved and scaled
# HST image is our only choice.
#
# However, the HST image doesn't have much noise left (because of the
# huge kernel). So we will be convolving it with a 1 pixel kernel
# (effectively no convolution), and then using much more looser
# NoiseChisel parameters to give a reasonable result.
hsegments = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(hsegdir)/udf$(uid)-$(f).fits) )
$(hsegments): $(hsegdir)/%.fits: $(hdegdir)/%.fits $(onepkernel) \
$(hdegdir)/pix-res-scale.txt $(noisechisel)
# Generate a segmentation map on the convolved and rescaled
# HST image.
astnoisechisel $< -o$(@D)/$*-nc.fits --kernel=$(onepkernel) \
--minbfrac=0.0 --minmodeq=0.3 --qthresh=0.4 \
--dthresh=0.8 --detsnminarea=5 --minnumfalse=50 \
--segquant=0.5 --gthresh=1e10 --objbordersn=1e10
# Make the clumps image an objects image. Note that because we
# disabled growth and object separation, each "object" only
# has one clump. So simply setting all the non-1 valued pixels
# in the objects image to zero, will do the job for us.
astarithmetic $(@D)/$*-nc.fits $(@D)/$*-nc.fits 1 neq 0 where \
-h1 -h2 -o$@ --type=long
# Subtract the Sky value from the HST images
# ------------------------------------------
#
# Once NoiseChisel is run on the degraded HST images, we have the Sky
# value and we can subtract it from the input image to clean it up. As
# described in commit 526d8e9 (titled: `A description is written for
# the process and results'), the F814W image in particular shows
# strong sky residuals, so this step is necessary.
cutdir = $(BDIR)/cutouts
finalhstdeg = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(cutdir)/udf$(uid)-$(f)-h.fits) )
$(finalhstdeg): $(cutdir)/%-h.fits: $(hsegdir)/%.fits | $(cutdir)
astarithmetic $(hsegdir)/$*-nc.fits $(hsegdir)/$*-nc.fits - -h0 -h3 -o$@
# Demonstration of HST's bad sky subtraction
# ------------------------------------------
#
# Convert the un-skysubtracted images of the f814w and f850lp degraded
# HST images into PDF (with the same scaling) for demonstration.
h814demodir = $(BDIR)/tex/f814w-demo
deghst-demo = $(h814demodir)/udf1-f814w.pdf $(h814demodir)/udf1-f850lp.pdf
$(deghst-demo): $(h814demodir)/%.pdf: $(hdegdir)/%.fits | $(h814demodir)
astconvertt $< -o$@ --fluxlow=-0.001 --fluxhigh=0.02 --noinvert
......@@ -24,13 +24,13 @@
# Align the MUSE UDF region
# -------------------------
#
# The original MUSE UDF Mosaic region is not aligned with the
# celestial coordinates. To make things easier, we are first aligning
# them to ease the processing.
# them to ease the cutout process. This is done here (rather than
# `cutout-muse.mk') because we need the corrected scale factor
audfdir=$(BDIR)/aligned-udf
udfaligned = $(foreach f, $(filters), $(audfdir)/muse-udf-$(f).fits)
$(udfaligned): $(audfdir)/muse-udf-%.fits: \
......@@ -40,10 +40,11 @@ $(udfaligned): $(audfdir)/muse-udf-%.fits: \
# From original MUSE images
# -------------------------
# Cutout desired regions
# ----------------------
#
# Cutout the original MUSE images.
# Cutout each field's region from the aligned MUSE images.
mcutdir = $(BDIR)/muse-cutouts
udf1-muse-cutouts = $(foreach f, $(filters), $(mcutdir)/udf1-$(f).fits)
$(udf1-muse-cutouts): $(mcutdir)/udf1-%.fits: $(audfdir)/muse-udf-%.fits \
......@@ -109,131 +110,17 @@ $(udf10-muse-cutouts): $(mcutdir)/udf10-%.fits: \
# Pixel scale conversion
# ----------------------
#
# For the final degrading of HST images, we also need the scaling
# factor between the HST and MUSE images, so read the MUSE pixel
# resolution from one of the aligned images, then also read the HST
# pixel resolution from one of the images and find scale factor
$(hdegdir)/pix-res-scale.txt: $(mcutdir)/udf1-f606w.fits \
$(hdegdir)/udf1-f606w-c.fits
mres=$$(astheader $(mcutdir)/udf1-f606w.fits | grep PC2_2 \
| awk '{print $$3}'); \
hres=$$(astheader $(hdegdir)/udf1-f606w-c.fits | grep PC2_2 \
| awk '{print $$3}'); \
echo | awk '{print '$$hres'/'$$mres'}' > $@
# HST images to MUSE resolution
# -----------------------------
#
# The HST images were convolved with the MUSE PSF for the same spatial
# resolution, now, we need to warp them to the MUSE pixel grid to
# easily use one segmentation map over both images.
h-to-m-pixres = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(hdegdir)/udf$(uid)-$(f).fits) )
$(h-to-m-pixres): $(hdegdir)/%.fits: $(hdegdir)/%-c.fits \
$(hdegdir)/pix-res-scale.txt $(imgwarp)
# Warp the HST image to the MUSE pixel scale, first find the
# scale factor (sf), then warp the image.
scalefactor=$$(cat $(hdegdir)/pix-res-scale.txt); \
astimgwarp $< --scale=$$scalefactor -o$@ --numthreads=1
# One pixel kernel
# ----------------
#
# This kernel is created since in practice it means no convolution
# with NoiseChisel.
segdir = $(BDIR)/seg-maps
onepkernel = $(segdir)/one-pix-kernel.fits
$(onepkernel): | $(segdir)
echo "1" > $(segdir)/one-pix-kernel.txt
astconvertt $(segdir)/one-pix-kernel.txt -o$@
rm $(segdir)/one-pix-kernel.txt
# Create the segmentation map
# ---------------------------
#
# The first thing we need to do is to create a segmentation map that
# will be fed into Gnuastro's MakeCatalog to generate the final
# catalog. Unfortunately as it currently stands, the MUSE-generated
# broad-band image has too many artifacts at low surface
# brightnesses. So if we want to do detection over it, we are going to
# miss a lot of the fainter objects. Thus, the convolved and scaled
# HST image is our only choice.
#
# However, the HST image doesn't have much noise left (because of the
# huge kernel). So we will be convolving it with a 1 pixel kernel
# (effectively no convolution), and then using much more looser
# NoiseChisel parameters to give a reasonable result.
segments = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(segdir)/udf$(uid)-$(f).fits) )
$(segments): $(segdir)/%.fits: $(hdegdir)/%.fits $(onepkernel) \
$(hdegdir)/pix-res-scale.txt $(noisechisel)
# Generate a segmentation map on the convolved and rescaled
# HST image.
astnoisechisel $< -o$(@D)/$*-nc.fits --kernel=$(onepkernel) \
--minbfrac=0.0 --minmodeq=0.3 --qthresh=0.4 \
--dthresh=0.8 --detsnminarea=5 --minnumfalse=50 \
--segquant=0.5 --gthresh=1e10 --objbordersn=1e10
# Make the clumps image an objects image. Note that because we
# disabled growth and object separation, each "object" only
# has one clump. So simply setting all the non-1 valued pixels
# in the objects image to zero, will do the job for us.
astarithmetic $(@D)/$*-nc.fits $(@D)/$*-nc.fits 1 neq 0 where \
-h1 -h2 -o$@ --type=long
# Subtract the Sky value from the HST images
# ------------------------------------------
#
# Once NoiseChisel is run on the degraded HST images, we have the Sky
# value and we can subtract it from the input image to clean it up. As
# described in commit 526d8e9 (titled: `A description is written for
# the process and results'), the F814W image in particular shows
# strong sky residuals, so this step is necessary.
cutdir = $(BDIR)/cutouts
finalhstdeg = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(cutdir)/udf$(uid)-$(f)-h.fits) )
$(finalhstdeg): $(cutdir)/%-h.fits: $(segdir)/%.fits | $(cutdir)
astarithmetic $(segdir)/$*-nc.fits $(segdir)/$*-nc.fits - -h0 -h3 -o$@
# Correct MUSE image size
# -----------------------
#
# To generate the catalog, all the inputs have to have the same
# size. However, due to differences in the WCS, after being warped,
# some fields might give a different size (by one pixel) compared to
# the MUSE cropped image. Since we will be using this warp againon an
# orignal HST image later at segmentation time, the only way would be
# to make the MUSE image the same size as the resampled HST
# image. This will only be the addition or removal of one row or
# column in the image, so it shouldn't be significant.
# the MUSE cropped image.
cutdir = $(BDIR)/cutouts
muse-corr = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(cutdir)/udf$(uid)-$(f)-m.fits) )
$(muse-corr): $(cutdir)/%-m.fits: $(mcutdir)/%.fits $(cutdir)/%-h.fits \
| $(cutdir)
$(muse-corr): $(cutdir)/%-m.fits: $(mcutdir)/%.fits $(cutdir)/%-h.fits
# If the sizes are identical, then just copy the actual
# cropped MUSE image, otherwise, using ImageCrop's `--section'
......@@ -257,11 +144,29 @@ $(muse-corr): $(cutdir)/%-m.fits: $(mcutdir)/%.fits $(cutdir)/%-h.fits \
# Crop degraded HST for demo
# --------------------------
# NoiseChisel on MUSE images for astrometry comparison
# ----------------------------------------------------
#
# Crops from the HST images for a demonstration of bad F814W results.
h814demodir = $(BDIR)/tex/f814w-demo
deghst-demo = $(h814demodir)/udf1-f814w.pdf $(h814demodir)/udf1-f850lp.pdf
$(deghst-demo): $(h814demodir)/%.pdf: $(hdegdir)/%.fits | $(h814demodir)
astconvertt $< -o$@ --fluxlow=-0.001 --fluxhigh=0.02 --noinvert
# For magnitude comparison we used the NoiseChisel runs on HST images,
# since fainter objects can be detected there. But for astrometry, it
# is important to use the MUSE images for detection, since the result
# on a region with no MUSE signal will largely be determined by the
# aperture set with HST. So here, we will run NoiseChisl on the MUSE
# images and make a segmentation map to generate a catalog.
#
# The process is almost identical with the HST NoiseChisel run, look
# there for comments.
msegdir = $(BDIR)/seg-muse
seg-muse = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(msegdir)/udf$(uid)-$(f).fits) )
$(seg-muse): $(msegdir)/%.fits: $(cutdir)/%-m.fits $(onepkernel) \
$(noisechisel) | $(msegdir)
astnoisechisel $< -o$(@D)/$*-nc.fits --kernel=$(onepkernel) \
--minbfrac=0.0 --minmodeq=0.3 --qthresh=0.40 \
--dthresh=0.8 --detsnminarea=5 --minnumfalse=30 \
--segsnminarea=5 --segquant=0.5 --gthresh=1e10 \
--objbordersn=1e10
astarithmetic $(@D)/$*-nc.fits $(@D)/$*-nc.fits 1 neq 0 where \
-h1 -h2 -o$@ --type=long
......@@ -41,7 +41,7 @@ two-d-hists = $(foreach field, udf udf10, \
$(foreach type, mag dist, \
$(twoddir)/$(field)-$(filter)-$(type).txt) ) )
$(two-d-hists): $(twoddir)/%.txt: \
$(ccatdir)/$$(word 1, $$(subst -, , $$*))-$$(word 2, $$(subst -, , $$*)).txt reproduce/scripts/two-dim-hist.awk reproduce/scripts/checks.awk \
$(mcatdir)/$$(word 1, $$(subst -, , $$*))-$$(word 2, $$(subst -, , $$*)).txt reproduce/scripts/two-dim-hist.awk reproduce/scripts/checks.awk \
| $(twoddir)
# See if the magnitude is required or the distance, then put
......@@ -86,7 +86,7 @@ histstdmultip = 3
statdir = $(BDIR)/statistics
mag-ave-std = $(foreach uid, 1 2 3 4 5 6 7 8 9 10, \
$(foreach f, $(filters), $(statdir)/udf$(uid)-$(f).txt) )
$(mag-ave-std): $(statdir)/%.txt: $(ccatdir)/%.txt \
$(mag-ave-std): $(statdir)/%.txt: $(ccatdir)/%-mag.txt \
reproduce/scripts/bin-ave-std.awk | $(statdir)
awk -vxmin=$(twodxmin) -vxmax=$(twodxmax) -vxcol=5 -vycol=8 \
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment