Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add pyproject.toml and relax dependencies #117

Merged
merged 18 commits into from
Nov 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 70 additions & 13 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,41 +1,93 @@
name: pfb-imaging Workflow
name: pfb-imaging CI Workflow

on:
push:
tags:
- 'v*'
pull_request:

env:
POETRY_VERSION: 1.5

jobs:
test:
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
continue-on-error: true
if: "!contains(github.event.head_commit.message, '[skip ci]')"

env:
NUMBA_CACHE_DIR: /tmp/numba-cache

strategy:
matrix:
python-version: ["3.9", "3.10", "3.11"]
os: [ubuntu-20.04, ubuntu-22.04]
python-version: ["3.10", "3.11"]

steps:
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version}}

- name: Install poetry
uses: abatilo/actions-poetry@v2
with:
poetry-version: ${{ env.POETRY_VERSION }}

- name: Check poetry install
run: poetry --version

- name: Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 1
fetch-depth: 0

- name: Restore repo times
uses: chetan/git-restore-mtime-action@v2

- name: Upgrade pip and setuptools
run: python -m pip install -U pip setuptools

# - name: Pin setuptools
# run: python -m pip install setuptools==65.5
- name: Create Key and Numba Cache Directory
id: numba-key
run: |
mkdir -p ${{ env.NUMBA_CACHE_DIR }}
echo "timestamp=$(/bin/date -u '+%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT

- name: Cache Numba Kernels
uses: actions/cache@v3
with:
key: numba-cache-${{ matrix.python-version }}-${{ steps.numba-key.outputs.timestamp }}
restore-keys: numba-cache-${{ matrix.python-version }}-
path: ${{ env.NUMBA_CACHE_DIR }}

- name: List the measures directory
run: curl ftp://ftp.astron.nl/outgoing/Measures/ > measures_dir.txt

- name: Load cached CASA Measures Data
id: load-cached-casa-measures
uses: actions/cache@v3
with:
key: casa-measures-${{ hashFiles('measures_dir.txt')}}
path: |
~/measures
~/.casarc

- name: Download and install CASA Measures Data
if: steps.load-cached-casa-measures.outputs.cache-hit != 'true'
run: |
mkdir -p ~/measures
curl ftp://ftp.astron.nl/outgoing/Measures/WSRT_Measures.ztar | tar xvzf - -C ~/measures
echo "measures.directory: ~/measures" > ~/.casarc

- name: Install pfb-imaging
run: python -m pip install .[testing]
run: poetry install

- name: Run pfb-imaging
run: poetry run pfb --help

- name: Run tests
run: py.test -s -vvv tests/
run: poetry run pytest -v tests/

deploy:
needs: [test]
Expand All @@ -48,27 +100,32 @@ jobs:
with:
python-version: "3.10"

- name: Install latest setuptools, wheel, pip
run: python3 -m pip install -U pip setuptools wheel
- name: Install poetry
uses: abatilo/actions-poetry@v2
with:
poetry-version: ${{ env.POETRY_VERSION }}

- name: Check poetry install
run: poetry --version

- name: Checkout source
uses: actions/checkout@v4
with:
fetch-depth: 1

- name: Build distributions
run: python setup.py sdist bdist_wheel
run: poetry build

- name: Publish distribution to Test PyPI
uses: pypa/gh-action-pypi-publish@master
uses: pypa/gh-action-pypi-publish@1.8.6
with:
user: __token__
password: ${{ secrets.PYPI_TEST_API_TOKEN }}
repository_url: https://test.pypi.org/legacy/
continue-on-error: false

- name: Publish distribution 📦 to PyPI
uses: pypa/gh-action-pypi-publish@master
uses: pypa/gh-action-pypi-publish@1.8.6
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
120 changes: 43 additions & 77 deletions pfb/opt/pcg.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,6 @@ def pcg_psf(psfhat,

def pcg_dds(ds_name,
eta, # regularisation for Hessian approximation
sigma, # regularisation for preconditioner
mask=1.0,
use_psf=True,
residual_name='RESIDUAL',
Expand All @@ -399,6 +398,7 @@ def pcg_dds(ds_name,
epsilon=5e-4,
double_accum=True,
nthreads=1,
zero_model_outside_mask=False,
tol=1e-5,
maxit=500,
verbosity=1,
Expand All @@ -413,94 +413,64 @@ def pcg_dds(ds_name,
if not isinstance(ds_name, list):
ds_name = [ds_name]

# drop_vars = ['PSF']
# if not use_psf:
# drop_vars.append('PSFHAT')
drop_vars = None
drop_vars = ['PSF', 'PSFHAT']
ds = xds_from_list(ds_name, nthreads=nthreads,
drop_vars=drop_vars)[0]

if residual_name in ds:
j = getattr(ds, residual_name).values * mask * ds.BEAM.values
ds = ds.drop_vars(residual_name)
beam = mask * ds.BEAM.values
if zero_model_outside_mask:
if model_name not in ds:
raise RuntimeError(f"Asked to zero model outside mask but {model_name} not in dds")
model = getattr(ds, model_name).values
model = np.where(mask > 0, model, 0.0)
print("Zeroing model outside mask")
resid = ds.DIRTY.values - _hessian_slice(
model,
uvw=ds.UVW.values,
weight=ds.WEIGHT.values,
vis_mask=ds.MASK.values,
freq=ds.FREQ.values,
beam=ds.BEAM.values,
cell=ds.cell_rad,
x0=ds.x0,
y0=ds.y0,
do_wgridding=do_wgridding,
epsilon=epsilon,
double_accum=double_accum,
nthreads=nthreads)
j = resid * beam
else:
j = ds.DIRTY.values * mask * ds.BEAM.values
if model_name in ds:
model = getattr(ds, model_name).values
else:
model = np.zeros(mask.shape, dtype=float)

if residual_name in ds:
j = getattr(ds, residual_name).values * beam
ds = ds.drop_vars(residual_name)
else:
j = ds.DIRTY.values * beam

psf = ds.PSF.values
nx_psf, py_psf = psf.shape
nx, ny = j.shape
wsum = np.sum(ds.WEIGHT.values * ds.MASK.values)
psf /= wsum
wsum = ds.wsum
j /= wsum

# downweight edges of field compared to center
# this allows the PCG to downweight the fit to the edges
# which may be contaminated by edge effects and also
# stabalises the preconditioner
width = np.minimum(int(0.1*nx), 32)
taperxy = taperf((nx, ny), width)
# eta /= taperxy

# set precond if PSF is present
if 'PSFHAT' in ds and use_psf:
psfhat = np.abs(ds.PSFHAT.values)/wsum
ds.drop_vars(('PSFHAT'))
nx_psf, nyo2 = psfhat.shape
ny_psf = 2*(nyo2-1) # is this always the case?
nxpadl = (nx_psf - nx)//2
nxpadr = nx_psf - nx - nxpadl
nypadl = (ny_psf - ny)//2
nypadr = ny_psf - ny - nypadl
if nx_psf != nx:
unpad_x = slice(nxpadl, -nxpadr)
else:
unpad_x = slice(None)
if ny_psf != ny:
unpad_y = slice(nypadl, -nypadr)
else:
unpad_y = slice(None)
xpad = empty_noncritical((nx_psf, ny_psf),
dtype=j.dtype)
xhat = empty_noncritical((nx_psf, nyo2),
dtype='c16')
xout = empty_noncritical((nx, ny),
dtype=j.dtype)
precond = partial(
hess_direct_slice,
xpad=xpad,
xhat=xhat,
xout=xout,
abspsf=psfhat,
taperxy=taperxy,
lastsize=ny_psf,
nthreads=nthreads,
eta=sigma,
mode='backward')

x0 = precond(j)

# get intrinsic resolution by deconvolving psf
upsf = precond(psf[unpad_x, unpad_y])
upsf /= upsf.max()
gaussparu = fitcleanbeam(upsf[None], level=0.25, pixsize=1.0)[0]
ds = ds.assign(**{
'UPSF': (('x', 'y'), upsf)
})
ds = ds.assign_attrs(gaussparu=gaussparu)
precond = None
if 'UPDATE' in ds:
x0 = ds.UPDATE.values * mask
else:
# print('Not using preconditioning')
precond = None
x0 = np.zeros_like(j)

hess = partial(_hessian_slice,
uvw=ds.UVW.values,
weight=ds.WEIGHT.values,
vis_mask=ds.MASK.values,
freq=ds.FREQ.values,
beam=ds.BEAM.values,
beam=beam,
cell=ds.cell_rad,
x0=ds.x0,
y0=ds.y0,
flip_u=ds.flip_u,
flip_v=ds.flip_v,
flip_w=ds.flip_w,
do_wgridding=do_wgridding,
epsilon=epsilon,
double_accum=double_accum,
Expand All @@ -520,11 +490,7 @@ def pcg_dds(ds_name,
backtrack=False,
return_resid=False)

if model_name in ds:
model = getattr(ds, model_name).values + x
else:
model = x

model += x

resid = ds.DIRTY.values - _hessian_slice(
model,
Expand Down
7 changes: 1 addition & 6 deletions pfb/parser/fluxtractor.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,9 @@ inputs:
eta:
dtype: float
default: 1e-5
abbreviation: sinv
abbreviation: eta
info:
Standard deviation of assumed GRF prior
sigma:
dtype: float
default: 1
info:
The value that is added to |psfhat| for preconditioning.
model-name:
dtype: str
default: MODEL
Expand Down
7 changes: 6 additions & 1 deletion pfb/parser/init.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -121,11 +121,16 @@ inputs:
info:
Display progress.
Use --no-progressbar to deactivate.
memory_reporting:
memory-reporting:
dtype: bool
default: false
info:
Report worker memory as tasks complete
check_ants:
dtype: bool
default: false
info:
Check that ANTENNA1 and ANTENNA2 tables are consistent with the ANTENNA table.

_include:
- (.)out.yml
Expand Down
File renamed without changes.
6 changes: 3 additions & 3 deletions pfb/parser/uncabbedcabs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@ pfb.degrid:
_include:
- (.)degrid.yaml

pfb.klean:
command: pfb.workers.klean.klean
pfb.kclean:
command: pfb.workers.kclean.kclean
flavour: python
policies:
pass_missing_as_none: true

_include:
- (.)klean.yaml
- (.)kclean.yaml

pfb.restore:
command: pfb.workers.restore.restore
Expand Down
6 changes: 0 additions & 6 deletions pfb/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -461,12 +461,6 @@ def construct_mappings(ms_name,
raise RuntimeError("Something went wrong constructing the "
"frequency mapping. sum(fchunks != nchan)")

# nfreq_chunks = nchan_in // cpit
# freq_chunks = (cpit,)*nfreq_chunks
# rem = nchan_in - nfreq_chunks * cpit
# if rem:
# freq_chunks += (rem,)

ms_chunks[ms].append({'row': row_chunks,
'chan': freq_chunks})

Expand Down
17 changes: 9 additions & 8 deletions pfb/utils/stokes2vis.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,14 +164,15 @@ def single_stokes(

# check that antpos gives the correct size table
antmax = allants.size
try:
assert antmax == nant
except Exception as e:
raise ValueError('Inconsistent ANTENNA table. '
'Shape does not match max number of antennas '
'as inferred from ant1 and ant2. '
f'Table size is {antpos.shape} but got {antmax}. '
f'{oname}')
if opts.check_ants:
try:
assert antmax == nant
except Exception as e:
raise ValueError('Inconsistent ANTENNA table. '
'Shape does not match max number of antennas '
'as inferred from ant1 and ant2. '
f'Table size is {antpos.shape} but got {antmax}. '
f'{oname}')

# relabel antennas by index
# this only works because allants is sorted in ascending order
Expand Down
Loading
Loading