Skip to content

Commit

Permalink
Merge pull request #49 from euroargodev/v0p1p16b
Browse files Browse the repository at this point in the history
V0p1p16b
  • Loading branch information
gaelforget authored Nov 21, 2023
2 parents 94dc243 + 72311bf commit 7221c63
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 197 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ArgoData"
uuid = "9eb831cf-c491-48dc-bed4-6aca718df73c"
authors = ["gaelforget <[email protected]>"]
version = "0.1.16"
version = "0.1.17"

[deps]
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
Expand Down
9 changes: 5 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@ Analysis and Processing of

## Contents

- `GDAC` module to access and retrieve files from Argo server.
- `MITprof` format of [Forget, et al 2015](http://dx.doi.org/10.5194/gmd-8-3071-2015) for [standard depth data](https://doi.org/10.7910/DVN/EE3C40).
- `MITprof_plots` module (in `examples/`) for `MITprof`.
- `AnalysisMethods` for cost functions and geospatial statistics.
- `GDAC` module to access and retrieve files from Argo server
- `MITprof` format of [Forget, et al 2015](http://dx.doi.org/10.5194/gmd-8-3071-2015) for [standard depth data](https://doi.org/10.7910/DVN/EE3C40)
- `MITprofPlots` module (in `examples/MITprof_plots.jl`)
- `MITprofAnalysis` for cost functions and data manipulations
- `MITprofStat` for gridded geospatial statistics
- `Notebooks`
- [ArgoData](https://juliaocean.github.io/OceanRobots.jl/dev/examples/Float_Argo.html) 🚀 interactive vizualisation
- [MITprof](https://euroargodev.github.io/ArgoData.jl/dev/ArgoToMITprof.html) 🚀 simplified format
Expand Down
2 changes: 1 addition & 1 deletion src/ArgoData.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ include("MITprof.jl")
include("MITprofAnalysis.jl")

export GDAC, ArgoTools, GriddedFields
export MITprof, MITprofAnalysis, MITprofStat, AnalysisMethods
export MITprof, MITprofAnalysis, MITprofStat
export ProfileNative, ProfileStandard, MITprofStandard

end # module
185 changes: 0 additions & 185 deletions src/MITprof.jl
Original file line number Diff line number Diff line change
Expand Up @@ -384,188 +384,3 @@ end

end

module AnalysisMethods

using Dates, MeshArrays, NCDatasets, Glob, DataFrames, CSV

import ArgoData.MITprofStandard

"""
profile_positions(path)
Create table (`DataFrame`) of the positions and dates obtained by looping through files in `path`.
Additional information such as float `ID`, position on the ECCO grid `pos`, number of
valid data points for T and S (`nbT` ,`nbS`).
```
using ArgoData
path="MITprof/"
csv_file="csv/profile_positions.csv"
using MeshArrays
γ=GridSpec("LatLonCap",MeshArrays.GRID_LLC90)
Γ=GridLoad(γ)
df=MITprof.profile_positions(path,Γ)
CSV.write(csv_file, df)
```
"""
function profile_positions(path,Γ,file="")
if isempty(file)
list=glob("*.nc",path)
nfiles=length(list)
else
list=[joinpath(path,file)]
nfiles=1
end

y=fill(0.0,nfiles,2)
d=fill(DataFrame(),nfiles)

#println("starting step 1")

for ff in 1:nfiles
output_file=list[ff]
mod(ff,100)==0 ? println("output_file $(ff) is "*output_file) : nothing

mp=MITprofStandard(output_file)

da=Dates.julian2datetime.(Dates.datetime2julian(DateTime(0,1,1)) .+mp.date)
y[ff,1]=year(minimum(da))
y[ff,2]=year(maximum(da))

(f,i,j,c)=MeshArrays.knn.XC,Γ.YC,mp.lon[:],mp.lat[:])
pos=[[f[ii],i[ii],j[ii]] for ii in 1:length(c)]

nbT=sum((!ismissing).(mp.T[:,:]),dims=2)
nbS=sum((!ismissing).(mp.S[:,:]),dims=2)

d[ff]=DataFrame(ID=parse.(Int,mp.ID),lon=mp.lon,lat=mp.lat,
date=da,pos=c[:],nbT=nbT[:],nbS=nbS[:])
end

#println("starting step 2")

nd=length(findall((!isempty).(d)))
df=d[1]
[append!(df,d[ff]) for ff in 2:nd]

df
end

"""
profile_variables(name::String)
Create Array of all values for one variable, obtained by looping through files in `path`.
```
@everywhere using ArgoData, CSV, DataFrames
@everywhere list_v=("prof_T","prof_Testim","prof_Tweight","prof_S","prof_Sestim","prof_Sweight")
@distributed for v in list_v
output_file="csv/"*v*".csv"
tmp=MITprof.profile_variables(v)
CSV.write(output_file,DataFrame(tmp,:auto))
end
```
"""
function profile_variables(name::String)
path="MITprof/"
csv_file="csv/profile_positions.csv"
df=CSV.read(csv_file,DataFrame)

list=glob("*.nc",path)
nfiles= length(list)
x=Array{Union{Float64,Missing},2}(undef,size(df,1),55)
n0=[0]
for ff in 1:nfiles
tmp=Dataset(list[ff],"r") do ds
ds[name][:,:]
end # ds is closed
s=size(tmp)
x[n0[1]+1:n0[1]+s[1],:].=tmp
n0[1]+=s[1]
end

x
end

"""
profile_levels()
Create Array of all values for one level, obtained by looping through files in `csv/`.
"""
function profile_levels(k=0)
k==0 ? kk=collect(1:55) : kk=[k]
list_v=("prof_T","prof_Testim","prof_Tweight","prof_S","prof_Sestim","prof_Sweight")
list_n=("T","Te","Tw","S","Se","Sw")

csv_file="csv/profile_positions.csv"
df0=CSV.read(csv_file,DataFrame)

path="csv/"

nfiles= length(list_v)
for ff in 1:nfiles
println(list_v[ff])
df=CSV.read(path*list_v[ff]*".csv",DataFrame)
name=list_n[ff]
for k in kk
fil=path*"k$(k).csv"
if ff==1
df1=DataFrame(date=df0.date)
else
df1=CSV.read(fil,DataFrame)
end
println("x$(k)")
df1[:,name]=df[:,Symbol("x$(k)")]
CSV.write(fil,df1)
end
end

end

"""
profile_add_level!(df,k)
```
df=CSV.read("csv/profile_positions.csv",DataFrame)
MITprof.profile_add_level!(df,5)
```
"""
function profile_add_level!(df,k)
df1=CSV.read("csv/k$(k).csv",DataFrame)
list_n=("T","Te","Tw","S","Se","Sw")
[df[:,Symbol(i)]=df1[:,Symbol(i)] for i in list_n]
end

"""
profile_subset(df,lons,lats,dates)
```
df=CSV.read("csv/profile_positions.csv",DataFrame)
d0=DateTime("2012-06-11T18:50:04")
d1=DateTime("2012-07-11T18:50:04")
tmp=MITprof.profile_subset(df,(0,10),(-5,5),(d0,d1))
```
"""
profile_subset(df,lons,lats,dates) =
df[ (df.lon .> lons[1]) .& (df.lon .<= lons[2]) .&
(df.lat .> lats[1]) .& (df.lat .<= lats[2]) .&
(df.date .> dates[1]) .& (df.date .<= dates[2]) ,:]

#profile_subset(df,lons,lats,dates) =
# subset(df, [:lon, :lat, :date] => (lon, lat, date) ->
# lon .> lons[1] .&& lon .<= lons[2] .&&
# lat .> lats[1] .&& lat .<= lats[2] .&&
# date .> dates[1] .&& date .<= dates[2])

"""
profile_trim(df)
"""
profile_trim(df) = df[
(!ismissing).(df.T) .& (!ismissing).(df.Te) .& (df.Tw.>0) .&
(!ismissing).(df.S) .& (!ismissing).(df.Se) .& (df.Sw.>0) .&
(df.date .> DateTime(1000,1,1)) .& (df.date .< DateTime(2022,4,1))
,:]

end
11 changes: 8 additions & 3 deletions src/MITprofAnalysis.jl
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,14 @@ df=MITprofAnalysis.csv_of_positions(path,Γ)
CSV.write(csv_file, df)
```
"""
function csv_of_positions(path,Γ)
list=glob("*.nc",path)
nfiles=length(list)
function csv_of_positions(path,Γ,file="")
if isempty(file)
list=glob("*.nc",path)
nfiles=length(list)
else
list=[joinpath(path,file)]
nfiles=1
end

y=fill(0.0,nfiles,2)
d=fill(DataFrame(),nfiles)
Expand Down
11 changes: 9 additions & 2 deletions src/tools.jl
Original file line number Diff line number Diff line change
Expand Up @@ -432,8 +432,15 @@ function prof_convert!(prof,meta)
meta["TPOTfromTINSITU"] ? ArgoTools.prof_TtoΘ!(prof) : nothing
end

function interp_z(x,y,xi)
jj=findall(isfinite.(y))
"""
interp_z(x,y,xi; keep_mask=false)
Call `Interpolations.linear_interpolation` with `extrapolation_bc=Flat()`.
If `keep_mask=true` then retain NaNs that are sometime to indicate sea floor has been reached (e.g. in model output).
"""
function interp_z(x,y,xi; keep_mask=false)
!keep_mask ? jj=findall(isfinite.(y)) : jj=eachindex(y)
interp_linear_extrap = linear_interpolation(Float64.(x[jj]), Float64.(y[jj]), extrapolation_bc=Flat())
return interp_linear_extrap(xi)
end
Expand Down
2 changes: 1 addition & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ using ArgoData, MeshArrays, Test

γ=GridSpec("LatLonCap",MeshArrays.GRID_LLC90)
Γ=GridLoad(γ)
df=AnalysisMethods.profile_positions(pth,Γ,fil)
df=MITprofAnalysis.csv_of_positions(pth,Γ,fil)
@test isapprox(maximum(df.lat),6.859)

dates=[ArgoTools.DateTime(2011,1,10) ArgoTools.DateTime(2011,1,20)]
Expand Down

0 comments on commit 7221c63

Please sign in to comment.