From 655abebe526a3587c26429e41130d78f326fa524 Mon Sep 17 00:00:00 2001 From: Simen Kirkvik Date: Wed, 17 Dec 2025 17:41:11 +0100 Subject: [PATCH] fix: Update Arome to translate latlon to lambert Also: - Add test for reading uv from tile coords - Build with nix - Pin nix with npins - Remove .config tools manifest - Remove preview flag --- .config/dotnet-tools.json | 20 --- .envrc | 4 +- Oceanbox.FvcomKit.slnx | 2 +- default.nix | 19 +++ nix/default.nix | 146 +++++++++++++++++++ nix/projnet.fsharp.nix | 29 ++++ nix/sdslite.nix | 29 ++++ nix/sources.json | 23 +++ nix/sources.nix | 4 + shell.nix | 11 +- src/Adjoin.fs | 5 +- src/Arome.fs | 201 ++++++++++++++++++++++---- src/Fvcom.fs | 6 +- src/Grid.fs | 37 ++--- src/Interpol.fs | 3 +- src/NorKyst.fs | 5 +- src/NorShelf.fs | 3 + src/Oceanbox.FvcomKit.fsproj | 61 ++++---- src/Thredds.fs | 25 ++-- src/Types.fs | 1 + src/default.nix | 41 ++++++ src/deps.json | 272 +++++++++++++++++++++++++++++++++++ test/Tests.fs | 18 --- test/Tests.fsproj | 17 --- xtest/Arome.fs | 189 ++++++++++++++++++++++++ xtest/xtest.fsproj | 38 +++++ xtest/xunit.runner.json | 3 + 27 files changed, 1059 insertions(+), 153 deletions(-) delete mode 100644 .config/dotnet-tools.json create mode 100644 default.nix create mode 100644 nix/default.nix create mode 100644 nix/projnet.fsharp.nix create mode 100644 nix/sdslite.nix create mode 100644 nix/sources.json create mode 100644 nix/sources.nix create mode 100644 src/default.nix create mode 100644 src/deps.json delete mode 100644 test/Tests.fs delete mode 100644 test/Tests.fsproj create mode 100644 xtest/Arome.fs create mode 100644 xtest/xtest.fsproj create mode 100644 xtest/xunit.runner.json diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json deleted file mode 100644 index f85a6d3..0000000 --- a/.config/dotnet-tools.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "version": 1, - "isRoot": true, - "tools": { - "fantomas": { - "version": "7.0.1", - "commands": [ - "fantomas" - ], - "rollForward": false - }, - "dotnet-outdated-tool": { - "version": "4.6.7", - "commands": [ - "dotnet-outdated" - ], - "rollForward": false - } - } -} \ No newline at end of file diff --git a/.envrc b/.envrc index 4a4726a..30d7c0f 100644 --- a/.envrc +++ b/.envrc @@ -1 +1,3 @@ -use_nix +export NPINS_DIRECTORY="nix" + +use_nix \ No newline at end of file diff --git a/Oceanbox.FvcomKit.slnx b/Oceanbox.FvcomKit.slnx index 67c6667..4735a9c 100644 --- a/Oceanbox.FvcomKit.slnx +++ b/Oceanbox.FvcomKit.slnx @@ -12,5 +12,5 @@ - + diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..155d075 --- /dev/null +++ b/default.nix @@ -0,0 +1,19 @@ +{ + sources ? import ./nix, + system ? builtins.currentSystem, + pkgs ? import sources.nixpkgs { + inherit system; + config = { }; + overlays = [ ]; + }, +}: +let + sdk = pkgs.dotnetCorePackages.sdk_9_0; + sdslite = pkgs.callPackage ./nix/sdslite.nix { dotnet-sdk = sdk; }; + projnetFsharp = pkgs.callPackage ./nix/projnet.fsharp.nix { dotnet-sdk = sdk; }; +in +pkgs.callPackage ./src { + SDSLite = sdslite; + projnet = projnetFsharp; + dotnet-sdk = sdk; +} \ No newline at end of file diff --git a/nix/default.nix b/nix/default.nix new file mode 100644 index 0000000..6592476 --- /dev/null +++ b/nix/default.nix @@ -0,0 +1,146 @@ +/* + This file is provided under the MIT licence: + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ +# Generated by npins. Do not modify; will be overwritten regularly +let + data = builtins.fromJSON (builtins.readFile ./sources.json); + version = data.version; + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295 + range = + first: last: if first > last then [ ] else builtins.genList (n: first + n) (last - first + 1); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257 + stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1)); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269 + stringAsChars = f: s: concatStrings (map f (stringToCharacters s)); + concatMapStrings = f: list: concatStrings (map f list); + concatStrings = builtins.concatStringsSep ""; + + # If the environment variable NPINS_OVERRIDE_${name} is set, then use + # the path directly as opposed to the fetched source. + # (Taken from Niv for compatibility) + mayOverride = + name: path: + let + envVarName = "NPINS_OVERRIDE_${saneName}"; + saneName = stringAsChars (c: if (builtins.match "[a-zA-Z0-9]" c) == null then "_" else c) name; + ersatz = builtins.getEnv envVarName; + in + if ersatz == "" then + path + else + # this turns the string into an actual Nix path (for both absolute and + # relative paths) + builtins.trace "Overriding path of \"${name}\" with \"${ersatz}\" due to set \"${envVarName}\"" ( + if builtins.substring 0 1 ersatz == "/" then + /. + ersatz + else + /. + builtins.getEnv "PWD" + "/${ersatz}" + ); + + mkSource = + name: spec: + assert spec ? type; + let + path = + if spec.type == "Git" then + mkGitSource spec + else if spec.type == "GitRelease" then + mkGitSource spec + else if spec.type == "PyPi" then + mkPyPiSource spec + else if spec.type == "Channel" then + mkChannelSource spec + else if spec.type == "Tarball" then + mkTarballSource spec + else + builtins.throw "Unknown source type ${spec.type}"; + in + spec // { outPath = mayOverride name path; }; + + mkGitSource = + { + repository, + revision, + url ? null, + submodules, + hash, + branch ? null, + ... + }: + assert repository ? type; + # At the moment, either it is a plain git repository (which has an url), or it is a GitHub/GitLab repository + # In the latter case, there we will always be an url to the tarball + if url != null && !submodules then + builtins.fetchTarball { + inherit url; + sha256 = hash; # FIXME: check nix version & use SRI hashes + } + else + let + url = + if repository.type == "Git" then + repository.url + else if repository.type == "GitHub" then + "https://github.com/${repository.owner}/${repository.repo}.git" + else if repository.type == "GitLab" then + "${repository.server}/${repository.repo_path}.git" + else + throw "Unrecognized repository type ${repository.type}"; + urlToName = + url: rev: + let + matched = builtins.match "^.*/([^/]*)(\\.git)?$" url; + + short = builtins.substring 0 7 rev; + + appendShort = if (builtins.match "[a-f0-9]*" rev) != null then "-${short}" else ""; + in + "${if matched == null then "source" else builtins.head matched}${appendShort}"; + name = urlToName url revision; + in + builtins.fetchGit { + rev = revision; + inherit name; + # hash = hash; + inherit url submodules; + }; + + mkPyPiSource = + { url, hash, ... }: + builtins.fetchurl { + inherit url; + sha256 = hash; + }; + + mkChannelSource = + { url, hash, ... }: + builtins.fetchTarball { + inherit url; + sha256 = hash; + }; + + mkTarballSource = + { + url, + locked_url ? url, + hash, + ... + }: + builtins.fetchTarball { + url = locked_url; + sha256 = hash; + }; +in +if version == 5 then + builtins.mapAttrs mkSource data.pins +else + throw "Unsupported format version ${toString version} in sources.json. Try running `npins upgrade`" diff --git a/nix/projnet.fsharp.nix b/nix/projnet.fsharp.nix new file mode 100644 index 0000000..31ec188 --- /dev/null +++ b/nix/projnet.fsharp.nix @@ -0,0 +1,29 @@ +{ + dotnet-sdk, + fetchFromGitLab, + buildDotnetModule +}: +let + src = fetchFromGitLab { + owner = "oceanbox"; + repo = "ProjNet.FSharp"; + # tag = "v5.2.0"; + rev = "722ce0c23fda6844a81e995afbb2d81cbd5f38ec"; + private = true; + forceFetchGit = true; + hash = "sha256-Rvnnf/D2x90pwgvTbXz307MJVBlVPK/cCf1hqj2VosE="; + }; +in +buildDotnetModule { + name = "ProjNet.FSharp"; + + src = src; + + dotnet-sdk = dotnet-sdk; + + projectFile = "src/ProjNet.FSharp.fsproj"; + nugetDeps = "${src}/src/deps.json"; + packNupkg = true; + + executables = [ ]; +} diff --git a/nix/sdslite.nix b/nix/sdslite.nix new file mode 100644 index 0000000..9cce51c --- /dev/null +++ b/nix/sdslite.nix @@ -0,0 +1,29 @@ +{ + dotnet-sdk, + fetchFromGitLab, + buildDotnetModule +}: +let + src = fetchFromGitLab { + owner = "oceanbox"; + repo = "SDSlite"; + # tag = "v2.8.0"; + rev = "8c1a158206c37bc57a5bd726a792bd6a9cd2ec01"; + private = true; + forceFetchGit = true; + hash = "sha256-i9pNrmH/VC0Q9FCldbWGdZHkqSL1cdYtAOs7vX+DlXM="; + }; +in +buildDotnetModule { + name = "Oceanbox.SDSLite"; + + src = src; + + dotnet-sdk = dotnet-sdk; + + projectFile = "ScientificDataSet/ScientificDataSet.csproj"; + nugetDeps = "${src}/ScientificDataSet/deps.json"; + packNupkg = true; + + executables = [ ]; +} \ No newline at end of file diff --git a/nix/sources.json b/nix/sources.json new file mode 100644 index 0000000..75810c5 --- /dev/null +++ b/nix/sources.json @@ -0,0 +1,23 @@ +{ + "pins": { + "nix-utils": { + "type": "Git", + "repository": { + "type": "Git", + "url": "https://git.sr.ht/~mrtz/nix-utils" + }, + "branch": "trunk", + "submodules": false, + "revision": "098f594425d2b9dde0657becad0f6498d074f8b3", + "url": null, + "hash": "0hh52w1fkpr1xx6j8cjm6g88j2352yv2ysqm1q51j59y6f583vyb" + }, + "nixpkgs": { + "type": "Channel", + "name": "nixpkgs-unstable", + "url": "https://releases.nixos.org/nixpkgs/nixpkgs-26.05pre905319.f720de590661/nixexprs.tar.xz", + "hash": "07n4hhch0j6n69b0zchdjg0l80z2xrdk7k57ykv90cvhklim5dz1" + } + }, + "version": 5 +} \ No newline at end of file diff --git a/nix/sources.nix b/nix/sources.nix new file mode 100644 index 0000000..83f7943 --- /dev/null +++ b/nix/sources.nix @@ -0,0 +1,4 @@ +{ + "ProjNet.FSharp" = "https://gitlab.com/api/v4/projects/35009572/packages/nuget/download"; + "Oceanbox.SDSLite" = "https://gitlab.com/api/v4/projects/34025102/packages/nuget/download"; +} diff --git a/shell.nix b/shell.nix index 01aee42..22eedd0 100644 --- a/shell.nix +++ b/shell.nix @@ -1,13 +1,18 @@ -with import { }; let - dotnet-sdk = dotnet-sdk_9; + sources = import ./nix; + pkgs = import sources.nixpkgs {}; + dotnet-sdk = pkgs.dotnetCorePackages.sdk_9_0; in +with import { }; mkShell rec { packages = [ bun dotnet-sdk fsautocomplete fantomas + npins + nixfmt + nuget-to-json ]; buildInputs = [ @@ -17,4 +22,4 @@ mkShell rec { DOTNET_ROOT = "${dotnet-sdk}/share/dotnet"; LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath buildInputs; -} +} \ No newline at end of file diff --git a/src/Adjoin.fs b/src/Adjoin.fs index 56371b2..d83b2fc 100644 --- a/src/Adjoin.fs +++ b/src/Adjoin.fs @@ -139,6 +139,7 @@ let private genOobIdx (proj: IProj) (tree: KdTree<_, _>) (cullIdx: (int * int)[] // cullIdx = cullIdx // oobIdx = genOobIdx proj tree cullIdx fPos rPos // } + let private mkFvcomAdjoint (proj: IProj) (fPos: PosVec) (bbox: BBox) ((rPos, wet): BiPos * Mask) = let box = mkCropBox bbox let pos' = reProject proj rPos @@ -147,7 +148,9 @@ let private mkFvcomAdjoint (proj: IProj) (fPos: PosVec) (bbox: BBox) ((rPos, wet let nearest = fPos |> Array.map (fun (x, y) -> tree.GetNearestNeighbours ([| x; y |], 1) |> Array.head |> (fun x -> x.Value)) - { adjoinIdx = nearest; cullIdx = cullIdx; oobIdx = genOobIdx proj tree cullIdx fPos rPos } + let oobIdx = genOobIdx proj tree cullIdx fPos rPos + + { adjoinIdx = nearest; cullIdx = cullIdx; oobIdx = oobIdx } let inline float2 x = bimap float float x diff --git a/src/Arome.fs b/src/Arome.fs index 0bc3b68..c5e60bb 100644 --- a/src/Arome.fs +++ b/src/Arome.fs @@ -1,20 +1,76 @@ module Oceanbox.FvcomKit.Arome -open FSharpPlus +open System + open Microsoft.Research.Science.Data + open ProjNet.FSharp open Serilog open Types - let trans = makeTransform CoordSys.WGS84 (CoordSys.LCCMet ()) +[] +type Pointf = { + x: float + y: float +} with + static member Zero = { x = 0.0; y = 0.0 } + static member OfTuple (x, y) = { x = x; y = y } + static member OfStructTuple struct (x, y) = { x = x; y = y } + +/// Single precision +[] +type Points = { + x: single + y: single +} with + static member Zero = { x = 0.0f; y = 0.0f } + static member OfTuple (x, y) = { x = x; y = y } + static member OfStructTuple struct (x, y) = { x = x; y = y } + +module Pointf = + let ofPoints (p: Points) : Pointf = { x = float p.x; y = float p.y } + + let getBBox (points: Pointf array) : BBox = + let minX = points |> Array.minBy _.x |> _.x + let maxX = points |> Array.maxBy _.x |> _.x + let minY = points |> Array.minBy _.y |> _.y + let maxY = points |> Array.maxBy _.y |> _.y + let center = (minX + (maxX - minX)) / 2., (minY + (maxY - minY)) / 2. + + { + minX = minX + maxX = maxX + minY = minY + maxY = maxY + center = center + } + +module Points = + let getBBox (points: Points array) : BBox = + let minX = points |> Array.minBy _.x |> _.x + let maxX = points |> Array.maxBy _.x |> _.x + let minY = points |> Array.minBy _.y |> _.y + let maxY = points |> Array.maxBy _.y |> _.y + let center = float (minX + (maxX - minX)) / 2., float (minY + (maxY - minY)) / 2. + + { + minX = float minX + maxX = float maxX + minY = float minY + maxY = float maxY + center = center + } + +[] type SquareGrid = { dimensions: int * int BBox: BBox squareSize: float projection: ProjNet.CoordinateSystems.CoordinateSystem + points: Points array } with member this.getBoundingBox() = this.BBox static member empty = { @@ -22,15 +78,93 @@ type SquareGrid = { BBox = BBox.empty squareSize = 0.0 projection = CoordSys.LCCMet () + points = Array.empty } -let getBBox (xs: float array) (ys: float array) : BBox = +let private square x = x * x + +let haversineDistance (earthRadius: float) (x0: float) (y0: float) (x1: float) (y1: float) : float = + let mutable lat1 = y0 + let mutable lat2 = y1 + let lon1 = x0 + let lon2 = x1 + + let dLat = Double.DegreesToRadians (lat2 - lat1) + let dLon = Double.DegreesToRadians (lon2 - lon1) + lat1 <- Double.DegreesToRadians lat1 + lat2 <- Double.DegreesToRadians lat2 + + let a = square (Math.Sin (dLat / 2.0)) + Math.Cos lat1 * Math.Cos lat2 * square (Math.Sin (dLon / 2.0)) + let c = 2.0 * Math.Asin (Math.Sqrt a) + + let result = earthRadius * c + + result + +let getGrid (ds: DataSet) : Result = try - let minX = Array.min xs - let maxX = Array.max xs - let minY = Array.min ys - let maxY = Array.max ys - let center = float (minX + (maxX - minX)) / 2., float (minY + (maxY - minY)) / 2. + let dimensions = ds.Dimensions["x"].Length, ds.Dimensions["y"].Length + let longs : float array2d = ds["longitude"].GetData () :?> float[,] + let lats : float array2d = ds["latitude"].GetData () :?> float[,] + // NOTE: The netcdf file dimensions are defined as (y, x) + let width = Array2D.length2 longs + let height = Array2D.length1 lats + let points : Points array = + let result = Array.create (width * height) Points.Zero + for i in 0 .. height - 1 do + for j in 0 .. width - 1 do + let lat = lats[i, j] + let lon = longs[i, j] + let p = lon, lat + // NOTE(simkir): Convert to lambert projection + let x, y = trans.project p + result[i * width + j] <- Points.OfTuple (single x, single y) + result + + let bbox = Points.getBBox points + + if points.Length < 2 then + Error "The dataset must contain at least 1 square" + else + let p0 = points[0 * width + 0] + let p1 = points[0 * width + 1] + let p2 = points[1 * width + 0] + // let p3 = points[1 * width + 1] + let x1, x0 = if p1.x > p0.x then p1.x, p0.x else p0.x, p1.x + let y1, y0 = if p2.y > p1.y then p2.y, p1.y else p1.y, p2.y + let lengthX = x1 - x0 + let lengthY = y1 - y0 + let isSquare = lengthX = lengthY + + if not isSquare then + Log.Warning ( + "FvcomKit.Arome.getGrid grid is not square: {X1} - {X0} = {LengthX} = {LengthY} = {Y1} - {Y0}", + x1, + x0, + lengthX, + lengthY, + y1, + y0 + ) + + Ok { + SquareGrid.empty with + dimensions = dimensions + BBox = bbox + squareSize = float lengthX + points = points + } + with exn -> + Log.Error (exn, "Sorcerer.Arome.getAromeSquareGrid exception") + Error $"Error reading arome grid: {exn.Message}" + +let private getBBox (xs: float array) (ys: float array) : BBox = + try + let minX = xs |> Array.min + let maxX = xs |> Array.max + let minY = ys |> Array.min + let maxY = ys |> Array.max + let center = (minX + (maxX - minX)) / 2., (minY + (maxY - minY)) / 2. { minX = minX @@ -43,7 +177,8 @@ let getBBox (xs: float array) (ys: float array) : BBox = Log.Error $"{e}" BBox.empty -let getGrid (ds: DataSet) : Result = +/// Depends on the netcdf having the 'x' and 'y' variables +let getSquareGrid (ds: DataSet) : Result = try let dimensions = ds.Dimensions["x"].Length, ds.Dimensions["y"].Length let xs = (ds["x"].GetData () :?> single[]) |> Array.map float @@ -77,27 +212,35 @@ let getGrid (ds: DataSet) : Result = Log.Error (exn, "FvcomKit.Arome.getGrid exception") Error $"Error reading arome grid: {exn.Message}" -let readUV (ds: DataSet) t x y = - let u = ds["x_wind_10m"].GetData ([| t; y; x |], [| 1; 1; 1 |]) :?> single[,,] - let v = ds["y_wind_10m"].GetData ([| t; y; x |], [| 1; 1; 1 |]) :?> single[,,] - u[0, 0, 0], v[0, 0, 0] +let readUV (ds: DataSet) (t: int) x y : single * single = + let xWind = ds["x_wind_10m"] + let yWind = ds["y_wind_10m"] + let origin = [| t; 0; y; x |] + let shape = [| 1; 1; 1; 1; |] + Log.Verbose("""Fetching NetCDF["x_wind_10m"]({Origin}, {Shape})""", origin, shape) + let us = xWind.GetData (origin, shape) :?> single[,,,] + let vs = yWind.GetData (origin, shape) :?> single[,,,] + + us[0, 0, 0, 0], vs[0, 0, 0, 0] /// Finds the index of a tile within a square grid, given its bounding box and square length -let tryFindIndex squareSize (wide, tall) (minX, minY) (maxX, maxY) (p0, p1) = - if (minX < p0 && p0 < maxX) && (minY < p1 && p1 < maxY) then - let dx = p0 - minX - let dy = p1 - minY - let xIdx = int (dx / squareSize) - let yIdx = int (dy / squareSize) +let tryFindIndex (grid: SquareGrid) (x0, y0) = + let wide, tall = grid.dimensions + let bbox = grid.BBox + if bbox.minX <= x0 && x0 < bbox.maxX && bbox.minY <= y0 && y0 < bbox.maxY then + let dx = x0 - bbox.minX + let dy = y0 - bbox.minY + let xIdx = int (dx / grid.squareSize) + let yIdx = int (dy / grid.squareSize) if xIdx < wide && yIdx < tall then Some (xIdx, yIdx) else Log.Warning ( "Got wrong indices within the bounding box of the archive: min {@Min}, max {@Max}, point {@Point}, delta {@Delta}m, indices {@Indices}", - (minX, minY), - (maxX, maxY), - (p0, p1), + (bbox.minX, bbox.minY), + (bbox.maxX, bbox.maxY), + (x0, y0), (dx, dy), (xIdx, yIdx) ) @@ -107,15 +250,11 @@ let tryFindIndex squareSize (wide, tall) (minX, minY) (maxX, maxY) (p0, p1) = /// Tries to get the closest x and y in the arome dataset based on position p let tryFind (grid: SquareGrid) (p: float * float) : (int * int) option = - let min = grid.BBox.minX, grid.BBox.minY - let max = grid.BBox.maxX, grid.BBox.maxY + tryFindIndex grid p - tryFindIndex grid.squareSize grid.dimensions min max p - -let tryFindWithProj (grid: SquareGrid) proj (p0: float, p1: float) : (int * int) option = - let trans = makeTransform proj grid.projection - let min = grid.BBox.minX, grid.BBox.minY - let max = grid.BBox.maxX, grid.BBox.maxY +let tryFindWithProj (proj: Projection) (grid: SquareGrid) (p0: float, p1: float) : (int * int) option = + let coordSys : ProjNet.CoordinateSystems.CoordinateSystem = Projection.ToCoordinateSystem proj + let trans = makeTransform coordSys grid.projection let p = trans.project ((p0, p1)) - tryFindIndex grid.squareSize grid.dimensions min max p \ No newline at end of file + tryFindIndex grid p \ No newline at end of file diff --git a/src/Fvcom.fs b/src/Fvcom.fs index 951707d..42ba83f 100644 --- a/src/Fvcom.fs +++ b/src/Fvcom.fs @@ -29,6 +29,7 @@ type FvcomGrid = { member this.getVertices() = this.Nodes member this.getCells() = this.Elem member this.getBoundingBox() = this.BBox + static member empty = { Elem = Array.empty Nodes = Array.empty @@ -39,6 +40,7 @@ type FvcomGrid = { SiglayCenter = Array2D.zeroCreate 0 0 Siglev = Array2D.zeroCreate 0 0 } + member this.ToGrid() = { Elem = this.Elem; Nodes = this.Nodes; BBox = this.BBox } let getNumFrames (ds: DataSet) = @@ -76,7 +78,7 @@ let getTimeSpanSinceStart (ds: DataSet) n = let days = ds["Itime"].GetData () :?> int[] let msec = ds["Itime2"].GetData () :?> int[] let t0 = TimeSpan.FromDays days[n] - let t1 = TimeSpan.FromMilliseconds msec[n] + let t1 = TimeSpan.FromMilliseconds (float msec[n]) t0 + t1 |> Some with e -> Log.Error $"getTimeInDays exception: {e.Message}" @@ -526,7 +528,7 @@ module Singular = Log.Error $"{err}" 0f, 0f - let readTemp (ds: DataSet) n t d = + let readTemp (ds: DataSet) n t d : single = try let t = ds["temp"].GetData ([| t; d; n |], [| 1; 1; 1 |]) :?> single[,,] t[0, 0, 0] diff --git a/src/Grid.fs b/src/Grid.fs index 69da993..dd3ec0d 100644 --- a/src/Grid.fs +++ b/src/Grid.fs @@ -61,35 +61,35 @@ type NeighborIndex = { } with static member empty = { ElemsAroundNode = Map.empty; NodesAroundNode = Map.empty; ElemsAroundElem = Map.empty } -type private Ean = Map - // NOTE(SimenLK): The amount of items to be stored in the trees leafs // let treeLeafSize = LeafNodeSize 64 let private createTree (points: Leaf array) = let tree = KdTree (2, KdTree.Math.DoubleMath ()) - points - |> Array.iter (fun a -> tree.Add ([| fst a.Pos; snd a.Pos |], a.Data) |> ignore) + + do points |> Array.iter (fun a -> tree.Add ([| fst a.Pos; snd a.Pos |], a.Data) |> ignore) if points.Length > 0 then - tree.Balance () + do tree.Balance () else - Log.Warning $"Empty kd-tree" + do Log.Warning $"Empty kd-tree" + tree +type private Ean = Map + let private makeElemsSurroundingNodeMap (elem: Elem array) : ElemsAroundNode = - let addElIdx k v (nodes: Ean) = - Map.tryFind k nodes - |> Option.defaultWith (fun () -> []) - |> fun nds -> Map.add k (v :: nds) nodes - elem - |> Array.fold - (fun (n, acc) (a, b, c) -> - let acc' = acc |> addElIdx a n |> addElIdx b n |> addElIdx c n - n + 1, acc' - ) - (0, Map.empty) + let addElIdx k v (nodes: Ean) : Ean = + let nds = + Map.tryFind k nodes + |> Option.defaultValue [||] + nodes |> Map.add k (Array.append [|v|] nds) + let folder (n, acc) (a, b, c) = + let acc' = acc |> addElIdx a n |> addElIdx b n |> addElIdx c n + n + 1, acc' + + ((0, Map.empty), elem) + ||> Array.fold folder |> snd - |> Map.mapValues toArray let private makeElemsSurroundingNodeMap' (elem: Elem array) : ElemsAroundNode = let addElemIdx k v nodes = @@ -137,6 +137,7 @@ let getSurrounding (idx: Map) (a, b, c) = let makeNeighborIndex (grid: IGrid) = let elem = grid.getCells () let ean = makeElemsSurroundingNodeMap elem + { ElemsAroundNode = ean NodesAroundNode = makeNodesSurroudingNodeMap ean elem diff --git a/src/Interpol.fs b/src/Interpol.fs index b0e2a76..d660b43 100644 --- a/src/Interpol.fs +++ b/src/Interpol.fs @@ -21,9 +21,10 @@ let private calcInterpolationWeightedIdx (rn, fn) = let nearestIdx = Array.map (fun fzi -> findNearestZ fzi rn) fn nearestIdx |> Array.mapi (fun i j -> + let last = Array.last rn if fn[i] <= rn[0] then (0, 0), (1.0, 0.0) - elif fn[i] >= rn[^0] then + elif fn[i] >= last then (rn.Length - 1, 0), (1.0, 0.0) elif abs (fn[i] - rn[j]) < 9.999999975e-07 then (j, 0), (1.0, 0.0) diff --git a/src/NorKyst.fs b/src/NorKyst.fs index 1340ebb..833d209 100644 --- a/src/NorKyst.fs +++ b/src/NorKyst.fs @@ -1,18 +1,21 @@ module Oceanbox.FvcomKit.NorKyst open System + open Thredds let private getNorkystUrl (threddsUrl: string) (d: DateTime) = let url = threddsUrl let fmt (d: DateTime) = $"{d.Year}%02d{d.Month}%02d{d.Day}T00Z.nc" + $"{url}/fou-hi/new_norkyst800m/his/ocean_his.an.{fmt d}" let private getArchiveUrls urlf (t: DateTime) = let t = t.ToUniversalTime () let now = DateTime.Now.ToUniversalTime () let dDay = (now.Date - t.Date).Days + if dDay < 0 then // no data available [] else @@ -21,4 +24,4 @@ let private getArchiveUrls urlf (t: DateTime) = let tryGetArchive (threddsUrl: string) (t: DateTime) = getArchiveUrls (getNorkystUrl threddsUrl) t |> tryOpenThredds - |> Option.bind (fun (_, ds) -> tryGetTimeIndex ds t |> Option.map (fun idx -> (ds, idx))) \ No newline at end of file + |> Option.bind (fun (_, ds) -> tryGetTimeIndex ds t |> Option.map (fun idx -> ds, idx)) \ No newline at end of file diff --git a/src/NorShelf.fs b/src/NorShelf.fs index 8a33524..5c23b33 100644 --- a/src/NorShelf.fs +++ b/src/NorShelf.fs @@ -10,12 +10,14 @@ let private getNorshelfUrl (threddsUrl: string) (kind: Kind) (mode: Mode) (date: let fmt (d: DateTime) = $"{d.Year}%02d{d.Month}%02d{d.Day}T00Z.nc" let url = threddsUrl + "/sea_norshelf_files" + $"{url}/{date.Year}/%02d{date.Month}/norshelf_{kind}_{mode}_{fmt date}" let private getArchiveUrls urlf (t: DateTime) = let t = t.ToUniversalTime () let now = DateTime.Now.ToUniversalTime () let dDay = (now.Date - t.Date).Days + if dDay < -3 then // no data available [] elif dDay <= 0 then // forecast, count down from latest @@ -27,6 +29,7 @@ let private getArchiveUrls urlf (t: DateTime) = let tryGetArchive threddsUrl avg (t: DateTime) = let kind = if avg then Avg else Qck + getArchiveUrls (getNorshelfUrl threddsUrl kind) t |> tryOpenThredds |> Option.bind (fun (fname, ds) -> tryGetTimeIndex ds t |> Option.map (fun idx -> (fname, ds, idx))) diff --git a/src/Oceanbox.FvcomKit.fsproj b/src/Oceanbox.FvcomKit.fsproj index 003242a..227a1b3 100644 --- a/src/Oceanbox.FvcomKit.fsproj +++ b/src/Oceanbox.FvcomKit.fsproj @@ -3,41 +3,44 @@ Library net9.0 - true + Oceanbox AS + Oceanbox.FvcomKit linux-x64 - - 5.13.0 - preview + false + true - - - - - - - - - - - - + + + + + + + + + + + + - - - - - - - + + + + + + + - - - - - + + + + - + + + + + \ No newline at end of file diff --git a/src/Thredds.fs b/src/Thredds.fs index 02de7bf..deb74af 100644 --- a/src/Thredds.fs +++ b/src/Thredds.fs @@ -1,6 +1,7 @@ module Oceanbox.FvcomKit.Thredds open System + open Microsoft.Research.Science.Data open Microsoft.Research.Science.Data.NetCDF4 open Serilog @@ -21,20 +22,20 @@ type Kind = | Avg -> "avg" | Qck -> "qck" -let dateRange (start: DateTime) days = +let dateRange (start: DateTime) days : DateTime list = if days < 0 then List.unfold (fun d -> if d < days then None else Some (start.AddDays d, d - 1)) 0 else List.unfold (fun d -> if d > days then None else Some (start.AddDays d, d + 1)) 0 -let tryOpenArchive url = +let tryOpenArchive url : DataSet option = let uri = NetCDFUri () - uri.Url <- url + do uri.Url <- url try let ds = NetCDFDataSet.Open uri Some ds with e -> - Log.Debug e.Message + do Log.Debug e.Message None let rec tryOpenThredds = @@ -42,18 +43,22 @@ let rec tryOpenThredds = | [] -> None | x :: xs -> match tryOpenArchive x with + | None -> + tryOpenThredds xs | Some ds -> - Log.Debug $"thredds: {x}" + do Log.Debug $"thredds: {x}" Some (x, ds) - | None -> tryOpenThredds xs let tryGetTimeIndex (ds: DataSet) (t: DateTime) = let ot = ds["ocean_time"].GetData () :?> double[] - let t0 = DateTimeOffset.FromUnixTimeSeconds (ot[0] |> int64) - let t1 = DateTimeOffset.FromUnixTimeSeconds (ot[^0] |> int64) - Log.Debug $"t={t} t0={t0} tn={t1}" + let first = Array.head ot + let last = Array.last ot + let t0 = DateTimeOffset.FromUnixTimeSeconds (first |> int64) + let t1 = DateTimeOffset.FromUnixTimeSeconds (last |> int64) + do Log.Debug $"t={t} t0={t0} tn={t1}" + if t < t0.DateTime || t > t1.DateTime then - Log.Error "time is out of bounds" + do Log.Error "time is out of bounds" None else let dt = t - t0.DateTime diff --git a/src/Types.fs b/src/Types.fs index e63ec71..17825fc 100644 --- a/src/Types.fs +++ b/src/Types.fs @@ -4,6 +4,7 @@ open System type Vertex = float * float +[] type BBox = { minX: float maxX: float diff --git a/src/default.nix b/src/default.nix new file mode 100644 index 0000000..24e09e4 --- /dev/null +++ b/src/default.nix @@ -0,0 +1,41 @@ +{ + SDSLite, + projnet, + dotnet-sdk, + dotnet-runtime, + buildDotnetModule, +}: +let + name = "Oceanbox.Fvcomkit"; + projectFile = ./Oceanbox.FvcomKit.fsproj; + versionMatch = builtins.match ".*([^<]+).*" ( + builtins.readFile projectFile + ); + version = builtins.head versionMatch; +in +buildDotnetModule { + name = name; + pname = name; + version = version; + + src = ./.; + + buildInputs = [ + projnet + SDSLite + ]; + + projectFile = "Oceanbox.FvcomKit.fsproj"; + inherit + dotnet-sdk + dotnet-runtime + ; + + nugetDeps = ./deps.json; + + packNupkg = true; + + # NOTE(mrtz): Can't package nuget without it + # [ref](https://github.com/dotnet/fsharp/issues/12320) + dotnetFlags = "--property:TargetsForTfmSpecificContentInPackage="; +} \ No newline at end of file diff --git a/src/deps.json b/src/deps.json new file mode 100644 index 0000000..5f516b9 --- /dev/null +++ b/src/deps.json @@ -0,0 +1,272 @@ +[ + { + "pname": "DynamicInterop", + "version": "0.9.1", + "hash": "sha256-IB76dA0+K/y/2s/qYL7AfVOF0+6W2hVIBgf9YdZ1oJY=" + }, + { + "pname": "FSharp.Core", + "version": "9.0.303", + "hash": "sha256-AxR6wqodeU23KOTgkUfIgbavgbcSuzD4UBP+tiFydgA=" + }, + { + "pname": "FSharp.Data", + "version": "6.3.0", + "hash": "sha256-zhVkSfqCljqr6UR0IUMOHUBlR61PvwYKq63PQ09yJPM=" + }, + { + "pname": "FSharp.Data", + "version": "6.4.1", + "hash": "sha256-+Z7zbD8cKmhHJWg7Z8XHJ8IeJXCWr/kgRl+VbbsMFw8=" + }, + { + "pname": "FSharp.Data.Csv.Core", + "version": "6.3.0", + "hash": "sha256-JdOr3NDmLPohkPpZaWjKqssw0+Wr1lVDtJwTNJ/JhcY=" + }, + { + "pname": "FSharp.Data.Csv.Core", + "version": "6.4.1", + "hash": "sha256-oz040beVF7WMONi3n0dPQlZD5deQWnClSXKRijgnw/k=" + }, + { + "pname": "FSharp.Data.Html.Core", + "version": "6.3.0", + "hash": "sha256-tSstVvAT9o+0Pr6cIReJOvh0kcthOWgt1CPzgIRoYRQ=" + }, + { + "pname": "FSharp.Data.Html.Core", + "version": "6.4.1", + "hash": "sha256-QBbvE8WXUVjS/0mW3aohZBuyfr3M7UGw7kt1oSrlq+s=" + }, + { + "pname": "FSharp.Data.Http", + "version": "6.3.0", + "hash": "sha256-/PzzLT0ev4miFswct+YscFDwoaq05BSJATM4fPvxk8o=" + }, + { + "pname": "FSharp.Data.Http", + "version": "6.4.1", + "hash": "sha256-0YD/jSCppE1siXrUcTx0OmVdgsjMk+gn0pHp+3GS3V4=" + }, + { + "pname": "FSharp.Data.Json.Core", + "version": "6.3.0", + "hash": "sha256-MFe88psxmHWGQYoG8NXi4z33TlWO+dMwOV4NViaUmTM=" + }, + { + "pname": "FSharp.Data.Json.Core", + "version": "6.4.1", + "hash": "sha256-0Fmo0f1jC3s+Dime8j2oqLnOK+elqo1xWmktpEYrZlk=" + }, + { + "pname": "FSharp.Data.Runtime.Utilities", + "version": "6.3.0", + "hash": "sha256-psc/tsHLYrorjeBBBLviwwA57XMFXUP2ywZqLMzfxac=" + }, + { + "pname": "FSharp.Data.Runtime.Utilities", + "version": "6.4.1", + "hash": "sha256-rNo2XQMME1zrPaIezD15P0RoTu8wyhtiJyB99Qp1hcE=" + }, + { + "pname": "FSharp.Data.WorldBank.Core", + "version": "6.3.0", + "hash": "sha256-QdL5ylUCvvrhvnnSPWj4MfN7B78hMbb5IRmozK7oJjM=" + }, + { + "pname": "FSharp.Data.WorldBank.Core", + "version": "6.4.1", + "hash": "sha256-nUyyziwpY58UnBNpqFoe/1bgDfQIq6gOqtQIwAo7x/c=" + }, + { + "pname": "FSharp.Data.Xml.Core", + "version": "6.3.0", + "hash": "sha256-67ftkfQJZ3iD62YKFh8Tu9Fuusb96KlKWxgykP1Wd9U=" + }, + { + "pname": "FSharp.Data.Xml.Core", + "version": "6.4.1", + "hash": "sha256-ZiD2aiD5yUZR4CDZl6mh4ji1G3xvm4Yd9Gya/e+D32w=" + }, + { + "pname": "FSharpPlus", + "version": "1.5.0", + "hash": "sha256-jQUlF3hsi3xpg+AdTnQw2L+lzbvTh5BIyLXCdVT6u6M=" + }, + { + "pname": "FSharpPlus", + "version": "1.7.0", + "hash": "sha256-6hDoDOnMFXQC5Hrk6Fhd+Wj+PbPFXzL9+xLIqgILJuY=" + }, + { + "pname": "FsPickler", + "version": "5.3.2", + "hash": "sha256-hjtm55aPJllzcVMPjFP4KYiEEBYtCcrUhbVOR+34agg=" + }, + { + "pname": "KdTree", + "version": "1.4.1", + "hash": "sha256-R4+L26pJoliLiwMuxmJDoa3Vf16gBq417fN+iNCy7Yc=" + }, + { + "pname": "MathNet.Numerics", + "version": "5.0.0", + "hash": "sha256-RHJCVM6OxquJF7n5Mbe/oNbucBbkge6ULcbAczOgmVo=" + }, + { + "pname": "MathNet.Numerics.FSharp", + "version": "5.0.0", + "hash": "sha256-pPbh8JdmMjBgEu84c/qV4YJ+LLr4+c31C6t++u29qBs=" + }, + { + "pname": "MessagePack", + "version": "3.1.3", + "hash": "sha256-OBn7iltr/rdE7ZKmv0MCUQSS+6OJKUYtlHdTbhEwzzE=" + }, + { + "pname": "MessagePack.Annotations", + "version": "3.1.3", + "hash": "sha256-o+T3u+xaHtW1c7AeWysCmIDUfN8lRhes2LoW5iQBafs=" + }, + { + "pname": "MessagePackAnalyzer", + "version": "3.1.3", + "hash": "sha256-5t4Av4CQ8HI7y9aAw+2qcOp+fsY0/3PdaFPJeCEAXQ0=" + }, + { + "pname": "Microsoft.NET.StringTools", + "version": "17.11.4", + "hash": "sha256-lWfzY35WQ+iKS9TpuztDTljgF9CIORhFhFEm0p1dVBE=" + }, + { + "pname": "Microsoft.NETCore.Platforms", + "version": "1.1.0", + "hash": "sha256-FeM40ktcObQJk4nMYShB61H/E8B7tIKfl9ObJ0IOcCM=" + }, + { + "pname": "Microsoft.NETCore.Targets", + "version": "1.1.0", + "hash": "sha256-0AqQ2gMS8iNlYkrD+BxtIg7cXMnr9xZHtKAuN4bjfaQ=" + }, + { + "pname": "ProjNET", + "version": "2.0.0", + "hash": "sha256-GjBnuGXmdFagIw9mX51Kpu/nn4gXta6a0cK/dxOWaZY=" + }, + { + "pname": "runtime.any.System.IO", + "version": "4.3.0", + "hash": "sha256-vej7ySRhyvM3pYh/ITMdC25ivSd0WLZAaIQbYj/6HVE=" + }, + { + "pname": "runtime.any.System.Reflection", + "version": "4.3.0", + "hash": "sha256-ns6f++lSA+bi1xXgmW1JkWFb2NaMD+w+YNTfMvyAiQk=" + }, + { + "pname": "runtime.any.System.Reflection.Primitives", + "version": "4.3.0", + "hash": "sha256-LkPXtiDQM3BcdYkAm5uSNOiz3uF4J45qpxn5aBiqNXQ=" + }, + { + "pname": "runtime.any.System.Runtime", + "version": "4.3.0", + "hash": "sha256-qwhNXBaJ1DtDkuRacgHwnZmOZ1u9q7N8j0cWOLYOELM=" + }, + { + "pname": "runtime.any.System.Text.Encoding", + "version": "4.3.0", + "hash": "sha256-Q18B9q26MkWZx68exUfQT30+0PGmpFlDgaF0TnaIGCs=" + }, + { + "pname": "runtime.any.System.Threading.Tasks", + "version": "4.3.0", + "hash": "sha256-agdOM0NXupfHbKAQzQT8XgbI9B8hVEh+a/2vqeHctg4=" + }, + { + "pname": "runtime.native.System", + "version": "4.3.0", + "hash": "sha256-ZBZaodnjvLXATWpXXakFgcy6P+gjhshFXmglrL5xD5Y=" + }, + { + "pname": "runtime.unix.System.Private.Uri", + "version": "4.3.0", + "hash": "sha256-c5tXWhE/fYbJVl9rXs0uHh3pTsg44YD1dJvyOA0WoMs=" + }, + { + "pname": "Serilog", + "version": "4.2.0", + "hash": "sha256-7f3EpCsEbDxXgsuhE430KVI14p7oDUuCtwRpOCqtnbs=" + }, + { + "pname": "Serilog.Sinks.Console", + "version": "6.0.0", + "hash": "sha256-QH8ykDkLssJ99Fgl+ZBFBr+RQRl0wRTkeccQuuGLyro=" + }, + { + "pname": "Serilog.Sinks.File", + "version": "6.0.0", + "hash": "sha256-KQmlUpG9ovRpNqKhKe6rz3XMLUjkBqjyQhEm2hV5Sow=" + }, + { + "pname": "Serilog.Sinks.Seq", + "version": "9.0.0", + "hash": "sha256-NnAkRbxwQGdNXz6DDONRxorNh1nqH2TfAQtokbq5qDw=" + }, + { + "pname": "System.IO", + "version": "4.3.0", + "hash": "sha256-ruynQHekFP5wPrDiVyhNiRIXeZ/I9NpjK5pU+HPDiRY=" + }, + { + "pname": "System.Memory", + "version": "4.5.3", + "hash": "sha256-Cvl7RbRbRu9qKzeRBWjavUkseT2jhZBUWV1SPipUWFk=" + }, + { + "pname": "System.Numerics.Vectors", + "version": "4.5.0", + "hash": "sha256-qdSTIFgf2htPS+YhLGjAGiLN8igCYJnCCo6r78+Q+c8=" + }, + { + "pname": "System.Private.Uri", + "version": "4.3.0", + "hash": "sha256-fVfgcoP4AVN1E5wHZbKBIOPYZ/xBeSIdsNF+bdukIRM=" + }, + { + "pname": "System.Reflection", + "version": "4.3.0", + "hash": "sha256-NQSZRpZLvtPWDlvmMIdGxcVuyUnw92ZURo0hXsEshXY=" + }, + { + "pname": "System.Reflection.Emit.ILGeneration", + "version": "4.3.0", + "hash": "sha256-mKRknEHNls4gkRwrEgi39B+vSaAz/Gt3IALtS98xNnA=" + }, + { + "pname": "System.Reflection.Emit.Lightweight", + "version": "4.3.0", + "hash": "sha256-rKx4a9yZKcajloSZHr4CKTVJ6Vjh95ni+zszPxWjh2I=" + }, + { + "pname": "System.Reflection.Primitives", + "version": "4.3.0", + "hash": "sha256-5ogwWB4vlQTl3jjk1xjniG2ozbFIjZTL9ug0usZQuBM=" + }, + { + "pname": "System.Runtime", + "version": "4.3.0", + "hash": "sha256-51813WXpBIsuA6fUtE5XaRQjcWdQ2/lmEokJt97u0Rg=" + }, + { + "pname": "System.Text.Encoding", + "version": "4.3.0", + "hash": "sha256-GctHVGLZAa/rqkBNhsBGnsiWdKyv6VDubYpGkuOkBLg=" + }, + { + "pname": "System.Threading.Tasks", + "version": "4.3.0", + "hash": "sha256-Z5rXfJ1EXp3G32IKZGiZ6koMjRu0n8C1NGrwpdIen4w=" + } +] diff --git a/test/Tests.fs b/test/Tests.fs deleted file mode 100644 index f641c79..0000000 --- a/test/Tests.fs +++ /dev/null @@ -1,18 +0,0 @@ -module Tests - -open Expecto - -let server = - testList - "Server" - [ - testCase "Adding valid Todo" - <| fun _ -> - let expectedResult = Ok() - Expect.equal (Ok()) expectedResult "Result should be ok" - ] - -let all = testList "All" [ server ] - -[] -let main _ = runTests defaultConfig all \ No newline at end of file diff --git a/test/Tests.fsproj b/test/Tests.fsproj deleted file mode 100644 index 935c5f6..0000000 --- a/test/Tests.fsproj +++ /dev/null @@ -1,17 +0,0 @@ - - - - Exe - net9.0 - - - - - - - - - - - - \ No newline at end of file diff --git a/xtest/Arome.fs b/xtest/Arome.fs new file mode 100644 index 0000000..730855d --- /dev/null +++ b/xtest/Arome.fs @@ -0,0 +1,189 @@ +module Arome + +open System + +open Microsoft.Research.Science.Data + +open Xunit +open FsUnit.Xunit +open FsUnit.CustomMatchers +open Serilog +open ProjNet.FSharp + +open Oceanbox.FvcomKit + +let logger = + LoggerConfiguration() + .MinimumLevel.Is(Events.LogEventLevel.Verbose) + .WriteTo.Console() + .CreateLogger() + +do Log.Logger <- logger + +[] +let private path = "/data/archives/Arome/meps_det_sfc_20220102T00Z.nc" + +let trans = makeTransform CoordSys.WGS84 (CoordSys.LCCMet ()) + +// Use Netcdf to open dataset +let private openDataset (path: string) : DataSet = + let sw = Diagnostics.Stopwatch.StartNew() + let uri = NetCDF4.NetCDFUri() + do uri.FileName <- path + do uri.OpenMode <- ResourceOpenMode.ReadOnly + do uri.Deflate <- NetCDF4.DeflateLevel.Off + let ds = NetCDF4.NetCDFDataSet.Open uri + do Log.Debug $"openDataSet: {path} completed in {sw.ElapsedMilliseconds}ms" + + ds + +let private aromeGrid = { + Arome.SquareGrid.empty with + dimensions = 949, 1069 + BBox = { + minX = -18.12241427 + maxX = 54.24126163 + minY = 49.7653854 + maxY = 75.22869642 + center = 27.12063081, 37.61434821 + } + squareSize = 2.444065489 +} + +[] +let ``Create grid``() = + use ds = openDataset path + let res = Arome.getGrid ds + + // "Should be able to create square grid from /data/archives/Arome/meps_det_sfc_20220102T00Z.nc" + res |> should be (ofCase <@ Result.Ok @>) + +[] +let ``test grid regularity``() = + let trans = makeTransform CoordSys.WGS84 (CoordSys.LCCMet ()) + use ds = openDataset path + let dimensions = ds.Dimensions["x"].Length, ds.Dimensions["y"].Length + let longs : float array2d = ds["longitude"].GetData () :?> float[,] + let lats : float array2d = ds["latitude"].GetData () :?> float[,] + // NOTE: The netcdf file dimensions are defined as (y, x) + let width = Array2D.length2 longs + let height = Array2D.length1 lats + let points : Arome.Points array = + let result = Array.create (width * height) Arome.Points.Zero + for i in 0 .. height - 1 do + for j in 0 .. width - 1 do + let lat = lats[i, j] + let lon = longs[i, j] + let p = lon, lat + // NOTE(simkir): Convert to lambert and undo all I've done... :( + let x, y = trans.project p + result[i * width + j] <- Arome.Points.OfTuple (single x, single y) + result + + let mutable acc = 0.0f + for i in 0 .. height - 1 do + for j in 0 .. width - 2 do + let p0 = points[i * width + j] + let p1 = points[i * width + j + 1] + let dx = if p0.x < p1.x then p1.x - p0.x else p0.x - p1.x + + acc <- acc + dx + + let count = width * height + let avgDiffX = acc / single count + Log.Debug("Avg. X distance: {Avg}m", avgDiffX) + + acc <- 0.0f + for i in 0 .. height - 2 do + for j in 0 .. width - 1 do + let p0 = points[i * width + j] + let p1 = points[(i + 1) * width + j] + let dy = if p0.y < p1.y then p1.y - p0.y else p0.y - p1.y + + acc <- acc + dy + let avgDiffY = acc / single count + Log.Debug("Avg. Y distance: {Avg}m", avgDiffY) + +[] +let ``point within first rect``() = + use ds = openDataset path + let res = Arome.getGrid ds + + match res with + | Ok grid -> + let width, _height = grid.dimensions + let p0 = grid.points[0 * width + 0] + let p1 = grid.points[1 * width + 1] + let rect = Arome.Rect.OfPoints p0 p1 + // { x = -1064984.25f y = -1353489.375f }, { x = -1062474.625f y = -1350985.875f } + let p2 = Arome.Points.OfTuple (-1063000.25f, -1353486.0f) + let within = Arome.Rect.pointWithin rect p2 + + within |> should equal true + | Error _ -> + failwith "Should create grid" + +[] +let ``Read uvs for index (1, 1)``() = + use ds = openDataset path + let res = Arome.getGrid ds + match res with + | Ok grid -> + let idx = + let lat = 50.35 + let lon = 0.304 + let p = trans.project((lon, lat)) + Arome.tryFindIndex grid p + + match idx with + | Some (i, j) -> + let u, v = Arome.readUV ds 0 i j + (u, v) |> should equal (8.530239f, 6.659096f) // "Find uv (8.530239, 6.659096) at index (1, 1)" + | None -> + failwith "Should find idx" + | Error _ -> + failwith "Should find grid" + +[] +let ``Try search index (1, 1069) based on map coords``() = + let lat = 72.800 + let lon = -17.850 + let p = trans.project((lon, lat)) + let res = + use ds = openDataset path + Arome.getGrid ds + + match res with + | Ok grid -> + let idx = Arome.tryFindIndex grid p + + idx |> should equal (Some (4, 1067)) // "Should find idx (1, 1069)" + | Error _ -> + failwith "Should find grid" + +[] +let ``Read uvs for pos (60.3562, 5.2178)``() = + let lat = 60.3562 + let lon = 5.2178 + let p = trans.project((lon, lat)) + use ds = openDataset path + let res = Arome.getGrid ds + match res with + | Ok grid -> + let idx = Arome.tryFindIndex grid p + match idx with + | Some (i, j) -> + let u, v = Arome.readUV ds 0 i j + // "Find uv (2.367153168, -0.3955917358) at pos (60.3562, 5.2178) almost in Bergen" + (u, v) |> should equal (-0.3750343323f, 7.744056702f) + | None -> + failwith "Should find idx" + | Error _ -> + failwith "Should find grid" + +[] +let ``Read uvs for t 23 x 43 y 144``() = + use ds = openDataset path + let u, v = Arome.readUV ds 23 43 144 + // "Find uv (2.367153168, -0.3955917358) at pos (60.3562, 5.2178) almost in Bergen" + (u, v) |> should equal (11.77926254f, 5.764093399f) \ No newline at end of file diff --git a/xtest/xtest.fsproj b/xtest/xtest.fsproj new file mode 100644 index 0000000..3a8bdea --- /dev/null +++ b/xtest/xtest.fsproj @@ -0,0 +1,38 @@ + + + + enable + Exe + xtest + net9.0 + linux-x64 + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/xtest/xunit.runner.json b/xtest/xunit.runner.json new file mode 100644 index 0000000..249d815 --- /dev/null +++ b/xtest/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +}