Compare commits

...

37 Commits

Author SHA1 Message Date
77257fbc5f Bump fvcomkit 5.13.0 -> 6.0.0-alpha.1 2026-01-07 10:12:15 +01:00
cdc0659aea Add rossby public ip to atlantis tilt instance 2026-01-07 10:12:15 +01:00
f4ca37cc56 Codex progress
- Link fix
- Atmo list
- New addGroupArchive endpoint
2026-01-07 10:12:15 +01:00
fafc30c288 Clean up DataAgent pkg refs 2026-01-07 10:12:15 +01:00
db54d632e4 Test archmaester url on ArchiveAgent creation
As in it will throw exceptions if the provided url is wrong :)
2026-01-07 10:12:15 +01:00
beb33033f5 Small DataAgent refactorings 2026-01-07 10:12:15 +01:00
19a03541cf Make DataAgent start/end -time check more robust
I think it was in the arome case that could fail. In a sense, we should
maybe assume that the files are ordered correctly? But then again, the
function checks start end times, and just that, which maybe should not
impose ordering.
2026-01-07 10:12:15 +01:00
e725db9d0a fix(DataAgent): Dispose db ctx on tryAddArchive 2026-01-07 10:12:15 +01:00
c5a711a863 fix(DataAgent): Delete archive attribs for proper cascading
Since both archives and files refer to attribs, both will be deleted by
deleting attribs.
2026-01-07 10:12:15 +01:00
2927b9b6c3 fix(DataAgent): Do not enforce contiguous for atmo 2026-01-07 10:12:15 +01:00
9c582d1329 fix(DataAgent): Dispose db ctx in Archivist.withDb 2026-01-07 10:12:15 +01:00
ac70ba77f9 Explicitly reference OB pkgs 2026-01-07 10:12:15 +01:00
9118cb91e1 feat(Sorcerer): Working Arome with new fvcomkit
On rossby, where we have to translate lat long to lambert projection in
memory.
2026-01-07 10:12:14 +01:00
6e96ec0153 The holy bug 2026-01-07 10:12:14 +01:00
4dd266f4c4 Allow wildcard in cors allow origin 2026-01-07 10:12:14 +01:00
892380f10b Refactor serilog and asp net logging startup config 2026-01-07 10:12:14 +01:00
74067a6e3b Fix sorcerer tests 2026-01-07 10:12:14 +01:00
f36e4c4f69 wip: Try to reproduce netcdf id error in tests 2026-01-07 10:12:14 +01:00
ca36543e3e Add netcdf to sorcerer ld lib path in shell.nix 2026-01-07 10:12:14 +01:00
ce27975f03 Make async mailbox variants 2026-01-07 10:12:14 +01:00
4c6226f938 Update Sorcerer.Arome for new Fvcom API 2026-01-07 10:12:14 +01:00
0df78c81fd Add sorcerer tests 2026-01-07 10:12:14 +01:00
d58ad1ec1d fix(Atlantis): Make internal inventory more trusting
In that if you reach internal endpoints with correct credentials, do not
require correct OpenFGA tuples. This is mostly for being able to use the
archivist cli with the plaintext auth, without needing to handle that
auth as an actual user.

This is mostly temporary ;)
2026-01-07 10:12:14 +01:00
f7978d92cf Allow arome add to fail continuity check 2026-01-07 10:12:14 +01:00
9f01b60528 Add saveFreq to index.json
So that we can interpret a subset of the frames actually in the files
2026-01-07 10:11:53 +01:00
8c1d1cc99d Switch serilogger console theme 2026-01-07 10:11:53 +01:00
13eed6f12e feat(cli): Add archivist check continuity cmd
Tests whether there are holes in the archive. It collects all the errors
instead of stopping at the first one.
2026-01-07 10:11:53 +01:00
ca3abeb275 fix(DataAgent): Do not wrap opening netcdf in Error
Just let it throw an exception, and the caller is therefore responsible
for catching it. Makes the usage more ergonomic.
2026-01-07 10:11:53 +01:00
179dee52f2 feat: Add check freq command to archivist cli 2026-01-07 10:11:53 +01:00
c6a4f86a90 Pass auth explicitly 2026-01-07 10:11:53 +01:00
141cacdb6d Refactor Sorcerer/Arome.fs 2026-01-07 10:11:53 +01:00
c37fe7195d Having fun with print formatting 2026-01-07 10:11:53 +01:00
Simen Lund Kirkvik
61f012c5d7 Add more logging 2026-01-07 10:11:53 +01:00
Simen Lund Kirkvik
329ea61519 Refactor Archvist Cli addArchive 2026-01-07 10:11:53 +01:00
Simen Lund Kirkvik
3f73cb3131 Format Archivist/Cli/ArchiveIndex.fs 2026-01-07 10:11:53 +01:00
Simen Lund Kirkvik
530ad4d501 Add Archivist.Cli.Tests 2026-01-07 10:11:53 +01:00
f562c71e8a Use Console.WriteLine for pretty printing
Idk, if printfn can do this
2026-01-07 10:11:53 +01:00
47 changed files with 1984 additions and 1323 deletions

View File

@@ -12,16 +12,23 @@
<PackageVersion Include="Thoth.Json.Giraffe" Version="6.0.0" />
<PackageVersion Include="Thoth.Json.Net" Version="12.0.0" />
<PackageVersion Include="Serilog" Version="4.2.0" />
<PackageVersion Include="Serilog.Sinks.Console" Version="6.0.0"/>
<PackageVersion Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageVersion Include="Drifters.Api" Version="6.22.0" />
<PackageVersion Include="Oceanbox.FvcomKit" Version="6.0.0-alpha.1" />
<!-- Tests -->
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="xunit.v3" Version="3.2.1" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="FsUnit" Version="7.1.1" />
<PackageVersion Include="FsUnit.xUnit" Version="7.1.1" />
<!-- Client -->
<PackageVersion Include="Fable.Browser.IndexedDB" Version="2.2.0" />
<PackageVersion Include="Fable.Browser.ResizeObserver" Version="1.0.0" />
<PackageVersion Include="Fable.Browser.WebGL" Version="1.3.0" />
<PackageVersion Include="Fable.Core" Version="4.4.0"/>
<PackageVersion Include="Fable.Core" Version="4.4.0" />
<PackageVersion Include="Fable.Elmish" Version="4.2.0" />
<PackageVersion Include="Fable.Fetch" Version="2.7.0" />
<PackageVersion Include="Fable.FontAwesome.Free" Version="3.0.0"/>
<PackageVersion Include="Fable.FontAwesome.Free" Version="3.0.0" />
<PackageVersion Include="Fable.Lit.Elmish" Version="1.6.2-oceanbox" />
<PackageVersion Include="Fable.Lit.React" Version="1.6.2-oceanbox" />
<PackageVersion Include="Fable.Lit" Version="1.6.2-oceanbox" />
@@ -32,17 +39,17 @@
<PackageVersion Include="Fable.Remoting.MsgPack" Version="1.24.0" />
<PackageVersion Include="Fable.SignalR.Elmish" Version="2.1.0" />
<PackageVersion Include="Fable.SimpleHttp" Version="3.6.0" />
<PackageVersion Include="Feliz.Router" Version="4.0.0"/>
<PackageVersion Include="Feliz.Router" Version="4.0.0" />
<PackageVersion Include="Feliz" Version="2.9.0" />
<PackageVersion Include="Feliz.UseElmish" Version="2.5.0" />
<PackageVersion Include="Feliz.CompilerPlugins" Version="2.2.0" />
<PackageVersion Include="Matplotlib.ColorMaps" Version="3.0.1" />
<PackageVersion Include="Thoth.Fetch" Version="3.0.1" />
<PackageVersion Include="Thoth.Json" Version="10.4.1"/>
<PackageVersion Include="Thoth.Json" Version="10.4.1" />
<!-- Serverpack -->
<PackageVersion Include="OpenFga.Sdk" Version="0.7.0"/>
<PackageVersion Include="FSharp.SystemTextJson" Version="1.3.13"/>
<PackageVersion Include="Saturn.OpenTelemetry" Version="0.6.0-alpha"/>
<PackageVersion Include="OpenFga.Sdk" Version="0.7.0" />
<PackageVersion Include="FSharp.SystemTextJson" Version="1.3.13" />
<PackageVersion Include="Saturn.OpenTelemetry" Version="0.6.0-alpha" />
<!-- Atlantis -->
<PackageVersion Include="Argu" Version="6.2.5" />
<PackageVersion Include="AspNetCore.Serilog.RequestLoggingMiddleware" Version="1.0.2" />
@@ -64,7 +71,6 @@
<PackageVersion Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="9.0.2" />
<PackageVersion Include="Microsoft.AspNetCore.DataProtection.StackExchangeRedis" Version="9.0.2" />
<PackageVersion Include="Microsoft.Extensions.Http.Polly" Version="9.0.2" />
<PackageVersion Include="Oceanbox.FvcomKit" Version="5.13.0" />
<PackageVersion Include="prometheus-net.AspNetCore" Version="8.2.1" />
<PackageVersion Include="Saturn" Version="0.17.0" />
<PackageVersion Include="SecurityCodeScan" Version="3.5.4">
@@ -83,15 +89,15 @@
<PackageVersion Include="ProjNet.FSharp" Version="5.2.0" />
<!-- Dapperizer -->
<PackageVersion Include="Oceanbox.SDSLite" Version="2.8.0" />
<PackageVersion Include="Dapper.FSharp" Version="4.9.0"/>
<PackageVersion Include="Microsoft.Extensions.Logging.Console" Version="9.0.2"/>
<PackageVersion Include="NetTopologySuite" Version="2.5.0"/>
<PackageVersion Include="Dapper.FSharp" Version="4.9.0" />
<PackageVersion Include="Microsoft.Extensions.Logging.Console" Version="9.0.2" />
<PackageVersion Include="NetTopologySuite" Version="2.5.0" />
<PackageVersion Include="Npgsql" Version="9.0.2" />
<PackageVersion Include="Npgsql.NetTopologySuite" Version="9.0.2"/>
<PackageVersion Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.2"/>
<PackageVersion Include="Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite" Version="9.0.2"/>
<PackageVersion Include="Npgsql.NetTopologySuite" Version="9.0.2" />
<PackageVersion Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.2" />
<PackageVersion Include="Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite" Version="9.0.2" />
<!-- Entity -->
<PackageVersion Include="Microsoft.EntityFrameworkCore" Version="9.0.1"/>
<PackageVersion Include="Microsoft.EntityFrameworkCore" Version="9.0.1" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.1">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -5,6 +5,7 @@
<Platform Name="x86" />
</Configurations>
<Folder Name="/Archivist/">
<Project Path="src/Archivist/src/Cli.Tests/Cli.Tests.fsproj" />
<Project Path="src/Archivist/src/Cli/Archivist.fsproj" />
<Project Path="src/Archivist/src/Client/Client.fsproj" />
</Folder>
@@ -52,5 +53,6 @@
</Folder>
<Folder Name="/Sorcerer/">
<Project Path="src/Sorcerer/src/Server/Sorcerer.fsproj" />
<Project Path="src\Sorcerer\src\Sorcerer.Tests\Sorcerer.Tests.fsproj" />
</Folder>
</Solution>

View File

@@ -0,0 +1,29 @@
namespace Oceanbox.Archivist.Cli
module ArchiveIndex =
module EnsureContiguous =
open Xunit
[<Fact>]
let ``one file``() =
let startTime = System.DateTime(2024, 1, 1)
let saveFreq = System.TimeSpan.FromSeconds 3600.0
let files = [|
"testfile1.nc", 1, System.DateTime(2024, 1, 1)
|]
let result = ArchiveIndex.ensureContiguous startTime saveFreq files
Assert.True result.IsOk
[<Fact>]
let ``two files``() =
let startTime = System.DateTime(2024, 1, 1)
let saveFreq = System.TimeSpan.FromSeconds 3600.0
let files = [|
"testfile1.nc", 1, System.DateTime(2024, 1, 1)
// NOTE(simkir): Add one hour
"testfile2.nc", 1, System.DateTime(2024, 1, 1, 1, 0, 0)
|]
let result = ArchiveIndex.ensureContiguous startTime saveFreq files
Assert.True result.IsOk

View File

@@ -0,0 +1,35 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Nullable>enable</Nullable>
<OutputType>Exe</OutputType>
<RootNamespace>Cli.Tests</RootNamespace>
<TargetFramework>net9.0</TargetFramework>
<!--
This template uses native xUnit.net command line options when using 'dotnet run' and
VSTest by default when using 'dotnet test'. For more information on how to enable support
for Microsoft Testing Platform, please visit:
https://xunit.net/docs/getting-started/v3/microsoft-testing-platform
-->
</PropertyGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
<ItemGroup>
<Compile Include="ArchiveIndex.fs" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio" />
<PackageReference Include="FSharp.Core" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Cli\Archivist.fsproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,3 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json"
}

View File

@@ -40,29 +40,34 @@ let getArchiveBasePath (args: AddArchiveArgs) =
Log.Error e
failwith e
let getModelAreaArchives modelId =
withCliAuth (fun auth ->
let getModelAreaArchives auth modelId =
async {
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let cli = api.inventoryApi ()
cli.getModelAreaArchives (modelId, ArchiveType.FromString "*:*:*"))
let! res = cli.getModelAreaArchives (modelId, ArchiveType.FromString "*:*:*")
return res
}
let getArchive archiveId =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let cli = api.adminApi ()
cli.getArchiveDto archiveId)
cli.getArchiveDto archiveId
)
let getFiles archiveId =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let cli = api.adminApi ()
cli.getFiles archiveId)
cli.getFiles archiveId
)
let getAllFiles archiveId =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let cli = api.adminApi ()
cli.getAllFiles archiveId)
cli.getAllFiles archiveId
)
/// <summary>
/// Send request to Archmaester for it to add new archive entry
@@ -74,27 +79,32 @@ let getAllFiles archiveId =
/// <param name="reverse">Frames are in reverse order</param>
/// <param name="json">Additional info in JSON</param>
/// <param name="published">Publish archive</param>
let postArchive (idx, modelArea, basePath, files, reverse, json, published) =
let args = idx, modelArea, basePath, files, reverse, json, published
withCliAuth (fun auth ->
let postArchive auth (associated: ArchiveId array) (dto: ArchiveDto) : Async<Result<unit array, string array>> =
async {
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let adminApi = api.adminApi ()
let sw = Diagnostics.Stopwatch.StartNew ()
instantiateArchiveDto args
|> Result.map (fun dto ->
async {
let! r1 = adminApi.newArchive dto
sw.Restart ()
Log.Information ("Posting new archive {Name}", dto.props.name)
let! r1 = adminApi.newArchive dto
Log.Information ("Posting new archive completed in {Elapsed}ms", sw.ElapsedMilliseconds)
let! (r2: Result<unit, string>[]) =
idx.associated
|> Array.map (fun a -> adminApi.addAssociation (idx.archiveId, a))
|> sequence
let (r: Result<unit list, string>) = r1 :: List.ofArray r2 |> sequence
return r |> Result.map ignore
})
|> Result.defaultWith (Error >> async.Return))
sw.Restart ()
Log.Information ("Posting {Count} new associations", associated.Length)
let! (r2: Result<unit, string> array) =
associated
|> Array.map (fun a -> adminApi.addAssociation (dto.props.archiveId, a))
|> Async.Sequential
Log.Information (
"Posting {Count} new associations completed in {Elapsed}ms",
associated.Length,
sw.ElapsedMilliseconds
)
let results =
Array.append [| r1 |] r2 |> FsToolkit.ErrorHandling.Array.sequenceResultA
return results
}
/// <summary>
/// Send request to Archmaester for it to add a new sub archive entry
@@ -105,7 +115,8 @@ let postSubArchive (force: bool) (dto: SubArchiveDef) =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let cli = api.archiveApi ()
cli.addSubArchive dto)
cli.addSubArchive dto
)
/// <summary>
/// Send request to Archmaester for it to update an archive entry
@@ -132,7 +143,8 @@ let postArchiveUpdate (idx: ArchiveIndex) =
}
return! archiveApi.updateArchive (idx.archiveId, form)
})
}
)
let modifyArchive (args: ModifyArchiveArgs) =
let aid = args.ArchiveId
@@ -165,7 +177,8 @@ let modifyArchive (args: ModifyArchiveArgs) =
if y.Length = 2 then
(single y[0], single y[1]) |> Some
else
None)
None
)
|> sequence
|> fun y ->
Log.Debug $"%A{y}"
@@ -177,7 +190,8 @@ let modifyArchive (args: ModifyArchiveArgs) =
return! archiveApi.updateArchive (aid, form)
with exn ->
return Error exn.Message
})
}
)
|> execAsync
let modifyArchiveAttribs (args: ModifyArchiveAttribsArgs) =
@@ -217,7 +231,8 @@ let modifyArchiveAttribs (args: ModifyArchiveAttribsArgs) =
return! adminApi.updateArchiveAttribs (aid, form)
with exn ->
return Error exn.Message
})
}
)
|> execAsync
let deleteArchives (archive: string) = archive |> retireArchive |> ignore
@@ -229,64 +244,90 @@ let readDriftersInputJson file =
Log.Error $"Drifters input not found: {file}"
""
let rec addArchive (args: AddArchiveArgs) =
let basePath = getArchiveBasePath args
Log.Information $"BasePath: %s{basePath}"
type private Reverse = { reverse: bool option }
let idx =
match args.Index with
| Some ix -> ix
| None -> $"{basePath}/index.json"
|> readArchiveIdx
type private ParticleFile = { simulation: Reverse }
let files = getArchiveFiles idx.archiveType basePath (args.Files |> Array.ofList)
type private FieldFile = { analysis: Reverse }
let modelArea = initiateModelArea idx basePath
let private getJson basePath (archiveType: ArchiveType) : string * bool =
let inputPath = Path.Join [| basePath; "input.json" |]
let tryFindJson (f: 'T -> bool) decode =
let str = inputPath |> readDriftersInputJson
let res = str |> decode
match res with
| Ok sim -> str, f sim
| Error e -> failwith e
let json, reverse =
match idx.archiveType with
| Drifters (_, DriftersFormat.Particle) ->
let s = readDriftersInputJson (Path.Join [| basePath; "input.json" |])
match archiveType with
| Drifters (_, Particle) ->
let tryDecode = Thoth.Json.Net.Decode.Auto.fromString<ParticleFile>
let getReverse (sim: ParticleFile) =
sim.simulation.reverse |> Option.defaultValue false
let r =
Thoth.Json.Net.Decode.Auto.fromString<{| simulation: {| reverse: bool option |} |}> s
|> Result.map (fun x ->
match x.simulation.reverse with
| Some reverse -> reverse
| None -> false)
tryFindJson getReverse tryDecode
| Drifters (_, DriftersFormat.Field2D)
| Drifters (_, DriftersFormat.Field3D) ->
let tryDecode = Thoth.Json.Net.Decode.Auto.fromString<FieldFile>
let getReverse (sim: FieldFile) =
sim.analysis.reverse |> Option.defaultValue false
match r with
| Ok x -> s, x
| Error e -> failwith e
| Drifters (_, DriftersFormat.Field2D)
| Drifters (_, DriftersFormat.Field3D) ->
let s = readDriftersInputJson (Path.Join [| basePath; "input.json" |])
tryFindJson getReverse tryDecode
| _ -> "", false
let r =
Thoth.Json.Net.Decode.Auto.fromString<{| analysis: {| reverse: bool option |} |}> s
|> Result.map (fun x ->
match x.analysis.reverse with
| Some reverse -> reverse
| None -> false)
let addArchive (auth: string) (args: AddArchiveArgs) =
async {
let sw = Diagnostics.Stopwatch.StartNew ()
let basePath = getArchiveBasePath args
let indexPath =
match args.Index with
| Some ix -> ix
| None -> $"{basePath}/index.json"
match r with
| Ok x -> s, x
| Error e -> failwith e
| _ -> "", false
Log.Information ("Add archive: {BasePath}", basePath)
let published = args.Published |> Option.defaultValue true
let idx = indexPath |> readArchiveIdx
let saveRes =
try
postArchive (idx, modelArea, basePath, files, reverse, json, published)
|> Async.RunSynchronously
with e ->
Log.Error (e, "Archivist.addArchive {ArchmaesterUrl}", Settings.archmaesterUrl)
Error "Could not add archive"
Log.Information ("Reading archive files: {BasePath}", basePath)
let files = args.Files |> Array.ofList |> getArchiveFiles idx.archiveType basePath
Log.Information ("Reading archive files: {BasePath} completed", basePath)
match saveRes with
| Ok () -> Log.Information $"Successfully added archive %s{idx.name}"
| Error err -> Log.Error $"Error: {err}"
Log.Information ("Initializing model area", basePath)
let! modelAreaRes = initiateModelArea auth idx basePath
match modelAreaRes with
| Ok modelArea ->
Log.Information ("Found model area", modelArea)
let json, reverse = getJson basePath idx.archiveType
// TODO(simkir): Default to false?
let published = args.Published |> Option.defaultValue true
Log.Information ("Creating DTO from {FileCount} files", files.Length)
let dtoRes =
instantiateArchiveDto published reverse modelArea basePath json idx files
match dtoRes with
| Ok dto ->
Log.Information ("Creating DTO completed in {Elapsed}ms", sw.ElapsedMilliseconds)
try
// TODO: Confirm associated exists before posting, maybe.
let! res = postArchive auth idx.associated dto
match res with
| Ok _ -> Log.Information $"Successfully added archive %s{idx.name}"
| Error errs ->
Log.Error ("[Archivist] Error adding archive to {ArchmaesterUrl}", Settings.archmaesterUrl)
for err in errs do
Log.Error (" - {Error}", err)
with e ->
Log.Error (e, "[Archivist] Exception adding archive to {ArchmaesterUrl}", Settings.archmaesterUrl)
| Error errs ->
Log.Error ("[Archivist] Error creating Archive DTO:")
for err in errs do
Log.Error (" - {Error}", err)
| Error err ->
Log.Error (
"[Archivist] Error validating model area when adding archive to {ArchmaesterUrl}: {Error}",
Settings.archmaesterUrl,
err
)
}
let createSubArchive (args: SubArchiveArgs) =
let fetchRefArchive = getArchive >> Async.RunSynchronously
@@ -322,27 +363,44 @@ let getAcl archiveId =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let cli = api.aclApi ()
cli.getAcl archiveId)
cli.getAcl archiveId
)
let printAcl acl =
let formatAcl label (x: string[]) =
if x.Length > 0 then
let str = Array.intersperse "\n " x |> Array.fold (fun a x -> a ++ x) ""
$"{label}:\n {str}\n"
else
""
let print label (entries: string array) =
if (Array.isEmpty >> not) entries then
printfn label
for entry in entries do
printfn " - %s" entry
acl.owners |> formatAcl "owners" |> (fun s -> printf $"{s}")
acl.users |> formatAcl "users" |> (fun s -> printf $"{s}")
acl.groups |> formatAcl "groups" |> (fun s -> printf $"{s}")
printfn "ACL"
acl.owners |> print "owners"
acl.users |> print "users"
acl.groups |> print "groups"
printfn ""
let printFiles (files: ArchiveFile array) =
let fmt = "yyyy-MM-dd HH:mm"
printfn "Files"
printfn "%4s | %-32s | %-16s | %-16s" "#" "Name" "Start time" "End time"
files
|> Array.iteri (fun i file ->
let name = String.truncate 32 file.name
let startTime = file.startTime.ToString fmt
let endTime = file.endTime.ToString fmt
printfn "%4i | %32s | %16s | %16s" i name startTime endTime
)
printfn ""
let showArchive (args: ShowArchiveArgs) =
let archiveId = args.ArchiveId
let fmt = "yyyy-MM-dd HH:mm"
async {
match! getArchive archiveId with
| Ok a ->
| Ok _archive ->
let! files =
if args.All then getAllFiles archiveId
elif args.Files then getFiles archiveId
@@ -358,14 +416,10 @@ let showArchive (args: ShowArchiveArgs) =
| Ok a -> printAcl a
| Error e -> Log.Error e
f.series
|> Array.iteri (fun n x ->
let name = String.truncate 32 x.name |> fun y -> y.PadRight (32, ' ')
printfn $" %-3d{n}: {name} | {x.startTime.ToString (fmt)} - {x.endTime.ToString (fmt)}")
printFiles f.series
| Error e -> Log.Error e
| Error e -> Log.Error e
}
|> Async.RunSynchronously
let showRelatedArchives (args: ShowArchiveArgs) =
let archiveId = args.ArchiveId
@@ -375,7 +429,8 @@ let showRelatedArchives (args: ShowArchiveArgs) =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let cli = api.inventoryApi ()
cli.getAssociated (archiveId, ArchiveType.Any))
cli.getAssociated (archiveId, ArchiveType.Any)
)
// cli.getRelated(archiveId, Atmo(AtmoVariant.Any, AtmoFormat.Any)))
match r with
| Ok ra -> ra |> Array.iter (fun y -> printfn $"{y.name}: {y.archiveId}")
@@ -385,7 +440,7 @@ let showRelatedArchives (args: ShowArchiveArgs) =
let augmentArchive (args: AugmentArgs) =
let aid = args.Archive
let fnames = args.Files |> List.map Path.GetFullPath
let fnames = args.Files |> Array.ofList |> Array.map Path.GetFullPath
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
@@ -397,14 +452,14 @@ let augmentArchive (args: AugmentArgs) =
let reverse = archive.files.series[0].reverse // assume if one file is reversed, all files are
let a = archiveTypeToIArchive archive.props.archiveType
let files = fnames |> Array.map (readDataSet a)
let fs =
fnames
|> traverse (readDataSet a)
|> Result.bind (fun files ->
let _, _, t = List.head files
let startUtc = t.ToUniversalTime ()
let f = Array.ofList files
ensureContiguous startUtc archive.props.freq f |> Result.map fst)
let _, _, t = Array.head files
let startUtc = t.ToUniversalTime ()
let f = files
let saveFreq = TimeSpan.FromSeconds (float archive.props.freq)
let res = ensureContiguous startUtc saveFreq f |> Result.map fst
res |> Result.mapError Array.head
match fs with
| Ok files ->
@@ -425,7 +480,8 @@ let augmentArchive (args: AugmentArgs) =
Log.Error $"{err}"
return Error err
})
}
)
|> execAsync
let resizeArchive (args: ResizeArgs) =
@@ -436,19 +492,115 @@ let resizeArchive (args: ResizeArgs) =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let archmaester = api.archiveApi ()
async { return! archmaester.resizeArchive (aid, first, last) })
async { return! archmaester.resizeArchive (aid, first, last) }
)
|> execAsync
let addType (t: string) =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let adminApi = api.adminApi ()
async { return! adminApi.addType t })
async { return! adminApi.addType t }
)
|> execAsync
let deleteType (t: string) =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let adminApi = api.adminApi ()
async { return! adminApi.removeType t })
|> execAsync
async { return! adminApi.removeType t }
)
|> execAsync
let private readArchiveSaveFreq (archive: IArchive) (netCdfFile: string) =
async {
use ds = DatasetAgent.openDataSet netCdfFile
let saveFreq = archive.getSaveFreq ds
Log.Information ("Save freq. is {Hours}h {Min}m", saveFreq.Hours, saveFreq.Minutes)
}
type ArchiveCheckInfo = { Index: ArchiveIndex; Archive: IArchive; Files: FileInfo array }
let private findArchiveInfo path : Result<ArchiveCheckInfo, string> =
if isDir path then
Log.Information ("Checking directory {Archive}", path)
let dir = DirectoryInfo path
let indexFile =
let path = Path.Join [| dir.FullName; "index.json" |]
FileInfo path
let index = readArchiveIdx indexFile.FullName
let archive = archiveTypeToIArchive index.archiveType
let files =
getArchiveFiles index.archiveType dir.FullName [| "" |] |> Array.map FileInfo
Ok { Index = index; Archive = archive; Files = files }
else
let file = FileInfo path
let dir = file.Directory
match file.Extension with
| ".nc" ->
Log.Information ("Checking netcdf file {Archive}", path)
let indexFile =
let path = Path.Join [| dir.FullName; "index.json" |]
FileInfo path
let index = readArchiveIdx indexFile.FullName
let archive = archiveTypeToIArchive index.archiveType
Ok { Index = index; Archive = archive; Files = [| file |] }
| ".json" ->
Log.Information ("Checking index file {Archive}", path)
let index = readArchiveIdx file.FullName
let archive = archiveTypeToIArchive index.archiveType
let files =
getArchiveFiles index.archiveType dir.FullName [| "" |] |> Array.map FileInfo
Ok { Index = index; Archive = archive; Files = files }
| _ -> Error (sprintf "Unrecognized file %s" file.Extension)
let checkFreq (args: CheckFreqArgs) =
async {
try
let res = findArchiveInfo args.Archive
match res with
| Ok info ->
let first = info.Files |> Array.head
do! readArchiveSaveFreq info.Archive first.FullName
| Error err -> Log.Error ("Error in archive: {Error}", err)
with e ->
Log.Error (e, "Exception checking archive")
}
let private countError res =
match res with
| Ok _ -> 0
| Error _ -> 1
let checkCont (args: CheckContArgs) =
async {
try
let res = findArchiveInfo args.Archive
match res with
| Ok info ->
let files = info.Files |> Array.skip args.SkipFiles
let firstNetCdfFile = files |> Array.head
use ds = DatasetAgent.openDataSet firstNetCdfFile.FullName
let startTime = info.Archive.getStartTime ds
let saveFreq = info.Archive.getSaveFreq ds
let framesPerFile =
info.Index.frameCount
|> Option.orElse args.FrameCount
|> Option.defaultValue (info.Archive.getNumFrames ds)
let! ct = Async.CancellationToken
Log.Debug ("CancellationToken {Hash}", ct.GetHashCode())
let! results =
files
|> testContinuity args.SkipFiles startTime saveFreq framesPerFile info.Archive
let errorCount = results |> Array.sumBy countError
Log.Information ("Archive has {Count} errors", errorCount)
| Error err -> Log.Error ("Error checking archive: {Error}", err)
with e ->
Log.Error (e, "Exception checking archive")
}

View File

@@ -1,11 +1,13 @@
module ArchiveIndex
open FSharpPlus
open Microsoft.Research.Science.Data
open ProjNet.FSharp
open Serilog
open System
open System.IO
open Microsoft.Research.Science.Data
open FsToolkit.ErrorHandling
open ProjNet.FSharp
open Serilog
open Thoth.Json.Net
open Archmaester.Dto
@@ -58,6 +60,7 @@ type ArchiveIndex = {
center: (single * single) option
initialZoom: float option
startTime: DateTime
frameCount: int option
owners: string array option
users: string[]
groups: string[]
@@ -71,11 +74,12 @@ type ArchiveIndex = {
modelArea = None
name = ""
description = ""
archiveType = Fvcom(FvcomVariant.Any, FvcomFormat.Any)
archiveType = Fvcom (FvcomVariant.Any, FvcomFormat.Any)
projection = ""
center = None
initialZoom = None
startTime = DateTime.UnixEpoch
frameCount = None
owners = None
users = [||]
groups = [||]
@@ -104,6 +108,7 @@ module ArchiveIndex =
startTime =
get.Optional.Field "startTime" Decode.datetimeUtc
|> Option.defaultValue DateTime.UnixEpoch
frameCount = get.Optional.Field "frameCount" Decode.int
owners = get.Optional.Field "owners" (Decode.array Decode.string)
users = get.Required.Field "users" (Decode.array Decode.string)
groups = get.Required.Field "groups" (Decode.array Decode.string)
@@ -114,19 +119,19 @@ module ArchiveIndex =
let inline withCliAuth f =
match Settings.cliAuth with
| None ->
Log.Fatal "You must provide ARCHMAESTER_AUTH"
Error "You are not logged in" |> async.Return
| Some auth ->
Log.Debug "You are logged in"
f auth
| None ->
Log.Fatal "You must provide ARCHMAESTER_AUTH"
Error "You are not logged in" |> async.Return
let private (|NumSeq|_|) x =
let rex = Text.RegularExpressions.Regex @".+[-_]([0-9]+)\..+$"
let m = rex.Match x
if m.Groups.Count > 1 then
Some(int m.Groups[1].Value, x)
Some (int m.Groups[1].Value, x)
else
None
@@ -136,10 +141,13 @@ let private numSort (fs: string array) =
|> Array.sortBy fst
|> Array.collect (fun (_, grp) ->
grp
|> Array.map (function
| NumSeq(n, f) -> (n, f)
| x -> (-1, x))
|> Array.sortBy fst)
|> Array.map (
function
| NumSeq (n, f) -> n, f
| x -> -1, x
)
|> Array.sortBy fst
)
|> Array.filter (fun (n, _) -> n >= 0)
|> Array.map snd
@@ -151,23 +159,28 @@ let getBasePath: string -> string = Path.GetFullPath >> Path.GetDirectoryName
let stripBasePath basePath path =
let p = Path.GetFullPath path
Path.GetRelativePath(basePath, p)
Path.GetRelativePath (basePath, p)
let readIndexFile (f: string) =
let index = File.ReadAllText f
let readIndexFile (path: string) =
let sw = Diagnostics.Stopwatch.StartNew ()
let index = File.ReadAllText path
Log.Information ("Reading index file {File} completed in {Elapsed}ms", path, sw.ElapsedMilliseconds)
Decode.fromString ArchiveIndex.decoder index
let readArchiveIdx (path: string) =
let a = File.GetAttributes path
let indexPath =
if a.HasFlag FileAttributes.Directory then
Path.Combine (path, "index.json")
else
path
if a.HasFlag FileAttributes.Directory then
Path.Combine(path, "index.json")
else
path
indexPath
|> readIndexFile
|> Result.defaultWith (fun e ->
Log.Error e
failwith $"{e}")
failwith $"{e}"
)
let readModelAreaFile (f: string) =
let index = File.ReadAllText f
@@ -177,7 +190,7 @@ let readModelAreaIdx (path: string) =
let a = File.GetAttributes path
if a.HasFlag FileAttributes.Directory then
Path.Combine(path, "model.json")
Path.Combine (path, "model.json")
else
path
|> readModelAreaFile
@@ -197,140 +210,146 @@ let tryGetTimesDimension (ds: DataSet) : int =
type IArchive =
abstract getStartTime: DataSet -> DateTime
abstract getSaveFreq: DataSet -> int
abstract getSaveFreq: DataSet -> TimeSpan
abstract getNumFrames: DataSet -> int
let getAromeTime (ds: DataSet) n : TimeSpan option =
let getAromeTime (ds: DataSet) (n: int) : TimeSpan option =
try
let times = ds["time"].GetData() :?> float[]
times[n] |> TimeSpan.FromDays |> Some
let sw = Diagnostics.Stopwatch.StartNew()
let variable = ds["time"]
let times = variable.GetData([| n |], [| 1 |]) :?> float array
Log.Verbose("Read {Count} time entries in {Elapsed}ms", times.Length, sw.ElapsedMilliseconds)
times |> Array.tryHead |> Option.map TimeSpan.FromSeconds
with exn ->
Log.Error(exn, "ArchiveIndex.getAromeFileStartTime exception: {exn.Message}")
Log.Error (exn, "ArchiveIndex.getAromeFileStartTime exception")
None
let getAromeDateTime (ds: DataSet) n : DateTime option =
getAromeTime ds n
|> Option.map (fun time ->
let startTime = Settings.julianStart + time
startTime.ToUniversalTime())
getAromeTime ds n |> Option.map (fun time -> DateTime.UnixEpoch + time)
let FvcomArchive =
{ new IArchive with
/// Get the start time of the first file in the sequence, assuming it is a netcdf file with the field Itimes
member this.getStartTime ds =
member _.getStartTime ds =
Oceanbox.FvcomKit.Fvcom.getTimeSpanSinceStart ds 0
|> Option.map (fun t -> (Settings.julianStart + t).ToUniversalTime())
|> Option.map (fun t -> (Settings.julianStart + t).ToUniversalTime ())
|> Option.defaultValue DateTime.UtcNow
/// Find the save frequency by checking the distance between the two first entries
member this.getSaveFreq ds =
member _.getSaveFreq ds =
try
let t0 = Oceanbox.FvcomKit.Fvcom.getTimeSpanSinceStart ds 0 |> Option.get
let t1 = Oceanbox.FvcomKit.Fvcom.getTimeSpanSinceStart ds 1 |> Option.get
let dt = t1 - t0
dt.TotalSeconds |> int
dt
with _ ->
Log.Warning "Could not determine time step, defaulting to 3600 seconds"
3600
TimeSpan.FromHours 1.0
/// Find the frames per file count by looking into the first entry
member this.getNumFrames ds =
member _.getNumFrames ds =
let numFrames = Oceanbox.FvcomKit.Fvcom.getNumFrames ds
numFrames
}
let AromeArchive =
{ new IArchive with
member this.getStartTime ds =
member _.getStartTime ds =
getAromeDateTime ds 0 |> Option.defaultValue DateTime.UtcNow
member this.getSaveFreq ds =
getAromeTime ds 0
|> Option.bind (fun t0 ->
getAromeTime ds 1
|> Option.map (fun t1 ->
member _.getSaveFreq ds =
let opt =
option {
let! t0 = getAromeDateTime ds 0
let! t1 = getAromeDateTime ds 1
let dt = t1 - t0
dt.TotalSeconds |> int))
|> Option.defaultValue -1
return dt
}
member this.getNumFrames ds = tryGetTimesDimension ds
opt |> Option.defaultValue (TimeSpan.FromHours 1)
member _.getNumFrames ds = tryGetTimesDimension ds
}
let DriftersArchive =
{ new IArchive with
member x.getStartTime ds =
member _.getStartTime ds =
try
ds.Metadata["startTime"] :?> string |> DateTime.Parse
with _ ->
Log.Error $"Could not get start time from drifters"
exit 1
member x.getSaveFreq ds =
member _.getSaveFreq ds =
try
ds.Metadata["saveFreq"] :?> string |> int
ds.Metadata["saveFreq"] :?> string |> float |> TimeSpan.FromSeconds
with _ ->
Log.Error "Could not get save frequency from drifters"
-1
TimeSpan.MinValue
member x.getNumFrames ds = tryGetTimesDimension ds
member _.getNumFrames ds = tryGetTimesDimension ds
}
let getDsTimes (ds: DataSet) =
try
ds["times"].GetData() :?> float[] |> Array.map TimeSpan.FromDays
ds["times"].GetData () :?> float[] |> Array.map TimeSpan.FromDays
with exn ->
Log.Error(exn, $"ArchiveIndex.getDsTimes(): {exn.Message}")
Log.Error (exn, $"ArchiveIndex.getDsTimes(): {exn.Message}")
exit 1
let getDsDateTime (ds: DataSet) n =
let times = getDsTimes ds
let startTime = Settings.julianStart + times[n]
startTime.ToUniversalTime()
startTime.ToUniversalTime ()
let FvStatsArchive =
{ new IArchive with
member x.getStartTime ds =
member _.getStartTime ds =
if ds.Metadata.ContainsKey "years" then
let y = ds.Metadata["years"] :?> string |> String.split [ ","; ";" ] |> Seq.head
DateTime.Parse($"{y}-01-01")
let yearsStr = ds.Metadata["years"] :?> string
let y = yearsStr.Split [| ','; ';' |] |> Array.head
DateTime.Parse $"{y}-01-01"
else
DateTime.Parse($"{DateTime.UtcNow.Year}-01-01")
DateTime.Parse $"{DateTime.UtcNow.Year}-01-01"
member x.getSaveFreq ds = 3600 * 24 * 30 // month
member x.getNumFrames ds = 12 // year
member _.getSaveFreq ds = TimeSpan.FromDays 30 // month
member _.getNumFrames ds = 12 // year
}
let getArchiveFiles format basePath (files: string array) =
let fs =
let getArchiveFiles (format: ArchiveType) basePath (files: string array) : string array =
let readFiles (files: string array) : string array =
if files.Length = 1 then
Directory.GetFiles(basePath, "*.nc", SearchOption.AllDirectories)
Directory.GetFiles (basePath, "*.nc", SearchOption.AllDirectories)
else
files
let f =
let sort: string array -> string array =
match format with
| Atmo _
| Drifters _
| FvStats _ -> fs
| Fvcom _ -> numSort fs
| Any -> failwith "not possible"
| Atmo _ -> Array.sort
| Drifters _ -> id
| FvStats _ -> id
| Fvcom _ -> numSort
| Any -> id
f
files
|> readFiles
|> sort
|> Utils.tap (Array.iteri (fun idx path -> Log.Debug (" {Index,4}. {FilePath}", idx, path)))
let updateCoordinateProjection (p: string option) (file: string) =
let proj =
p
|> Option.map Projection.FromString // sanitize
|> Option.defaultValue (Projection.UTM 33) // default to UTM33 for now
|> fun x -> x.ToString()
|> Option.defaultValue (UTM 33) // default to UTM33 for now
|> fun x -> x.ToString ()
let ds = openDataSetRw file |> Result.get
use ds = openDataSetRw file
ds.Metadata["CoordinateProjection"] <- proj
ds.Commit()
ds.Commit ()
// TODO(simkir): Create an actual parser?
let readBoundingPolygon projection path : Result<(single * single) array, string> =
let readBoundingPolygon projection path : Result<(single * single) array, string array> =
let polyFileLines path =
if File.Exists path then
try
@@ -342,40 +361,44 @@ let readBoundingPolygon projection path : Result<(single * single) array, string
else
[]
let verify (lines: string list) : Result<string array, string> =
let verify (lines: string list) : Result<string array, string array> =
match lines with
| [] -> Error "The file is empty"
| [] -> Error [| "The file is empty" |]
| countLine :: coordinateLines ->
countLine.Split(separator = ' ', options = StringSplitOptions.RemoveEmptyEntries)
|> List.ofArray
countLine.Split (separator = ' ', options = StringSplitOptions.RemoveEmptyEntries)
|> function
| [ polygon; countStr ] ->
try
int countStr |> Ok
with _ ->
Error "The count could not be converted to an int"
| [| polygon; countStr |] ->
let countRes =
try
int countStr |> Ok
with _ ->
Error [| "The count could not be converted to an int" |]
countRes
|> Result.bind (fun _ ->
if polygon = "polygon" then
coordinateLines |> Array.ofList |> Ok
else
Error "Missing polygon signature from bounding.poly")
| _ -> Error "The first line should be 'polygon [count]'"
Error [| "Missing polygon signature from bounding.poly" |]
)
| _ -> Error [| "The first line should be 'polygon [count]'" |]
let readCoordinates (lines: string array) : Result<(single * single) array, string> =
let readCoordinates (lines: string array) : Result<(single * single) array, string array> =
match lines with
| [||] -> Error "The bounding.poly file is empty"
| [||] -> Error [| "The bounding.poly file is empty" |]
| coordinateLines ->
coordinateLines
|> traverse (fun line ->
|> Array.traverseResultA (fun line ->
let coordinates =
line.Split(separator = ' ', options = StringSplitOptions.RemoveEmptyEntries)
line.Split (separator = ' ', options = StringSplitOptions.RemoveEmptyEntries)
if coordinates.Length = 2 then
Ok(single coordinates[0], single coordinates[1])
Ok (single coordinates[0], single coordinates[1])
else
Error "There should only be two numbers per line")
Error "There should only be two numbers per line"
)
monad' {
result {
let polyfile = polyFileLines path
if polyfile.Length > 0 then
@@ -386,48 +409,147 @@ let readBoundingPolygon projection path : Result<(single * single) array, string
| Some p ->
let prj = p |> Projection.FromString |> Projection.ToCoordinateSystem
let toLatLon = makeTransform prj CoordSys.WGS84
cx |> Array.map toLatLon.project
| None -> cx
return cx |> Array.map toLatLon.project
| None -> return cx
else
[||]
return [||]
}
let testContinuity
(skipFiles: int)
(startTime: DateTime)
(saveFreq: TimeSpan)
(framesPerFile: int)
(archive: IArchive)
(files: FileInfo array)
: Async<Result<unit, string> array> =
async {
let mutable currentTime = startTime
let res =
files
|> Array.mapi (fun idx file ->
use ds = openDataSet file.FullName
let fileStartTime = archive.getStartTime ds
let fileDuration = saveFreq * float framesPerFile
if currentTime = fileStartTime then
let nextExpectedStartTime = currentTime + fileDuration
Log.Information (
"{Index,4}: {File} starts at {FileStartTime} and next should start at {ExpectedStartTime}",
skipFiles + idx,
file.Name,
fileStartTime,
nextExpectedStartTime
)
do currentTime <- nextExpectedStartTime
Ok ()
else
let nextExpectedStartTime = fileStartTime + fileDuration
Log.Error (
"{File} start time {FileStartTime} does not equal the expected start time {CurrentTime}",
file.Name,
fileStartTime,
currentTime
)
do currentTime <- nextExpectedStartTime
Error $"{file} start time {fileStartTime} does not equal the expected start time {currentTime}"
)
return res
}
/// <summary>
/// Tests whether all the files are contiguous. Meaning, that for each file, that its endtime and the next files
/// starttime match. In other words, no holes in the archives.
/// </summary>
let ensureContiguous startTime saveFreq files =
let ensureContiguous
(startTime: DateTime)
(saveFreq: TimeSpan)
(files: (string * int * DateTime) array)
: Result<(string * int * DateTime) array * DateTime, string array> =
let filesRes, endDate =
files
|> Array.fold
(fun (files, currentTime: DateTime) (fileName, frames, fileStartTime) ->
let fileDuration = saveFreq * frames |> float |> TimeSpan.FromSeconds
let nextExpectedStartTime = currentTime + fileDuration
(([], startTime), files)
||> Array.fold (fun (files, currentTime: DateTime) (fileName, frames, fileStartTime) ->
let fileDuration = saveFreq * float frames
let nextExpectedStartTime = currentTime + fileDuration
let fileRes =
Log.Debug $"%s{fileName} started at {fileStartTime}"
let fileRes =
Log.Debug $"%s{fileName} started at {fileStartTime}"
if currentTime = fileStartTime then
Ok(fileName, frames, currentTime)
else
Log.Error
$"File {fileName} start time {fileStartTime} does not equal the expected start time {currentTime}"
if currentTime = fileStartTime then
Ok (fileName, frames, currentTime)
else
Error
$"File {fileName} start time {fileStartTime} does not equal the expected start time {currentTime}"
Error
$"File {fileName} start time {fileStartTime} does not equal the expected start time {currentTime}"
fileRes :: files, nextExpectedStartTime)
([], startTime)
fileRes :: files, nextExpectedStartTime
)
filesRes
|> List.rev
|> Array.ofList
|> sequence
|> Array.sequenceResultA
|> Result.map (fun files -> files, endDate)
let private collectArchiveFiles
basePath
(archiveStartTime: DateTime)
(saveFreq: TimeSpan)
(frameCountOpt: int option)
reverse
(archive: IArchive)
(files: FileInfo array)
: DateTime * ArchiveFile list =
let sw = Diagnostics.Stopwatch.StartNew()
let _, endTime, archiveFiles =
((0, archiveStartTime, []), files)
||> Array.fold (fun (idx, currentTime, files) file ->
do sw.Restart()
use ds = openDataSet file.FullName
let frames = frameCountOpt |> Option.defaultValue (archive.getNumFrames ds)
let fileStartTime = archive.getStartTime ds
let fileDuration = saveFreq * float frames
let fileEndTime = currentTime + fileDuration
let archiveFile = {
name = stripBasePath basePath file.Name
frames = frames
ordering = idx // intermediate ordering, server might change it later
startTime = fileStartTime
endTime = fileEndTime
reverse = reverse
}
let nextExpectedStartTime =
if currentTime = fileStartTime then
Log.Debug (
"{Index,4}: {File} starts at {FileStartTime} and next should start at {ExpectedStartTime} ({Elapsed}ms)",
idx,
file.Name,
fileStartTime,
fileEndTime,
sw.ElapsedMilliseconds
)
fileEndTime
else
Log.Error (
"{Index,4}: {File} start time {FileStartTime} does not equal the expected start time {CurrentTime} ({Elapsed}ms)",
idx,
file.Name,
fileStartTime,
currentTime,
sw.ElapsedMilliseconds
)
fileStartTime + fileDuration
idx + 1, nextExpectedStartTime, archiveFile :: files
)
endTime, archiveFiles
let tryAddModelArea (model: ModelAreaIndex) basePath =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi(Settings.archmaesterUrl, auth)
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let modelAreaApi = api.modelAreaApi ()
let adminApi = api.adminApi ()
@@ -454,90 +576,97 @@ let tryAddModelArea (model: ModelAreaIndex) basePath =
let! m = adminApi.addModelArea newModelArea
return m |> Result.map (fun _ -> newModelArea)
})
}
)
let initiateModelArea (idx: ArchiveIndex) (basePath: string) =
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi(Settings.archmaesterUrl, auth)
let inventoryApi = api.inventoryApi ()
let private getParentModelAreaId (api: Remoting.v1.InternalApi) rid =
async {
let adminApi = api.adminApi ()
try
let! parent = adminApi.getArchiveDto rid
match parent with
| Ok p -> return Ok p.props.modelArea
| Error e -> return failwith $"Could not get model area id: {e}"
with e ->
return failwith $"Failed to get parent archive: %A{rid} with %A{e.Message}"
}
let private checkModelArea (api: Remoting.v1.InternalApi) (reference: ArchiveId option) (modelAreaId: Guid option) =
async {
let modelAreaApi = api.modelAreaApi ()
let inventoryApi = api.inventoryApi ()
let failBadly () =
Log.Error "Either modelArea or reference must be specified!"
Environment.Exit 1
failwith "Missing modelArea or reference."
let getParentModelAreaId rid =
async {
try
let! parent = adminApi.getArchiveDto rid
match parent with
| Ok p ->
return Ok p.props.modelArea
| Error e -> return failwith $"Could not get model area id: {e}"
with e -> return failwith $"Failed to get parent archive: %A{rid} with %A{e.Message}"
}
match modelAreaId with
| Some mid ->
let! modelArea = modelAreaApi.getModelArea mid
return modelArea
| None ->
match reference with
| Some refId ->
match! inventoryApi.getArchive refId with
| Ok ref -> return! modelAreaApi.getModelArea ref.modelArea
| Error e ->
Log.Error $"{e}"
return None
| None -> return failBadly ()
}
let checkModelArea () =
let failBadly () =
Log.Error "Either modelArea or reference must be specified!"
Environment.Exit 1
failwith "Missing modelArea or reference."
match idx.modelArea with
// TODO: Verify model area valid/exists?
let initiateModelArea (auth: string) (idx: ArchiveIndex) (basePath: string) : Async<Result<ModelAreaId, string>> =
let initAtmo (api: Remoting.v1.InternalApi) =
async {
let modelAreaApi = api.modelAreaApi ()
// NOTE(simkir): For now, arome takes one model area which it is an archive in. In this example only
// world, which basically covers the nordics, which is where most (if not all) model areas are in.
match idx.modelArea with
| Some mid ->
let! modelAreaOpt = modelAreaApi.getModelArea mid
match modelAreaOpt with
| Some m -> return Ok m.modelAreaId
// TODO(simkir): Does it have to?
| None -> return Error "Arome must specify a model area"
| None -> return Error "Arome must specify a model area"
}
let initFvcom (api: Remoting.v1.InternalApi) =
async {
let modelAreaApi = api.modelAreaApi ()
let! res = modelAreaApi.getModelAreaId idx.name
match res with
| Ok id ->
Log.Debug $"Existing model area: %A{res}"
return Ok id
| Error _ ->
match! checkModelArea api idx.reference idx.modelArea with
| Some ma ->
Log.Debug $"New model area: %A{ma}"
return Ok ma.modelAreaId
| None ->
if idx.reference.IsSome then
async {
match! inventoryApi.getArchive idx.reference.Value with
| Ok ref -> return! modelAreaApi.getModelArea ref.modelArea
| Error e ->
Log.Error $"{e}"
return None
}
else
failBadly ()
| Some mid -> modelAreaApi.getModelArea mid
// match model with
// | ModelAreaId mid -> modelAreaApi.getModelArea mid
// | ModelArea m -> tryAddModelArea m basePath
// | ModelAreaFile f ->
// async {
// match readModelAreaFile f with
// | Ok m -> return! tryAddModelArea m basePath
// | Error e -> return Error e
// }
Log.Error "Error"
return Error "Error"
}
async {
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
match idx.archiveType with
| FvStats _
| Atmo _ ->
// NOTE(simkir): For now, arome takes one model area which it is an archive in. In this example only world,
// which basically covers the nordics, which is where most (if not all) model areas are in.
idx.modelArea
|> function
| Some mid ->
modelAreaApi.getModelArea mid
|> Async.map (
function
| Some m -> Ok m.modelAreaId
| None -> Error "Arome must specify a model area")
| None -> Error "Arome must specify a model area" |> async.Return
let! res = initAtmo api
return res
| Fvcom _ ->
async {
let! model = modelAreaApi.getModelAreaId idx.name
match model with
| Ok m ->
Log.Debug $"Existing model area: %A{model}"
return Ok m
| Error _ ->
match! checkModelArea () with
| Some ma ->
Log.Debug $"New model area: %A{ma}"
return Ok ma.modelAreaId
| None ->
Log.Error "Error"
return Error "Error"
}
let! res = initFvcom api
return res
| _ ->
match idx.reference with
| Some rid -> getParentModelAreaId rid
| None -> failwith $"Missing archive reference for non-base archive {idx.archiveType}")
|> Async.RunSynchronously
|> Result.defaultValue HelloWorld
| Some rid ->
let! res = getParentModelAreaId api rid
return res
| None -> return failwith $"Missing archive reference for non-base archive {idx.archiveType}"
}
let archiveTypeToIArchive t =
match t with
@@ -549,85 +678,165 @@ let archiveTypeToIArchive t =
printfn "instantiateArchiveDto: not possbile"
exit 1
let readDataSet (a: IArchive) f : Result<string * int * DateTime, string> =
openDataSet f
|> Result.map (fun ds ->
let framesPerFile = a.getNumFrames ds
let startTime = a.getStartTime ds
let startTimeUtc = startTime.ToUniversalTime()
f, framesPerFile, startTimeUtc)
let readDataSet (a: IArchive) f : string * int * DateTime =
let sw = Diagnostics.Stopwatch.StartNew ()
use ds = openDataSet f
let framesPerFile = a.getNumFrames ds
let startTime = a.getStartTime ds
Log.Debug (
"Read {FileName} frames per file {FramesPerFile} with startTime {StartTime} in {Elapsed}ms ",
f,
framesPerFile,
startTime,
sw.ElapsedMilliseconds
)
let instantiateArchiveDto (idx, modelArea, basePath, files, reverse, json, published) : Result<ArchiveDto, string> =
let firstFile = Array.head files
// Read meta data from Fvcom or drifters
let polygonPath = Path.Combine [| basePath; "bounding.poly" |]
let a = archiveTypeToIArchive idx.archiveType
f, framesPerFile, startTime
monad {
let! ds = openDataSet firstFile
let saveFreq = a.getSaveFreq ds
let instantiateArchiveDto
(reverse: bool)
(published: bool)
(modelArea: ModelAreaId)
(basePath: string)
(json: string)
(idx: ArchiveIndex)
(files: string array)
: Result<ArchiveDto, string array> =
result {
let sw = Diagnostics.Stopwatch.StartNew ()
// Read meta data from Fvcom or drifters
let polygonPath = Path.Combine [| basePath; "bounding.poly" |]
let archive = archiveTypeToIArchive idx.archiveType
let archiveStartTime, archiveSaveFreq =
let firstFile = Array.head files
use ds = openDataSet firstFile
Log.Information (
"Opening first NetCDF file {FirstFile} completed in {Elapsed}ms",
firstFile,
sw.ElapsedMilliseconds
)
sw.Restart ()
let! (fs: (string * int * DateTime) array, endTimeUtc) =
files
|> traverse (readDataSet a)
|> Result.bind (fun f ->
let _, _, startTime = f[0]
ensureContiguous startTime saveFreq f)
let startTime = archive.getStartTime ds
Log.Information (
"Getting start time from {FirstFile} completed with {StartTime} in {Elapsed}ms",
firstFile,
startTime,
sw.ElapsedMilliseconds
)
sw.Restart ()
let saveFreq = archive.getSaveFreq ds
let _, _, startTime = fs[0]
Log.Information $"Archive %s{idx.name} start at {startTime} and ends at {endTimeUtc}"
Log.Debug $"fs: %A{fs}"
let proj = if idx.projection = "" then None else Some idx.projection
let! boundingPoly = readBoundingPolygon proj polygonPath
startTime, saveFreq
let files' = {
basePath = basePath
series =
sw.Restart ()
match idx.archiveType with
| Atmo _ ->
let archiveEndTime, seriesList =
files
|> Array.map FileInfo
|> collectArchiveFiles basePath archiveStartTime archiveSaveFreq idx.frameCount reverse archive
let series = Array.ofList seriesList
return {
props = {
archiveId = idx.archiveId
reference = idx.reference
modelArea = modelArea
name = idx.name
description = idx.description
archiveType = idx.archiveType
projection = idx.projection
focalPoint = idx.center |> Option.defaultValue (0f, 0f)
defaultZoom = idx.initialZoom |> Option.defaultValue 9.0 |> single
freq = int archiveSaveFreq.TotalSeconds
frames = series |> Array.sumBy _.frames
startTime = archiveStartTime
endTime = archiveEndTime
expires = None
created = DateTime.MinValue
owner = idx.owners |> Option.bind Array.tryHead |> Option.defaultValue ""
isPublic = false
isPublished = published
polygon = None
json = ""
}
acl = {
owners = idx.owners |> Option.defaultValue Array.empty
users = idx.users
groups = idx.groups
shares = [||] // TODO: fix
}
files = { basePath = basePath; series = series }
polygon = [||]
retired = false
json = json
}
| _ ->
Log.Information "Reading all files and validating contiguous time"
let! (fs: (string * int * DateTime) array, endTimeUtc) =
files
|> Array.map (readDataSet archive)
|> ensureContiguous archiveStartTime archiveSaveFreq
Log.Information (
"Reading all files and validating contiguous time completed in {Elapsed}ms",
sw.ElapsedMilliseconds
)
Log.Information $"Archive %s{idx.name} start at {archiveStartTime} and ends at {endTimeUtc}"
Log.Debug $"fs: %A{fs}"
let proj = if idx.projection = "" then None else Some idx.projection
let! boundingPoly = readBoundingPolygon proj polygonPath
sw.Restart ()
Log.Information "Creating file DTOs"
let series =
fs
|> Array.mapi (fun n (path, frames, startT) -> {
name = stripBasePath basePath path
frames = frames
ordering = n // intermediate ordering, server might change it later
startTime = startT
endTime = startT.AddSeconds(saveFreq * frames |> float)
endTime = startT.AddSeconds (archiveSaveFreq.TotalSeconds * float frames)
reverse = reverse
})
}
let files' = { basePath = basePath; series = series }
Log.Information ("Creating file DTOs completed in {Elapsed}ms", sw.ElapsedMilliseconds)
return {
props = {
archiveId = idx.archiveId
reference = idx.reference
modelArea = modelArea
name = idx.name
description = idx.description
archiveType = idx.archiveType
projection = idx.projection
focalPoint = idx.center |> Option.defaultValue (0f, 0f)
defaultZoom = idx.initialZoom |> Option.defaultValue 9.0 |> single
freq = saveFreq
frames = fs |> Array.fold (fun a (_, f, _) -> a + f) 0
startTime = startTime
endTime = endTimeUtc
expires = None
created = DateTime.MinValue
owner = idx.owners |> Option.bind Array.tryHead |> Option.defaultValue ""
isPublic = false
isPublished = published
polygon = None
json = ""
return {
props = {
archiveId = idx.archiveId
reference = idx.reference
modelArea = modelArea
name = idx.name
description = idx.description
archiveType = idx.archiveType
projection = idx.projection
focalPoint = idx.center |> Option.defaultValue (0f, 0f)
defaultZoom = idx.initialZoom |> Option.defaultValue 9.0 |> single
freq = int archiveSaveFreq.TotalSeconds
frames = fs |> Array.fold (fun a (_, f, _) -> a + f) 0
startTime = archiveStartTime
endTime = endTimeUtc
expires = None
created = DateTime.MinValue
owner = idx.owners |> Option.bind Array.tryHead |> Option.defaultValue ""
isPublic = false
isPublished = published
polygon = None
json = ""
}
acl = {
owners = idx.owners |> Option.defaultValue Array.empty
users = idx.users
groups = idx.groups
shares = [||] // TODO: fix
}
files = files'
polygon = boundingPoly
retired = false
json = json
}
acl = {
owners = idx.owners |> Option.defaultValue Array.empty
users = idx.users
groups = idx.groups
shares = [||] // TODO: fix
}
files = files'
polygon = boundingPoly
retired = false
json = json
}
}
let retireArchive (archive: string) =
@@ -639,7 +848,7 @@ let retireArchive (archive: string) =
readArchiveIdx archive |> fun x -> x.archiveId
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi(Settings.archmaesterUrl, auth)
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let adminApi = api.adminApi ()
let archiveApi = api.archiveApi ()
@@ -668,16 +877,18 @@ let retireArchive (archive: string) =
try
do! delFromDb aid
return Ok()
return Ok ()
with e ->
Log.Error $"{e}"
return Error e.Message
})
}
)
|> Async.RunSynchronously
let deleteModelArea (mid: ModelAreaId) =
// TODO: retire all dependent archies
withCliAuth (fun auth ->
let api = Remoting.v1.InternalApi(Settings.archmaesterUrl, auth)
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let adminApi = api.adminApi ()
async { return! adminApi.deleteModelArea mid })
async { return! adminApi.deleteModelArea mid }
)

View File

@@ -9,6 +9,7 @@
<Version>7.1.0</Version>
</PropertyGroup>
<ItemGroup>
<Compile Include="Utils.fs"/>
<Compile Include="Settings.fs"/>
<Compile Include="Args.fs"/>
<Compile Include="ArchiveIndex.fs"/>
@@ -22,6 +23,7 @@
<PackageReference Include="Fargo.CmdLine" />
<PackageReference Include="FSharp.Data" />
<PackageReference Include="FSharpPlus" />
<PackageReference Include="FsToolkit.ErrorHandling" />
<PackageReference Include="Microsoft.EntityFrameworkCore" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
@@ -36,6 +38,10 @@
<PackageReference Include="Thoth.Json.Net" />
<PackageReference Include="FSharp.Core" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Oceanbox.FvcomKit" />
<PackageReference Include="ProjNet.FSharp" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\DataAgent\src\DataAgent\Oceanbox.DataAgent.fsproj"/>
<ProjectReference Include="..\..\..\DataAgent\src\Entity\Entity.csproj"/>

View File

@@ -17,6 +17,7 @@ let showVersion () =
type CmdType =
| ListCmd
| ShowCmd
| CheckCmd
| AddCmd
| DeleteCmd
| ModifyCmd
@@ -32,6 +33,11 @@ type ShowType =
| Archive
| Model
type CheckType =
| Freq
/// Contigous archive
| Cont
type AddType =
| Archive
| Sub
@@ -54,6 +60,16 @@ type ModifyType =
| ArchiveAttribs
| Model
type CheckContArgs = {
Archive: string
FrameCount: int option
SkipFiles: int
}
type CheckFreqArgs = {
Archive: string
}
type ListArchiveArgs = {
All: bool
Retired: bool
@@ -145,6 +161,8 @@ type AugmentArgs = { Archive: ArchiveId; Files: string list }
type ResizeArgs = { From: int; To: int option; Archive: ArchiveId }
type Command =
| CheckFreq of CheckFreqArgs
| CheckCont of CheckContArgs
| ListArchives of ListArchiveArgs
| ListModels of ListModelArgs
| ListTypes of int
@@ -170,6 +188,13 @@ type Command =
| Resize of ResizeArgs
| Version of int
let tryParseInt (str: string) : Result<int, string> =
try
Int32.Parse str
|> Ok
with e ->
Error $"Invalid int {e.Message}"
let parseArchiveId (s: string) =
match ArchiveId.TryParse s with
| true, g -> Ok g
@@ -206,20 +231,22 @@ let argParser: Arg<Command * int> =
if version then
return Version logLevel, logLevel
else
match!
cmd "list" "ls" "List entities" |>> CmdType.ListCmd
<|> (cmd "show" "s" "Show entity details" |>> CmdType.ShowCmd)
<|> (cmd "add" "a" "Add entities" |>> CmdType.AddCmd)
<|> (cmd "delete" "rm" "Delete entities" |>> CmdType.DeleteCmd)
<|> (cmd "modify" "m" "Modify entities" |>> CmdType.ModifyCmd)
<|> (cmd "augment" "au" "Augment archive" |>> CmdType.AugmentCmd)
<|> (cmd "resize" "r" "Resize archive" |>> CmdType.ResizeCmd)
with
let! res =
cmd "list" "ls" "List entities" |>> ListCmd
<|> (cmd "show" "s" "Show entity details" |>> ShowCmd)
<|> (cmd "check" "c" "Check archives" |>> CheckCmd)
<|> (cmd "add" "a" "Add entities" |>> AddCmd)
<|> (cmd "delete" "rm" "Delete entities" |>> DeleteCmd)
<|> (cmd "modify" "m" "Modify entities" |>> ModifyCmd)
<|> (cmd "augment" "au" "Augment archive" |>> AugmentCmd)
<|> (cmd "resize" "r" "Resize archive" |>> ResizeCmd)
match res with
| CmdType.ListCmd ->
match!
cmd "archives" "a" "List archives" |>> ListType.Archives
<|> (cmd "models" "m" "List models" |>> ListType.Models)
<|> (cmd "types" "t" "List types" |>> ListType.Types)
cmd "archives" "a" "List archives" |>> Archives
<|> (cmd "models" "m" "List models" |>> Models)
<|> (cmd "types" "t" "List types" |>> Types)
with
| ListType.Archives ->
let! all = flag "all" null "List all archives"
@@ -285,6 +312,31 @@ let argParser: Arg<Command * int> =
and! modelId = arg "model-id" "Model ID" |> reqArg |> parse parseModelAreaId
return ShowModel { Verbose = verbose; Json = json; ModelId = modelId }, logLevel
| CheckCmd ->
let! res =
cmd "freq" null "Save frequency" |>> CheckType.Freq
<|> (cmd "cont" null "Whether archive is contiguous" |>> CheckType.Cont)
match res with
| CheckType.Freq ->
let! path = arg "path" "Path to archive you want to check" |> reqArg
let cmd' = CheckFreq {
Archive = path
}
return cmd', logLevel
| CheckType.Cont ->
let! frameCount = opt "frame-count" null "count" "Archive frames per file count" |> optParse tryParseInt
and! skipFiles = opt "skip-files" null "count" "Archive files to skip" |> optParse tryParseInt
and! path = arg "path" "Path to archive you want to check" |> reqArg
let cmd' = CheckCont {
Archive = path
FrameCount = frameCount
SkipFiles = skipFiles |> Option.defaultValue 0
}
return cmd', logLevel
| CmdType.AddCmd ->
match!
cmd "archive" "a" "Add archive" |>> AddType.Archive

View File

@@ -15,86 +15,105 @@ let getArchives archiveIds =
|> Async.RunSynchronously
|> sequence
let listModels verbose (filterM: ModelArea -> bool) (filterA: ArchiveProps -> bool) =
ModelAreaCli.getBaseModelAreas System.Guid.Empty
|> Result.map (
Array.filter filterM
>> Array.iter (fun m ->
let archives =
ArchiveCli.getModelAreaArchives m.modelAreaId |> Async.RunSynchronously
match archives with
| Error e -> Log.Error e
| Ok arch ->
let ax = arch |> Array.filter filterA
let listModels auth verbose (filterM: ModelArea -> bool) (filterA: ArchiveProps -> bool) : Async<unit> =
async {
let! ct = Async.CancellationToken
Log.Debug("Cancel token {Hash}", ct.GetHashCode())
try
let worldId = System.Guid.Empty
let! world = ModelAreaCli.getModelArea auth worldId
Log.Information "Fetching world completed"
let! worldArchives = ArchiveCli.getModelAreaArchives auth worldId
Utils.printModelTerse (world |> Result.defaultWith failwith)
Utils.printArchivesTerse (worldArchives |> Result.defaultWith failwith)
if verbose then
Utils.printModel false m
Utils.printArchives false ax
else
Utils.printModelTerse m
Utils.printArchivesTerse ax
printfn "")
)
|> Result.defaultWith (fun e -> Log.Error ("Listing.listModels error fetching model areas {Error}", e))
Log.Information $"Fetching world model areas:"
let! modelAreas = ModelAreaCli.getBaseModelAreas auth worldId
let filtered = modelAreas |> Array.filter filterM
let listArchives (args: ListArchiveArgs) =
let verbose = args.Verbose
let retired = args.Retired
for m in filtered do
let! archives = ArchiveCli.getModelAreaArchives auth m.modelAreaId
match archives with
| Ok arch ->
let ax = arch |> Array.filter filterA
let matchO (a: ArchiveDto) =
match args.Owner with
| None -> true
| Some owner ->
let pat = owner
a.acl.owners |> Array.exists (fun y -> Regex.IsMatch (y, pat))
if verbose then
Utils.printModel false m
Utils.printArchives false ax
else
Utils.printModelTerse m
Utils.printArchivesTerse ax
let matchU (a: ArchiveDto) =
match args.User with
| None -> true
| Some user ->
let pat = user
a.acl.users |> Array.exists (fun y -> Regex.IsMatch (y, pat))
|| a.acl.owners |> Array.exists (fun y -> Regex.IsMatch (y, pat))
printfn ""
| Error e ->
Log.Error e
with ex ->
Log.Error(ex, "Listing.listModels error fetching model areas")
}
let matchG (a: ArchiveDto) =
match args.Group with
| None -> true
| Some group ->
let pat = group
a.acl.groups |> Array.exists (fun y -> Regex.IsMatch (y, pat))
let listArchives (auth: string) (args: ListArchiveArgs) =
async {
let verbose = args.Verbose
let retired = args.Retired
let matchM (m: ModelArea) =
match args.ModelName with
| None -> true
| Some modelName ->
let pat = modelName
Regex.IsMatch (m.name, pat)
let matchO (a: ArchiveDto) =
match args.Owner with
| None -> true
| Some owner ->
let pat = owner
a.acl.owners |> Array.exists (fun y -> Regex.IsMatch (y, pat))
let matchA (a: ArchiveProps) =
match args.ArchiveName with
| None -> true
| Some archiveName ->
let pat = archiveName
printfn $"List Archives pattern %s{pat}"
Regex.IsMatch (a.name, pat)
let matchU (a: ArchiveDto) =
match args.User with
| None -> true
| Some user ->
let pat = user
a.acl.users |> Array.exists (fun y -> Regex.IsMatch (y, pat))
|| a.acl.owners |> Array.exists (fun y -> Regex.IsMatch (y, pat))
// NOTE: Only checks for archive props
let matchBox (a: ArchiveProps) = matchA a //&& matchO a && matchU a && matchG a
let matchG (a: ArchiveDto) =
match args.Group with
| None -> true
| Some group ->
let pat = group
a.acl.groups |> Array.exists (fun y -> Regex.IsMatch (y, pat))
if args.All then
listModels verbose matchM matchA
elif args.Type.IsSome then
let t =
let spec = args.Type |> Option.get
let matchM (m: ModelArea) =
match args.ModelName with
| None -> true
| Some modelName ->
let pat = modelName
Regex.IsMatch (m.name, pat)
try
ArchiveType.FromString spec
with _ ->
Log.Warning $"Unknown archive type: %s{spec}"
ArchiveType.Any
let matchA (a: ArchiveProps) =
match args.ArchiveName with
| None -> true
| Some archiveName ->
let pat = archiveName
printfn $"List Archives pattern %s{pat}"
Regex.IsMatch (a.name, pat)
let archiveF (a: ArchiveProps) = a.archiveType = t && matchBox a
listModels verbose matchM archiveF
else
let archiveF (a: ArchiveProps) = a.reference.IsNone && matchBox a
listModels verbose matchM archiveF
// NOTE: Only checks for archive props
let matchBox (a: ArchiveProps) = matchA a //&& matchO a && matchU a && matchG a
if args.All then
do! listModels auth verbose matchM matchA
return ()
elif args.Type.IsSome then
let t =
let spec = args.Type |> Option.get
try
ArchiveType.FromString spec
with _ ->
Log.Warning $"Unknown archive type: %s{spec}"
ArchiveType.Any
let archiveF (a: ArchiveProps) = a.archiveType = t && matchBox a
do! listModels auth verbose matchM archiveF
return ()
else
let archiveF (a: ArchiveProps) = a.reference.IsNone && matchBox a
do! listModels auth verbose matchM archiveF
return ()
}

View File

@@ -1,11 +1,16 @@
module Archivist
open System
open Fargo
open Serilog
open Serilog.Events
open Serilog.Sinks
open Args
exception MissingAuth
let configureSerilog level =
let n =
match level with
@@ -17,18 +22,32 @@ let configureSerilog level =
LoggerConfiguration()
.MinimumLevel.Is(n)
.WriteTo.Console(theme = Serilog.Sinks.SystemConsole.Themes.ConsoleTheme.None)
.WriteTo.Console(
formatProvider = Settings.culture,
theme = SystemConsole.Themes.AnsiConsoleTheme.Sixteen
)
.CreateLogger ()
let executeCommand (ct: System.Threading.CancellationToken) (cmd: Command * int) =
let executeCommand (ct: Threading.CancellationToken) (cmd: Command * int) =
task {
let logLevel = snd cmd
Log.Logger <- configureSerilog logLevel
printfn $"Archivist connection string: %s{Settings.archmaesterUrl}"
Log.Debug("Current culture info is {CultureInfo}", Globalization.CultureInfo.CurrentCulture.Name)
Globalization.CultureInfo.CurrentCulture <- Settings.culture
Log.Information("Archivist connection string: {ConnectionString}", Settings.archmaesterUrl)
let auth = Settings.cliAuth |> Option.defaultWith (fun () -> raise MissingAuth)
Log.Debug ("[Main] CancellationToken {Hash}", ct.GetHashCode())
match fst cmd with
| CheckFreq args ->
do! Async.StartAsTask (ArchiveCli.checkFreq args, cancellationToken = ct)
return 0
| CheckCont args ->
do! Async.StartAsTask(ArchiveCli.checkCont args, cancellationToken = ct)
return 0
| ModifyArchive args ->
ArchiveCli.modifyArchive args
return 0
@@ -43,13 +62,13 @@ let executeCommand (ct: System.Threading.CancellationToken) (cmd: Command * int)
ArchiveCli.showRelatedArchives args
return 0
else
ArchiveCli.showArchive args
do! Async.StartAsTask (ArchiveCli.showArchive args, cancellationToken = ct)
return 0
| ShowModel args ->
ModelAreaCli.showModel args
return 0
| ListArchives args ->
Listing.listArchives args
do! Async.StartAsTask (Listing.listArchives auth args, cancellationToken = ct)
return 0
| ListModels args ->
ModelAreaCli.listModels args
@@ -59,7 +78,7 @@ let executeCommand (ct: System.Threading.CancellationToken) (cmd: Command * int)
// ArchiveCli.c args
return 0
| AddArchive args ->
ArchiveCli.addArchive args
do! Async.StartAsTask (ArchiveCli.addArchive auth args, cancellationToken = ct)
return 0
| AddSub args ->
ArchiveCli.createSubArchive args

View File

@@ -65,35 +65,33 @@ let listModelArchivesAdmin (mid, filter: ArchiveFilter) =
return Error exn.Message
})
let getModelArea modelId =
withCliAuth (fun auth ->
let getModelArea auth modelId =
async {
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let archmaester = api.modelAreaApi ()
async {
let! m = archmaester.getModelArea modelId
return m |> Option.toResultWith "error"
})
let! m = archmaester.getModelArea modelId
return m |> Option.toResultWith "Model area does not exist"
}
let getBaseModelArea () =
withCliAuth (fun auth ->
let getBaseModelArea (auth: string) =
async {
let api = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let archmaester = api.modelAreaApi ()
async {
try
let! region = archmaester.getModelArea HelloWorld
return region |> Option.toResultWith "erro"
with exn ->
return Error exn.Message
})
try
let! region = archmaester.getModelArea HelloWorld
return region |> Option.toResultWith "erro"
with exn ->
return Error exn.Message
}
let getBaseModelAreas (helloWorld: Guid) : Result<ModelArea[], string> =
Log.Information $"Fetching model area {helloWorld} model areas:"
withCliAuth (fun auth ->
let getBaseModelAreas (auth: string) (helloWorld: Guid) : Async<ModelArea array> =
async {
let intra = Remoting.v1.InternalApi (Settings.archmaesterUrl, auth)
let api = intra.modelAreaApi ()
api.getSubModelAreas helloWorld |> Async.map Ok)
|> Async.RunSynchronously
let! modelAreas = api.getSubModelAreas helloWorld
return modelAreas
}
let addModel (args: AddModelArgs) =
let file = args.File |> Path.GetFullPath

View File

@@ -5,12 +5,11 @@ open System
let base64enc (s: string) =
Text.ASCIIEncoding.UTF8.GetBytes s |> Convert.ToBase64String
let culture = new Globalization.CultureInfo "en-GB"
let tryGetEnv =
Environment.GetEnvironmentVariable
>> function
| null
| "" -> None
| x -> Some x
>> Utils.tryStr
let archmaesterUrl =
tryGetEnv "ARCHMAESTER_URL" |> Option.defaultValue "http://localhost:8085"

View File

@@ -0,0 +1,7 @@
module Utils
let strNull = System.String.IsNullOrWhiteSpace
let strNotNull = strNull >> not
let tryStr str = if strNotNull str then Some str else None
let tap f x = f x; x

View File

@@ -42,6 +42,15 @@
"FSharp.Core": "6.0.6"
}
},
"FsToolkit.ErrorHandling": {
"type": "Direct",
"requested": "[5.0.1, )",
"resolved": "5.0.1",
"contentHash": "93oG3WSogK05H4gkikAmx5pBf30TQJfO1Jky+o/N/nv+RTP3nfOfjlmCHzuyUjQCRFOQog/xQabcky+WBWceeQ==",
"dependencies": {
"FSharp.Core": "9.0.300"
}
},
"Microsoft.EntityFrameworkCore": {
"type": "Direct",
"requested": "[9.0.1, )",
@@ -84,6 +93,38 @@
"Microsoft.EntityFrameworkCore.Design": "9.0.1"
}
},
"Oceanbox.FvcomKit": {
"type": "Direct",
"requested": "[6.0.0-alpha.1, )",
"resolved": "6.0.0-alpha.1",
"contentHash": "VBvQjHiSV1aBPNlTti7XyUileo0SP/Wn84ABGFiCD8Lt0PcsjtZhmkvj+3X/geh0NziB5tF5gscravYT+G+A1Q==",
"dependencies": {
"FSharp.Core": "9.0.303",
"FSharp.Data": "6.4.1",
"FSharpPlus": "1.7.0",
"FsPickler": "5.3.2",
"KDTree": "1.4.1",
"MathNet.Numerics.FSharp": "5.0.0",
"MessagePack": "3.1.3",
"Oceanbox.SDSLite": "2.8.0",
"ProjNet.FSharp": "5.2.0",
"Serilog": "4.2.0",
"Serilog.Sinks.Console": "6.0.0",
"Serilog.Sinks.Seq": "9.0.0"
}
},
"ProjNet.FSharp": {
"type": "Direct",
"requested": "[5.2.0, )",
"resolved": "5.2.0",
"contentHash": "sYSePg/0sVo16Fk3r7okVSga6i9GAN0kkjt1haEXVw25SF8A4S3Gcpf5+6lgknBGdYiZBmJ+3S6v5g1WSSCp2g==",
"dependencies": {
"FSharp.Core": "8.0.100",
"FSharp.Data": "6.3.0",
"FSharpPlus": "1.5.0",
"ProjNet": "2.0.0"
}
},
"Serilog": {
"type": "Direct",
"requested": "[4.2.0, )",
@@ -785,13 +826,13 @@
"FSharp.Data": "[6.4.1, )",
"FSharpPlus": "[1.7.0, )",
"Fable.Remoting.DotnetClient": "[3.35.0, )",
"FsToolkit.ErrorHandling": "[5.0.1, )",
"Microsoft.EntityFrameworkCore": "[9.0.1, )",
"Microsoft.EntityFrameworkCore.Relational": "[9.0.1, )",
"NetTopologySuite": "[2.5.0, )",
"Npgsql.EntityFrameworkCore.PostgreSQL": "[9.0.2, )",
"Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite": "[9.0.2, )",
"Npgsql.NetTopologySuite": "[9.0.2, )",
"Oceanbox.FvcomKit": "[5.13.0, )",
"Oceanbox.SDSLite": "[2.8.0, )",
"Serilog.Sinks.Console": "[6.0.0, )",
"Thoth.Json.Net": "[12.0.0, )"
@@ -963,27 +1004,6 @@
"Npgsql": "9.0.2"
}
},
"Oceanbox.FvcomKit": {
"type": "CentralTransitive",
"requested": "[5.13.0, )",
"resolved": "5.13.0",
"contentHash": "6uVL3fLhRf4OU1hWygGpVex4pI5YB+GaWrKZUgoL/LkGmdFv0qU8Y7v+meHNM3E9bjR7xKinCVfrw5SXsF6C8g==",
"dependencies": {
"FSharp.Core": "9.0.201",
"FSharp.Data": "6.4.1",
"FSharpPlus": "1.7.0",
"FsPickler": "5.3.2",
"KDTree": "1.4.1",
"MathNet.Numerics.FSharp": "5.0.0",
"MessagePack": "3.1.3",
"Oceanbox.SDSLite": "2.8.0",
"ProjNet.FSharp": "5.2.0",
"Serilog": "4.2.0",
"Serilog.Sinks.Console": "6.0.0",
"Serilog.Sinks.Seq": "9.0.0",
"Thoth.Json.Net": "12.0.0"
}
},
"Oceanbox.SDSLite": {
"type": "CentralTransitive",
"requested": "[2.8.0, )",
@@ -992,18 +1012,6 @@
"dependencies": {
"DynamicInterop": "0.9.1"
}
},
"ProjNet.FSharp": {
"type": "CentralTransitive",
"requested": "[5.2.0, )",
"resolved": "5.2.0",
"contentHash": "sYSePg/0sVo16Fk3r7okVSga6i9GAN0kkjt1haEXVw25SF8A4S3Gcpf5+6lgknBGdYiZBmJ+3S6v5g1WSSCp2g==",
"dependencies": {
"FSharp.Core": "8.0.100",
"FSharp.Data": "6.3.0",
"FSharpPlus": "1.5.0",
"ProjNet": "2.0.0"
}
}
},
"net9.0/linux-x64": {

View File

@@ -971,6 +971,14 @@ module Handlers =
getArchivePolygon = fun aid -> requireViewer user aid (fun () -> getArchivePolygon aid)
}
let internalInventoryHandlers (ctx: HttpContext) : Api.Inventory = {
getModelAreaArchives = getModelAreaArchives ctx
getArchive = getArchiveProps ctx
getRefArchives = getRefArchives ctx
getAssociated = getAssociatedArchives ctx
getArchivePolygon = getArchivePolygon
}
let aclHandlers (ctx: HttpContext) : Api.Acl =
let user = ctx.User.Identity.Name
@@ -1078,7 +1086,7 @@ module Endpoints =
let inventoryEndpoints: HttpHandler =
Remoting.createApi ()
|> Remoting.fromContext Handlers.inventoryHandlers
|> Remoting.fromContext Handlers.internalInventoryHandlers
|> Remoting.withRouteBuilder Api.internalRouteBuilder
|> Remoting.buildHttpHandler

View File

@@ -328,11 +328,11 @@
},
"Oceanbox.FvcomKit": {
"type": "Direct",
"requested": "[5.13.0, )",
"resolved": "5.13.0",
"contentHash": "6uVL3fLhRf4OU1hWygGpVex4pI5YB+GaWrKZUgoL/LkGmdFv0qU8Y7v+meHNM3E9bjR7xKinCVfrw5SXsF6C8g==",
"requested": "[6.0.0-alpha.1, )",
"resolved": "6.0.0-alpha.1",
"contentHash": "VBvQjHiSV1aBPNlTti7XyUileo0SP/Wn84ABGFiCD8Lt0PcsjtZhmkvj+3X/geh0NziB5tF5gscravYT+G+A1Q==",
"dependencies": {
"FSharp.Core": "9.0.201",
"FSharp.Core": "9.0.303",
"FSharp.Data": "6.4.1",
"FSharpPlus": "1.7.0",
"FsPickler": "5.3.2",
@@ -343,8 +343,7 @@
"ProjNet.FSharp": "5.2.0",
"Serilog": "4.2.0",
"Serilog.Sinks.Console": "6.0.0",
"Serilog.Sinks.Seq": "9.0.0",
"Thoth.Json.Net": "12.0.0"
"Serilog.Sinks.Seq": "9.0.0"
}
},
"prometheus-net.AspNetCore": {
@@ -1593,13 +1592,13 @@
"FSharp.Data": "[6.4.1, )",
"FSharpPlus": "[1.7.0, )",
"Fable.Remoting.DotnetClient": "[3.35.0, )",
"FsToolkit.ErrorHandling": "[5.0.1, )",
"Microsoft.EntityFrameworkCore": "[9.0.1, )",
"Microsoft.EntityFrameworkCore.Relational": "[9.0.1, )",
"NetTopologySuite": "[2.5.0, )",
"Npgsql.EntityFrameworkCore.PostgreSQL": "[9.0.2, )",
"Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite": "[9.0.2, )",
"Npgsql.NetTopologySuite": "[9.0.2, )",
"Oceanbox.FvcomKit": "[5.13.0, )",
"Oceanbox.SDSLite": "[2.8.0, )",
"Serilog.Sinks.Console": "[6.0.0, )",
"Thoth.Json.Net": "[12.0.0, )"

View File

@@ -57,7 +57,7 @@ ingress:
annotations:
cert-manager.io/cluster-issuer: letsencrypt-staging
nginx.ingress.kubernetes.io/proxy-buffer-size: 128k
nginx.ingress.kubernetes.io/whitelist-source-range: 10.0.0.0/8,172.16.0.0/12,192.168.0.0/16
nginx.ingress.kubernetes.io/whitelist-source-range: 10.0.0.0/8,172.16.0.0/12,192.168.0.0/16,195.43.37.33/32
hosts:
- host: <x>-atlantis.dev.oceanbox.io
paths:

View File

@@ -130,7 +130,7 @@ module Archive =
prop.key object
prop.children [
Html.a [
prop.href (Router.format("user", object))
prop.href (Router.format("users", object))
prop.text object
]
]
@@ -744,4 +744,4 @@ module Archive =
| None ->
Html.h1 "Archive not found"
]
]

View File

@@ -231,6 +231,19 @@ module Archives =
]
]
[<ReactComponent>]
let private AtmoList () =
let archiveType = Archmaester.Dto.ArchiveType.FromString "atmo:*:*"
Html.div [
prop.classes [ "archives-list" ]
prop.children [
Html.h2 "Atmo"
Archives.List(archiveType)
]
]
[<ReactComponent>]
let View () =
React.fragment [
@@ -247,6 +260,8 @@ module Archives =
DriftersList ()
StatsList ()
AtmoList ()
]
]
]
]

View File

@@ -6,6 +6,9 @@ module GroupArchive =
open Feliz
open Feliz.Router
// TODO
let private postViewPermission (group: string) = ()
[<ReactComponent>]
let private DeleteRelationButton onDelete (tuple: Remoting.Tuple) =
let handleDelete (ev: Types.Event) =
@@ -41,7 +44,10 @@ module GroupArchive =
Html.div [
prop.classes [ "flex-row-center" ]
prop.children [
Html.div [ prop.classes [ "grow" ]; prop.children [ Html.b "View Term" ] ]
Html.div [
prop.classes [ "grow" ]
prop.children [ Html.b "View Term" ]
]
DeleteRelationButton onDelete tuple
]
@@ -99,10 +105,20 @@ module GroupArchive =
[<ReactComponent>]
let private PermissionForm (permissions: OpenFGA.Types.ArchiveRelation array) (group: string) =
let adding, setAdding = React.useState false
let addView, setAddView = React.useState false
let addExec, setAddExec = React.useState false
let handleAddClick (ev: Types.Event) = setAdding true
let handleCancelClick (ev: Types.Event) = setAdding false
let handleCheckView (ev: Types.Event) = setAddView (not addView)
let handleCheckExec (ev: Types.Event) = setAddExec (not addExec)
let handleClickSave (ev: Types.Event) =
console.debug("Adding permissions")
if addView then
postViewPermission group
let hasViewTerm = permissions |> Array.exists _.IsViewTerm
let hasExecTicket = permissions |> Array.exists _.IsExecTicket
@@ -114,14 +130,20 @@ module GroupArchive =
]
prop.children [
if adding then
Html.button [ prop.onClick handleAddClick; prop.text "Save" ]
Html.button [
prop.onClick handleClickSave
prop.text "Save"
]
Html.button [
prop.onClick handleCancelClick
prop.text "Cancel"
]
else
Html.button [ prop.onClick handleAddClick; prop.text "Add" ]
Html.button [
prop.onClick handleAddClick
prop.text "Add"
]
]
]
@@ -129,14 +151,35 @@ module GroupArchive =
Html.div [
prop.id "group-archive-exec-form"
prop.classes [
"flex-row"
"flex-row-start"
"gap-32"
]
prop.children [
if not hasViewTerm then
Html.div [
prop.classes [
"flex-column"
"gap-8"
"shadow"
"brad-8"
"m-8"
"p-16"
]
prop.children [
Html.b "View"
Html.div [
prop.children [
Html.input [
prop.id "view-checkbox"
prop.type'.checkbox
prop.onChange handleCheckView
prop.custom("checked", addView)
]
Html.label [
prop.htmlFor "view-checkbox"
prop.children (Html.b "View")
]
]
]
Groups.ViewForm (Remoting.ViewTerm.empty, ignore)
]
]
@@ -155,7 +198,20 @@ module GroupArchive =
style.flexBasis (length.px 512)
]
prop.children [
Html.b "Exec"
Html.div [
prop.children [
Html.input [
prop.id "exec-checkbox"
prop.type'.checkbox
prop.onChange handleCheckExec
prop.custom("checked", addExec)
]
Html.label [
prop.htmlFor "exec-checkbox"
prop.children (Html.b "Exec")
]
]
]
Groups.ExecForm (Remoting.ExecTicket.empty, ignore)
]
]
@@ -259,4 +315,4 @@ module GroupArchive =
]
]
| None -> Html.h1 (sprintf "Group %s / Archive %O not found" group archiveId)
]
]

View File

@@ -15,9 +15,9 @@ module GroupArchiveAddForm =
return Error (sprintf "Error adding archive to groups: %s" e.Message)
}
// TODO: Promote to elmish
// TODO(simkir): Promote to elmish
[<ReactComponent>]
let View key (onAdd: Archmaester.Dto.ArchiveProps -> unit) (group: string) =
let View key (onAdd: Archmaester.Dto.ArchiveProps -> unit) (group: string) =
let adding, setAdding = React.useState false
let selecting, setSelecting = React.useState false
let searchTerm, setSearchTerm = React.useState ""
@@ -231,4 +231,4 @@ module GroupArchiveAddForm =
]
| None -> ()
]
]
]

View File

@@ -32,9 +32,24 @@ module Admin =
let fgaUser = sprintf "user:%s" user
let fgaGroup = sprintf "group:%s" req.Group
[|
{ Object = fgaUser; Relation = "active"; User = fgaUser; Condition = None }
{ Object = fgaUser; Relation = "registered"; User = fgaUser; Condition = None }
{ Object = fgaGroup; Relation = "member"; User = fgaUser; Condition = None }
{
Object = fgaUser
Relation = "active"
User = fgaUser
Condition = None
}
{
Object = fgaUser
Relation = "registered"
User = fgaUser
Condition = None
}
{
Object = fgaGroup
Relation = "member"
User = fgaUser
Condition = None
}
|]
)
let req = OpenFGA.Queries.write tuples
@@ -69,6 +84,25 @@ module Admin =
return! Error (sprintf "Error deleting users from OpenFGA: %s" e.Message)
}
/// Creates write tuples for adding an archive to a group
let private createGroupPermissionTuples
(archiveId: Archmaester.Dto.ArchiveId)
(viewOpt: Remoting.ViewTerm option)
(execOpt: Remoting.ExecTicket option)
(groups: string array)
: Model.ClientTupleKey array =
groups
|> Array.collect (fun group -> [|
match viewOpt with
| Some view -> OpenFGA.Group.viewArchive archiveId view group
| None -> ()
match execOpt with
| Some exec -> OpenFGA.Group.execArchive archiveId exec group
| None -> ()
|])
/// Adds a collection of groups permissions to an archive
let addArchiveGroups (ctx: HttpContext) (req: Remoting.AddArchiveGroupsRequest) =
async {
let user = ctx.User.Identity.Name
@@ -78,16 +112,61 @@ module Admin =
let db = ctx.GetService<Entity.ArchiveContext> ()
let fga = ctx.GetService<OpenFgaClient> ()
let! created = Archmaester.EFCore.addArchiveGroups db req.Id req.Groups
// NOTE: Find the groups that do not have this archives added
let! existing =
req.Groups
|> Array.map (fun group ->
async {
let! opt = Archmaester.EFCore.queryGroupArchive db req.Id group
let res =
match opt with
| Some _archive -> None
| None -> Some group
return res
}
)
|> Async.Sequential
// NOTE: The groups missing this archive which we will now add
let adding: string array = existing |> Array.choose id
let! created = adding |> Archmaester.EFCore.addArchiveGroups db req.Id
do logger.LogInformation ("Added {CreatedCount} archive group entries", created)
let req = OpenFGA.Group.addArchive req
let req =
createGroupPermissionTuples req.Id req.View req.Exec req.Groups
|> OpenFGA.Queries.write'
let! fgaResp = fga.Write req |> Async.AwaitTask
do logger.LogInformation ("OpenFGA write responded with: {JSON}", fgaResp.ToJson ())
return Ok ()
with e ->
do logger.LogError(e, "Error adding group to archive")
do logger.LogError (e, "Error adding group to archive")
return Error (sprintf "Error adding archive groups: %s" e.Message)
}
/// Adds permissions to an archive for a group
let addGroupArchive (ctx: HttpContext) (req: Remoting.AddGroupArchiveRequest) =
async {
let user = ctx.User.Identity.Name
let logger = ctx.GetLogger<Remoting.Api.Admin> ()
do logger.LogInformation ("Add archive to group from {User}: {Request}", user, req)
try
let db = ctx.GetService<Entity.ArchiveContext> ()
let fga = ctx.GetService<OpenFgaClient> ()
let! exists = Archmaester.EFCore.queryGroupArchive db req.ArchiveId req.Group
if exists.IsNone then
let! created = Archmaester.EFCore.addArchiveGroups db req.ArchiveId [| req.Group |]
do logger.LogInformation ("Added {CreatedCount} archive group entries", created)
let req =
createGroupPermissionTuples req.ArchiveId req.View req.Exec [| req.Group |]
|> OpenFGA.Queries.write'
let! fgaResp = fga.Write req |> Async.AwaitTask
do logger.LogInformation ("OpenFGA write responded with: {JSON}", fgaResp.ToJson ())
return Ok ()
with e ->
do logger.LogError (e, "Error adding group to archive")
return Error (sprintf "Error adding archive groups: %s" e.Message)
}
@@ -199,7 +278,8 @@ module Admin =
let dtos: Archmaester.Dto.ArchiveProps array =
archives
|> Array.map (fun (archive, type') ->
let archiveType = Archmaester.Dto.ArchiveType.FromDbType(type'.kind, type'.variant, type'.format)
let archiveType =
Archmaester.Dto.ArchiveType.FromDbType (type'.kind, type'.variant, type'.format)
{
Archmaester.Dto.ArchiveProps.empty with
archiveId = archive.id
@@ -242,10 +322,10 @@ module Admin =
do logger.LogInformation ("getArchiveCount from {User}", user)
let db = ctx.GetService<Entity.ArchiveContext> ()
let! entities = Archmaester.EFCore.queryArchiveTypes db
let dtos : Archmaester.Dto.ArchiveType array =
let dtos: Archmaester.Dto.ArchiveType array =
entities
|> Array.map (fun entity ->
Archmaester.Dto.ArchiveType.FromDbType(entity.Kind, entity.Variant, entity.Format)
Archmaester.Dto.ArchiveType.FromDbType (entity.Kind, entity.Variant, entity.Format)
)
return dtos
}
@@ -309,6 +389,7 @@ module Admin =
let private impl (ctx: HttpContext) : Remoting.Api.Admin = {
addUsers = Handler.addUsers ctx
addArchiveGroups = Handler.addArchiveGroups ctx
addGroupArchive = Handler.addGroupArchive ctx
deleteArchive = Handler.deleteArchive ctx
getAllGroups = Handler.getAllGroups ctx
getArchive = Handler.getArchive ctx
@@ -325,4 +406,4 @@ module Admin =
|> Remoting.withErrorHandler Utils.rpcErrorHandler
|> Remoting.fromContext impl
|> Remoting.withRouteBuilder Remoting.routeBuilder
|> Remoting.buildHttpHandler
|> Remoting.buildHttpHandler

View File

@@ -407,6 +407,24 @@ module Archmaester =
return entities
}
let queryGroupArchive (db: Entity.ArchiveContext) (archiveId: System.Guid) (groupName: string) : Async<Entity.Archive option> =
async {
try
let! entity =
db.Archives
.AsNoTracking()
.Include(_.Attribs)
.SingleAsync(fun archive ->
archive.ArchiveId = archiveId
&& archive.Groups.Any (fun archiveGroup -> archiveGroup.Group.Name.Contains groupName)
)
|> Async.AwaitTask
return Some entity
with :? System.InvalidOperationException ->
return None
}
let queryGroupArchives (db: Entity.ArchiveContext) (groupName: string) : Async<Entity.Archive array> =
async {
let! entities =
@@ -420,4 +438,4 @@ module Archmaester =
|> Async.AwaitTask
return entities
}
}

View File

@@ -186,7 +186,7 @@ module OpenFGA =
result
let write' (writes: ClientTupleKey array) =
let write' (writes: ClientTupleKey array) : ClientWriteRequest =
let result = ClientWriteRequest ()
do result.Writes <- ResizeArray writes
@@ -222,20 +222,6 @@ module OpenFGA =
tuple
/// Creates write tuples for adding an archive to a group
let addArchive (req: Remoting.AddArchiveGroupsRequest) : ClientWriteRequest =
req.Groups
|> Array.collect (fun group -> [|
match req.View with
| Some view -> viewArchive req.Id view group
| None -> ()
match req.Exec with
| Some exec -> execArchive req.Id exec group
| None -> ()
|])
|> Queries.write'
module private Handlers =
let check (ctx: HttpContext) (req: Remoting.CheckRequest) =
async {
@@ -329,4 +315,4 @@ module OpenFGA =
Remoting.createApi ()
|> Remoting.withRouteBuilder Remoting.routeBuilder
|> Remoting.fromContext impl
|> Remoting.buildHttpHandler
|> Remoting.buildHttpHandler

View File

@@ -228,15 +228,6 @@
"FSharp.Data.Runtime.Utilities": "6.4.1"
}
},
"FsPickler": {
"type": "Transitive",
"resolved": "5.3.2",
"contentHash": "LFtxXpQNor8az1ez3rN9oz2cqf/06i9yTrPyJ9R83qLEpFAU7Of0WL2hoSXzLHer4lh+6mO1NV4VQFiBzNRtjw==",
"dependencies": {
"FSharp.Core": "4.3.2",
"System.Reflection.Emit.Lightweight": "4.3.0"
}
},
"Giraffe.ViewEngine": {
"type": "Transitive",
"resolved": "1.4.0",
@@ -285,35 +276,6 @@
"resolved": "6.0.0",
"contentHash": "eVHCR7a6m/dm5RFcBzE3qs/Jg5j9R5Rjpu8aTOv9e4AFvaQtBXb5ah7kmwU+YwA0ufRwz4wf1hnIvsD2hSnI4g=="
},
"KdTree": {
"type": "Transitive",
"resolved": "1.4.1",
"contentHash": "yWbb35v/V9y88SLLMUPTlAN3pQEoPhDfZf9PApFnlU4kLtwVQ75U9vW5mW4/alQnLBuLKWBKcy4W5xK95mYsuA=="
},
"MathNet.Numerics": {
"type": "Transitive",
"resolved": "5.0.0",
"contentHash": "pg1W2VwaEQMAiTpGK840hZgzavnqjlCMTVSbtVCXVyT+7AX4mc1o89SPv4TBlAjhgCOo9c1Y+jZ5m3ti2YgGgA=="
},
"MathNet.Numerics.FSharp": {
"type": "Transitive",
"resolved": "5.0.0",
"contentHash": "lKYhd68fReW5odX/q+Uzxw3357Duq3zmvkYvnZVqqcc2r/EmrYGDoOdUGuHnhfr8yj9V34js5gQH/7IWcxZJxg==",
"dependencies": {
"FSharp.Core": "6.0.2",
"MathNet.Numerics": "5.0.0"
}
},
"MessagePack.Annotations": {
"type": "Transitive",
"resolved": "3.1.3",
"contentHash": "XTy4njgTAf6UVBKFj7c7ad5R0WVKbvAgkbYZy4f00kplzX2T3VOQ34AUke/Vn/QgQZ7ETdd34/IDWS3KBInSGA=="
},
"MessagePackAnalyzer": {
"type": "Transitive",
"resolved": "3.1.3",
"contentHash": "19u1oVNv2brCs5F/jma8O8CnsKMMpYwNqD0CAEDEzvqwDTAhqC9r7xHZP4stPb3APs/ryO/zVn7LvjoEHfvs7Q=="
},
"Microsoft.AspNetCore.Cryptography.Internal": {
"type": "Transitive",
"resolved": "9.0.2",
@@ -591,11 +553,6 @@
"resolved": "3.0.1",
"contentHash": "s/s20YTVY9r9TPfTrN5g8zPF1YhwxyqO6PxUkrYTGI2B+OGPe9AdajWZrLhFqXIvqIW23fnUE4+ztrUWNU1+9g=="
},
"Microsoft.NET.StringTools": {
"type": "Transitive",
"resolved": "17.11.4",
"contentHash": "mudqUHhNpeqIdJoUx2YDWZO/I9uEDYVowan89R6wsomfnUJQk6HteoQTlNjZDixhT2B4IXMkMtgZtoceIjLRmA=="
},
"NetTopologySuite.IO.PostGis": {
"type": "Transitive",
"resolved": "2.1.0",
@@ -713,15 +670,6 @@
"System.IO.Pipelines": "5.0.1"
}
},
"ProjNET": {
"type": "Transitive",
"resolved": "2.0.0",
"contentHash": "iMJG8qpGJ8SjFrB044O8wgo0raAWCdG1Bvly0mmVcjzsrexDHhC+dUct6Wb1YwQtupMBjSTWq7Fn00YeNErprA==",
"dependencies": {
"System.Memory": "4.5.3",
"System.Numerics.Vectors": "4.5.0"
}
},
"Sentry": {
"type": "Transitive",
"resolved": "5.11.0",
@@ -738,23 +686,6 @@
"Sentry": "5.11.0"
}
},
"Serilog.Sinks.File": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "lxjg89Y8gJMmFxVkbZ+qDgjl+T4yC5F7WSLTvA+5q0R04tfKVLRL/EHpYoJ/MEQd2EeCKDuylBIVnAYMotmh2A==",
"dependencies": {
"Serilog": "4.0.0"
}
},
"Serilog.Sinks.Seq": {
"type": "Transitive",
"resolved": "9.0.0",
"contentHash": "aNU8A0K322q7+voPNmp1/qNPH+9QK8xvM1p72sMmCG0wGlshFzmtDW9QnVSoSYCj0MgQKcMOlgooovtBhRlNHw==",
"dependencies": {
"Serilog": "4.2.0",
"Serilog.Sinks.File": "6.0.0"
}
},
"StackExchange.Redis": {
"type": "Transitive",
"resolved": "2.7.27",
@@ -877,13 +808,13 @@
"FSharp.Data": "[6.4.1, )",
"FSharpPlus": "[1.7.0, )",
"Fable.Remoting.DotnetClient": "[3.35.0, )",
"FsToolkit.ErrorHandling": "[5.0.1, )",
"Microsoft.EntityFrameworkCore": "[9.0.1, )",
"Microsoft.EntityFrameworkCore.Relational": "[9.0.1, )",
"NetTopologySuite": "[2.5.0, )",
"Npgsql.EntityFrameworkCore.PostgreSQL": "[9.0.2, )",
"Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite": "[9.0.2, )",
"Npgsql.NetTopologySuite": "[9.0.2, )",
"Oceanbox.FvcomKit": "[5.13.0, )",
"Oceanbox.SDSLite": "[2.8.0, )",
"Serilog.Sinks.Console": "[6.0.0, )",
"Thoth.Json.Net": "[12.0.0, )"
@@ -1144,17 +1075,6 @@
"Microsoft.AspNetCore.Authentication.OpenIdConnect": "6.0.0"
}
},
"MessagePack": {
"type": "CentralTransitive",
"requested": "[3.1.3, )",
"resolved": "3.1.3",
"contentHash": "UiNv3fknvPzh5W+S0VV96R17RBZQQU71qgmsMnjjRZU2rtQM/XcTnOB+klT2dA6T1mxjnNKYrEm164AoXvGmYg==",
"dependencies": {
"MessagePack.Annotations": "3.1.3",
"MessagePackAnalyzer": "3.1.3",
"Microsoft.NET.StringTools": "17.11.4"
}
},
"Microsoft.AspNetCore.Authentication.JwtBearer": {
"type": "CentralTransitive",
"requested": "[9.0.2, )",
@@ -1263,27 +1183,6 @@
"Npgsql": "9.0.2"
}
},
"Oceanbox.FvcomKit": {
"type": "CentralTransitive",
"requested": "[5.13.0, )",
"resolved": "5.13.0",
"contentHash": "6uVL3fLhRf4OU1hWygGpVex4pI5YB+GaWrKZUgoL/LkGmdFv0qU8Y7v+meHNM3E9bjR7xKinCVfrw5SXsF6C8g==",
"dependencies": {
"FSharp.Core": "9.0.201",
"FSharp.Data": "6.4.1",
"FSharpPlus": "1.7.0",
"FsPickler": "5.3.2",
"KDTree": "1.4.1",
"MathNet.Numerics.FSharp": "5.0.0",
"MessagePack": "3.1.3",
"Oceanbox.SDSLite": "2.8.0",
"ProjNet.FSharp": "5.2.0",
"Serilog": "4.2.0",
"Serilog.Sinks.Console": "6.0.0",
"Serilog.Sinks.Seq": "9.0.0",
"Thoth.Json.Net": "12.0.0"
}
},
"Oceanbox.SDSLite": {
"type": "CentralTransitive",
"requested": "[2.8.0, )",
@@ -1304,18 +1203,6 @@
"System.Diagnostics.DiagnosticSource": "9.0.9"
}
},
"ProjNet.FSharp": {
"type": "CentralTransitive",
"requested": "[5.2.0, )",
"resolved": "5.2.0",
"contentHash": "sYSePg/0sVo16Fk3r7okVSga6i9GAN0kkjt1haEXVw25SF8A4S3Gcpf5+6lgknBGdYiZBmJ+3S6v5g1WSSCp2g==",
"dependencies": {
"FSharp.Core": "8.0.100",
"FSharp.Data": "6.3.0",
"FSharpPlus": "1.5.0",
"ProjNet": "2.0.0"
}
},
"Saturn": {
"type": "CentralTransitive",
"requested": "[0.17.0, )",

View File

@@ -130,6 +130,14 @@ module Remoting =
Exec: ExecTicket option
}
[<Struct>]
type AddGroupArchiveRequest = {
Group: string
ArchiveId: Archmaester.Dto.ArchiveId
View: ViewTerm option
Exec: ExecTicket option
}
[<Struct>]
type AddUsersRequest = {
Group: string
@@ -143,6 +151,7 @@ module Remoting =
type Admin = {
addUsers: AddUsersRequest -> Async<Result<unit, string>>
addArchiveGroups: AddArchiveGroupsRequest -> Async<Result<unit, string>>
addGroupArchive: AddGroupArchiveRequest -> Async<Result<unit, string>>
deleteArchive: Archmaester.Dto.ArchiveId -> Async<Result<bool, string>>
getAllGroups: Async<string array>
getArchive: Archmaester.Dto.ArchiveId -> Async<Result<Archmaester.Dto.ArchiveProps, string>>
@@ -160,4 +169,4 @@ module Remoting =
ListObjects: ListObjectsRequest -> Async<Result<string array, string>>
ListUsers: ListUsersRequest -> Async<Result<string array, string>>
Read: ReadRequest -> Async<Result<ReadResponse, string>>
}
}

View File

@@ -20,6 +20,13 @@ type private Inbox = MailboxProcessor<Msg>
type ArchiveAgent(arch: ArchiveService, ?expiry) =
let expiry = 1000 * defaultArg expiry 60
let uri = Uri arch.url
let ip =
uri.DnsSafeHost
|> Net.Dns.GetHostEntry
|> _.AddressList
|> Array.head
let rec expunge (inbox: Inbox) =
async {
Threading.Thread.Sleep expiry
@@ -38,8 +45,27 @@ type ArchiveAgent(arch: ArchiveService, ?expiry) =
if ttl.TotalSeconds < 0.0 then Map.remove k m else m)
state
let handlePruneAsync state expiry =
async {
let res =
(state, state)
||> Map.fold (fun m k v ->
let expireTime = v.atime.AddMilliseconds (float expiry)
let ttl = expireTime - DateTime.Now
Log.Debug $"ArchiveAgent: {k} ttl: {ttl.TotalSeconds}"
if ttl.TotalSeconds < 0.0 then Map.remove k m else m
)
return res
}
let handleAdd state (aid: Guid) dto =
Map.add aid { archive = dto; atime = DateTime.Now } state
state |> Map.add aid { archive = dto; atime = DateTime.Now }
let handleAddAsync state (aid: Guid) dto =
async {
let res = state |> Map.add aid { archive = dto; atime = DateTime.Now }
return res
}
let handleGet state (reply: Reply) aid =
match Map.tryFind aid state with
@@ -47,26 +73,67 @@ type ArchiveAgent(arch: ArchiveService, ?expiry) =
reply.Reply (Some arch)
state
| None ->
Log.Debug $"ArchiveAgent: handleGet: not in cache: {aid}, trying archmaester."
Log.Debug $"ArchiveAgent: handleGet: not in cache: {aid}."
Log.Debug $"ArchiveAgent: handleGet: trying archmaester ({uri.DnsSafeHost} = {ip})."
let api = Remoting.v1.InternalApi (arch.url, arch.credentials)
let adminApi = api.adminApi ()
let arch = adminApi.getArchiveDto aid |> Async.RunSynchronously
let res = adminApi.getArchiveDto aid |> Async.RunSynchronously
match arch with
match res with
| Ok a ->
Log.Debug $"ArchiveAgent: handleGet: trying archmaester ({arch.url}) completed with {a.props.name}."
Some { archive = a; atime = DateTime.Now } |> reply.Reply
handleAdd state aid a
| Error e ->
Log.Error $"ArchiveAgent: handleGet: {e}"
Log.Error(e, "ArchiveAgent: handleGet")
None |> reply.Reply
state
let handleGetAsync state (reply: Reply) aid =
async {
match Map.tryFind aid state with
| Some arch ->
do reply.Reply (Some arch)
return state
| None ->
Log.Debug $"ArchiveAgent: handleGet: not in cache: {aid}."
let uri = Uri arch.url
let ip =
uri.DnsSafeHost
|> Net.Dns.GetHostEntry
|> _.AddressList
|> Array.head
Log.Debug $"ArchiveAgent: handleGet: trying archmaester ({uri.DnsSafeHost} = {ip})."
let api = Remoting.v1.InternalApi (arch.url, arch.credentials)
let adminApi = api.adminApi ()
try
match! adminApi.getArchiveDto aid with
| Ok a ->
Log.Debug $"ArchiveAgent: handleGet: trying archmaester ({arch.url}) completed with {a.props.name}."
do Some { archive = a; atime = DateTime.Now } |> reply.Reply
return handleAdd state aid a
| Error e ->
Log.Error $"ArchiveAgent: handleGet: Error {e}"
None |> reply.Reply
return state
with ex ->
Log.Error(ex, "ArchiveAgent: handleGet: exn")
do reply.Reply None
return state
}
let update msg state =
match msg with
| Get (aid, reply) -> handleGet state reply aid
| Add (aid, ds) -> handleAdd state aid ds
| Prune -> handlePrune state expiry
let updateAsync msg state =
match msg with
| Get (aid, reply) -> handleGetAsync state reply aid
| Add (aid, ds) -> handleAddAsync state aid ds
| Prune -> handlePruneAsync state expiry
member val private inbox: Inbox =
Inbox.Start (fun (inbox: Inbox) ->
expunge inbox
@@ -74,13 +141,21 @@ type ArchiveAgent(arch: ArchiveService, ?expiry) =
let rec loop state =
async {
let! msg = inbox.Receive ()
let state' = update msg state
let! state' = updateAsync msg state
return! loop state'
}
loop Map.empty)
loop Map.empty
)
member x.addArchive(aid, dto) = x.inbox.Post (Add (aid, dto))
member x.tryGetArchive aid =
x.inbox.PostAndReply (fun r -> Get (aid, r)) |> Option.map (fun x -> x.archive)
x.inbox.PostAndReply (fun r -> Get (aid, r)) |> Option.map (fun x -> x.archive)
member x.tryGetArchiveAsync aid =
async {
let! opt = x.inbox.PostAndAsyncReply (fun r -> Get (aid, r))
let res = opt |> Option.map (fun x -> x.archive)
return res
}

View File

@@ -4,13 +4,13 @@ open System
open System.Collections
open System.Linq
open FSharpPlus
open FSharp.Linq.NullableOperators
open Microsoft.FSharp.Core
open FSharpPlus
open Microsoft.EntityFrameworkCore
open NetTopologySuite.Geometries
open Npgsql
open Serilog
open Microsoft.EntityFrameworkCore
open Archmaester.Dto
@@ -230,7 +230,7 @@ let retireDanglingAttribs (ctx: Entity.ArchiveContext) =
type Archivist(dataSource: NpgsqlDataSource) =
let withDb qry =
try
let ctx = new Entity.ArchiveContext(dataSource, true)
use ctx = new Entity.ArchiveContext(dataSource, true)
qry ctx |> Ok
with e ->
Log.Error $"DataAgent.Archives.Archivist.withDb: {e}"
@@ -264,9 +264,16 @@ type Archivist(dataSource: NpgsqlDataSource) =
e.Name <- x
e
let verifyStartAndEndTimes startTime endTime (files: ArchiveFile[]) =
let n = files.Length - 1
startTime = files[0].startTime && endTime = files[n].endTime
let verifyStartAndEndTimes (startTime: DateTime) (endTime: DateTime) (files: ArchiveFile array) =
let first = files |> Array.minBy _.startTime
let last = files |> Array.maxBy _.endTime
let firstStart = first.startTime
let lastEnd = last.endTime
if startTime = firstStart && endTime = lastEnd then
Ok ()
else
Error $"Start and end times don't match file series: prop start {startTime} vs. first file start {firstStart} and prop end {endTime} vs. last file end {lastEnd}"
let verifyContiguousSeries (files: ArchiveFile[]) =
let checkTime (n, cont) (f: ArchiveFile) =
@@ -288,6 +295,22 @@ type Archivist(dataSource: NpgsqlDataSource) =
else
Error $"Archive series is not contiguous at:\n{files[n]}"
member private this.verifyArchiveExists item =
if this.archiveExists item.props.archiveId then
let msg = $"Archive {item.props.name} already exists with id {item.props.archiveId}"
Log.Error msg
Error msg
else
Ok ()
member private this.verifyModelAreaExists item =
if not (this.modelAreaExists item.props.modelArea) then
let msg = $"ModelArea {item.props.modelArea} does not exist"
Log.Error msg
Error msg
else
Ok ()
member private x.instantiateAttribs'(ctx: Entity.ArchiveContext, props: Entity.Attribs, item: ArchiveDto) =
let atype =
let k, v, f = item.props.archiveType.ToDbType ()
@@ -346,29 +369,29 @@ type Archivist(dataSource: NpgsqlDataSource) =
Log.Debug $"Archives.updateAttribs error: {e}"
Error $"Could not update archive props: {e.Message}"
member x.tryMigrate() =
member _.tryMigrate() =
Log.Debug "Running database migrations... "
match withDb (fun ctx -> ctx.Database.Migrate ()) with
| Ok _ -> Log.Debug "migrations done."
| Error e -> Log.Error $"exception in Db.tryMigrate: \n{e}"
member x.archiveExists(aid: Guid) =
member _.archiveExists(aid: Guid) =
let f (ctx: Entity.ArchiveContext) =
ctx.Archives.AsNoTracking().Any (fun a -> a.ArchiveId = aid)
withDb f |> Result.get
member x.checkModelAreaId(mid) =
match x.getModelArea mid with
member this.checkModelAreaId(mid) =
match this.getModelArea mid with
| Error err ->
Log.Debug $"getModelArea returned error: {err}. Using HelloWorld."
false
| Ok _ -> true
member private x.addArchiveGroups'(ctx: Entity.ArchiveContext, aid: ArchiveId, groups: string[]) =
member private this.addArchiveGroups'(ctx: Entity.ArchiveContext, aid: ArchiveId, groups: string[]) =
if groups.Length > 0 then
x.addGroups' (ctx, groups) |> ignore
this.addGroups' (ctx, groups) |> ignore
let gx = ctx.Groups.Where (fun u -> groups.Contains u.Name) |> Seq.toList
let ax =
@@ -844,41 +867,31 @@ type Archivist(dataSource: NpgsqlDataSource) =
ctx.SaveChanges () |> ignore
}
member x.tryAddArchive(item: ArchiveDto) =
member this.tryAddArchive(item: ArchiveDto) =
monad {
do!
if not (x.modelAreaExists item.props.modelArea) then
let msg = $"ModelArea {item.props.modelArea} does not exist"
Log.Error msg
Error msg
else
Ok ()
do!
if x.archiveExists item.props.archiveId then
let msg = $"Archive {item.props.name} already exists with id {item.props.archiveId}"
Log.Error msg
Error msg
else
Ok ()
do! this.verifyModelAreaExists item
do! this.verifyArchiveExists item
let p = item.props
let files = item.files.series
if verifyStartAndEndTimes p.startTime p.endTime files |> not then
do! Error "Start and end times don't match file series"
do! verifyStartAndEndTimes p.startTime p.endTime files
let! _ = verifyContiguousSeries files
// NOTE(simkir): Atmo archives do not need to be contiguous
let! _ =
match item.props.archiveType with
| Atmo _ -> Ok [||]
| _ -> verifyContiguousSeries files
Log.Information $"Adding new archive with Guid: {p.archiveId}"
let ctx = new Entity.ArchiveContext (dataSource)
use ctx = new Entity.ArchiveContext (dataSource)
// let transaction = ctx.Database.BeginTransaction()
do! x.addAcl' (ctx, item.acl)
do! this.addAcl' (ctx, item.acl)
Log.Information $"Adding new archive item: {(p.name, item.files.basePath)}"
Log.Debug $"Adding new archive item\n%A{item}"
match x.addArchive' (ctx, item) with
match this.addArchive' (ctx, item) with
| Ok a -> return! Ok a
// return a |> tap (tryCommit transaction)
| Error e ->
@@ -1263,11 +1276,14 @@ type Archivist(dataSource: NpgsqlDataSource) =
member x.deleteArchive(aid: ArchiveId) =
let f (ctx: Entity.ArchiveContext) =
let archive =
ctx.Archives.Where(fun a -> a.ArchiveId = aid).Include(_.RefArchives).First ()
ctx.Archives
.Include(_.RefArchives)
.Include(fun archive -> archive.Attribs)
.Single(fun a -> a.ArchiveId = aid)
try
Log.Debug $"removing {archive.Name}:{archive.ArchiveId}"
ctx.Remove archive |> ignore
ctx.Remove archive.Attribs |> ignore
ctx.SaveChanges () |> ignore
retireDanglingAttribs ctx
ctx.SaveChanges () |> ignore
@@ -1325,22 +1341,23 @@ type Archivist(dataSource: NpgsqlDataSource) =
member _.tryGetArchive aid =
withDb (fun ctx ->
ctx.Archives
.AsNoTracking()
.Where(fun y -> y.ArchiveId = aid)
.Include(_.Attribs)
.ThenInclude(_.Type)
.Include(_.Ref)
.Include(_.Owners)
.Include(_.Users)
.Include(_.Groups)
.Include(fun y -> y.Files.OrderBy _.File.Ordering)
.ToArray ()
|> fun y ->
if y.Length = 0 then
Error $"archive {aid} not found"
else
archiveToDto ctx y[0] |> Ok)
let archives =
ctx.Archives
.AsNoTracking()
.Where(fun y -> y.ArchiveId = aid)
.Include(_.Attribs)
.ThenInclude(_.Type)
.Include(_.Ref)
.Include(_.Owners)
.Include(_.Users)
.Include(_.Groups)
.Include(fun y -> y.Files.OrderBy _.File.Ordering)
.ToArray ()
if archives.Length = 0 then
Error $"archive {aid} not found"
else
archiveToDto ctx archives[0] |> Ok
)
|> Result.flatten
member x.getRefArchivesProps(aid: Guid, atype: ArchiveType, user: string, groups: string[]) =

View File

@@ -2,9 +2,13 @@ module Oceanbox.DataAgent.DatasetAgent
open System
open System.IO
open Microsoft.Research.Science.Data
open FsToolkit.ErrorHandling
open FSharpPlus
open Serilog
open Archmaester.Dto
type private DatasetState = { ds: DataSet; atime: DateTime }
@@ -66,22 +70,13 @@ let private getFrameIndex (files: ArchiveFile array) t =
let inline openDataSetWithMode mode fname =
let sw = Diagnostics.Stopwatch.StartNew()
Log.Debug $"openDataSet: {fname}"
let uri = NetCDF4.NetCDFUri ()
uri.FileName <- fname
uri.OpenMode <- mode
if File.Exists fname then
let uri = NetCDF4.NetCDFUri ()
uri.FileName <- fname
uri.OpenMode <- mode
try
let ds = NetCDF4.NetCDFDataSet.Open uri
Log.Debug $"openDataSet: {fname} completed in {sw.ElapsedMilliseconds}ms"
Ok ds
with e ->
Log.Error $"%A{e}"
reraise ()
else
Error $"File '{fname}' does not exist"
let ds = NetCDF4.NetCDFDataSet.Open uri
Log.Verbose $"openDataSet: {fname} completed in {sw.ElapsedMilliseconds}ms"
ds
let openDataSet = openDataSetWithMode ResourceOpenMode.ReadOnly
@@ -138,13 +133,13 @@ type DatasetAgent(archives: ArchiveAgent.ArchiveAgent, ?expiry) =
Log.Error $"withDataSet: {e}"
reraise ()
[<TailCall>]
let rec expunge (inbox: Inbox) =
async {
Threading.Thread.Sleep expiry
inbox.Post Prune
return expunge inbox
do! Async.Sleep expiry
do inbox.Post Prune
return! expunge inbox
}
|> Async.Start
let update msg state =
match msg with
@@ -159,7 +154,7 @@ type DatasetAgent(archives: ArchiveAgent.ArchiveAgent, ?expiry) =
member val private inbox: Inbox =
Inbox.Start (fun (inbox: Inbox) ->
expunge inbox
expunge inbox |> Async.Start
let rec loop state =
async {
@@ -168,7 +163,8 @@ type DatasetAgent(archives: ArchiveAgent.ArchiveAgent, ?expiry) =
return! loop state'
}
loop Map.empty)
loop Map.empty
)
member private x.dispose() = x.inbox.Post Dispose
@@ -197,13 +193,9 @@ type DatasetAgent(archives: ArchiveAgent.ArchiveAgent, ?expiry) =
| None ->
// NOTE: The file is not in the cache, so open from disk
try
match openDataSet file with
| Ok ds ->
this.addDataSet (file, ds)
withDataSet ds tIdx f |> Ok
| Error e ->
Log.Error $"DatasetAgent.tryUseDataset: {e}"
Error e
let ds = openDataSet file
this.addDataSet (file, ds)
withDataSet ds tIdx f |> Ok
with e ->
Log.Error $"DatasetAgent.tryUseDataset: {e}"
Error e.Message
@@ -211,14 +203,17 @@ type DatasetAgent(archives: ArchiveAgent.ArchiveAgent, ?expiry) =
Log.Error "DatasetAgent.tryUseDataset: time out of bounds"
Error "time out of bounds"
member this.getFrameCount aid =
archives.tryGetArchive aid
|> Option.map (fun archive ->
archive.files.series |> Array.sumBy _.frames
)
|> Option.defaultValue 0
member _.getFrameCount aid =
async {
match! archives.tryGetArchiveAsync aid with
| Some archive ->
let frames = archive.files.series |> Array.sumBy _.frames
return frames
| None ->
return 0
}
member this.eval(f, aid, ?t) =
member this.eval(f, aid, ?t) : Result<'T, string> =
let t = defaultArg t 0
let result = archives.tryGetArchive aid
@@ -226,6 +221,21 @@ type DatasetAgent(archives: ArchiveAgent.ArchiveAgent, ?expiry) =
| Some archive -> this.tryUseDataset archive t f
| None -> Error "DataSet.eval: ArchiveAgent failed"
member this.evalAsync(f, aid, ?t) : Async<Result<'T, string>> =
asyncResult {
let t = defaultArg t 0
match! archives.tryGetArchiveAsync aid with
| Some archive ->
let! res = this.tryUseDataset archive t f
return res
| None ->
return! Error "DataSet.eval: ArchiveAgent failed"
}
member this.evalDto(f, archive, ?t) =
let t = defaultArg t 0
this.tryUseDataset archive t f
member _.run(ds, f, ?t) =
let t = defaultArg t 0
withDataSet ds t f

View File

@@ -30,13 +30,13 @@
<PackageReference Include="Fable.Remoting.DotnetClient" />
<PackageReference Include="FSharp.Data" />
<PackageReference Include="FSharpPlus" />
<PackageReference Include="FsToolkit.ErrorHandling" />
<PackageReference Include="Microsoft.EntityFrameworkCore" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Relational" />
<PackageReference Include="NetTopologySuite" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite" />
<PackageReference Include="Npgsql.NetTopologySuite" />
<PackageReference Include="Oceanbox.FvcomKit" />
<PackageReference Include="Oceanbox.SDSLite" />
<PackageReference Include="Serilog.Sinks.Console" />
<PackageReference Include="Thoth.Json.Net" />

View File

@@ -32,43 +32,42 @@ module v1 =
// shitshow to allow insecure ssl certificates
let httpClientHandler = new HttpClientHandler ()
let httpClient = new HttpClient (httpClientHandler)
do httpClient.Timeout <- TimeSpan.FromMinutes 0.5
let token64 () =
Text.ASCIIEncoding.UTF8.GetBytes (token) |> Convert.ToBase64String
do
httpClientHandler.ServerCertificateCustomValidationCallback <-
do httpClientHandler.ServerCertificateCustomValidationCallback <-
HttpClientHandler.DangerousAcceptAnyServerCertificateValidator
httpClient.DefaultRequestHeaders.Remove ("Authorization") |> ignore
httpClient.DefaultRequestHeaders.Add ("Authorization", $"{TokenAuthTag} {token}")
do httpClient.DefaultRequestHeaders.Remove ("Authorization") |> ignore
do httpClient.DefaultRequestHeaders.Add ("Authorization", $"{TokenAuthTag} {token}")
member x.inventoryApi() =
member _.inventoryApi() =
Remoting.createApi url
|> Remoting.withHttpClient httpClient
|> Remoting.withRouteBuilder Api.internalRouteBuilder
|> Remoting.buildProxy<Api.Inventory>
member x.aclApi() =
member _.aclApi() =
Remoting.createApi url
|> Remoting.withHttpClient httpClient
|> Remoting.withRouteBuilder Api.internalRouteBuilder
|> Remoting.buildProxy<Api.Acl>
member x.archiveApi() =
member _.archiveApi() =
Remoting.createApi url
|> Remoting.withHttpClient httpClient
|> Remoting.withRouteBuilder Api.internalRouteBuilder
|> Remoting.buildProxy<Api.Archive>
member x.modelAreaApi() =
member _.modelAreaApi() =
Remoting.createApi url
|> Remoting.withHttpClient httpClient
|> Remoting.withRouteBuilder Api.internalRouteBuilder
|> Remoting.buildProxy<Api.ModelArea>
member x.adminApi() =
member _.adminApi() =
Remoting.createApi url
|> Remoting.withHttpClient httpClient
|> Remoting.withRouteBuilder Api.internalRouteBuilder

View File

@@ -44,12 +44,28 @@ let rec printArchives asJson (archives: ArchiveProps[]) =
let printArchiveTerse n (p: ArchiveProps) =
let startDate = p.startTime.ToString (format = "yyyy-MM-dd HH:mm")
let days = $"%3.1f{(p.endTime - p.startTime).TotalDays}".PadLeft (5, ' ')
let typ = String.truncate 20 ((string p.archiveType).PadRight (20, ' '))
let name = String.truncate 32 p.name
printfn $" %-3d{n + 1} | {p.archiveId} | {startDate} | {days} | {typ} | {name}"
let totalDays = (p.endTime - p.startTime).TotalDays
Console.WriteLine(
"{0,5} | {1,36} | {2,16} | {3,10:F2} | {4,-15} | {5}",
n + 1,
p.archiveId,
startDate,
totalDays,
p.archiveType,
p.name
)
let printArchivesTerse (archives: ArchiveProps[]) =
Console.WriteLine(
"{0,5} | {1,-36} | {2,-16} | {3,-10:F2} | {4,-15} | {5}",
"#",
"Id",
"Start date",
"Total days",
"Type",
"Name"
)
archives |> Array.iteri printArchiveTerse
let printArchiveProp asJson (archive: ArchiveProps) =

View File

@@ -75,6 +75,15 @@
"FSharp.Core": "6.0.6"
}
},
"FsToolkit.ErrorHandling": {
"type": "Direct",
"requested": "[5.0.1, )",
"resolved": "5.0.1",
"contentHash": "93oG3WSogK05H4gkikAmx5pBf30TQJfO1Jky+o/N/nv+RTP3nfOfjlmCHzuyUjQCRFOQog/xQabcky+WBWceeQ==",
"dependencies": {
"FSharp.Core": "9.0.300"
}
},
"Microsoft.EntityFrameworkCore": {
"type": "Direct",
"requested": "[9.0.1, )",
@@ -140,27 +149,6 @@
"Npgsql": "9.0.2"
}
},
"Oceanbox.FvcomKit": {
"type": "Direct",
"requested": "[5.13.0, )",
"resolved": "5.13.0",
"contentHash": "6uVL3fLhRf4OU1hWygGpVex4pI5YB+GaWrKZUgoL/LkGmdFv0qU8Y7v+meHNM3E9bjR7xKinCVfrw5SXsF6C8g==",
"dependencies": {
"FSharp.Core": "9.0.201",
"FSharp.Data": "6.4.1",
"FSharpPlus": "1.7.0",
"FsPickler": "5.3.2",
"KDTree": "1.4.1",
"MathNet.Numerics.FSharp": "5.0.0",
"MessagePack": "3.1.3",
"Oceanbox.SDSLite": "2.8.0",
"ProjNet.FSharp": "5.2.0",
"Serilog": "4.2.0",
"Serilog.Sinks.Console": "6.0.0",
"Serilog.Sinks.Seq": "9.0.0",
"Thoth.Json.Net": "12.0.0"
}
},
"Oceanbox.SDSLite": {
"type": "Direct",
"requested": "[2.8.0, )",
@@ -308,15 +296,6 @@
"FSharp.Data.Runtime.Utilities": "6.4.1"
}
},
"FsPickler": {
"type": "Transitive",
"resolved": "5.3.2",
"contentHash": "LFtxXpQNor8az1ez3rN9oz2cqf/06i9yTrPyJ9R83qLEpFAU7Of0WL2hoSXzLHer4lh+6mO1NV4VQFiBzNRtjw==",
"dependencies": {
"FSharp.Core": "4.3.2",
"System.Reflection.Emit.Lightweight": "4.3.0"
}
},
"Google.Api.CommonProtos": {
"type": "Transitive",
"resolved": "2.17.0",
@@ -352,35 +331,6 @@
"Grpc.Core.Api": "2.71.0"
}
},
"KdTree": {
"type": "Transitive",
"resolved": "1.4.1",
"contentHash": "yWbb35v/V9y88SLLMUPTlAN3pQEoPhDfZf9PApFnlU4kLtwVQ75U9vW5mW4/alQnLBuLKWBKcy4W5xK95mYsuA=="
},
"MathNet.Numerics": {
"type": "Transitive",
"resolved": "5.0.0",
"contentHash": "pg1W2VwaEQMAiTpGK840hZgzavnqjlCMTVSbtVCXVyT+7AX4mc1o89SPv4TBlAjhgCOo9c1Y+jZ5m3ti2YgGgA=="
},
"MathNet.Numerics.FSharp": {
"type": "Transitive",
"resolved": "5.0.0",
"contentHash": "lKYhd68fReW5odX/q+Uzxw3357Duq3zmvkYvnZVqqcc2r/EmrYGDoOdUGuHnhfr8yj9V34js5gQH/7IWcxZJxg==",
"dependencies": {
"FSharp.Core": "6.0.2",
"MathNet.Numerics": "5.0.0"
}
},
"MessagePack.Annotations": {
"type": "Transitive",
"resolved": "3.1.3",
"contentHash": "XTy4njgTAf6UVBKFj7c7ad5R0WVKbvAgkbYZy4f00kplzX2T3VOQ34AUke/Vn/QgQZ7ETdd34/IDWS3KBInSGA=="
},
"MessagePackAnalyzer": {
"type": "Transitive",
"resolved": "3.1.3",
"contentHash": "19u1oVNv2brCs5F/jma8O8CnsKMMpYwNqD0CAEDEzvqwDTAhqC9r7xHZP4stPb3APs/ryO/zVn7LvjoEHfvs7Q=="
},
"Microsoft.EntityFrameworkCore.Abstractions": {
"type": "Transitive",
"resolved": "9.0.1",
@@ -525,21 +475,6 @@
"resolved": "9.0.8",
"contentHash": "tizSIOEsIgSNSSh+hKeUVPK7xmTIjR8s+mJWOu1KXV3htvNQiPMFRMO17OdI1y/4ZApdBVk49u/08QGC9yvLug=="
},
"Microsoft.NET.StringTools": {
"type": "Transitive",
"resolved": "17.11.4",
"contentHash": "mudqUHhNpeqIdJoUx2YDWZO/I9uEDYVowan89R6wsomfnUJQk6HteoQTlNjZDixhT2B4IXMkMtgZtoceIjLRmA=="
},
"Microsoft.NETCore.Platforms": {
"type": "Transitive",
"resolved": "1.1.0",
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
},
"Microsoft.NETCore.Targets": {
"type": "Transitive",
"resolved": "1.1.0",
"contentHash": "aOZA3BWfz9RXjpzt0sRJJMjAscAUm3Hoa4UWAfceV9UTYxgwZ1lZt5nO2myFf+/jetYQo4uTP7zS8sJY67BBxg=="
},
"NetTopologySuite.IO.PostGis": {
"type": "Transitive",
"resolved": "2.1.0",
@@ -548,126 +483,11 @@
"NetTopologySuite": "[2.0.0, 3.0.0-A)"
}
},
"ProjNET": {
"type": "Transitive",
"resolved": "2.0.0",
"contentHash": "iMJG8qpGJ8SjFrB044O8wgo0raAWCdG1Bvly0mmVcjzsrexDHhC+dUct6Wb1YwQtupMBjSTWq7Fn00YeNErprA==",
"dependencies": {
"System.Memory": "4.5.3",
"System.Numerics.Vectors": "4.5.0"
}
},
"Serilog.Sinks.File": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "lxjg89Y8gJMmFxVkbZ+qDgjl+T4yC5F7WSLTvA+5q0R04tfKVLRL/EHpYoJ/MEQd2EeCKDuylBIVnAYMotmh2A==",
"dependencies": {
"Serilog": "4.0.0"
}
},
"Serilog.Sinks.Seq": {
"type": "Transitive",
"resolved": "9.0.0",
"contentHash": "aNU8A0K322q7+voPNmp1/qNPH+9QK8xvM1p72sMmCG0wGlshFzmtDW9QnVSoSYCj0MgQKcMOlgooovtBhRlNHw==",
"dependencies": {
"Serilog": "4.2.0",
"Serilog.Sinks.File": "6.0.0"
}
},
"System.IO": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "3qjaHvxQPDpSOYICjUoTsmoq5u6QJAFRUITgeT/4gqkF1bajbSmb1kwSxEA8AHlofqgcKJcM8udgieRNhaJ5Cg==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.Runtime": "4.3.0",
"System.Text.Encoding": "4.3.0",
"System.Threading.Tasks": "4.3.0"
}
},
"System.Memory": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "1MbJTHS1lZ4bS4FmsJjnuGJOu88ZzTT2rLvrhW7Ygic+pC0NWA+3hgAen0HRdsocuQXCkUTdFn9yHJJhsijDXw=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
},
"System.Reflection": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "KMiAFoW7MfJGa9nDFNcfu+FpEdiHpWgTcS2HdMpDvt9saK3y/G4GwprPyzqjFH9NTaGPQeWNHU+iDlDILj96aQ==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.IO": "4.3.0",
"System.Reflection.Primitives": "4.3.0",
"System.Runtime": "4.3.0"
}
},
"System.Reflection.Emit.ILGeneration": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "59tBslAk9733NXLrUJrwNZEzbMAcu8k344OYo+wfSVygcgZ9lgBdGIzH/nrg3LYhXceynyvTc8t5/GD4Ri0/ng==",
"dependencies": {
"System.Reflection": "4.3.0",
"System.Reflection.Primitives": "4.3.0",
"System.Runtime": "4.3.0"
}
},
"System.Reflection.Emit.Lightweight": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "oadVHGSMsTmZsAF864QYN1t1QzZjIcuKU3l2S9cZOwDdDueNTrqq1yRj7koFfIGEnKpt6NjpL3rOzRhs4ryOgA==",
"dependencies": {
"System.Reflection": "4.3.0",
"System.Reflection.Emit.ILGeneration": "4.3.0",
"System.Reflection.Primitives": "4.3.0",
"System.Runtime": "4.3.0"
}
},
"System.Reflection.Primitives": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "5RXItQz5As4xN2/YUDxdpsEkMhvw3e6aNveFXUn4Hl/udNTCNhnKp8lT9fnc3MhvGKh1baak5CovpuQUXHAlIA==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.Runtime": "4.3.0"
}
},
"System.Runtime": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "JufQi0vPQ0xGnAczR13AUFglDyVYt4Kqnz1AZaiKZ5+GICq0/1MH/mO/eAJHt/mHW1zjKBJd7kV26SrxddAhiw==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0"
}
},
"System.Text.Encoding": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "BiIg+KWaSDOITze6jGQynxg64naAPtqGHBwDrLaCtixsa5bKiR8dpPOHA7ge3C0JJQizJE+sfkz1wV+BAKAYZw==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.Runtime": "4.3.0"
}
},
"System.Threading.Tasks": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "LbSxKEdOUhVe8BezB/9uOGGppt+nZf6e1VFyw6v3DN6lqitm0OSn2uXMOdtP0M3W4iMcqcivm2J6UgqiwwnXiA==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.Runtime": "4.3.0"
}
},
"entity": {
"type": "Project",
"dependencies": {
@@ -722,17 +542,6 @@
"FSharp.Core": "4.7.2"
}
},
"MessagePack": {
"type": "CentralTransitive",
"requested": "[3.1.3, )",
"resolved": "3.1.3",
"contentHash": "UiNv3fknvPzh5W+S0VV96R17RBZQQU71qgmsMnjjRZU2rtQM/XcTnOB+klT2dA6T1mxjnNKYrEm164AoXvGmYg==",
"dependencies": {
"MessagePack.Annotations": "3.1.3",
"MessagePackAnalyzer": "3.1.3",
"Microsoft.NET.StringTools": "17.11.4"
}
},
"Newtonsoft.Json": {
"type": "CentralTransitive",
"requested": "[13.0.3, )",
@@ -748,155 +557,13 @@
"Microsoft.Extensions.Logging.Abstractions": "8.0.2"
}
},
"ProjNet.FSharp": {
"type": "CentralTransitive",
"requested": "[5.2.0, )",
"resolved": "5.2.0",
"contentHash": "sYSePg/0sVo16Fk3r7okVSga6i9GAN0kkjt1haEXVw25SF8A4S3Gcpf5+6lgknBGdYiZBmJ+3S6v5g1WSSCp2g==",
"dependencies": {
"FSharp.Core": "8.0.100",
"FSharp.Data": "6.3.0",
"FSharpPlus": "1.5.0",
"ProjNet": "2.0.0"
}
},
"Serilog": {
"type": "CentralTransitive",
"requested": "[4.2.0, )",
"resolved": "4.2.0",
"contentHash": "gmoWVOvKgbME8TYR+gwMf7osROiWAURterc6Rt2dQyX7wtjZYpqFiA/pY6ztjGQKKV62GGCyOcmtP1UKMHgSmA=="
"resolved": "4.0.0",
"contentHash": "2jDkUrSh5EofOp7Lx5Zgy0EB+7hXjjxE2ktTb1WVQmU00lDACR2TdROGKU0K1pDTBSJBN1PqgYpgOZF8mL7NJw=="
}
},
"net9.0/linux-x64": {
"runtime.any.System.IO": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "SDZ5AD1DtyRoxYtEcqQ3HDlcrorMYXZeCt7ZhG9US9I5Vva+gpIWDGMkcwa5XiKL0ceQKRZIX2x0XEjLX7PDzQ=="
},
"runtime.any.System.Reflection": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "hLC3A3rI8jipR5d9k7+f0MgRCW6texsAp0MWkN/ci18FMtQ9KH7E2vDn/DH2LkxsszlpJpOn9qy6Z6/69rH6eQ=="
},
"runtime.any.System.Reflection.Primitives": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "Nrm1p3armp6TTf2xuvaa+jGTTmncALWFq22CpmwRvhDf6dE9ZmH40EbOswD4GnFLrMRS0Ki6Kx5aUPmKK/hZBg=="
},
"runtime.any.System.Runtime": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "fRS7zJgaG9NkifaAxGGclDDoRn9HC7hXACl52Or06a/fxdzDajWb5wov3c6a+gVSlekRoexfjwQSK9sh5um5LQ==",
"dependencies": {
"System.Private.Uri": "4.3.0"
}
},
"runtime.any.System.Text.Encoding": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "+ihI5VaXFCMVPJNstG4O4eo1CfbrByLxRrQQTqOTp1ttK0kUKDqOdBSTaCB2IBk/QtjDrs6+x4xuezyMXdm0HQ=="
},
"runtime.any.System.Threading.Tasks": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "OhBAVBQG5kFj1S+hCEQ3TUHBAEtZ3fbEMgZMRNdN8A0Pj4x+5nTELEqL59DU0TjKVE6II3dqKw4Dklb3szT65w=="
},
"runtime.native.System": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "c/qWt2LieNZIj1jGnVNsE2Kl23Ya2aSTBuXMD6V7k9KWr6l16Tqdwq+hJScEpWER9753NWC8h96PaVNY5Ld7Jw==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0"
}
},
"runtime.unix.System.Private.Uri": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "ooWzobr5RAq34r9uan1r/WPXJYG1XWy9KanrxNvEnBzbFdQbMG7Y3bVi4QxR7xZMNLOxLLTAyXvnSkfj5boZSg==",
"dependencies": {
"runtime.native.System": "4.3.0"
}
},
"System.IO": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "3qjaHvxQPDpSOYICjUoTsmoq5u6QJAFRUITgeT/4gqkF1bajbSmb1kwSxEA8AHlofqgcKJcM8udgieRNhaJ5Cg==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.Runtime": "4.3.0",
"System.Text.Encoding": "4.3.0",
"System.Threading.Tasks": "4.3.0",
"runtime.any.System.IO": "4.3.0"
}
},
"System.Private.Uri": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "I4SwANiUGho1esj4V4oSlPllXjzCZDE+5XXso2P03LW2vOda2Enzh8DWOxwN6hnrJyp314c7KuVu31QYhRzOGg==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"runtime.unix.System.Private.Uri": "4.3.0"
}
},
"System.Reflection": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "KMiAFoW7MfJGa9nDFNcfu+FpEdiHpWgTcS2HdMpDvt9saK3y/G4GwprPyzqjFH9NTaGPQeWNHU+iDlDILj96aQ==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.IO": "4.3.0",
"System.Reflection.Primitives": "4.3.0",
"System.Runtime": "4.3.0",
"runtime.any.System.Reflection": "4.3.0"
}
},
"System.Reflection.Primitives": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "5RXItQz5As4xN2/YUDxdpsEkMhvw3e6aNveFXUn4Hl/udNTCNhnKp8lT9fnc3MhvGKh1baak5CovpuQUXHAlIA==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.Runtime": "4.3.0",
"runtime.any.System.Reflection.Primitives": "4.3.0"
}
},
"System.Runtime": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "JufQi0vPQ0xGnAczR13AUFglDyVYt4Kqnz1AZaiKZ5+GICq0/1MH/mO/eAJHt/mHW1zjKBJd7kV26SrxddAhiw==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"runtime.any.System.Runtime": "4.3.0"
}
},
"System.Text.Encoding": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "BiIg+KWaSDOITze6jGQynxg64naAPtqGHBwDrLaCtixsa5bKiR8dpPOHA7ge3C0JJQizJE+sfkz1wV+BAKAYZw==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.Runtime": "4.3.0",
"runtime.any.System.Text.Encoding": "4.3.0"
}
},
"System.Threading.Tasks": {
"type": "Transitive",
"resolved": "4.3.0",
"contentHash": "LbSxKEdOUhVe8BezB/9uOGGppt+nZf6e1VFyw6v3DN6lqitm0OSn2uXMOdtP0M3W4iMcqcivm2J6UgqiwwnXiA==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0",
"Microsoft.NETCore.Targets": "1.1.0",
"System.Runtime": "4.3.0",
"runtime.any.System.Threading.Tasks": "4.3.0"
}
}
}
"net9.0/linux-x64": {}
}
}

View File

@@ -378,6 +378,7 @@ module Dto =
projection: string
focalPoint: single * single
defaultZoom: single
/// Length of frame in seconds
freq: int
frames: int
startTime: DateTime
@@ -535,4 +536,4 @@ module Dto =
json = ""
isPublished = false
isPublic = true
}
}

View File

@@ -9,6 +9,8 @@ in
pkgs.mkShellNoCC {
inputsFrom = [ baseShell ];
LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [ pkgs.netcdf ];
LOG_LEVEL = "verbose";
CLIENT_PORT = port + 80;

View File

@@ -551,8 +551,9 @@ module Atmo =
Log.Information("sorcerer: wind: user {username} -> {RequestPath}", ctx.User.Identity.Name, ctx.Request.Path)
let uid = ctx.User.Identity.Name
let observer = ctx.GetService<ObserverFactory>().Create(uid, tag="Atmo.wind")
{
WindTile = fun aid t n z x y -> Arome.windTile aid t n z x y |> async.Return
WindTile = Arome.windTile
GetBarbSigns = fun () -> Settings.barbsJson |> async.Return
}

View File

@@ -1,10 +1,12 @@
module Arome
open FSharpPlus
open Microsoft.Research.Science.Data
open Serilog
open System
open Microsoft.Research.Science.Data
open FsToolkit.ErrorHandling
open Serilog
open Oceanbox
open Oceanbox.DataAgent
open Sorcerer.Types
@@ -15,58 +17,121 @@ open Remoting
// let aromeNorwaySample = DatasetAgent.openDataSet "/data/archives/arome/norway/arome-weather-sample.nc"
let private getGrid aid =
let f ds _ =
Log.Debug("Evaluating read grid function")
FvcomKit.Arome.getGrid ds
Settings.dataAgent.eval(f, aid)
|> Result.flatten
let private getGrid aid : Async<Result<FvcomKit.Arome.SquareGrid, string>> =
asyncResult {
let f ds _ =
Log.Debug ("Evaluating read grid function")
FvcomKit.Arome.getGrid ds
let! result = dataAgent.eval (f, aid)
return! result
}
let private readUV (x, y) (ds: DataSet) t =
try
Log.Verbose $"Arome.readUV: {ds.URI} {t}"
FvcomKit.Arome.readUV ds t x y
|> Ok
with exn ->
Log.Error(exn, "Arome.readUV excetion with arguments {Time} {XIdx} {YIdx}", t, x, y)
Error "Exception reading netcdf dataset"
let private gridCache = CacheAgent.CacheAgent<Guid * FvcomKit.Arome.SquareGrid * int>(300)
let private gridCache =
CacheAgent.CacheAgent<Guid * FvcomKit.Arome.SquareGrid * int> (300)
let private toVec (x, y) = { X = x; Y = y }
let private getWindArchiveId aid =
Log.Debug $"getWindArchiveId: {aid}"
Settings.withAuth (fun auth ->
let api = InternalApi(Settings.appsettings.archiveSvc, auth)
async {
let auth = Settings.archmaesterAuth
let api = InternalApi (Settings.appsettings.archiveSvc, auth)
let args = aid, Dto.ArchiveType.Atmo (Dto.AtmoVariant.Any, Dto.AtmoFormat.Any)
async {
let inventory = api.inventoryApi()
match! inventory.getAssociated(args) with
| Ok ax ->
if ax.Length > 0 then
let wind = Array.head ax
return Some wind.archiveId
else
let! a = api.inventoryApi().getArchive(aid)
let a = Result.get a
let args = a.modelArea, Dto.ArchiveType.Atmo (Dto.AtmoVariant.Any, Dto.AtmoFormat.Any)
match! inventory.getModelAreaArchives(args) with
| Ok ax ->
if ax.Length > 0 then
let wind = Array.head ax
return Some wind.archiveId
else
return None
| Error e ->
Log.Error $"getWindArchiveId: {e}"
return None
| Error e ->
Log.Error $"getWindArchiveId: {e}"
return None
} |> Async.RunSynchronously
Log.Debug $"getWindArchiveId: {aid}"
let inventory = api.inventoryApi ()
match! inventory.getAssociated (args) with
| Ok ax ->
if ax.Length > 0 then
let wind = Array.tryHead ax |> Option.map _.archiveId
return wind
else
Log.Warning("[Arome] Wind archive not fount as associate. Looking in model area archives instead.")
let! a = api.inventoryApi().getArchive (aid)
let a = a |> Result.defaultWith failwith
let args =
a.modelArea, Dto.ArchiveType.Atmo (Dto.AtmoVariant.Any, Dto.AtmoFormat.Any)
match! inventory.getModelAreaArchives (args) with
| Ok ax ->
let wind = Array.tryHead ax |> Option.map _.archiveId
return wind
| Error e ->
Log.Error $"getWindArchiveId: {e}"
return None
| Error e ->
Log.Error $"getWindArchiveId: {e}"
return None
}
let findTilePositions (numBoxes: int) (x: int) (y: int) (z: int) : (float * float) array =
let x = float x
let y = float y
let z = float z
let numBoxes = float numBoxes
// NOTE(simkir): Calculate the bbox of the given tile
let lat_n, lng_w = tileToLatLng x y z
let lat_s, lng_e = tileToLatLng (x + 1.) (y + 1.) z
let min = toVec (lng_w, lat_s)
let max = toVec (lng_e, lat_n)
// NOTE(simkir): Split the tile into a given amount of boxes, and find the element closest to the middle of each of
// those boxes
let boxLenX = (max.X - min.X) / numBoxes
let boxLenY = (max.Y - min.Y) / numBoxes
// NOTE(simkir): Iterate the y-axis backwards because y-coordinates on the map is opposite from the y-axis on a
// Canvas
[| (numBoxes - 1.0) .. -1.0 .. 0.0 |]
// NOTE: Make the array of velocities flat. The caller knows how many arrows there are per tile
|> Array.collect (fun row ->
[| 0.0 .. (numBoxes - 1.0) |]
|> Array.map (fun col ->
// TODO: Can we do this with lat lon?
let boxMin = { X = min.X + col * boxLenX; Y = min.Y + row * boxLenY }
let boxMax = { X = boxMin.X + boxLenX; Y = boxMin.Y + boxLenY }
let boxMid = { X = (boxMin.X + boxMax.X) * 0.5; Y = (boxMin.Y + boxMax.Y) * 0.5 }
let pos = boxMid.X, boxMid.Y
pos
)
)
let readUV (x, y) ds t =
FvcomKit.Arome.readUV ds t x y
let private proj = ProjNet.FSharp.Projections.WGS84
let tryReadUV aid (agent: DatasetAgent.DatasetAgent) (timeFrame: int) (grid: FvcomKit.Arome.SquareGrid) (p: float * float) : Async<Result<single * single, string>> =
asyncResult {
match FvcomKit.Arome.tryFindWithProj proj grid p with
| Some (x, y) ->
try
let! coord = agent.evalAsync (readUV (x, y), aid, t = timeFrame)
return coord
with ex ->
Log.Error (ex, "Arome.readUV exception with arguments {Time} {XIdx} {YIdx}", timeFrame, x, y)
return! Error "Exception reading arome uv"
| None ->
return! Error "Could not find idx"
}
let tryGetGrid aid : Async<(Guid * FvcomKit.Arome.SquareGrid * int) option> =
asyncOption {
match gridCache.tryGet aid with
| Some (wid, g, maxFrame) -> return wid, g, maxFrame
| None ->
let! wid = getWindArchiveId aid
let! g = getGrid wid |> Async.map Result.toOption
let! maxFrame = dataAgent.getFrameCount wid
do gridCache.add (aid, (wid, g, maxFrame))
return wid, g, maxFrame
}
/// <summary>
/// Reads arome NetCDF archive based on a OpenLayers web tile coordinate. The coordinate is translated to longitude and
/// latitude coordinates, which are looked up into the arome dataset.
@@ -78,74 +143,29 @@ let private getWindArchiveId aid =
/// <param name="x">web tile x coordinate</param>
/// <param name="y">web tile y coordinate</param>
/// <returns>A flat array of wind velocity options, as the tile might be outside the bounds of the arome archive</returns>
let windTile (aid: Guid) timeFrame numBoxes z x y : V2<single> option array =
Log.Verbose $"windTile: {aid}, {timeFrame}"
let xgrid =
gridCache.tryGet aid
|> Option.orElseWith (fun () ->
getWindArchiveId aid
|> Option.bind (fun wid ->
getGrid wid
|> Result.either
(fun g ->
let maxFrame = Settings.dataAgent.getFrameCount wid
gridCache.add(aid, (wid, g, maxFrame))
Some (wid, g, maxFrame))
(fun _ -> None)))
let windTile (aid: Guid) (timeFrame: int) (numBoxes: int) (z: int) (x: int) (y: int) : Async<V2<single> option array> =
async {
Log.Verbose $"windTile: {aid}, {timeFrame}"
match! tryGetGrid aid with
| Some (id, grid, maxFrames) ->
if timeFrame < maxFrames then
let positions = findTilePositions numBoxes x y z
let! results = positions |> Array.map (tryReadUV id dataAgent timeFrame grid) |> Async.Sequential
let velocities: V2<single> option array =
results
|> Array.map (
function
| Ok (u, v) -> Some { X = u; Y = v }
| Error err ->
Log.Error ("[Arome] windTile error: {ErrorMsg}", err)
None
)
// NOTE(simkir): Calculate the bbox of the given tile
let lat_n, lng_w = tileToLatLng x y z
let lat_s, lng_e = tileToLatLng (x + 1.) (y + 1.) z
let min = (lng_w, lat_s) |> toVec
let max = (lng_e, lat_n) |> toVec
// NOTE(simkir): Split the tile into a given amount of boxes, and find
// the element closest to the middle of each of those boxes
let boxLenX = (max.X - min.X) / numBoxes
let boxLenY = (max.Y - min.Y) / numBoxes
let findVelocities (aid, grid: FvcomKit.Arome.SquareGrid) : V2<single> option array =
let readUV' p =
Settings.dataAgent.eval(readUV p, aid, timeFrame)
|> Result.flatten
|> Result.map toVec
|> Result.toOption
// NOTE(simkir): Iterate the y-axis backwards because y-coordinates on
// the map is opposite from the y-axis on a Canvas
[| (numBoxes - 1.0) .. -1.0 .. 0.0 |]
|> Array.map (fun row ->
[| 0.0 .. (numBoxes - 1.0) |]
|> Array.map (fun col ->
let boxMin = {
X = min.X + (col * boxLenX)
Y = min.Y + (row * boxLenY)
}
let boxMax = {
X = boxMin.X + boxLenX
Y = boxMin.Y + boxLenY
}
let boxMid = {
X = (boxMin.X + boxMax.X) * 0.5
Y = (boxMin.Y + boxMax.Y) * 0.5
}
let pos = boxMid.X, boxMid.Y
pos
|> FvcomKit.Arome.tryFind grid
|> Option.bind readUV'
)
)
|> Array.concat // NOTE: Make the array of velocities flat. The caller knows how many arrows there are per tile
xgrid
|> Option.map (fun (wid, grid, maxFrame) ->
if timeFrame < maxFrame then
findVelocities(wid, grid)
else
[||])
|> Option.defaultWith (fun err ->
Log.Error("Arome.windInTile error: {Error}", err)
[||]
)
return velocities
else
Log.Error ("Arome.windInTile error: Outside of timeframe")
return [||]
| None ->
Log.Error ("Arome.windInTile error: Could not find grid")
return [||]
}

View File

@@ -4,6 +4,7 @@ open System
open System.IO
open System.Text
open System.Text.RegularExpressions
open Microsoft.AspNetCore.Builder
open Microsoft.AspNetCore.Http
open Microsoft.AspNetCore.Authentication
@@ -11,15 +12,16 @@ open Microsoft.AspNetCore.Authentication.Cookies
open Microsoft.AspNetCore.Cors.Infrastructure
open Microsoft.Extensions.DependencyInjection
open Microsoft.Extensions.Logging
open Argu
open Dapr.Actors
open Dapr.Actors.Client
open Prometheus
open Giraffe
open Prometheus
open Saturn
open Saturn.Dapr
open Saturn.OpenFga
open Saturn.Observer
open Saturn.OpenFga
open Saturn.OpenTelemetry
open Sentry
open Sentry.AspNetCore
@@ -42,33 +44,37 @@ type Arguments =
| Log_Level _ -> "Verbose=0, Debug=1, Information=2, Warning=3, Error=4, Fatal=5"
| Port _ -> "listen port (default 8085)"
let configureSerilog (builder: ILoggingBuilder) =
let configureSerilog () : unit =
let minInfo = if logLevel < LogEventLevel.Information then LogEventLevel.Information else logLevel
let minWarn = if logLevel < LogEventLevel.Warning then LogEventLevel.Warning else logLevel
LoggerConfiguration()
.MinimumLevel.Is(logLevel)
.MinimumLevel.Override("Microsoft", minInfo)
.MinimumLevel.Override("System", minInfo)
.MinimumLevel.Override("Giraffe", minWarn)
.Filter.ByExcluding("RequestPath like '/health%'")
.Filter.ByExcluding("RequestPath like '/metrics'")
.WriteTo.Console()
.WriteTo.OpenTelemetry(fun opt ->
opt.Endpoint <- appsettings.otelCollector
opt.IncludedData <-
IncludedData.TraceIdField
// ||| IncludedData.SpanIdField
// ||| IncludedData.SourceContextAttribute
opt.ResourceAttributes <-
dict [
"service.name", box appsettings.appName
"pod.name", box Environment.MachineName
]
)
.Enrich.FromLogContext()
// .Enrich.WithCorrelationIdHeader("traceparent")
.CreateLogger()
|> fun logger -> Log.Logger <- logger
let logger =
LoggerConfiguration()
.MinimumLevel.Is(logLevel)
.MinimumLevel.Override("Microsoft", minInfo)
.MinimumLevel.Override("System", minInfo)
.MinimumLevel.Override("Giraffe", minWarn)
.Filter.ByExcluding("RequestPath like '/health%'")
.Filter.ByExcluding("RequestPath like '/metrics'")
.WriteTo.Console()
.WriteTo.OpenTelemetry(fun opt ->
opt.Endpoint <- appsettings.otelCollector
opt.IncludedData <-
IncludedData.TraceIdField
// ||| IncludedData.SpanIdField
// ||| IncludedData.SourceContextAttribute
opt.ResourceAttributes <-
dict [
"service.name", box appsettings.appName
"pod.name", box Environment.MachineName
]
)
.Enrich.FromLogContext()
// .Enrich.WithCorrelationIdHeader("traceparent")
.CreateLogger()
Log.Logger <- logger
let configureLogging (builder: ILoggingBuilder) =
builder
.ClearProviders()
.AddSerilog() |> ignore
@@ -79,7 +85,8 @@ let corsPolicy (policy: CorsPolicyBuilder) =
.AllowAnyHeader()
.AllowAnyMethod()
.WithOrigins(appsettings.allowedOrigins)
|> ignore
.SetIsOriginAllowedToAllowWildcardSubdomains()
|> ignore
let configureServices (service: IServiceCollection) =
service
@@ -296,14 +303,15 @@ let app port =
settings otelConfig
use_redis
use_openfga
})
}
)
use_sentry sentryOptions
use_multiauth multiAuthSettings
// use_cookie_sso appsettings.sso
use_router webApp
with_fga appsettings.fga
app_config configureApp
logging configureSerilog
logging configureLogging
service_config configureServices
use_dapr configureActors
with_observer appsettings.appName
@@ -329,11 +337,20 @@ let main argv =
args.TryGetResult Log_Level
|> Option.iter (fun level -> Environment.SetEnvironmentVariable("LOG_LEVEL", level))
configureSerilog ()
let port = args.GetResult(Port, defaultValue = Settings.port)
if not (Directory.Exists appsettings.cacheDir) then
Directory.CreateDirectory appsettings.cacheDir
|> ignore
let uri = Uri appsettings.archiveSvc
let ip =
let entry = Net.Dns.GetHostEntry uri.DnsSafeHost
entry.AddressList |> Array.head
Log.Information("Archmaester is {Url} ({Ip})", uri, ip)
run (app port)
0

View File

@@ -3,6 +3,7 @@ module Settings
open System
open System.IO
open System.Reflection
open Dapr.Client
open Azure.Identity
open Azure.Security.KeyVault.Secrets
@@ -10,6 +11,7 @@ open Thoth.Json.Net
open Serilog
open Serilog.Events
open FSharpPlus
open Oceanbox.DataAgent
open Oceanbox.ServerPack.MultiAuth
open Oceanbox.ServerPack.Fga
@@ -32,13 +34,6 @@ type Settings = {
cacheDir: string
}
let tryGetEnv =
Environment.GetEnvironmentVariable
>> function
| null
| "" -> None
| x -> Some x
let private settings: Settings =
let settings =
File.ReadAllText "appsettings.json"
@@ -56,12 +51,6 @@ let private settings: Settings =
|> Option.defaultValue settings.plainAuthUsers
{ settings with plainAuthUsers = users }
let private isInt n =
try
int n |> ignore
true
with _ -> false
let private secretStore = tryGetEnv "SECRETSTORE" |> Option.defaultValue "secretstore"
let private keyVault = tryGetEnv "KEYVAULT" |> Option.defaultValue "azure-keyvault"
let private configStore = tryGetEnv "CONFIGSTORE" |> Option.defaultValue "configstore"
@@ -90,7 +79,9 @@ let tryGetSecret key =
Log.Error msg
printf $">>> ERROR: {msg}\n %A{exn}"
return None
} |> Async.AwaitTask |> Async.RunSynchronously
}
|> Async.AwaitTask
|> Async.RunSynchronously
let private vault =
settings.sso.keyVault
@@ -106,7 +97,8 @@ let tryGetFromVault key =
printfn $"ERROR: tryGetFromVault(): {key}"
None
let private configureEnv () =
let configureEnv () =
printfn "Configuring env"
let setupAppEnv () =
monad {
let! secret = tryGetSecret appEnvSecret
@@ -151,7 +143,9 @@ let private configureEnv () =
let! secret = Dictionary.tryGetValue "AZURE_CLIENT_SECRET" azure
Environment.SetEnvironmentVariable("AZURE_CLIENT_SECRET", secret)
} |> ignore
printfn ">>> Configuring runtime environment from secret store"
match tryGetEnv "LOCAL_DAPR_RUNTIME" with
| Some x when x = "1" || x = "true" ->
printfn " > Waiting for Dapr..."
@@ -159,6 +153,7 @@ let private configureEnv () =
setupAzureEnv ()
printfn $" > Azure Keyvault credentials in {keyVault}"
| Some _ | None -> ()
setupEnvFromVault ()
printfn $" > App envrionment in {appEnvSecret}"
printfn "<<< Done configuring runtime environment"
@@ -238,6 +233,7 @@ let appsettings =
Assembly
.GetEntryAssembly()
.GetCustomAttribute<AssemblyInformationalVersionAttribute>().InformationalVersion)
{
settings with
oidc.clientSecret = oidcClientSecret
@@ -265,7 +261,8 @@ let archmaesterAuth =
|> Option.defaultWith (fun () ->
printfn "ERROR: envrionment ARCHMEISTER_AUTH not set"
Environment.Exit 1
"")
""
)
let inline withAuth f = f archmaesterAuth

View File

@@ -42,13 +42,13 @@
<PackageReference Include="Fable.Remoting.Server" />
<PackageReference Include="FSharp.Data" />
<PackageReference Include="FSharpPlus" />
<PackageReference Include="FsToolkit.ErrorHandling" />
<PackageReference Include="Giraffe" />
<PackageReference Include="IdentityModel.AspNetCore" />
<PackageReference Include="MessagePack" />
<PackageReference Include="Microsoft.AspNetCore.DataProtection.StackExchangeRedis" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" />
<PackageReference Include="Newtonsoft.Json" />
<PackageReference Include="Oceanbox.FvcomKit" />
<PackageReference Include="prometheus-net.AspNetCore" />
<PackageReference Include="Saturn" />
<PackageReference Include="Saturn.OpenTelemetry" />
@@ -66,6 +66,9 @@
<PackageReference Include="Thoth.Json.Net" />
<PackageReference Include="FSharp.Core" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Oceanbox.FvcomKit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\Interfaces\Archmaester\Archmaester.Api.fsproj" />
<ProjectReference Include="..\..\..\Interfaces\Sorcerer\Sorcerer.Api.fsproj" />

View File

@@ -2,7 +2,7 @@ module Utils
open ProjNet.FSharp
open System
open FSharpPlus
let bimap f (a, b) = f a, f b
@@ -26,3 +26,12 @@ let lngLatToLCC =
let mercatorScaleFactor (lat: float) =
1.0 / (lat * Math.PI / 180.0 |> Math.Cos)
let tryStr str =
if String.IsNullOrEmpty str then
None
else
Some str
let tryGetEnv = Environment.GetEnvironmentVariable >> tryStr

View File

@@ -209,6 +209,15 @@
"FSharp.Core": "6.0.6"
}
},
"FsToolkit.ErrorHandling": {
"type": "Direct",
"requested": "[5.0.1, )",
"resolved": "5.0.1",
"contentHash": "93oG3WSogK05H4gkikAmx5pBf30TQJfO1Jky+o/N/nv+RTP3nfOfjlmCHzuyUjQCRFOQog/xQabcky+WBWceeQ==",
"dependencies": {
"FSharp.Core": "9.0.300"
}
},
"Giraffe": {
"type": "Direct",
"requested": "[7.0.2, )",
@@ -270,11 +279,11 @@
},
"Oceanbox.FvcomKit": {
"type": "Direct",
"requested": "[5.13.0, )",
"resolved": "5.13.0",
"contentHash": "6uVL3fLhRf4OU1hWygGpVex4pI5YB+GaWrKZUgoL/LkGmdFv0qU8Y7v+meHNM3E9bjR7xKinCVfrw5SXsF6C8g==",
"requested": "[6.0.0-alpha.1, )",
"resolved": "6.0.0-alpha.1",
"contentHash": "VBvQjHiSV1aBPNlTti7XyUileo0SP/Wn84ABGFiCD8Lt0PcsjtZhmkvj+3X/geh0NziB5tF5gscravYT+G+A1Q==",
"dependencies": {
"FSharp.Core": "9.0.201",
"FSharp.Core": "9.0.303",
"FSharp.Data": "6.4.1",
"FSharpPlus": "1.7.0",
"FsPickler": "5.3.2",
@@ -285,8 +294,7 @@
"ProjNet.FSharp": "5.2.0",
"Serilog": "4.2.0",
"Serilog.Sinks.Console": "6.0.0",
"Serilog.Sinks.Seq": "9.0.0",
"Thoth.Json.Net": "12.0.0"
"Serilog.Sinks.Seq": "9.0.0"
}
},
"prometheus-net.AspNetCore": {
@@ -1364,13 +1372,13 @@
"FSharp.Data": "[6.4.1, )",
"FSharpPlus": "[1.7.0, )",
"Fable.Remoting.DotnetClient": "[3.35.0, )",
"FsToolkit.ErrorHandling": "[5.0.1, )",
"Microsoft.EntityFrameworkCore": "[9.0.1, )",
"Microsoft.EntityFrameworkCore.Relational": "[9.0.1, )",
"NetTopologySuite": "[2.5.0, )",
"Npgsql.EntityFrameworkCore.PostgreSQL": "[9.0.2, )",
"Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite": "[9.0.2, )",
"Npgsql.NetTopologySuite": "[9.0.2, )",
"Oceanbox.FvcomKit": "[5.13.0, )",
"Oceanbox.SDSLite": "[2.8.0, )",
"Serilog.Sinks.Console": "[6.0.0, )",
"Thoth.Json.Net": "[12.0.0, )"

View File

@@ -0,0 +1,99 @@
namespace Oceanbox.Sorcerer
module Arome =
open System
open Microsoft.Research.Science.Data
open Xunit
open FSharpPlus
open FsUnit.Xunit
open FsUnit.CustomMatchers
open ProjNet.FSharp
open Serilog
open Oceanbox
open Oceanbox.DataAgent
let path = "/data/archives/Arome/meps_det_sfc_20220102T00Z.nc"
let trans = makeTransform CoordSys.WGS84 (CoordSys.LCCMet ())
let url = Utils.tryGetEnv "ARCHMAESTER_URL" |> Option.get
let creds = Utils.tryGetEnv "ARCHMAESTER_AUTH" |> Option.get
let archiveAgent = ArchiveAgent.ArchiveAgent { url = url; credentials = creds }
let dataAgent = new DatasetAgent.DatasetAgent(archiveAgent)
let logger =
LoggerConfiguration()
.MinimumLevel.Is(Events.LogEventLevel.Verbose)
.WriteTo.Console()
.CreateLogger()
do Log.Logger <- logger
[<Fact>]
let ``tile coord to index``() =
use ds : DataSet = DatasetAgent.openDataSet path
let res = FvcomKit.Arome.getGrid ds
match res with
| Ok grid ->
let lat, lng = Utils.tileToLatLng 514 331 10
let p = trans.project((lng, lat))
let idx = FvcomKit.Arome.tryFindIndex grid p
idx |> should equal (Some (44, 139))
| Error _ ->
failwithf "Expecting arome .nc %s to be present" path
[<Fact>]
let ``read tile coord uv``() =
async {
use ds : DataSet = DatasetAgent.openDataSet path
let res = FvcomKit.Arome.getGrid ds
match res with
| Ok grid ->
let lat, lng = Utils.tileToLatLng 514 331 10
let p = trans.project((lng, lat))
match FvcomKit.Arome.tryFindIndex grid p with
| Some (x, y) ->
let u, v = dataAgent.run(ds, Arome.readUV (x, y), 23)
(u, v) |> should equal (11.44918442f, 5.609796524f)
| None ->
failwithf "Expecting idx to be found"
| Error _ ->
failwithf "Expecting arome .nc %s to be present" path
}
[<Fact>]
let ``read [23, 0, 400, 182]``() =
async {
use ds : DataSet = DatasetAgent.openDataSet path
let u, v = dataAgent.run(ds, Arome.readUV (182, 400), 23)
(u, v) |> should equal (5.09469223f, 8.497491837f)
}
[<Fact>]
let ``wind tile [at] 514, 331, 10``() =
async {
let aid = Guid "36633129-cecb-45b9-bcdb-4ce5dc11237c"
use ds : DataSet = DatasetAgent.openDataSet path
let res = FvcomKit.Arome.getGrid ds
match res with
| Ok grid ->
let positions = Arome.findTilePositions 1 514 331 10
let! results = positions |> Array.map (Arome.tryReadUV aid dataAgent 0 grid) |> Async.Sequential
let velocities: (single * single) array =
results
|> Array.choose (
function
| Ok (u, v) -> Some (u, v)
| Error err ->
Log.Error ("[Arome] windTile error: {ErrorMsg}", err)
None
)
velocities |> should equal [|(11.43746567f, 6.017023087f)|]
| Error err ->
failwithf "Expecting Grid to be found: %s" err
}

View File

@@ -0,0 +1,41 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Nullable>enable</Nullable>
<OutputType>Exe</OutputType>
<RootNamespace>Sorcerer.Server.Tests</RootNamespace>
<TargetFramework>net9.0</TargetFramework>
<!--
This template uses native xUnit.net command line options when using 'dotnet run' and
VSTest by default when using 'dotnet test'. For more information on how to enable support
for Microsoft Testing Platform, please visit:
https://xunit.net/docs/getting-started/v3/microsoft-testing-platform
-->
</PropertyGroup>
<ItemGroup>
<Content Include="appsettings.json" CopyToOutputDirectory="PreserveNewest" />
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
<ItemGroup>
<Compile Include="Arome.fs" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FsUnit" />
<PackageReference Include="FsUnit.xUnit" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<!-- <PackageReference Include="Oceanbox.FvcomKit" /> -->
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio" />
<PackageReference Include="Oceanbox.SDSLite" />
<PackageReference Include="FSharp.Core" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Server\Sorcerer.fsproj" />
<ProjectReference Include="..\..\..\DataAgent\src\DataAgent\Oceanbox.DataAgent.fsproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,71 @@
{
"oidc": {
"issuer": "https://auth.oceanbox.io/realms/oceanbox",
"authorization_endpoint": "https://auth.oceanbox.io/realms/oceanbox/protocol/openid-connect/auth",
"token_endpoint": "https://auth.oceanbox.io/realms/oceanbox/protocol/openid-connect/token",
"jwks_uri": "https://auth.oceanbox.io/realms/oceanbox/protocol/openid-connect/certs",
"userinfo_endpoint": "https://auth.oceanbox.io/realms/oceanbox/protocol/openid-connect/userinfo",
"end_session_endpoint": "https://auth.oceanbox.io/realms/oceanbox/protocol/openid-connect/logout",
"device_authorization_endpoint": "https://auth.oceanbox.io/realms/oceanbox/protocol/openid-connect/auth/device",
"clientId": "sorcerer_dev",
"clientSecret": "",
"scopes": [
"openid",
"email",
"offline_access",
"profile"
],
"audiences": [
"atlantis",
"atlantis_dev",
"sorcerer",
"sorcerer_dev"
]
},
"sso": {
"cookieDomain": ".oceanbox.io",
"cookieName": ".obx.staging",
"ttl": 12.0,
"signedOutRedirectUri": "https://atlantis.local.oceanbox.io/",
"realm": "atlantis",
"environment": "staging",
"keyStore": {
"kind": "azure",
"uri": "https://atlantis.blob.core.windows.net",
"key": "dataprotection-keys"
},
"keyVault": {
"kind": "azure",
"uri": "https://atlantisvault.vault.azure.net",
"key": "dataencryption-keys"
}
},
"plainAuthUsers": [],
"fga": {
"apiUrl": "https://openfga.dev.oceanbox.io",
"apiKey": "",
"storeId": "01JH65JAW80D06GYBN7A8TBZRG",
"modelId": ""
},
"sentryUrl": "",
"redis": "localhost:6379,user=default,password=secret",
"allowedOrigins": [
"http://localhost:8085",
"http://localhost:8080",
"https://localhost:8080",
"https://maps.oceanbox.io",
"https://atlantis.dev.oceanbox.io",
"https://jonas-atlantis.dev.oceanbox.io",
"https://atlantis.svc.oceanbox.io",
"https://a.local.oceanbox.io:8080"
],
"appName": "sorcerer",
"appEnv": "staging",
"appNamespace": "staging-sorcerer",
"appVersion": "0.0.0",
"otelCollector": "http://10.255.241.12:4317",
"archiveSvc": "https://atlantis.dev.oceanbox.io",
"dataDir": "/data/archives",
"cacheDir": "/data/archives/cache",
"authDomain": "staging"
}

View File

@@ -0,0 +1,3 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json"
}