chore: Format with Fantomas and Cleanup Helm
This commit is contained in:
@@ -18,7 +18,7 @@ let versionFile = Path.getFullName ".version"
|
||||
Target.create "Clean" (fun _ -> Shell.cleanDir distPath)
|
||||
|
||||
Target.create "InstallClient" (fun _ ->
|
||||
run npm "install" "."
|
||||
run bun "install" "."
|
||||
run dotnet "tool restore" "."
|
||||
)
|
||||
|
||||
|
||||
@@ -72,16 +72,16 @@ let createProcess exe arg dir =
|
||||
|> CreateProcess.ensureExitCode
|
||||
|
||||
let dotnet = createProcess "dotnet"
|
||||
let npm =
|
||||
let npmPath =
|
||||
match ProcessUtils.tryFindFileOnPath "npm" with
|
||||
let bun =
|
||||
let bunPath =
|
||||
match ProcessUtils.tryFindFileOnPath "bun" with
|
||||
| Some path -> path
|
||||
| None ->
|
||||
"npm was not found in path. Please install it and make sure it's available from your path. " +
|
||||
"bun was not found in path. Please install it and make sure it's available from your path. " +
|
||||
"See https://safe-stack.github.io/docs/quickstart/#install-pre-requisites for more info"
|
||||
|> failwith
|
||||
|
||||
createProcess npmPath
|
||||
createProcess bunPath
|
||||
|
||||
let run proc arg dir =
|
||||
proc arg dir
|
||||
|
||||
@@ -1,30 +1,32 @@
|
||||
FROM mcr.microsoft.com/dotnet/sdk:9.0
|
||||
|
||||
# Add keys and sources lists
|
||||
RUN apt-get update && apt-get install -y ca-certificates gnupg
|
||||
RUN mkdir -p /etc/apt/keyrings
|
||||
RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
|
||||
ENV NODE_MAJOR=20
|
||||
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
|
||||
# Bun version
|
||||
ARG BUN_INSTALL=/usr/local
|
||||
ARG BUN_VERSION=bun-v1.2.16
|
||||
|
||||
# Install node, 7zip, yarn, git, process tools
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y nodejs p7zip-full git procps ssh-client
|
||||
&& apt-get install -y p7zip-full git procps ssh-client unzip
|
||||
|
||||
# Install Bun
|
||||
RUN set -eux; \
|
||||
curl -fsSL https://bun.sh/install > /usr/local/bin/install-bun \
|
||||
&& chmod +x /usr/local/bin/install-bun \
|
||||
&& /usr/local/bin/install-bun $BUN_VERSION debug-info
|
||||
|
||||
ENV BUN_INSTALL=/usr/local
|
||||
|
||||
# Clean up
|
||||
RUN apt-get autoremove -y \
|
||||
&& apt-get clean -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dotnet tools
|
||||
RUN dotnet tool install fable -g
|
||||
|
||||
# Trouble brewing
|
||||
RUN rm /etc/ssl/openssl.cnf
|
||||
|
||||
# add dotnet tools to path to pick up fake and paket installation
|
||||
# Add dotnet tools to path to pick up fake and paket installation
|
||||
ENV PATH="/root/.dotnet/tools:${PATH}"
|
||||
|
||||
# Copy endpoint specific user settings into container to specify
|
||||
# .NET Core should be used as the runtime.
|
||||
COPY settings.vscode.json /root/.vscode-remote/data/Machine/settings.json
|
||||
COPY settings.vscode.json /root/.vscode-remote/data/Machine/settings.json
|
||||
|
||||
@@ -7,26 +7,29 @@ charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = false
|
||||
|
||||
[*.nix]
|
||||
indent_size = 2
|
||||
|
||||
[*.fs]
|
||||
max_line_length=120
|
||||
max_line_length= 120
|
||||
# Feliz style
|
||||
fsharp_single_argument_web_mode=true
|
||||
fsharp_space_before_colon=false
|
||||
fsharp_max_if_then_else_short_width=60
|
||||
fsharp_max_infix_operator_expression=50
|
||||
fsharp_max_record_width=70
|
||||
fsharp_max_record_number_of_items=1
|
||||
fsharp_max_array_or_list_width=70
|
||||
fsharp_max_array_or_list_number_of_items=1
|
||||
fsharp_max_value_binding_width=70
|
||||
fsharp_max_function_binding_width=40
|
||||
fsharp_max_dot_get_expression_width=50
|
||||
fsharp_multiline_block_brackets_on_same_column=true
|
||||
fsharp_newline_between_type_definition_and_members=false
|
||||
fsharp_max_elmish_width=40
|
||||
fsharp_align_function_signature_to_indentation=false
|
||||
fsharp_alternative_long_member_definitions=false
|
||||
fsharp_multi_line_lambda_closing_newline=false
|
||||
fsharp_disable_elmish_syntax=false
|
||||
fsharp_keep_indent_in_branch=false
|
||||
fsharp_blank_lines_around_nested_multiline_expressions=false
|
||||
fsharp_single_argument_web_mode = true
|
||||
fsharp_space_before_colon = false
|
||||
fsharp_max_if_then_else_short_width = 60
|
||||
fsharp_max_infix_operator_expression = 50
|
||||
fsharp_max_record_width = 70
|
||||
fsharp_max_record_number_of_items = 1
|
||||
fsharp_max_array_or_list_width = 70
|
||||
fsharp_max_array_or_list_number_of_items = 1
|
||||
fsharp_max_value_binding_width = 70
|
||||
fsharp_max_function_binding_width = 40
|
||||
fsharp_max_dot_get_expression_width = 50
|
||||
fsharp_multiline_block_brackets_on_same_column = true
|
||||
fsharp_newline_between_type_definition_and_members = false
|
||||
fsharp_align_function_signature_to_indentation = false
|
||||
fsharp_alternative_long_member_definitions = false
|
||||
fsharp_multi_line_lambda_closing_newline = true
|
||||
fsharp_keep_indent_in_branch = false
|
||||
fsharp_blank_lines_around_nested_multiline_expressions = false
|
||||
fsharp_multiline_bracket_style = stroustrup
|
||||
fsharp_newline_before_multiline_computation_expression = true
|
||||
3
.envrc
Normal file
3
.envrc
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env bash
|
||||
# the shebang is ignored, but nice for editors
|
||||
use nix
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -16,3 +16,6 @@ dist/
|
||||
.ionide/
|
||||
*.db
|
||||
build.fsx.lock
|
||||
.direnv/
|
||||
result*
|
||||
NuGet.Config
|
||||
@@ -1,8 +1,11 @@
|
||||
variables:
|
||||
SDK_VERSION: 9.0
|
||||
SKIP_TESTS: "true"
|
||||
|
||||
include:
|
||||
- project: oceanbox/gitlab-ci
|
||||
ref: main
|
||||
ref: v4.1
|
||||
file: Dotnet.gitlab-ci.yml
|
||||
|
||||
inputs:
|
||||
project-name: fvcomstats
|
||||
project-dir: .
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
FROM mcr.microsoft.com/dotnet/runtime:9.0
|
||||
RUN rm /etc/ssl/openssl.cnf
|
||||
|
||||
RUN apt update \
|
||||
&& apt install -y libnetcdf-dev \
|
||||
&& apt autoremove -y \
|
||||
&& apt clean -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY deploy/ /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 15
|
||||
VisualStudioVersion = 15.0.27004.2005
|
||||
MinimumVisualStudioVersion = 15.0.26124.0
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{984D61E1-48FB-4098-9019-C1E99B524F58}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
README.md = README.md
|
||||
LICENSE = LICENSE
|
||||
Dockerfile = Dockerfile
|
||||
.gitlab-ci.yml = .gitlab-ci.yml
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Build", "Build.fsproj", "{3E61AD37-F1A6-4B40-B466-A2B501321E2D}"
|
||||
EndProject
|
||||
Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "src", "src/FvcomStats.fsproj", "{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}"
|
||||
EndProject
|
||||
Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "test", "test\Tests.fsproj", "{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Debug|x64 = Debug|x64
|
||||
Debug|x86 = Debug|x86
|
||||
Release|Any CPU = Release|Any CPU
|
||||
Release|x64 = Release|x64
|
||||
Release|x86 = Release|x86
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Release|x64.Build.0 = Release|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{3E61AD37-F1A6-4B40-B466-A2B501321E2D}.Release|x86.Build.0 = Release|Any CPU
|
||||
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Release|x64.Build.0 = Release|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{C0576A34-91FA-49DA-97CE-3EBB7DFAA667}.Release|x86.Build.0 = Release|Any CPU
|
||||
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Release|x64.Build.0 = Release|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{31724F29-E4E2-45C5-BB0D-1E2776D10A1A}.Release|x86.Build.0 = Release|Any CPU
|
||||
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {2E1D05DC-0707-48E8-83A9-A7D8FA0A1E04}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
16
FvcomStats.slnx
Normal file
16
FvcomStats.slnx
Normal file
@@ -0,0 +1,16 @@
|
||||
<Solution>
|
||||
<Configurations>
|
||||
<Platform Name="Any CPU" />
|
||||
<Platform Name="x64" />
|
||||
<Platform Name="x86" />
|
||||
</Configurations>
|
||||
<Folder Name="/Solution Items/">
|
||||
<File Path=".gitlab-ci.yml" />
|
||||
<File Path="Dockerfile" />
|
||||
<File Path="LICENSE" />
|
||||
<File Path="README.md" />
|
||||
</Folder>
|
||||
<Project Path="Build.fsproj" />
|
||||
<Project Path="src/FvcomStats.fsproj" />
|
||||
<Project Path="test/Tests.fsproj" />
|
||||
</Solution>
|
||||
21
LICENSE
21
LICENSE
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Serit Tromsø AS
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
22
default.nix
Normal file
22
default.nix
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
sources ? import ./nix,
|
||||
pkgs ? import sources.nixpkgs { },
|
||||
}:
|
||||
let
|
||||
dotnet-sdk = pkgs.dotnetCorePackages.sdk_9_0;
|
||||
in
|
||||
{
|
||||
shell = pkgs.mkShell {
|
||||
packages = with pkgs; [
|
||||
npins
|
||||
bun
|
||||
|
||||
fantomas
|
||||
fsautocomplete
|
||||
dotnet-sdk
|
||||
];
|
||||
|
||||
DOTNET_ROOT = "${dotnet-sdk}/share/dotnet";
|
||||
NPINS_DIRECTORY = "./nix";
|
||||
};
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
@@ -1,21 +0,0 @@
|
||||
apiVersion: v2
|
||||
name: fvcom-stats
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
version: 0.2.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application.
|
||||
appVersion: 1.10.0
|
||||
@@ -1,4 +0,0 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
resources:
|
||||
- base.yaml
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"ConnString" : "Data Source=/data/data.db"
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
- op: test
|
||||
path: /spec/rules/0/http/paths/0/path
|
||||
value: /
|
||||
|
||||
# - op: replace
|
||||
# path: /spec/rules/0/http/paths/0/path
|
||||
# value:
|
||||
# path: /app
|
||||
|
||||
# - op: add
|
||||
# path: /spec/rules/-
|
||||
# value:
|
||||
# host: FvcomStats.oceanbox.io
|
||||
# http:
|
||||
# paths:
|
||||
# - path: /
|
||||
# pathType: Prefix
|
||||
# backend:
|
||||
# service:
|
||||
# name: <deploy_name>
|
||||
# port:
|
||||
# number: 80
|
||||
# - op: add
|
||||
# path: /spec/tls/0/hosts/-
|
||||
# value: FvcomStats.oceanbox.io
|
||||
@@ -1,16 +0,0 @@
|
||||
namespace: <deploy_namespace>
|
||||
secretGenerator:
|
||||
- name: <deploy_name>
|
||||
files:
|
||||
- appsettings.json
|
||||
generatorOptions:
|
||||
disableNameSuffixHash: true
|
||||
patchesJson6902:
|
||||
- target:
|
||||
group: networking.k8s.io
|
||||
version: v1
|
||||
kind: Ingress
|
||||
name: <deploy_name>
|
||||
path: ingress_patch.yaml
|
||||
bases:
|
||||
- ../base
|
||||
@@ -1,5 +0,0 @@
|
||||
ingress:
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: letsencrypt-production
|
||||
nginx.ingress.kubernetes.io/whitelist-source-range: 0.0.0.0/0
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"ConnString" : "Data Source=/data/data.db"
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
- op: test
|
||||
path: /spec/rules/0/http/paths/0/path
|
||||
value: /
|
||||
|
||||
# - op: replace
|
||||
# path: /spec/rules/0/http/paths/0/path
|
||||
# value:
|
||||
# path: /app
|
||||
|
||||
# - op: add
|
||||
# path: /spec/rules/-
|
||||
# value:
|
||||
# host: FvcomStats.dev.oceanbox.io
|
||||
# http:
|
||||
# paths:
|
||||
# - path: /
|
||||
# pathType: Prefix
|
||||
# backend:
|
||||
# service:
|
||||
# name: <deploy_name>
|
||||
# port:
|
||||
# number: 80
|
||||
# - op: add
|
||||
# path: /spec/tls/0/hosts/-
|
||||
# value: FvcomStats.dev.oceanbox.io
|
||||
@@ -1,16 +0,0 @@
|
||||
namespace: <deploy_namespace>
|
||||
secretGenerator:
|
||||
- name: <deploy_name>
|
||||
files:
|
||||
- appsettings.json
|
||||
generatorOptions:
|
||||
disableNameSuffixHash: true
|
||||
patchesJson6902:
|
||||
- target:
|
||||
group: networking.k8s.io
|
||||
version: v1
|
||||
kind: Ingress
|
||||
name: <deploy_name>
|
||||
path: ingress_patch.yaml
|
||||
bases:
|
||||
- ../base
|
||||
@@ -1,4 +0,0 @@
|
||||
ingress:
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: letsencrypt-production
|
||||
nginx.ingress.kubernetes.io/whitelist-source-range: 10.0.0.0/8,172.16.0.0/12,192.168.0.0/16
|
||||
@@ -1,21 +0,0 @@
|
||||
1. Get the application URL by running these commands:
|
||||
{{- if .Values.ingress.enabled }}
|
||||
{{- range $host := .Values.ingress.hosts }}
|
||||
{{- range .paths }}
|
||||
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ . }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- else if contains "NodePort" .Values.service.type }}
|
||||
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "FvcomStats.fullname" . }})
|
||||
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
|
||||
echo http://$NODE_IP:$NODE_PORT
|
||||
{{- else if contains "LoadBalancer" .Values.service.type }}
|
||||
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
|
||||
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "FvcomStats.fullname" . }}'
|
||||
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "FvcomStats.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
|
||||
echo http://$SERVICE_IP:{{ .Values.service.port }}
|
||||
{{- else if contains "ClusterIP" .Values.service.type }}
|
||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "FvcomStats.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
||||
echo "Visit http://127.0.0.1:8080 to use your application"
|
||||
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:80
|
||||
{{- end }}
|
||||
@@ -1,65 +0,0 @@
|
||||
{{/* vim: set filetype=mustache: */}}
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "FvcomStats.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "FvcomStats.fullname" -}}
|
||||
{{- if .Values.fullnameOverride -}}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}}
|
||||
{{- else -}}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride -}}
|
||||
{{- if contains $name .Release.Name -}}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" -}}
|
||||
{{- else -}}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "FvcomStats.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "FvcomStats.labels" -}}
|
||||
helm.sh/chart: {{ include "FvcomStats.chart" . }}
|
||||
{{ include "FvcomStats.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end -}}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "FvcomStats.selectorLabels" -}}
|
||||
app: {{ include "FvcomStats.name" . }}
|
||||
instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/name: {{ include "FvcomStats.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end -}}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "FvcomStats.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create -}}
|
||||
{{ default (include "FvcomStats.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else -}}
|
||||
{{ default "default" .Values.serviceAccount.name }}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
@@ -1,119 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "FvcomStats.fullname" . }}
|
||||
annotations:
|
||||
{{- if .Values.ci.environment }}
|
||||
app.gitlab.com/env: {{ default "" .Values.ci.environment }}
|
||||
app.gitlab.com/app: {{ default "" .Values.ci.projectPath }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "FvcomStats.labels" . | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "FvcomStats.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
{{- if .Values.ci.environment }}
|
||||
app.gitlab.com/env: {{ default "" .Values.ci.environment }}
|
||||
app.gitlab.com/app: {{ default "" .Values.ci.projectPath }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "FvcomStats.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "FvcomStats.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8085
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
{{- if .Values.service.https }}
|
||||
scheme: HTTPS
|
||||
{{- end }}
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
{{- if .Values.service.https }}
|
||||
scheme: HTTPS
|
||||
{{- end }}
|
||||
{{- if .Values.service.https }}
|
||||
env:
|
||||
- name: ASPNETCORE_Kestrel__Certificates__Default__Path
|
||||
value: "/app/tls/kestrel.pfx"
|
||||
- name: SERVER_USE_HTTPS
|
||||
value: "1"
|
||||
{{- else }}
|
||||
env: []
|
||||
{{- end }}
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: appsettings
|
||||
mountPath: /app/appsettings.json
|
||||
subPath: appsettings.json
|
||||
readOnly: true
|
||||
{{- if .Values.service.https }}
|
||||
- name: tls-certificates
|
||||
mountPath: /app/tls
|
||||
readOnly: true
|
||||
{{- end }}
|
||||
initContainers:
|
||||
- name: init
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"
|
||||
command: [ "/bin/sh", "-c", "true"]
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: appsettings
|
||||
mountPath: /app/appsettings.json
|
||||
subPath: appsettings.json
|
||||
readOnly: true
|
||||
volumes:
|
||||
- name: data
|
||||
{{- if .Values.persistence.enabled }}
|
||||
persistentVolumeClaim:
|
||||
claimName: {{ .Values.persistence.existingClaim | default (include "FvcomStats.fullname" .) }}
|
||||
{{- else }}
|
||||
emptyDir: {}
|
||||
{{- end }}
|
||||
- name: appsettings
|
||||
secret:
|
||||
secretName: {{ template "FvcomStats.fullname" . }}
|
||||
{{- if .Values.service.https }}
|
||||
- name: tls-certificates
|
||||
secret:
|
||||
secretName: {{ .Values.service.secretName }}
|
||||
{{- end }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
@@ -1,64 +0,0 @@
|
||||
{{- if .Values.ingress.enabled -}}
|
||||
{{- $fullName := include "FvcomStats.fullname" . -}}
|
||||
{{- $svcPort := .Values.service.port -}}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ $fullName }}
|
||||
labels:
|
||||
{{- include "FvcomStats.labels" . | nindent 4 }}
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: nginx
|
||||
nginx.ingress.kubernetes.io/ssl-redirect: "true"
|
||||
{{- if .Values.service.https }}
|
||||
nginx.ingress.kubernetes.io/backend-protocol: HTTPS
|
||||
{{- else }}
|
||||
nginx.ingress.kubernetes.io/backend-protocol: HTTP
|
||||
{{- end }}
|
||||
{{- with .Values.ingress.annotations }}
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
tls:
|
||||
{{- if .Values.ci.deployHost }}
|
||||
- hosts:
|
||||
- {{ .Values.ci.deployHost }}
|
||||
secretName: {{ .Release.Name }}-tls
|
||||
{{- end }}
|
||||
{{- if .Values.ingress.tls }}
|
||||
{{- range .Values.ingress.tls }}
|
||||
- hosts:
|
||||
{{- range .hosts }}
|
||||
- {{ . | quote }}
|
||||
{{- end }}
|
||||
secretName: {{ .secretName }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
rules:
|
||||
{{- if .Values.ci.deployHost }}
|
||||
- host: {{ .Values.ci.deployHost }}
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: {{ $fullName }}
|
||||
port:
|
||||
number: {{ $svcPort }}
|
||||
{{- end }}
|
||||
{{- range .Values.ingress.hosts }}
|
||||
- host: {{ .host | quote }}
|
||||
http:
|
||||
paths:
|
||||
{{- range .paths }}
|
||||
- path: {{ . }}
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: {{ $fullName }}
|
||||
port:
|
||||
number: {{ $svcPort }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -1,25 +0,0 @@
|
||||
{{- if and .Values.persistence.enabled (not .Values.persistence.existingClaim) }}
|
||||
kind: PersistentVolumeClaim
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: {{ template "FvcomStats.fullname" . }}
|
||||
{{- with .Values.persistence.annotations }}
|
||||
annotations:
|
||||
{{ toYaml . | indent 4 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{ include "FvcomStats.labels" . | indent 4 }}
|
||||
spec:
|
||||
accessModes:
|
||||
- {{ .Values.persistence.accessMode | quote }}
|
||||
resources:
|
||||
requests:
|
||||
storage: {{ .Values.persistence.size | quote }}
|
||||
{{- if .Values.persistence.storageClass }}
|
||||
{{- if (eq "-" .Values.persistence.storageClass) }}
|
||||
storageClassName: ""
|
||||
{{- else }}
|
||||
storageClassName: "{{ .Values.persistence.storageClass }}"
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -1,15 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "FvcomStats.fullname" . }}
|
||||
labels:
|
||||
{{- include "FvcomStats.labels" . | nindent 4 }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
- port: {{ .Values.service.port }}
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: http
|
||||
selector:
|
||||
{{- include "FvcomStats.selectorLabels" . | nindent 4 }}
|
||||
@@ -1,8 +0,0 @@
|
||||
{{- if .Values.serviceAccount.create -}}
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "FvcomStats.serviceAccountName" . }}
|
||||
labels:
|
||||
{{ include "FvcomStats.labels" . | nindent 4 }}
|
||||
{{- end -}}
|
||||
@@ -1,73 +0,0 @@
|
||||
# Default values for FvcomStats.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: overwritten by CD
|
||||
tag: latest
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets:
|
||||
- name: gitlab-pull-secret
|
||||
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
serviceAccount:
|
||||
# Specifies whether a service account should be created
|
||||
create: true
|
||||
# The name of the service account to use.
|
||||
# If not set and create is true, a name is generated using the fullname template
|
||||
name:
|
||||
|
||||
podSecurityContext:
|
||||
fsGroup: 2000
|
||||
|
||||
securityContext:
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
readOnlyRootFilesystem: false
|
||||
runAsNonRoot: true
|
||||
runAsUser: 1000
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 80
|
||||
https: false
|
||||
secretName: kestrel-tls
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
|
||||
persistence:
|
||||
enabled: false
|
||||
size: 1G
|
||||
storageClass: ""
|
||||
accessMode: ReadWriteOnce
|
||||
|
||||
ci:
|
||||
environment: ""
|
||||
projectPath: ""
|
||||
deployHost: ""
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
|
||||
146
nix/default.nix
Normal file
146
nix/default.nix
Normal file
@@ -0,0 +1,146 @@
|
||||
/*
|
||||
This file is provided under the MIT licence:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
# Generated by npins. Do not modify; will be overwritten regularly
|
||||
let
|
||||
data = builtins.fromJSON (builtins.readFile ./sources.json);
|
||||
version = data.version;
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
|
||||
range =
|
||||
first: last: if first > last then [ ] else builtins.genList (n: first + n) (last - first + 1);
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
|
||||
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
|
||||
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
|
||||
concatMapStrings = f: list: concatStrings (map f list);
|
||||
concatStrings = builtins.concatStringsSep "";
|
||||
|
||||
# If the environment variable NPINS_OVERRIDE_${name} is set, then use
|
||||
# the path directly as opposed to the fetched source.
|
||||
# (Taken from Niv for compatibility)
|
||||
mayOverride =
|
||||
name: path:
|
||||
let
|
||||
envVarName = "NPINS_OVERRIDE_${saneName}";
|
||||
saneName = stringAsChars (c: if (builtins.match "[a-zA-Z0-9]" c) == null then "_" else c) name;
|
||||
ersatz = builtins.getEnv envVarName;
|
||||
in
|
||||
if ersatz == "" then
|
||||
path
|
||||
else
|
||||
# this turns the string into an actual Nix path (for both absolute and
|
||||
# relative paths)
|
||||
builtins.trace "Overriding path of \"${name}\" with \"${ersatz}\" due to set \"${envVarName}\"" (
|
||||
if builtins.substring 0 1 ersatz == "/" then
|
||||
/. + ersatz
|
||||
else
|
||||
/. + builtins.getEnv "PWD" + "/${ersatz}"
|
||||
);
|
||||
|
||||
mkSource =
|
||||
name: spec:
|
||||
assert spec ? type;
|
||||
let
|
||||
path =
|
||||
if spec.type == "Git" then
|
||||
mkGitSource spec
|
||||
else if spec.type == "GitRelease" then
|
||||
mkGitSource spec
|
||||
else if spec.type == "PyPi" then
|
||||
mkPyPiSource spec
|
||||
else if spec.type == "Channel" then
|
||||
mkChannelSource spec
|
||||
else if spec.type == "Tarball" then
|
||||
mkTarballSource spec
|
||||
else
|
||||
builtins.throw "Unknown source type ${spec.type}";
|
||||
in
|
||||
spec // { outPath = mayOverride name path; };
|
||||
|
||||
mkGitSource =
|
||||
{
|
||||
repository,
|
||||
revision,
|
||||
url ? null,
|
||||
submodules,
|
||||
hash,
|
||||
branch ? null,
|
||||
...
|
||||
}:
|
||||
assert repository ? type;
|
||||
# At the moment, either it is a plain git repository (which has an url), or it is a GitHub/GitLab repository
|
||||
# In the latter case, there we will always be an url to the tarball
|
||||
if url != null && !submodules then
|
||||
builtins.fetchTarball {
|
||||
inherit url;
|
||||
sha256 = hash; # FIXME: check nix version & use SRI hashes
|
||||
}
|
||||
else
|
||||
let
|
||||
url =
|
||||
if repository.type == "Git" then
|
||||
repository.url
|
||||
else if repository.type == "GitHub" then
|
||||
"https://github.com/${repository.owner}/${repository.repo}.git"
|
||||
else if repository.type == "GitLab" then
|
||||
"${repository.server}/${repository.repo_path}.git"
|
||||
else
|
||||
throw "Unrecognized repository type ${repository.type}";
|
||||
urlToName =
|
||||
url: rev:
|
||||
let
|
||||
matched = builtins.match "^.*/([^/]*)(\\.git)?$" url;
|
||||
|
||||
short = builtins.substring 0 7 rev;
|
||||
|
||||
appendShort = if (builtins.match "[a-f0-9]*" rev) != null then "-${short}" else "";
|
||||
in
|
||||
"${if matched == null then "source" else builtins.head matched}${appendShort}";
|
||||
name = urlToName url revision;
|
||||
in
|
||||
builtins.fetchGit {
|
||||
rev = revision;
|
||||
inherit name;
|
||||
# hash = hash;
|
||||
inherit url submodules;
|
||||
};
|
||||
|
||||
mkPyPiSource =
|
||||
{ url, hash, ... }:
|
||||
builtins.fetchurl {
|
||||
inherit url;
|
||||
sha256 = hash;
|
||||
};
|
||||
|
||||
mkChannelSource =
|
||||
{ url, hash, ... }:
|
||||
builtins.fetchTarball {
|
||||
inherit url;
|
||||
sha256 = hash;
|
||||
};
|
||||
|
||||
mkTarballSource =
|
||||
{
|
||||
url,
|
||||
locked_url ? url,
|
||||
hash,
|
||||
...
|
||||
}:
|
||||
builtins.fetchTarball {
|
||||
url = locked_url;
|
||||
sha256 = hash;
|
||||
};
|
||||
in
|
||||
if version == 5 then
|
||||
builtins.mapAttrs mkSource data.pins
|
||||
else
|
||||
throw "Unsupported format version ${toString version} in sources.json. Try running `npins upgrade`"
|
||||
11
nix/sources.json
Normal file
11
nix/sources.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"pins": {
|
||||
"nixpkgs": {
|
||||
"type": "Channel",
|
||||
"name": "nixpkgs-unstable",
|
||||
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre885943.e99366c665bd/nixexprs.tar.xz",
|
||||
"hash": "1cl8gphr2r9yz5qxn4r85bcgdba23igjc269dv4jajw9r50xgij6"
|
||||
}
|
||||
},
|
||||
"version": 5
|
||||
}
|
||||
170
src/Counts.fs
170
src/Counts.fs
@@ -6,49 +6,44 @@ open Oceanbox.FvcomKit
|
||||
open Oceanbox.FvcomKit.Fvcom
|
||||
open ProjNet.FSharp
|
||||
|
||||
let qfloat = [|0.05; 0.25; 0.50; 0.75; 0.95; 0.99|]
|
||||
let qstring = [|"spQ05"; "spQ25"; "spQ50"; "spQ75"; "spQ95"; "spQ99"|]
|
||||
let qfloat = [| 0.05; 0.25; 0.50; 0.75; 0.95; 0.99 |]
|
||||
let qstring = [| "spQ05"; "spQ25"; "spQ50"; "spQ75"; "spQ95"; "spQ99" |]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
let addSectors (count: int [,,]) =
|
||||
let c = Array2D.zeroCreate<float32> (Array3D.length1 count) (Array3D.length2 count)
|
||||
let addSectors (count: int[,,]) =
|
||||
let c =
|
||||
Array2D.zeroCreate<float32> (Array3D.length1 count) (Array3D.length2 count)
|
||||
//let sectorInds = [| for i in 0..((Array3D.length3 count) - 1) -> i |]
|
||||
|
||||
c
|
||||
|> Array2D.iteri (fun i j _ -> let s = Array.sum(count[i, j, *]) |> float32
|
||||
c[i, j] <- s
|
||||
)
|
||||
|> Array2D.iteri (fun i j _ ->
|
||||
let s = Array.sum (count[i, j, *]) |> float32
|
||||
c[i, j] <- s
|
||||
)
|
||||
c
|
||||
|
||||
// TODO: Work in progress
|
||||
let countsToTimeseries (counts: float32[]) (bins: float32[]) = 0
|
||||
|
||||
let countsToTimeseries (counts: float32 []) (bins: float32 []) =
|
||||
0
|
||||
|
||||
|
||||
let computeQuantile (counts: float32 []) (bins: float32 []) (q: float32)=
|
||||
let computeQuantile (counts: float32[]) (bins: float32[]) (q: float32) =
|
||||
let N = Array.sum counts |> float32
|
||||
let qN = round(q * N)
|
||||
let qN = round (q * N)
|
||||
|
||||
let binSumsAccumulated =
|
||||
Array.scan (+) 0.f counts
|
||||
let binSumsAccumulated = Array.scan (+) 0.f counts
|
||||
|
||||
let qInd =
|
||||
binSumsAccumulated
|
||||
|> Array.findIndex (fun elm -> elm >= qN)
|
||||
printfn "%d %f" qInd q
|
||||
bins[qInd-1]
|
||||
bins[qInd - 1]
|
||||
|
||||
let computeMean (counts: float32 []) (bins: float32 []) =
|
||||
let computeMean (counts: float32[]) (bins: float32[]) =
|
||||
let N = Array.sum counts
|
||||
let sumSp =
|
||||
Array.zip counts bins
|
||||
|> Array.map (fun (ci, spi) -> ci * spi)
|
||||
|> Array.sum
|
||||
|
||||
sumSp/N
|
||||
sumSp / N
|
||||
|
||||
let readCounts (nc: Microsoft.Research.Science.Data.DataSet) =
|
||||
let nele = nc.Dimensions["nele"].Length
|
||||
@@ -58,123 +53,124 @@ let readCounts (nc: Microsoft.Research.Science.Data.DataSet) =
|
||||
let counts = Array3D.zeroCreate nele nBins nSectors
|
||||
for i in 0 .. (nSectors - 1) do
|
||||
printfn "%d" i
|
||||
let c = nc["counts"].GetData([|0; 0; i|], [|nele; nBins; 1|]) :?> int [,,]
|
||||
let c =
|
||||
nc["counts"].GetData([| 0; 0; i |], [| nele; nBins; 1 |]) :?> int[,,]
|
||||
counts[*, *, i] <- c[*, *, 0]
|
||||
|
||||
counts
|
||||
|
||||
|
||||
let findMonthFromName (fname: string) =
|
||||
let ind1 = (fname.LastIndexOf "_") + 1
|
||||
let ind2 = (fname.LastIndexOf ".") - 1
|
||||
int(fname[ind1..ind2])
|
||||
int (fname[ind1..ind2])
|
||||
|
||||
|
||||
let spStatsMonth (fname: string) (outfile: string) (month: int)=
|
||||
let nc = NetCDFDataSet.Open(fname, openMode=Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let bins = nc["bins"].GetData() :?> float32 []
|
||||
let spStatsMonth (fname: string) (outfile: string) (month: int) =
|
||||
let nc =
|
||||
NetCDFDataSet.Open(fname, openMode = Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let bins = nc["bins"].GetData() :?> float32[]
|
||||
let counts = readCounts nc |> addSectors
|
||||
nc.Dispose()
|
||||
|
||||
let out = NetCDFDataSet.Open outfile
|
||||
|
||||
let q2nc (qs: string) (q: float32 []) =
|
||||
let q2nc (qs: string) (q: float32[]) =
|
||||
let a = Array2D.zeroCreate<float32> 1 q.Length
|
||||
|
||||
q
|
||||
|> Array.iteri (fun i qi -> a[0, i] <- qi)
|
||||
q |> Array.iteri (fun i qi -> a[0, i] <- qi)
|
||||
|
||||
out[qs].PutData([|(month-1); 0|], a) |> ignore
|
||||
out[qs].PutData([| (month - 1); 0 |], a) |> ignore
|
||||
|
||||
[|for i in 0 .. ((Array2D.length1 counts) - 1) -> i|]
|
||||
|> Array.map (fun ind -> let c = counts[ind, *]
|
||||
computeMean c bins)
|
||||
[| for i in 0 .. ((Array2D.length1 counts) - 1) -> i |]
|
||||
|> Array.map (fun ind ->
|
||||
let c = counts[ind, *]
|
||||
computeMean c bins
|
||||
)
|
||||
|> q2nc "spMonthlyMean"
|
||||
|
||||
let qfloat = [|0.05f; 0.25f; 0.50f; 0.75f; 0.95f; 0.99f|]
|
||||
let qstring = [|"spMonthlyQ05"; "spMonthlyQ25"; "spMonthlyQ50"; "spMonthlyQ75"; "spMonthlyQ95"; "spMonthlyQ99"|]
|
||||
let qfloat = [| 0.05f; 0.25f; 0.50f; 0.75f; 0.95f; 0.99f |]
|
||||
let qstring = [|
|
||||
"spMonthlyQ05"
|
||||
"spMonthlyQ25"
|
||||
"spMonthlyQ50"
|
||||
"spMonthlyQ75"
|
||||
"spMonthlyQ95"
|
||||
"spMonthlyQ99"
|
||||
|]
|
||||
|
||||
Array.zip qfloat qstring
|
||||
|> Array.iter (fun (qi, qsi) -> [|for i in 0 .. ((Array2D.length1 counts) - 1) -> i|]
|
||||
|> Array.map (fun ind -> let c = counts[ind, *]
|
||||
computeQuantile c bins qi)
|
||||
|> q2nc qsi |> ignore
|
||||
)
|
||||
|> Array.iter (fun (qi, qsi) ->
|
||||
[| for i in 0 .. ((Array2D.length1 counts) - 1) -> i |]
|
||||
|> Array.map (fun ind ->
|
||||
let c = counts[ind, *]
|
||||
computeQuantile c bins qi
|
||||
)
|
||||
|> q2nc qsi
|
||||
|> ignore
|
||||
)
|
||||
|
||||
out.Commit()
|
||||
out.Dispose()
|
||||
|
||||
|
||||
counts
|
||||
|
||||
|
||||
let spStatsFull (outfile: string) (counts: float32 [,]) (bins: float32 [])=
|
||||
let spStatsFull (outfile: string) (counts: float32[,]) (bins: float32[]) =
|
||||
|
||||
let out = NetCDFDataSet.Open outfile
|
||||
|
||||
let q2nc (qs: string) (q: float32 []) =
|
||||
let q2nc (qs: string) (q: float32[]) =
|
||||
let a = Array2D.zeroCreate<float32> 1 q.Length
|
||||
|
||||
q
|
||||
|> Array.iteri (fun i qi -> a[0, i] <- qi)
|
||||
|
||||
q |> Array.iteri (fun i qi -> a[0, i] <- qi)
|
||||
out[qs].PutData q |> ignore
|
||||
|
||||
[|for i in 0 .. ((Array2D.length1 counts) - 1) -> i|]
|
||||
|> Array.map (fun ind -> let c = counts[ind, *]
|
||||
computeMean c bins)
|
||||
[| for i in 0 .. ((Array2D.length1 counts) - 1) -> i |]
|
||||
|> Array.map (fun ind ->
|
||||
let c = counts[ind, *]
|
||||
computeMean c bins
|
||||
)
|
||||
|> q2nc "spMean"
|
||||
|
||||
let qfloat = [|0.05f; 0.25f; 0.50f; 0.75f; 0.95f; 0.99f|]
|
||||
let qstring = [|"spQ05"; "spQ25"; "spQ50"; "spQ75"; "spQ95"; "spQ99"|]
|
||||
let qfloat = [| 0.05f; 0.25f; 0.50f; 0.75f; 0.95f; 0.99f |]
|
||||
let qstring = [| "spQ05"; "spQ25"; "spQ50"; "spQ75"; "spQ95"; "spQ99" |]
|
||||
|
||||
Array.zip qfloat qstring
|
||||
|> Array.iter (fun (qi, qsi) -> [|for i in 0 .. ((Array2D.length1 counts) - 1) -> i|]
|
||||
|> Array.map (fun ind -> let c = counts[ind, *]
|
||||
computeQuantile c bins qi)
|
||||
|> q2nc qsi |> ignore
|
||||
)
|
||||
|> Array.iter (fun (qi, qsi) ->
|
||||
[| for i in 0 .. ((Array2D.length1 counts) - 1) -> i |]
|
||||
|> Array.map (fun ind ->
|
||||
let c = counts[ind, *]
|
||||
computeQuantile c bins qi
|
||||
)
|
||||
|> q2nc qsi
|
||||
|> ignore
|
||||
)
|
||||
|
||||
out.Commit()
|
||||
out.Dispose()
|
||||
0
|
||||
|
||||
// TODO: Work in progress
|
||||
let sectorSpStatsMonth fname = 0
|
||||
let spStatsMultipleFiles files = 0
|
||||
let spSectorStatsMultipleFiles files = 0
|
||||
|
||||
|
||||
|
||||
let sectorSpStatsMonth fname =
|
||||
0
|
||||
|
||||
let spStatsMultipleFiles files =
|
||||
0
|
||||
|
||||
let spSectorStatsMultipleFiles files =
|
||||
0
|
||||
|
||||
|
||||
|
||||
let addCountArrays (c1: float32 [,]) (c2: float32 [,]) =
|
||||
let addCountArrays (c1: float32[,]) (c2: float32[,]) =
|
||||
printfn "%d %d %d %d" (Array2D.length1 c1) (Array2D.length1 c2) (Array2D.length2 c1) (Array2D.length2 c2)
|
||||
c1
|
||||
|> Array2D.mapi (fun i j ci1 -> ci1 + c2[i, j])
|
||||
c1 |> Array2D.mapi (fun i j ci1 -> ci1 + c2[i, j])
|
||||
|
||||
let count2stats (files: string []) (outfile: string) =
|
||||
let months =
|
||||
files
|
||||
|> Array.map findMonthFromName
|
||||
let count2stats (files: string[]) (outfile: string) =
|
||||
let months = files |> Array.map findMonthFromName
|
||||
|
||||
let nc = NetCDFDataSet.Open files[0]
|
||||
let nele = nc.Dimensions["nele"].Length
|
||||
let bins_lenght = nc.Dimensions["bins"].Length
|
||||
let bins = nc["bins"].GetData() :?> float32 []
|
||||
let bins = nc["bins"].GetData() :?> float32[]
|
||||
nc.Dispose()
|
||||
|
||||
let totalCounts = Array2D.zeroCreate<float32> nele bins_lenght
|
||||
|
||||
Array.zip files months
|
||||
|> Array.iter (fun (f, m) -> let counts = spStatsMonth f outfile m
|
||||
addCountArrays totalCounts counts
|
||||
|> Array2D.iteri (fun i j ci -> totalCounts[i, j] <- ci)
|
||||
)
|
||||
|> Array.iter (fun (f, m) ->
|
||||
let counts = spStatsMonth f outfile m
|
||||
addCountArrays totalCounts counts
|
||||
|> Array2D.iteri (fun i j ci -> totalCounts[i, j] <- ci)
|
||||
)
|
||||
spStatsFull outfile totalCounts bins |> ignore
|
||||
0
|
||||
93
src/Main.fs
93
src/Main.fs
@@ -21,10 +21,7 @@ let configureSerilog level =
|
||||
| 2 -> LogEventLevel.Information
|
||||
| 3 -> LogEventLevel.Debug
|
||||
| _ -> LogEventLevel.Verbose
|
||||
LoggerConfiguration()
|
||||
.MinimumLevel.Is(n)
|
||||
.WriteTo.Console()
|
||||
.CreateLogger()
|
||||
LoggerConfiguration().MinimumLevel.Is(n).WriteTo.Console().CreateLogger()
|
||||
|
||||
let colorizer =
|
||||
function
|
||||
@@ -34,19 +31,19 @@ let colorizer =
|
||||
let errorHandler = ProcessExiter(colorizer = colorizer)
|
||||
|
||||
type NCArgs =
|
||||
// | Log_Level of level: int
|
||||
// | Log_Level of level: int
|
||||
| TemplateFile of string: string
|
||||
| Variable of string: string
|
||||
| [<MainCommand; ExactlyOnce; Last>] File of file: string
|
||||
interface IArgParserTemplate with
|
||||
member this.Usage =
|
||||
match this with
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
| TemplateFile _ -> "Path(s) to FVCOM output file"
|
||||
| Variable _ -> "Variable to analyze (temperature, salinity or uv)"
|
||||
| File _ -> "Name of output file"
|
||||
type TSArgs =
|
||||
// | Log_Level of level: int
|
||||
// | Log_Level of level: int
|
||||
| DataFoldersTS of string: string
|
||||
| StartTimeTS of string: string
|
||||
| StopTimeTS of string: string
|
||||
@@ -58,7 +55,7 @@ type TSArgs =
|
||||
interface IArgParserTemplate with
|
||||
member this.Usage =
|
||||
match this with
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
| DataFoldersTS _ -> "Path(s) to FVCOM output files"
|
||||
| StartTimeTS _ -> "Crop before start time (YYYY-MM-DD)"
|
||||
| StopTimeTS _ -> "Crop after start time (YYYY-MM-DD)"
|
||||
@@ -69,7 +66,7 @@ type TSArgs =
|
||||
| FileTS _ -> "Name of output file"
|
||||
|
||||
type UVArgs =
|
||||
// | Log_Level of level: int
|
||||
// | Log_Level of level: int
|
||||
| DataFoldersUV of string: string
|
||||
| StartTimeUV of string: string
|
||||
| StopTimeUV of string: string
|
||||
@@ -80,7 +77,7 @@ type UVArgs =
|
||||
interface IArgParserTemplate with
|
||||
member this.Usage =
|
||||
match this with
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
| DataFoldersUV _ -> "Path(s) to FVCOM output files"
|
||||
| StartTimeUV _ -> "Crop before start time (YYYY-MM-DD)"
|
||||
| StopTimeUV _ -> "Crop after start time (YYYY-MM-DD)"
|
||||
@@ -90,7 +87,7 @@ type UVArgs =
|
||||
| FileUV _ -> "Name of output file"
|
||||
|
||||
type BOTTOMArgs =
|
||||
// | Log_Level of level: int
|
||||
// | Log_Level of level: int
|
||||
| DataFoldersB of string: string
|
||||
| StartTimeB of string: string
|
||||
| StopTimeB of string: string
|
||||
@@ -101,7 +98,7 @@ type BOTTOMArgs =
|
||||
interface IArgParserTemplate with
|
||||
member this.Usage =
|
||||
match this with
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
| DataFoldersB _ -> "Path(s) to FVCOM output files"
|
||||
| StartTimeB _ -> "Crop before start time (YYYY-MM-DD)"
|
||||
| StopTimeB _ -> "Crop after start time (YYYY-MM-DD)"
|
||||
@@ -110,14 +107,14 @@ type BOTTOMArgs =
|
||||
//| ChunkSizeUV _ -> "Number of nodes/cells to be processed simultaneously"
|
||||
| FileB _ -> "Name of output file"
|
||||
type COUNTArgs =
|
||||
// | Log_Level of level: int
|
||||
// | Log_Level of level: int
|
||||
| DataFoldersC of string: string
|
||||
| TemplateFileC of string: string
|
||||
| [<MainCommand; ExactlyOnce; Last>] FileC of file: string
|
||||
interface IArgParserTemplate with
|
||||
member this.Usage =
|
||||
match this with
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
// | Log_Level _ -> "0=Error, 1=Warning, 2=Info, 3=Debug, 4=Verbose"
|
||||
| DataFoldersC _ -> "Path(s) to count files"
|
||||
| TemplateFileC _ -> "Path to fvcom data file"
|
||||
| FileC _ -> "Name of output file"
|
||||
@@ -154,9 +151,15 @@ let main argv =
|
||||
let variable = ncArgs.GetResult Variable
|
||||
let fvGrid = readFvcomGrid templateFile
|
||||
match variable with
|
||||
| "temperature" -> createOutputFileTS variable templateFile fvGrid saveName |> ignore
|
||||
| "salinity" -> createOutputFileTS variable templateFile fvGrid saveName |> ignore
|
||||
| _ -> createOutputFileUV templateFile fvGrid saveName |> ignore
|
||||
| "temperature" ->
|
||||
createOutputFileTS variable templateFile fvGrid saveName
|
||||
|> ignore
|
||||
| "salinity" ->
|
||||
createOutputFileTS variable templateFile fvGrid saveName
|
||||
|> ignore
|
||||
| _ ->
|
||||
createOutputFileUV templateFile fvGrid saveName
|
||||
|> ignore
|
||||
|
||||
if args.Contains TS then
|
||||
let tsArgs = args.GetResult TS
|
||||
@@ -177,7 +180,8 @@ let main argv =
|
||||
let fvGrid = readFvcomGrid fList.Path[0]
|
||||
let chunkSizeChecked = checkIndicesNode fvGrid startNode chunkSize //|> ignore
|
||||
printfn "%d %d %d" startNode chunkSize chunkSizeChecked
|
||||
analyzeNodeSubset var (startNode, chunkSizeChecked) fList fArray fvGrid outFile |> ignore
|
||||
analyzeNodeSubset var (startNode, chunkSizeChecked) fList fArray fvGrid outFile
|
||||
|> ignore
|
||||
|
||||
elif args.Contains UV then
|
||||
let uvArgs = args.GetResult UV
|
||||
@@ -194,7 +198,8 @@ let main argv =
|
||||
let fArray = createFileArray fList
|
||||
let fvGrid = readFvcomGrid fList.Path[0]
|
||||
let chunkSizeChecked = checkIndicesCell fvGrid startCell chunkSize //|> ignore
|
||||
analyzeCellSubset (startCell, chunkSizeChecked) fList fArray fvGrid outFile |> ignore
|
||||
analyzeCellSubset (startCell, chunkSizeChecked) fList fArray fvGrid outFile
|
||||
|> ignore
|
||||
|
||||
elif args.Contains B then
|
||||
let bArgs = args.GetResult B
|
||||
@@ -212,18 +217,26 @@ let main argv =
|
||||
let sectors = createSectors
|
||||
let subsets = makeCellSubsets fvGrid 200000
|
||||
|
||||
let months = [|for i in 1 .. 12 -> i|]
|
||||
let months = [| for i in 1..12 -> i |]
|
||||
|
||||
months
|
||||
|> Array.iter (fun m ->
|
||||
let fl = tryFindMonth fList m
|
||||
if fl.IsSome then
|
||||
let saveName = saveFolder + "/counts_" + prefix + "_" + string(m) + ".nc"
|
||||
createCountsFile saveName fvGrid bins sectors|> ignore
|
||||
let b, s, counts = countSp fvGrid fl.Value bins sectors
|
||||
writeCounts2NC saveName counts subsets
|
||||
else
|
||||
())
|
||||
let fl = tryFindMonth fList m
|
||||
if fl.IsSome then
|
||||
let saveName =
|
||||
saveFolder
|
||||
+ "/counts_"
|
||||
+ prefix
|
||||
+ "_"
|
||||
+ string (m)
|
||||
+ ".nc"
|
||||
createCountsFile saveName fvGrid bins sectors
|
||||
|> ignore
|
||||
let b, s, counts = countSp fvGrid fl.Value bins sectors
|
||||
writeCounts2NC saveName counts subsets
|
||||
else
|
||||
()
|
||||
)
|
||||
|
||||
elif args.Contains C then
|
||||
let cArgs = args.GetResult C
|
||||
@@ -232,9 +245,9 @@ let main argv =
|
||||
let outfile = cArgs.GetResult FileC
|
||||
let fvGrid = readFvcomGrid templateFile
|
||||
let sectors = createSectors
|
||||
createOutputFileBottomSp templateFile fvGrid outfile sectors |> ignore
|
||||
let files =
|
||||
tryFindCountFiles countFolder
|
||||
createOutputFileBottomSp templateFile fvGrid outfile sectors
|
||||
|> ignore
|
||||
let files = tryFindCountFiles countFolder
|
||||
|
||||
if files.IsNone then
|
||||
printfn "%s" "No count files found"
|
||||
@@ -242,17 +255,9 @@ let main argv =
|
||||
count2stats files.Value outfile |> ignore
|
||||
|
||||
()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//printfn "%A" counts[50, *, *]
|
||||
//printfn "%d %d %d" (Array3D.length1 counts) (Array3D.length2 counts) (Array3D.length3 counts)
|
||||
//printfn "%A" bins
|
||||
// let chunkSizeChecked = checkIndicesCell fvGrid startCell chunkSize //|> ignore
|
||||
// analyzeCellSubset (startCell, chunkSizeChecked) fList fArray fvGrid outFile |> ignore
|
||||
//printfn "%A" counts[50, *, *]
|
||||
//printfn "%d %d %d" (Array3D.length1 counts) (Array3D.length2 counts) (Array3D.length3 counts)
|
||||
//printfn "%A" bins
|
||||
//let chunkSizeChecked = checkIndicesCell fvGrid startCell chunkSize //|> ignore
|
||||
//analyzeCellSubset (startCell, chunkSizeChecked) fList fArray fvGrid outFile |> ignore
|
||||
0
|
||||
368
src/NetCDF.fs
368
src/NetCDF.fs
@@ -9,32 +9,29 @@ open Oceanbox.FvcomKit.Fvcom
|
||||
open ProjNet.FSharp
|
||||
open Variables
|
||||
|
||||
type gridDims =
|
||||
{
|
||||
TemplateFile: string
|
||||
Node: int
|
||||
Nele: int
|
||||
Siglay: int
|
||||
type gridDims = {
|
||||
TemplateFile: string
|
||||
Node: int
|
||||
Nele: int
|
||||
Siglay: int
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
type Filelist =
|
||||
{
|
||||
Path: string []
|
||||
FvcomTime: float32 []
|
||||
DTime : DateTime []
|
||||
Index: int []
|
||||
ArrayInd: int []
|
||||
}
|
||||
type Filelist = {
|
||||
Path: string[]
|
||||
FvcomTime: float32[]
|
||||
DTime: DateTime[]
|
||||
Index: int[]
|
||||
ArrayInd: int[]
|
||||
}
|
||||
|
||||
type fvcomFile =
|
||||
{
|
||||
type fvcomFile = {
|
||||
Path: string
|
||||
startIndex: int
|
||||
stopIndex: int
|
||||
startArrayInd: int
|
||||
stopArrayInd: int
|
||||
}
|
||||
}
|
||||
|
||||
let tryFindMonth (fl: Filelist) (month: int) =
|
||||
printfn "%d" month
|
||||
@@ -52,81 +49,87 @@ let tryFindMonth (fl: Filelist) (month: int) =
|
||||
|
||||
let tryReadNcFile fname =
|
||||
try
|
||||
let nc = NetCDFDataSet.Open(fname, openMode=Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let t = nc["time"].GetData() :?> float32 []
|
||||
let nc =
|
||||
NetCDFDataSet.Open(fname, openMode = Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let t = nc["time"].GetData() :?> float32[]
|
||||
nc.Dispose()
|
||||
1
|
||||
with
|
||||
| _ -> 0
|
||||
with _ ->
|
||||
0
|
||||
|
||||
|
||||
|
||||
let readFvcomGrid (ncfile:string) =
|
||||
let nc = NetCDFDataSet.Open(ncfile, openMode=Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let readFvcomGrid (ncfile: string) =
|
||||
let nc =
|
||||
NetCDFDataSet.Open(ncfile, openMode = Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let dims = nc.Dimensions
|
||||
|
||||
let node = dims["node"].Length
|
||||
let nele = dims["nele"].Length
|
||||
let siglay = dims["siglay"].Length
|
||||
|
||||
{
|
||||
TemplateFile = ncfile
|
||||
Node = node
|
||||
Nele = nele
|
||||
Siglay = siglay
|
||||
}
|
||||
{ TemplateFile = ncfile; Node = node; Nele = nele; Siglay = siglay }
|
||||
|
||||
let readFoldersList (fname: string) =
|
||||
let lines = File.ReadAllLines fname
|
||||
lines
|
||||
|
||||
let readTime (ncfile: string) =
|
||||
let nc = NetCDFDataSet.Open(ncfile, openMode=Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let fvcomTime = nc[ "time" ].GetData() :?> float32 []
|
||||
let nc =
|
||||
NetCDFDataSet.Open(ncfile, openMode = Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let fvcomTime = nc["time"].GetData() :?> float32[]
|
||||
let fvcomRef = DateTime(1858, 11, 17).Ticks
|
||||
|
||||
let iTime = nc[ "Itime" ].GetData() :?> int []
|
||||
|> Array.map int64
|
||||
|> Array.map (fun t -> (t * (int64(3600) * int64(24) * int64(10000000)) + int64(fvcomRef)))
|
||||
let iTime =
|
||||
nc["Itime"].GetData() :?> int[]
|
||||
|> Array.map int64
|
||||
|> Array.map (fun t ->
|
||||
(t * (int64 (3600) * int64 (24) * int64 (10000000))
|
||||
+ int64 (fvcomRef))
|
||||
)
|
||||
|
||||
let iTime2 = nc[ "Itime2" ].GetData() :?> int []
|
||||
|> Array.map int64
|
||||
|> Array.map (fun t -> t * int64(10000))
|
||||
let iTime2 =
|
||||
nc["Itime2"].GetData() :?> int[]
|
||||
|> Array.map int64
|
||||
|> Array.map (fun t -> t * int64 (10000))
|
||||
|
||||
let dTime = Array.zip iTime iTime2
|
||||
|> Array.map (fun (t1, t2) -> t1 + t2)
|
||||
|> Array.map (fun t -> DateTime(t))
|
||||
let dTime =
|
||||
Array.zip iTime iTime2
|
||||
|> Array.map (fun (t1, t2) -> t1 + t2)
|
||||
|> Array.map (fun t -> DateTime(t))
|
||||
|
||||
nc.Dispose()
|
||||
let ind = [| for i in 0 .. (fvcomTime.Length-1) -> i |]
|
||||
let ind = [| for i in 0 .. (fvcomTime.Length - 1) -> i |]
|
||||
let path = Array.create ind.Length ncfile
|
||||
|
||||
Array.zip3 (Array.zip fvcomTime dTime) path ind
|
||||
|
||||
let sortFileListByTime (fl: (float32 * string * int) []) =
|
||||
fl |> Array.sortBy (fun (t, _, _ ) -> t)
|
||||
let sortFileListByTime (fl: (float32 * string * int)[]) = fl |> Array.sortBy (fun (t, _, _) -> t)
|
||||
|
||||
let removeOverlap (fl: ((float32 * DateTime) * string * int) []) =
|
||||
let ind = [| for i in 1 .. (fl.Length-1) -> i|] |> Array.rev
|
||||
let t, _, _ = Array.unzip3 fl
|
||||
let removeOverlap (fl: ((float32 * DateTime) * string * int)[]) =
|
||||
let ind =
|
||||
[| for i in 1 .. (fl.Length - 1) -> i |]
|
||||
|> Array.rev
|
||||
let t, _, _ = Array.unzip3 fl
|
||||
let fvcomTime, _ = Array.unzip t
|
||||
let latest = [|fvcomTime[fvcomTime.Length-1]|]
|
||||
let noOverlap =
|
||||
[|
|
||||
fl[fl.Length-1]
|
||||
let latest = [| fvcomTime[fvcomTime.Length - 1] |]
|
||||
let noOverlap = [|
|
||||
fl[fl.Length - 1]
|
||||
for i in ind do
|
||||
if fvcomTime[i-1] < latest[0] then
|
||||
latest[0] <- fvcomTime[i-1]
|
||||
fl[i-1]
|
||||
|]
|
||||
if fvcomTime[i - 1] < latest[0] then
|
||||
latest[0] <- fvcomTime[i - 1]
|
||||
fl[i - 1]
|
||||
|]
|
||||
|
||||
Array.rev noOverlap
|
||||
|
||||
let getFilesInFolder (folder: string) (prefix: string) =
|
||||
let files =
|
||||
IO.Directory.GetFiles folder
|
||||
|> Array.filter (fun s -> let sLenght = String.length s
|
||||
s[(sLenght-3) .. sLenght] = ".nc")
|
||||
|> Array.filter (fun s ->
|
||||
let sLenght = String.length s
|
||||
s[(sLenght - 3) .. sLenght] = ".nc"
|
||||
)
|
||||
|> Array.filter (fun s -> s.Contains prefix)
|
||||
|> Array.filter (fun s -> not (s.Contains "restart"))
|
||||
|> Array.filter (fun s -> not (s.Contains "ngrd"))
|
||||
@@ -135,8 +138,10 @@ let getFilesInFolder (folder: string) (prefix: string) =
|
||||
files
|
||||
|
||||
let writeFileListToFile (saveName: string) (fl: Filelist) =
|
||||
let a = Array.zip3 fl.FvcomTime fl.Path fl.Index
|
||||
a |> Array.map (fun (x, y, z) -> $"{x}; {y}; {z}") |> fun s -> System.IO.File.WriteAllLines (saveName, s)
|
||||
let a = Array.zip3 fl.FvcomTime fl.Path fl.Index
|
||||
a
|
||||
|> Array.map (fun (x, y, z) -> $"{x}; {y}; {z}")
|
||||
|> fun s -> System.IO.File.WriteAllLines(saveName, s)
|
||||
|
||||
|
||||
let createFileList (foldersList: string) (prefix: string) =
|
||||
@@ -151,19 +156,19 @@ let createFileList (foldersList: string) (prefix: string) =
|
||||
files
|
||||
|> Array.map readTime
|
||||
|> Array.fold Array.append [||]
|
||||
|> Array.sortBy (fun ((t, _), _, _ ) -> t)
|
||||
|> Array.sortBy (fun ((t, _), _, _) -> t)
|
||||
|> removeOverlap
|
||||
|
||||
let t, p, i = Array.unzip3 flArray
|
||||
let fvTime, dTime = Array.unzip t
|
||||
let arrayInd = [|for n in 0 .. (fvTime.Length-1) -> n|]
|
||||
let arrayInd = [| for n in 0 .. (fvTime.Length - 1) -> n |]
|
||||
let fl = {
|
||||
Path = p
|
||||
FvcomTime = fvTime
|
||||
DTime = dTime
|
||||
Index = i
|
||||
ArrayInd = arrayInd
|
||||
}
|
||||
Path = p
|
||||
FvcomTime = fvTime
|
||||
DTime = dTime
|
||||
Index = i
|
||||
ArrayInd = arrayInd
|
||||
}
|
||||
writeFileListToFile "fileList.txt" fl
|
||||
fl
|
||||
|
||||
@@ -191,55 +196,53 @@ let dateStr2DateTime (ds: string) =
|
||||
|
||||
let cropFileList (fl: Filelist) (startTime: string) (stopTime: string) =
|
||||
let fl1 =
|
||||
let start = dateStr2DateTime startTime
|
||||
let t = Array.zip fl.FvcomTime fl.DTime
|
||||
let path = fl.Path
|
||||
let ind = fl.Index
|
||||
let start = dateStr2DateTime startTime
|
||||
let t = Array.zip fl.FvcomTime fl.DTime
|
||||
let path = fl.Path
|
||||
let ind = fl.Index
|
||||
|
||||
let cropped =
|
||||
Array.zip3 t path ind
|
||||
|> Array.filter (fun ((t, dt), p, i) -> dt.Date >= start)
|
||||
let cropped =
|
||||
Array.zip3 t path ind
|
||||
|> Array.filter (fun ((t, dt), p, i) -> dt.Date >= start)
|
||||
|
||||
let timeCropped, pathCropped, indCropped = Array.unzip3 cropped
|
||||
let tCropped, dtCropped = Array.unzip timeCropped
|
||||
let timeCropped, pathCropped, indCropped = Array.unzip3 cropped
|
||||
let tCropped, dtCropped = Array.unzip timeCropped
|
||||
|
||||
{
|
||||
Path = pathCropped
|
||||
FvcomTime = tCropped
|
||||
DTime = dtCropped
|
||||
Index = indCropped
|
||||
ArrayInd = fl.ArrayInd[0..indCropped.Length]
|
||||
}
|
||||
{
|
||||
Path = pathCropped
|
||||
FvcomTime = tCropped
|
||||
DTime = dtCropped
|
||||
Index = indCropped
|
||||
ArrayInd = fl.ArrayInd[0 .. indCropped.Length]
|
||||
}
|
||||
|
||||
let fl2 =
|
||||
let stop = dateStr2DateTime stopTime
|
||||
let t = Array.zip fl1.FvcomTime fl1.DTime
|
||||
let path = fl1.Path
|
||||
let ind = fl1.Index
|
||||
let stop = dateStr2DateTime stopTime
|
||||
let t = Array.zip fl1.FvcomTime fl1.DTime
|
||||
let path = fl1.Path
|
||||
let ind = fl1.Index
|
||||
|
||||
let cropped =
|
||||
Array.zip3 t path ind
|
||||
|> Array.filter (fun ((t, dt), p, i) -> dt.Date <= stop)
|
||||
let cropped =
|
||||
Array.zip3 t path ind
|
||||
|> Array.filter (fun ((t, dt), p, i) -> dt.Date <= stop)
|
||||
|
||||
let timeCropped, pathCropped, indCropped = Array.unzip3 cropped
|
||||
let tCropped, dtCropped = Array.unzip timeCropped
|
||||
let timeCropped, pathCropped, indCropped = Array.unzip3 cropped
|
||||
let tCropped, dtCropped = Array.unzip timeCropped
|
||||
|
||||
{
|
||||
Path = pathCropped
|
||||
FvcomTime = tCropped
|
||||
DTime = dtCropped
|
||||
Index = indCropped
|
||||
ArrayInd = fl1.ArrayInd[0..(indCropped.Length-1)]
|
||||
}
|
||||
{
|
||||
Path = pathCropped
|
||||
FvcomTime = tCropped
|
||||
DTime = dtCropped
|
||||
Index = indCropped
|
||||
ArrayInd = fl1.ArrayInd[0 .. (indCropped.Length - 1)]
|
||||
}
|
||||
|
||||
fl2
|
||||
|
||||
|
||||
let createFileArray (fl: Filelist) =
|
||||
let uniqueFiles = Array.distinct fl.Path
|
||||
let fileArray =
|
||||
uniqueFiles
|
||||
|> Array.map (fun f -> findFile f fl)
|
||||
let fileArray = uniqueFiles |> Array.map (fun f -> findFile f fl)
|
||||
|
||||
fileArray
|
||||
|
||||
@@ -326,41 +329,43 @@ let create3DVariable (nc: Microsoft.Research.Science.Data.DataSet) (grd: gridDim
|
||||
|> Array.iter (fun (s1, s2) -> v.Metadata[s1] <- s2)
|
||||
let zeroMatrix = createEmpty3DArray grd var.Dimensions
|
||||
|
||||
for i in 0 .. ((Array3D.length1 zeroMatrix)-1) do
|
||||
for i in 0 .. ((Array3D.length1 zeroMatrix) - 1) do
|
||||
//printfn "%d" i
|
||||
let array = Array3D.zeroCreate<float32> 1 (Array3D.length2 zeroMatrix) (Array3D.length3 zeroMatrix)
|
||||
nc[var.Name].PutData([|i; 0; 0|], array) |> ignore
|
||||
let array =
|
||||
Array3D.zeroCreate<float32> 1 (Array3D.length2 zeroMatrix) (Array3D.length3 zeroMatrix)
|
||||
nc[var.Name].PutData([| i; 0; 0 |], array)
|
||||
|> ignore
|
||||
|
||||
// let emptyArray = createEmpty3DArray grd var.Dimensions
|
||||
// let v = nc.AddVariable<float32>(var.Name, emptyArray, var.Dimensions)
|
||||
// var.Attributes
|
||||
// |> Array.iter (fun (s1, s2) -> v.Metadata[s1] <- s2) |> ignore
|
||||
// try
|
||||
// let emptyArray = createEmpty3DArray grd var.Dimensions
|
||||
// let v = nc.AddVariable<float32>(var.Name, emptyArray, var.Dimensions)
|
||||
// var.Attributes
|
||||
// |> Array.iter (fun (s1, s2) -> v.Metadata[s1] <- s2) |> ignore
|
||||
// nc.Commit()
|
||||
//
|
||||
// with
|
||||
// | :? System.OverflowException ->
|
||||
// printfn "%s" (var.Name + ": Too large array. Write piecewise")
|
||||
// let emptyArray = Array3D.zeroCreate<float32> 1 1 1
|
||||
// let v = nc.AddVariable<float32>(var.Name, emptyArray, var.Dimensions)
|
||||
// var.Attributes
|
||||
// |> Array.iter (fun (s1, s2) -> v.Metadata[s1] <- s2)
|
||||
// let zeroMatrix = createEmpty3DArray grd var.Dimensions
|
||||
//
|
||||
// for i in 0 .. ((Array3D.length1 zeroMatrix)-1) do
|
||||
// printfn "%d" i
|
||||
// let array = Array3D.zeroCreate<float32> 1 (Array3D.length2 zeroMatrix) (Array3D.length3 zeroMatrix)
|
||||
// nc[var.Name].PutData([|i; 0; 0|], array) |> ignore
|
||||
// let emptyArray = createEmpty3DArray grd var.Dimensions
|
||||
// let v = nc.AddVariable<float32>(var.Name, emptyArray, var.Dimensions)
|
||||
// var.Attributes
|
||||
// |> Array.iter (fun (s1, s2) -> v.Metadata[s1] <- s2) |> ignore
|
||||
// try
|
||||
// let emptyArray = createEmpty3DArray grd var.Dimensions
|
||||
// let v = nc.AddVariable<float32>(var.Name, emptyArray, var.Dimensions)
|
||||
// var.Attributes
|
||||
// |> Array.iter (fun (s1, s2) -> v.Metadata[s1] <- s2) |> ignore
|
||||
// nc.Commit()
|
||||
//
|
||||
// with
|
||||
// | :? System.OverflowException ->
|
||||
// printfn "%s" (var.Name + ": Too large array. Write piecewise")
|
||||
// let emptyArray = Array3D.zeroCreate<float32> 1 1 1
|
||||
// let v = nc.AddVariable<float32>(var.Name, emptyArray, var.Dimensions)
|
||||
// var.Attributes
|
||||
// |> Array.iter (fun (s1, s2) -> v.Metadata[s1] <- s2)
|
||||
// let zeroMatrix = createEmpty3DArray grd var.Dimensions
|
||||
//
|
||||
// for i in 0 .. ((Array3D.length1 zeroMatrix)-1) do
|
||||
// printfn "%d" i
|
||||
// let array = Array3D.zeroCreate<float32> 1 (Array3D.length2 zeroMatrix) (Array3D.length3 zeroMatrix)
|
||||
// nc[var.Name].PutData([|i; 0; 0|], array) |> ignore
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//fillWithZeros nc var grd |> ignore
|
||||
//fillWithZeros nc var grd |> ignore
|
||||
|
||||
nc.Commit()
|
||||
|
||||
@@ -369,21 +374,21 @@ let create4DVariable (nc: Microsoft.Research.Science.Data.DataSet) (grd: gridDim
|
||||
let v = nc.AddVariable<float32>(var.Name, emptyArray, var.Dimensions)
|
||||
var.Attributes
|
||||
|> Array.iter (fun (s1, s2) -> v.Metadata[s1] <- s2)
|
||||
for i in 0 .. 11 do
|
||||
for j in 0 .. 17 do
|
||||
for i in 0..11 do
|
||||
for j in 0..17 do
|
||||
//printfn "%d %d" i j
|
||||
let array = Array4D.zeroCreate<float32> 1 1 grd.Siglay grd.Nele
|
||||
nc[var.Name].PutData([|i; j; 0; 0|], array)
|
||||
// for k in 0 .. (grd.Siglay-1) do
|
||||
// printfn "%d %d %d" i j k
|
||||
// let array = Array4D.zeroCreate<float32> 1 1 1 grd.Nele
|
||||
// nc[var.Name].PutData([|i; j; k; 0|], array)
|
||||
nc[var.Name].PutData([| i; j; 0; 0 |], array)
|
||||
// for k in 0 .. (grd.Siglay-1) do
|
||||
// printfn "%d %d %d" i j k
|
||||
// let array = Array4D.zeroCreate<float32> 1 1 1 grd.Nele
|
||||
// nc[var.Name].PutData([|i; j; k; 0|], array)
|
||||
nc.Commit()
|
||||
|
||||
|
||||
let createOutputFileTS (var: string) (templateFile: string) (grd: gridDims) (saveFile: string) =
|
||||
if IO.File.Exists saveFile then
|
||||
IO.File.Delete saveFile
|
||||
IO.File.Delete saveFile
|
||||
|
||||
let fvcomGrid = readFvcomGrid templateFile
|
||||
let saveFileNC = new NetCDFDataSet(saveFile)
|
||||
@@ -400,7 +405,8 @@ let createOutputFileTS (var: string) (templateFile: string) (grd: gridDims) (sav
|
||||
// Create variables
|
||||
statVars
|
||||
|> Array.filter (fun v -> v.Dimensions.Length = 2)
|
||||
|> Array.iter (fun v -> create2DVariable saveFileNC grd v) |> ignore
|
||||
|> Array.iter (fun v -> create2DVariable saveFileNC grd v)
|
||||
|> ignore
|
||||
|
||||
statVars
|
||||
|> Array.filter (fun v -> v.Dimensions.Length = 3)
|
||||
@@ -410,16 +416,15 @@ let createOutputFileTS (var: string) (templateFile: string) (grd: gridDims) (sav
|
||||
0
|
||||
|
||||
let createSectorMidpoints =
|
||||
let sectorMidpoints =
|
||||
[|
|
||||
for n in 10f .. 20f .. 350f do
|
||||
n
|
||||
|]
|
||||
sectorMidpoints
|
||||
let sectorMidpoints = [|
|
||||
for n in 10f..20f..350f do
|
||||
n
|
||||
|]
|
||||
sectorMidpoints
|
||||
|
||||
let createOutputFileUV (templateFile: string) (grd: gridDims) (saveFile: string) =
|
||||
if IO.File.Exists saveFile then
|
||||
IO.File.Delete saveFile
|
||||
IO.File.Delete saveFile
|
||||
|
||||
let fvcomGrid = readFvcomGrid templateFile
|
||||
let saveFileNC = new NetCDFDataSet(saveFile)
|
||||
@@ -444,14 +449,15 @@ let createOutputFileUV (templateFile: string) (grd: gridDims) (saveFile: string)
|
||||
|
||||
|
||||
let s = createSectorMidpoints
|
||||
saveFileNC.AddVariable<float32>("sector_midpoints", s, "sector") |> ignore
|
||||
saveFileNC.AddVariable<float32>("sector_midpoints", s, "sector")
|
||||
|> ignore
|
||||
|
||||
saveFileNC.Commit()
|
||||
0
|
||||
|
||||
let createOutputFileBottomSp (templateFile: string) (grd: gridDims) (saveFile: string) (sectors: float32 []) =
|
||||
let createOutputFileBottomSp (templateFile: string) (grd: gridDims) (saveFile: string) (sectors: float32[]) =
|
||||
if IO.File.Exists saveFile then
|
||||
IO.File.Delete saveFile
|
||||
IO.File.Delete saveFile
|
||||
|
||||
let fvcomGrid = readFvcomGrid templateFile
|
||||
let saveFileNC = new NetCDFDataSet(saveFile)
|
||||
@@ -474,12 +480,13 @@ let createOutputFileBottomSp (templateFile: string) (grd: gridDims) (saveFile: s
|
||||
|> Array.filter (fun v -> v.Dimensions.Length = 3)
|
||||
|> Array.iter (fun v -> create3DVariable saveFileNC grd v)
|
||||
|
||||
saveFileNC.AddVariable<float32>("sector_midpoints", sectors, "sector") |> ignore
|
||||
saveFileNC.AddVariable<float32>("sector_midpoints", sectors, "sector")
|
||||
|> ignore
|
||||
|
||||
saveFileNC.Commit()
|
||||
0
|
||||
|
||||
let writeCounts2NC (fileName: string) (counts: int[,,]) (subsets: (int * int) []) =
|
||||
let writeCounts2NC (fileName: string) (counts: int[,,]) (subsets: (int * int)[]) =
|
||||
let nc = NetCDFDataSet.Open fileName
|
||||
let nele = Array3D.length1 counts
|
||||
let nBins = Array3D.length2 counts
|
||||
@@ -494,24 +501,24 @@ let writeCounts2NC (fileName: string) (counts: int[,,]) (subsets: (int * int) []
|
||||
// nc["counts"].PutData([|start; 0; 0|], c)
|
||||
// )
|
||||
|
||||
for i in 0 .. (nBins-1) do
|
||||
for i in 0 .. (nBins - 1) do
|
||||
printfn "%d" i
|
||||
let array = Array3D.zeroCreate<int> nele 1 nSectors
|
||||
array[*, 0, *] <- counts[*, i, *]
|
||||
nc["counts"].PutData([|0; i; 0|], array)
|
||||
nc["counts"].PutData([| 0; i; 0 |], array)
|
||||
|
||||
// for i in 0 .. (nBins-1) do
|
||||
// printfn "%d" i
|
||||
// for j in 0 .. (nSectors-1) do
|
||||
// let array = Array3D.zeroCreate<int> nele 1 1
|
||||
// array[*, 0, 0] <- counts[*, i, j]
|
||||
// nc["counts"].PutData([|0; i; j|], array)
|
||||
// for i in 0 .. (nBins-1) do
|
||||
// printfn "%d" i
|
||||
// for j in 0 .. (nSectors-1) do
|
||||
// let array = Array3D.zeroCreate<int> nele 1 1
|
||||
// array[*, 0, 0] <- counts[*, i, j]
|
||||
// nc["counts"].PutData([|0; i; j|], array)
|
||||
nc.Commit()
|
||||
nc.Dispose()
|
||||
|
||||
let createCountsFile (saveFile: string) (grd: gridDims) (bins: float32 []) (sectors: float32 [])=
|
||||
let createCountsFile (saveFile: string) (grd: gridDims) (bins: float32[]) (sectors: float32[]) =
|
||||
if IO.File.Exists saveFile then
|
||||
IO.File.Delete saveFile
|
||||
IO.File.Delete saveFile
|
||||
|
||||
let saveFileNC = new NetCDFDataSet(saveFile)
|
||||
let nele = grd.Nele
|
||||
@@ -523,18 +530,24 @@ let createCountsFile (saveFile: string) (grd: gridDims) (bins: float32 []) (sect
|
||||
saveFileNC.CreateDimension("bins", nBins)
|
||||
saveFileNC.CreateDimension("sectors", nSectors)
|
||||
|
||||
let binsa = Array.append bins [|0.f|]
|
||||
let binsa = Array.append bins [| 0.f |]
|
||||
printfn "%d" nBins
|
||||
printfn "%d" binsa.Length
|
||||
let b = saveFileNC.AddVariable<float32>("bins", binsa, "bins") |> ignore
|
||||
let s = saveFileNC.AddVariable<float32>("sectors", sectors, "sectors") |> ignore
|
||||
let b =
|
||||
saveFileNC.AddVariable<float32>("bins", binsa, "bins")
|
||||
|> ignore
|
||||
let s =
|
||||
saveFileNC.AddVariable<float32>("sectors", sectors, "sectors")
|
||||
|> ignore
|
||||
let emptyArray = Array3D.zeroCreate<int> 1 1 1
|
||||
let c = saveFileNC.AddVariable<int>("counts", emptyArray, [|"nele"; "bins"; "sectors"|]) |> ignore
|
||||
let c =
|
||||
saveFileNC.AddVariable<int>("counts", emptyArray, [| "nele"; "bins"; "sectors" |])
|
||||
|> ignore
|
||||
|
||||
for i in 0 .. (nSectors-1) do
|
||||
for i in 0 .. (nSectors - 1) do
|
||||
printfn "%d" i
|
||||
let array = Array3D.zeroCreate<int> grd.Nele nBins 1
|
||||
saveFileNC["counts"].PutData([|0; 0; i|], array)
|
||||
saveFileNC["counts"].PutData([| 0; 0; i |], array)
|
||||
|
||||
saveFileNC.Commit()
|
||||
saveFileNC.Dispose()
|
||||
@@ -545,19 +558,21 @@ let createCountsFile (saveFile: string) (grd: gridDims) (bins: float32 []) (sect
|
||||
let tryFindCountFiles (folder: string) =
|
||||
let files =
|
||||
IO.Directory.GetFiles folder
|
||||
|> Array.filter (fun s -> let sLenght = String.length s
|
||||
s[(sLenght-3) .. sLenght] = ".nc")
|
||||
|> Array.filter (fun s ->
|
||||
let sLenght = String.length s
|
||||
s[(sLenght - 3) .. sLenght] = ".nc"
|
||||
)
|
||||
|> Array.filter (fun s -> (s.Contains "counts"))
|
||||
|
||||
let months =
|
||||
files
|
||||
|> Array.map (fun f -> let ind1 = (f.LastIndexOf "_") + 1
|
||||
let ind2 = (f.LastIndexOf ".") - 1
|
||||
int(f[ind1..ind2]))
|
||||
|> Array.map (fun f ->
|
||||
let ind1 = (f.LastIndexOf "_") + 1
|
||||
let ind2 = (f.LastIndexOf ".") - 1
|
||||
int (f[ind1..ind2])
|
||||
)
|
||||
let fm = Array.zip files months
|
||||
let sortedFiles =
|
||||
fm
|
||||
|> Array.sortBy(fun (_, m) -> m)
|
||||
let sortedFiles = fm |> Array.sortBy (fun (_, m) -> m)
|
||||
|
||||
printfn "%A" sortedFiles
|
||||
|
||||
@@ -565,5 +580,4 @@ let tryFindCountFiles (folder: string) =
|
||||
let f, m = Array.unzip sortedFiles
|
||||
Some f
|
||||
else
|
||||
None
|
||||
|
||||
None
|
||||
156
src/Program.fs
156
src/Program.fs
@@ -1,80 +1,76 @@
|
||||
|
||||
open NetCDF
|
||||
open Timeseries
|
||||
open Variables
|
||||
|
||||
|
||||
|
||||
let resultsFolder = "/home/frankgaa/Buksnes/BuksnesArtificial/"
|
||||
let resultsFolder2 = "/home/frankgaa/Buksnes/BuksnesArtificial2/"
|
||||
let rFolders = [|resultsFolder; resultsFolder2|]
|
||||
|
||||
let resultsFolder1 = "/home/frankgaa/Buksnes/"
|
||||
let prefix = "Buksnes_"
|
||||
let outfile = "test.nc"
|
||||
|
||||
let makeNodeSubsets (grd: gridDims) (chunkSize: int) =
|
||||
let nFullSubsets = grd.Node / chunkSize
|
||||
let remainingPoints = grd.Node - (nFullSubsets*chunkSize)
|
||||
|
||||
let nSubsets =
|
||||
match remainingPoints with
|
||||
| 0 -> nFullSubsets
|
||||
| _ -> nFullSubsets + 1
|
||||
|
||||
let subsets =
|
||||
[|
|
||||
for i in 0..(nSubsets-1) do
|
||||
if i < nSubsets-1 then
|
||||
i*chunkSize, chunkSize
|
||||
else
|
||||
i*chunkSize, remainingPoints
|
||||
|]
|
||||
subsets
|
||||
|
||||
let makeCellSubsets (grd: gridDims) (chunkSize: int) =
|
||||
let nFullSubsets = grd.Node / chunkSize
|
||||
let remainingPoints = grd.Nele - (nFullSubsets*chunkSize)
|
||||
|
||||
let nSubsets =
|
||||
match remainingPoints with
|
||||
| 0 -> nFullSubsets
|
||||
| _ -> nFullSubsets + 1
|
||||
|
||||
let subsets =
|
||||
[|
|
||||
for i in 0..(nSubsets-1) do
|
||||
if i < nSubsets-1 then
|
||||
i*chunkSize, chunkSize
|
||||
else
|
||||
i*chunkSize, remainingPoints
|
||||
|]
|
||||
subsets
|
||||
|
||||
let varNamesNode = ["temp"; "salinity"]
|
||||
let varNamesCell = ["u"; "v"]
|
||||
|
||||
[<EntryPoint>]
|
||||
let main argv =
|
||||
let fList = createFileList rFolders prefix
|
||||
let fvGrid = readFvcomGrid fList.Path[0]
|
||||
let nodeSubsets = makeNodeSubsets fvGrid 10000
|
||||
let cellSubsets = makeCellSubsets fvGrid 10000
|
||||
createOutputFile fList.Path[0] fvGrid outfile |> ignore
|
||||
// varNamesNode
|
||||
// |> List.iter (fun var -> nodeSubsets
|
||||
// |> Array.iter (fun (s, n) -> //printfn "%s" var
|
||||
// analyzeNodeSubset var (s, n) fList fvGrid outfile
|
||||
// |> ignore
|
||||
// )
|
||||
// )
|
||||
|
||||
// varNamesCell
|
||||
// |> List.iter (fun var ->
|
||||
cellSubsets
|
||||
|> Array.iter (fun (s, n) -> //printfn "%s" var
|
||||
analyzeCellSubset (s, n) fList fvGrid outfile
|
||||
|> ignore
|
||||
)
|
||||
|
||||
0
|
||||
open NetCDF
|
||||
open Timeseries
|
||||
open Variables
|
||||
|
||||
|
||||
let resultsFolder = "/home/frankgaa/Buksnes/BuksnesArtificial/"
|
||||
let resultsFolder2 = "/home/frankgaa/Buksnes/BuksnesArtificial2/"
|
||||
let rFolders = [| resultsFolder; resultsFolder2 |]
|
||||
|
||||
let resultsFolder1 = "/home/frankgaa/Buksnes/"
|
||||
let prefix = "Buksnes_"
|
||||
let outfile = "test.nc"
|
||||
|
||||
let makeNodeSubsets (grd: gridDims) (chunkSize: int) =
|
||||
let nFullSubsets = grd.Node / chunkSize
|
||||
let remainingPoints = grd.Node - (nFullSubsets * chunkSize)
|
||||
|
||||
let nSubsets =
|
||||
match remainingPoints with
|
||||
| 0 -> nFullSubsets
|
||||
| _ -> nFullSubsets + 1
|
||||
|
||||
let subsets = [|
|
||||
for i in 0 .. (nSubsets - 1) do
|
||||
if i < nSubsets - 1 then
|
||||
i * chunkSize, chunkSize
|
||||
else
|
||||
i * chunkSize, remainingPoints
|
||||
|]
|
||||
subsets
|
||||
|
||||
let makeCellSubsets (grd: gridDims) (chunkSize: int) =
|
||||
let nFullSubsets = grd.Node / chunkSize
|
||||
let remainingPoints = grd.Nele - (nFullSubsets * chunkSize)
|
||||
|
||||
let nSubsets =
|
||||
match remainingPoints with
|
||||
| 0 -> nFullSubsets
|
||||
| _ -> nFullSubsets + 1
|
||||
|
||||
let subsets = [|
|
||||
for i in 0 .. (nSubsets - 1) do
|
||||
if i < nSubsets - 1 then
|
||||
i * chunkSize, chunkSize
|
||||
else
|
||||
i * chunkSize, remainingPoints
|
||||
|]
|
||||
subsets
|
||||
|
||||
let varNamesNode = [ "temp"; "salinity" ]
|
||||
let varNamesCell = [ "u"; "v" ]
|
||||
|
||||
[<EntryPoint>]
|
||||
let main argv =
|
||||
let fList = createFileList rFolders prefix
|
||||
let fvGrid = readFvcomGrid fList.Path[0]
|
||||
let nodeSubsets = makeNodeSubsets fvGrid 10000
|
||||
let cellSubsets = makeCellSubsets fvGrid 10000
|
||||
createOutputFile fList.Path[0] fvGrid outfile
|
||||
|> ignore
|
||||
// varNamesNode
|
||||
// |> List.iter (fun var -> nodeSubsets
|
||||
// |> Array.iter (fun (s, n) -> //printfn "%s" var
|
||||
// analyzeNodeSubset var (s, n) fList fvGrid outfile
|
||||
// |> ignore
|
||||
// )
|
||||
// )
|
||||
// varNamesCell
|
||||
// |> List.iter (fun var ->
|
||||
cellSubsets
|
||||
|> Array.iter (fun (s, n) -> //printfn "%s" var
|
||||
analyzeCellSubset (s, n) fList fvGrid outfile
|
||||
|> ignore
|
||||
)
|
||||
|
||||
0
|
||||
@@ -20,4 +20,4 @@ let appsettings =
|
||||
| Ok s -> s
|
||||
| Error e -> failwith e
|
||||
|
||||
sprintf "AppSettings: %A" appsettings |> Log.Debug
|
||||
sprintf "AppSettings: %A" appsettings |> Log.Debug
|
||||
162
src/Stats.fs
162
src/Stats.fs
@@ -5,47 +5,39 @@ open FSharp.Stats.Quantile
|
||||
open FSharp.Stats.SummaryStats
|
||||
open FSharp.Stats
|
||||
|
||||
type Stats =
|
||||
{
|
||||
VarName: string
|
||||
Ind: int
|
||||
IndZ: int
|
||||
Mean: float32
|
||||
Std: float32
|
||||
Var: float32
|
||||
Q05: float32
|
||||
Q25: float32
|
||||
Q50: float32
|
||||
Q75: float32
|
||||
Q95: float32
|
||||
Q99: float32
|
||||
}
|
||||
type StatsMonthly =
|
||||
{
|
||||
VarName: string
|
||||
month: int
|
||||
Ind: int
|
||||
IndZ: int
|
||||
Mean: float32
|
||||
Std: float32
|
||||
Var: float32
|
||||
Q05: float32
|
||||
Q25: float32
|
||||
Q50: float32
|
||||
Q75: float32
|
||||
Q95: float32
|
||||
Q99: float32
|
||||
}
|
||||
type Stats = {
|
||||
VarName: string
|
||||
Ind: int
|
||||
IndZ: int
|
||||
Mean: float32
|
||||
Std: float32
|
||||
Var: float32
|
||||
Q05: float32
|
||||
Q25: float32
|
||||
Q50: float32
|
||||
Q75: float32
|
||||
Q95: float32
|
||||
Q99: float32
|
||||
}
|
||||
type StatsMonthly = {
|
||||
VarName: string
|
||||
month: int
|
||||
Ind: int
|
||||
IndZ: int
|
||||
Mean: float32
|
||||
Std: float32
|
||||
Var: float32
|
||||
Q05: float32
|
||||
Q25: float32
|
||||
Q50: float32
|
||||
Q75: float32
|
||||
Q95: float32
|
||||
Q99: float32
|
||||
}
|
||||
|
||||
type WaterTransport =
|
||||
{
|
||||
SectorInd: int
|
||||
CellInd: int
|
||||
IndZ: int
|
||||
Value: float32
|
||||
}
|
||||
type WaterTransport = { SectorInd: int; CellInd: int; IndZ: int; Value: float32 }
|
||||
|
||||
let computeQuantiles (data: float []) (quantiles: float []) =
|
||||
let computeQuantiles (data: float[]) (quantiles: float[]) =
|
||||
let q =
|
||||
quantiles
|
||||
|> Array.map (fun qi -> california qi data)
|
||||
@@ -54,15 +46,15 @@ let computeQuantiles (data: float []) (quantiles: float []) =
|
||||
|
||||
|
||||
|
||||
let uv2dr (u: float32 []) (v: float32 []) =
|
||||
0
|
||||
let uv2dr (u: float32[]) (v: float32[]) = 0
|
||||
|
||||
let computeStats (varName: string) (ind: int) (indZ: int) (data: float[]) =
|
||||
let mean = Seq.average data
|
||||
//printfn "%A" data
|
||||
let std = Seq.stDevPopulation data
|
||||
let variance =Seq.varPopulation data
|
||||
let q05, q25, q50, q75, q95, q99 = computeQuantiles data [|0.05; 0.25; 0.5; 0.75; 0.95; 0.99|]
|
||||
let variance = Seq.varPopulation data
|
||||
let q05, q25, q50, q75, q95, q99 =
|
||||
computeQuantiles data [| 0.05; 0.25; 0.5; 0.75; 0.95; 0.99 |]
|
||||
{
|
||||
VarName = varName
|
||||
Ind = ind
|
||||
@@ -82,8 +74,9 @@ let computeStats (varName: string) (ind: int) (indZ: int) (data: float[]) =
|
||||
let computeMonthlyStats (varName: string) (month: int) (ind: int) (indZ: int) (data: float[]) =
|
||||
let mean = Seq.average data
|
||||
let std = Seq.stDevPopulation data
|
||||
let variance =Seq.varPopulation data
|
||||
let q05, q25, q50, q75, q95, q99 = computeQuantiles data [|0.05; 0.25; 0.5; 0.75; 0.95; 0.99|]
|
||||
let variance = Seq.varPopulation data
|
||||
let q05, q25, q50, q75, q95, q99 =
|
||||
computeQuantiles data [| 0.05; 0.25; 0.5; 0.75; 0.95; 0.99 |]
|
||||
{
|
||||
VarName = varName
|
||||
month = month
|
||||
@@ -100,29 +93,27 @@ let computeMonthlyStats (varName: string) (month: int) (ind: int) (indZ: int) (d
|
||||
Q99 = float32 q99
|
||||
}
|
||||
|
||||
let analyseScalarTimeSeries (varName: string) (data: float32 [,,]) (startInd: int) (nNodes: int) =
|
||||
let analyseScalarTimeSeries (varName: string) (data: float32[,,]) (startInd: int) (nNodes: int) =
|
||||
let nSiglays = (Array3D.length2 data) - 1
|
||||
let m =
|
||||
[| for z in 0..nSiglays do
|
||||
for n in startInd..(startInd+nNodes-1) do
|
||||
n, z, data[*, z, n-startInd]
|
||||
|> Array.map float
|
||||
|]
|
||||
let m = [|
|
||||
for z in 0..nSiglays do
|
||||
for n in startInd .. (startInd + nNodes - 1) do
|
||||
n, z, data[*, z, n - startInd] |> Array.map float
|
||||
|]
|
||||
|
||||
let stats =
|
||||
m
|
||||
|> Array.Parallel.map (fun (ind, indZ, a) -> computeStats varName ind indZ a)
|
||||
stats
|
||||
|
||||
let analyseScalarTimeSeriesMonthly (varName: string) (month:int) (data: float32 [,,]) (startInd: int) (nNodes: int) =
|
||||
let analyseScalarTimeSeriesMonthly (varName: string) (month: int) (data: float32[,,]) (startInd: int) (nNodes: int) =
|
||||
let nSiglays = (Array3D.length2 data) - 1
|
||||
//printfn "%A" data[*, 0, 0]
|
||||
let m =
|
||||
[| for z in 0..nSiglays do
|
||||
for n in startInd..(startInd+nNodes-1) do
|
||||
n, z, data[*, z, n-startInd]
|
||||
|> Array.map float
|
||||
|]
|
||||
let m = [|
|
||||
for z in 0..nSiglays do
|
||||
for n in startInd .. (startInd + nNodes - 1) do
|
||||
n, z, data[*, z, n - startInd] |> Array.map float
|
||||
|]
|
||||
|
||||
let stats =
|
||||
m
|
||||
@@ -131,12 +122,9 @@ let analyseScalarTimeSeriesMonthly (varName: string) (month:int) (data: float32
|
||||
|
||||
|
||||
let inSector (lower: float32) (upper: float32) (dr: float32) =
|
||||
if (dr >= lower && dr < upper) then
|
||||
true
|
||||
else
|
||||
false
|
||||
if (dr >= lower && dr < upper) then true else false
|
||||
|
||||
let waterTransportInSector (d: int*int*float32[]*float32[]) (s: float32*float32) (i: int) =
|
||||
let waterTransportInSector (d: int * int * float32[] * float32[]) (s: float32 * float32) (i: int) =
|
||||
let cellInd, indZ, sp, dr = d
|
||||
let nTotal = sp.Length |> float32
|
||||
let lower, upper = s
|
||||
@@ -144,50 +132,48 @@ let waterTransportInSector (d: int*int*float32[]*float32[]) (s: float32*float32)
|
||||
|
||||
let insideSector =
|
||||
Array.zip sp dr
|
||||
|> Array.filter(fun (_, dri) -> inSector lower upper dri)
|
||||
|> Array.filter (fun (_, dri) -> inSector lower upper dri)
|
||||
|> Array.map (fun (spi, _) -> spi)
|
||||
|
||||
let nValuesInside = insideSector.Length
|
||||
let v =
|
||||
if nValuesInside > 0 then
|
||||
(3600.f * (Array.sum insideSector) / nTotal) |> float32
|
||||
(3600.f * (Array.sum insideSector) / nTotal)
|
||||
|> float32
|
||||
else
|
||||
0f
|
||||
|
||||
{
|
||||
SectorInd = i
|
||||
CellInd = cellInd
|
||||
IndZ =
|
||||
indZ
|
||||
Value = v
|
||||
}
|
||||
{ SectorInd = i; CellInd = cellInd; IndZ = indZ; Value = v }
|
||||
|
||||
|
||||
|
||||
let computeWaterTransport (sp: float32 [,,]) (dr: float32 [,,]) (startInd: int) (nNodes: int) =
|
||||
let computeWaterTransport (sp: float32[,,]) (dr: float32[,,]) (startInd: int) (nNodes: int) =
|
||||
let nSiglays = (Array3D.length2 sp) - 1
|
||||
let m =
|
||||
[| for z in 0..(nSiglays-1) do
|
||||
for n in startInd..(startInd+nNodes-1) do
|
||||
n, z, sp[*, z, n-startInd], dr[*, z, n-startInd]
|
||||
|]
|
||||
let m = [|
|
||||
for z in 0 .. (nSiglays - 1) do
|
||||
for n in startInd .. (startInd + nNodes - 1) do
|
||||
n, z, sp[*, z, n - startInd], dr[*, z, n - startInd]
|
||||
|]
|
||||
//printfn "%d %d %d" (Array3D.length1 sp) (Array3D.length2 sp) (Array3D.length3 sp)
|
||||
let twoPi = 2.f * (Math.PI |> float32)
|
||||
let sectors =
|
||||
[|
|
||||
for n in 0f .. 20f .. 340f do
|
||||
let sectors = [|
|
||||
for n in 0f..20f..340f do
|
||||
(twoPi / 360.f) * n, (twoPi / 360.f) * (n + 20f)
|
||||
|]
|
||||
let sectorInd = [|for i in 0 .. (sectors.Length-1) -> i|]
|
||||
|]
|
||||
let sectorInd = [| for i in 0 .. (sectors.Length - 1) -> i |]
|
||||
|
||||
let waterTransportStats =
|
||||
Array.zip sectorInd sectors
|
||||
|> Array.map (fun (i, s) -> m
|
||||
|> Array.Parallel.map (fun d -> waterTransportInSector d s i))
|
||||
|> Array.map (fun (i, s) ->
|
||||
m
|
||||
|> Array.Parallel.map (fun d -> waterTransportInSector d s i)
|
||||
)
|
||||
|> Array.fold Array.append [||]
|
||||
|
||||
let waterTransportMatrix = Array3D.zeroCreate sectors.Length nSiglays nNodes
|
||||
let waterTransportMatrix =
|
||||
Array3D.zeroCreate sectors.Length nSiglays nNodes
|
||||
waterTransportStats
|
||||
|> Array.iter (fun stat -> //printfn "%d %d %d" stat.SectorInd stat.IndZ stat.CellInd |> ignore
|
||||
waterTransportMatrix[stat.SectorInd, stat.IndZ, (stat.CellInd-startInd)] <- stat.Value)
|
||||
waterTransportMatrix[stat.SectorInd, stat.IndZ, (stat.CellInd - startInd)] <- stat.Value
|
||||
)
|
||||
waterTransportMatrix
|
||||
@@ -4,54 +4,52 @@ open NetCDF
|
||||
open Oceanbox.FvcomKit.Fvcom
|
||||
|
||||
let parseFolders (folders: string) =
|
||||
if folders.Contains "," then
|
||||
folders.Split(",")
|
||||
else
|
||||
[|folders|]
|
||||
if folders.Contains "," then
|
||||
folders.Split(",")
|
||||
else
|
||||
[| folders |]
|
||||
|
||||
let makeNodeSubsets (grd: gridDims) (chunkSize: int) =
|
||||
let nFullSubsets = grd.Node / chunkSize
|
||||
let remainingPoints = grd.Node - (nFullSubsets*chunkSize)
|
||||
let remainingPoints = grd.Node - (nFullSubsets * chunkSize)
|
||||
|
||||
let nSubsets =
|
||||
match remainingPoints with
|
||||
| 0 -> nFullSubsets
|
||||
| _ -> nFullSubsets + 1
|
||||
|
||||
let subsets =
|
||||
[|
|
||||
for i in 0..(nSubsets-1) do
|
||||
if i < nSubsets-1 then
|
||||
i*chunkSize, chunkSize
|
||||
let subsets = [|
|
||||
for i in 0 .. (nSubsets - 1) do
|
||||
if i < nSubsets - 1 then
|
||||
i * chunkSize, chunkSize
|
||||
else
|
||||
i*chunkSize, remainingPoints
|
||||
|]
|
||||
i * chunkSize, remainingPoints
|
||||
|]
|
||||
subsets
|
||||
|
||||
let makeCellSubsets (grd: gridDims) (chunkSize: int) =
|
||||
let nFullSubsets = grd.Nele / chunkSize
|
||||
let remainingPoints = grd.Nele - (nFullSubsets*chunkSize)
|
||||
let remainingPoints = grd.Nele - (nFullSubsets * chunkSize)
|
||||
|
||||
let nSubsets =
|
||||
match remainingPoints with
|
||||
| 0 -> nFullSubsets
|
||||
| _ -> nFullSubsets + 1
|
||||
|
||||
let subsets =
|
||||
[|
|
||||
for i in 0..(nSubsets-1) do
|
||||
if i < nSubsets-1 then
|
||||
i*chunkSize, chunkSize
|
||||
let subsets = [|
|
||||
for i in 0 .. (nSubsets - 1) do
|
||||
if i < nSubsets - 1 then
|
||||
i * chunkSize, chunkSize
|
||||
else
|
||||
i*chunkSize, remainingPoints
|
||||
|]
|
||||
i * chunkSize, remainingPoints
|
||||
|]
|
||||
subsets
|
||||
|
||||
|
||||
let checkIndicesNode (grd: gridDims) (startInd: int) (nNodes: int) =
|
||||
if startInd >= grd.Node then
|
||||
printfn "Start index cannot be higher than number of nodes"
|
||||
exit(-1)
|
||||
exit (-1)
|
||||
if startInd + nNodes >= grd.Node then
|
||||
let nNodesNew = (grd.Node) - startInd
|
||||
nNodesNew
|
||||
@@ -61,7 +59,7 @@ let checkIndicesNode (grd: gridDims) (startInd: int) (nNodes: int) =
|
||||
let checkIndicesCell grd startInd nCells =
|
||||
if startInd >= grd.Nele then
|
||||
printfn "Start index cannot be higher than number of nodes"
|
||||
exit(-1)
|
||||
exit (-1)
|
||||
if startInd + nCells >= grd.Nele then
|
||||
let nElemNew = (grd.Nele) - startInd
|
||||
nElemNew
|
||||
|
||||
@@ -12,53 +12,41 @@ open Oceanbox.FvcomKit.ROMS
|
||||
|
||||
|
||||
type Indices = {
|
||||
Jan: int [] option
|
||||
Feb: int [] option
|
||||
Mar: int [] option
|
||||
Apr: int [] option
|
||||
May: int [] option
|
||||
Jun: int [] option
|
||||
Jul: int [] option
|
||||
Aug: int [] option
|
||||
Sep: int [] option
|
||||
Oct: int [] option
|
||||
Nov: int [] option
|
||||
Dec: int [] option
|
||||
}
|
||||
Jan: int[] option
|
||||
Feb: int[] option
|
||||
Mar: int[] option
|
||||
Apr: int[] option
|
||||
May: int[] option
|
||||
Jun: int[] option
|
||||
Jul: int[] option
|
||||
Aug: int[] option
|
||||
Sep: int[] option
|
||||
Oct: int[] option
|
||||
Nov: int[] option
|
||||
Dec: int[] option
|
||||
}
|
||||
|
||||
let statistics = [
|
||||
"Mean"
|
||||
"Std"
|
||||
"Var"
|
||||
"Q05"
|
||||
"Q25"
|
||||
"Q50"
|
||||
"Q75"
|
||||
"Q95"
|
||||
"Q99"
|
||||
]
|
||||
let statistics = [ "Mean"; "Std"; "Var"; "Q05"; "Q25"; "Q50"; "Q75"; "Q95"; "Q99" ]
|
||||
|
||||
let createNodeIndArray (grd: gridDims) =
|
||||
let nodeInds = [|
|
||||
for i in 0..(grd.Node-1) do
|
||||
for j in 0..(grd.Siglay-1) do
|
||||
for i in 0 .. (grd.Node - 1) do
|
||||
for j in 0 .. (grd.Siglay - 1) do
|
||||
i, j
|
||||
|]
|
||||
|]
|
||||
nodeInds
|
||||
|
||||
let createNeleIndArray (grd: gridDims) =
|
||||
let neleInds = [|
|
||||
for i in 0..(grd.Nele-1) do
|
||||
for j in 0..(grd.Siglay-1) do
|
||||
for i in 0 .. (grd.Nele - 1) do
|
||||
for j in 0 .. (grd.Siglay - 1) do
|
||||
i, j
|
||||
|]
|
||||
|]
|
||||
neleInds
|
||||
|
||||
let trySearchMonth (dt: DateTime []) (month: int) =
|
||||
let months =
|
||||
dt
|
||||
|> Array.map (fun v -> v.Month)
|
||||
let monthInds = [|for i in 0..(months.Length-1) -> i|]
|
||||
let trySearchMonth (dt: DateTime[]) (month: int) =
|
||||
let months = dt |> Array.map (fun v -> v.Month)
|
||||
let monthInds = [| for i in 0 .. (months.Length - 1) -> i |]
|
||||
let inds =
|
||||
Array.zip monthInds months
|
||||
|> Array.filter (fun (_, m) -> m = month)
|
||||
@@ -69,42 +57,44 @@ let trySearchMonth (dt: DateTime []) (month: int) =
|
||||
| _ -> Some(inds)
|
||||
|
||||
|
||||
let getMonthlyInds (dt: DateTime []) =
|
||||
let months = [|for i in 1 .. 12 do i|]
|
||||
let inds =
|
||||
months
|
||||
|> Array.map (fun m -> trySearchMonth dt m)
|
||||
let getMonthlyInds (dt: DateTime[]) =
|
||||
let months = [|
|
||||
for i in 1..12 do
|
||||
i
|
||||
|]
|
||||
let inds = months |> Array.map (fun m -> trySearchMonth dt m)
|
||||
|
||||
inds
|
||||
inds
|
||||
|
||||
let writeToNc =
|
||||
0
|
||||
let writeToNc = 0
|
||||
|
||||
let readVarOld (ncFile: string) (var: string) (spaceInd: int) (nSiglays: int) (nodeRange: int) (timeInd: int) =
|
||||
let nc = NetCDFDataSet.Open ncFile
|
||||
let i = spaceInd
|
||||
let data = nc[var].GetData([|timeInd; 0; i|], [|1; nSiglays; nodeRange|]) :?> float32 [,,]
|
||||
let data1 = nc[var].GetData() :?> float32 [,,]
|
||||
let data =
|
||||
nc[var].GetData([| timeInd; 0; i |], [| 1; nSiglays; nodeRange |]) :?> float32[,,]
|
||||
let data1 = nc[var].GetData() :?> float32[,,]
|
||||
|
||||
nc.Dispose()
|
||||
let d = data[0, *, *]
|
||||
let d1 = data1[timeInd, 0..(nSiglays), i..(nodeRange)]
|
||||
let d1 = data1[timeInd, 0 .. (nSiglays), i .. (nodeRange)]
|
||||
d
|
||||
|
||||
let swapDimensions (data: float32 [,]) =
|
||||
let swapDimensions (data: float32[,]) =
|
||||
let l1 = Array2D.length1 data
|
||||
let l2 = Array2D.length2 data
|
||||
//let l3 = Array3D.length3 data
|
||||
|
||||
let dataSwapped = Array2D.zeroCreate l2 l1
|
||||
// for i in 0..l3 do
|
||||
// dataSwapped[i, *, *] <- data[
|
||||
// for i in 0..l3 do
|
||||
// dataSwapped[i, *, *] <- data[
|
||||
|
||||
dataSwapped
|
||||
|
||||
|
||||
let readVar (fvcom: fvcomFile) (var: string) (spaceInd: int) (nSiglays: int) (nodeRange: int) =
|
||||
let nc = NetCDFDataSet.Open(fvcom.Path, openMode=Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let nc =
|
||||
NetCDFDataSet.Open(fvcom.Path, openMode = Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
|
||||
//printfn "%A" nc
|
||||
let i = spaceInd
|
||||
@@ -112,7 +102,8 @@ let readVar (fvcom: fvcomFile) (var: string) (spaceInd: int) (nSiglays: int) (no
|
||||
|
||||
|
||||
//printfn "%d" nodeRange
|
||||
let data = nc[var].GetData([|fvcom.startIndex; 0; i|], [|indRange; nSiglays; nodeRange|]) :?> float32 [,,]
|
||||
let data =
|
||||
nc[var].GetData([| fvcom.startIndex; 0; i |], [| indRange; nSiglays; nodeRange |]) :?> float32[,,]
|
||||
//printfn "%s" var
|
||||
//let data1 = nc[var].GetData() :?> float32 [,,]
|
||||
//printfn "%s" fvcom.Path
|
||||
@@ -121,19 +112,20 @@ let readVar (fvcom: fvcomFile) (var: string) (spaceInd: int) (nSiglays: int) (no
|
||||
//let d1 = data1[fvcom.startIndex..fvcom.stopIndex, 0..(nSiglays), i..(i-1+nodeRange)]
|
||||
data
|
||||
|
||||
let readTimeSeries (var: string) (fl:Filelist) (fArray: fvcomFile []) (nSiglays: int) (spaceInd: int) (nodeRange: int) =
|
||||
let readTimeSeries (var: string) (fl: Filelist) (fArray: fvcomFile[]) (nSiglays: int) (spaceInd: int) (nodeRange: int) =
|
||||
//let fileListArray = Array.zip3 fl.Path fl.Index fl.ArrayInd
|
||||
let data = Array3D.zeroCreate fl.FvcomTime.Length nSiglays nodeRange
|
||||
//printfn "%d %d %d" (Array3D.length1 data) (Array3D.length2 data) (Array3D.length3 data)
|
||||
// fileListArray
|
||||
// |> Array.iter (fun (p, timeInd, arrayInd) -> if timeInd = 0 then
|
||||
// printfn "%s %d" p timeInd
|
||||
// data[arrayInd, *, *] <- readVar p var spaceInd nSiglays nodeRange timeInd)
|
||||
// fileListArray
|
||||
// |> Array.iter (fun (p, timeInd, arrayInd) -> if timeInd = 0 then
|
||||
// printfn "%s %d" p timeInd
|
||||
// data[arrayInd, *, *] <- readVar p var spaceInd nSiglays nodeRange timeInd)
|
||||
fArray
|
||||
|> Array.iter (fun f -> //printfn "%d %d" f.startArrayInd f.stopArrayInd
|
||||
//let d = readVar f var spaceInd nSiglays nodeRange
|
||||
//printfn "%d %d %d" (Array3D.length1 d) (Array3D.length2 d) (Array3D.length3 d))
|
||||
data[f.startArrayInd..(f.stopArrayInd), *, *] <- readVar f var spaceInd nSiglays nodeRange)
|
||||
//let d = readVar f var spaceInd nSiglays nodeRange
|
||||
//printfn "%d %d %d" (Array3D.length1 d) (Array3D.length2 d) (Array3D.length3 d))
|
||||
data[f.startArrayInd .. (f.stopArrayInd), *, *] <- readVar f var spaceInd nSiglays nodeRange
|
||||
)
|
||||
|
||||
data
|
||||
|
||||
@@ -152,9 +144,11 @@ let exportStatsToNc (nc: Microsoft.Research.Science.Data.DataSet) (stat: Stats)
|
||||
| _ -> None
|
||||
|
||||
statistics
|
||||
|> List.iter (fun s -> let data = getData s
|
||||
if data.IsSome then
|
||||
nc[stat.VarName + s].PutData([|stat.IndZ; stat.Ind|], Array2D.create 1 1 data.Value))
|
||||
|> List.iter (fun s ->
|
||||
let data = getData s
|
||||
if data.IsSome then
|
||||
nc[stat.VarName + s].PutData([| stat.IndZ; stat.Ind |], Array2D.create 1 1 data.Value)
|
||||
)
|
||||
()
|
||||
let getData (stat: Stats) (s: string) =
|
||||
match s with
|
||||
@@ -182,102 +176,126 @@ let getDataMonthly (stat: StatsMonthly) (s: string) =
|
||||
| "Q99" -> Some stat.Q99
|
||||
| _ -> None
|
||||
|
||||
let extractMonthlyData (mInds: int []) (data: float32 [,,]) =
|
||||
let dataMonth = Array3D.zeroCreate mInds.Length (Array3D.length2 data) (Array3D.length3 data)
|
||||
let extractMonthlyData (mInds: int[]) (data: float32[,,]) =
|
||||
let dataMonth =
|
||||
Array3D.zeroCreate mInds.Length (Array3D.length2 data) (Array3D.length3 data)
|
||||
mInds
|
||||
|> Array.iteri (fun i ii -> dataMonth[i, *, *] <- data[ii, *, *])
|
||||
dataMonth
|
||||
|
||||
let statsToArrayM (s: string) (statsData: StatsMonthly []) (startInd: int) (nodeRange: int) (grd: gridDims) =
|
||||
let statsToArrayM (s: string) (statsData: StatsMonthly[]) (startInd: int) (nodeRange: int) (grd: gridDims) =
|
||||
//let dataArrayM = Array3D.zeroCreate<float32> grd.Siglay nodeRange 12
|
||||
let dataArrayM = Array3D.zeroCreate<float32> 12 grd.Siglay nodeRange
|
||||
statsData
|
||||
|> Array.iter (fun stat -> let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getDataMonthly stat s
|
||||
if Option.isSome d then
|
||||
dataArrayM[stat.month, k, i-startInd] <- d.Value
|
||||
)
|
||||
|> Array.iter (fun stat ->
|
||||
let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getDataMonthly stat s
|
||||
if Option.isSome d then
|
||||
dataArrayM[stat.month, k, i - startInd] <- d.Value
|
||||
)
|
||||
dataArrayM
|
||||
|
||||
let rec writeMonthlyToNc (outfile: Microsoft.Research.Science.Data.DataSet) (stats: StatsMonthly []) (startInd: int) (nodeRange: int) (grd : gridDims) =
|
||||
let rec writeMonthlyToNc
|
||||
(outfile: Microsoft.Research.Science.Data.DataSet)
|
||||
(stats: StatsMonthly[])
|
||||
(startInd: int)
|
||||
(nodeRange: int)
|
||||
(grd: gridDims)
|
||||
=
|
||||
statistics
|
||||
|> List.iter (fun s -> let arr = statsToArrayM s stats startInd nodeRange grd
|
||||
outfile[stats[0].VarName + "Monthly" + s].PutData([|0; 0; startInd|], arr) |> ignore
|
||||
)
|
||||
|> List.iter (fun s ->
|
||||
let arr = statsToArrayM s stats startInd nodeRange grd
|
||||
outfile[stats[0].VarName + "Monthly" + s].PutData([| 0; 0; startInd |], arr)
|
||||
|> ignore
|
||||
)
|
||||
|
||||
0
|
||||
|
||||
|
||||
|
||||
let rec tryOpenNcFile (fname:string) =
|
||||
let rec tryOpenNcFile (fname: string) =
|
||||
try
|
||||
let nc = NetCDFDataSet.Open fname
|
||||
nc
|
||||
with
|
||||
| :? Microsoft.Research.Science.Data.DataSetCreateException ->
|
||||
printfn "%s" (fname + "could not be opended. Waiting ....")
|
||||
Threading.Thread.Sleep 10000
|
||||
tryOpenNcFile fname
|
||||
with :? Microsoft.Research.Science.Data.DataSetCreateException ->
|
||||
printfn "%s" (fname + "could not be opended. Waiting ....")
|
||||
Threading.Thread.Sleep 10000
|
||||
tryOpenNcFile fname
|
||||
|
||||
|
||||
|
||||
|
||||
let analyzeNodeSubset (varName: string) (subset: int * int) (fList: Filelist) (fArray: fvcomFile []) (grd: gridDims) (outfile: string) =
|
||||
let analyzeNodeSubset
|
||||
(varName: string)
|
||||
(subset: int * int)
|
||||
(fList: Filelist)
|
||||
(fArray: fvcomFile[])
|
||||
(grd: gridDims)
|
||||
(outfile: string)
|
||||
=
|
||||
//let ncout = NetCDFDataSet.Open outfile
|
||||
let startInd, nodeRange = subset
|
||||
printfn "%d" startInd
|
||||
let data = readTimeSeries varName fList (fArray: fvcomFile []) grd.Siglay startInd nodeRange
|
||||
let data =
|
||||
readTimeSeries varName fList (fArray: fvcomFile[]) grd.Siglay startInd nodeRange
|
||||
//printfn "%A" data[*, 0, 0]
|
||||
// Analysis of full length dataset
|
||||
let statsData = analyseScalarTimeSeries varName data startInd nodeRange
|
||||
let data2NcArray = Array2D.zeroCreate<float32> grd.Siglay nodeRange
|
||||
let ncout = tryOpenNcFile outfile
|
||||
statistics
|
||||
|> List.iter (fun s -> statsData
|
||||
|> Array.iter (fun stat -> let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getData stat s
|
||||
if Option.isSome d then
|
||||
data2NcArray[k, i-startInd] <- d.Value)
|
||||
//let ncout = tryOpenNcFile outfile
|
||||
ncout[varName + s].PutData([|0; startInd|], data2NcArray))
|
||||
|> List.iter (fun s ->
|
||||
statsData
|
||||
|> Array.iter (fun stat ->
|
||||
let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getData stat s
|
||||
if Option.isSome d then
|
||||
data2NcArray[k, i - startInd] <- d.Value
|
||||
)
|
||||
//let ncout = tryOpenNcFile outfile
|
||||
ncout[varName + s].PutData([| 0; startInd |], data2NcArray)
|
||||
)
|
||||
ncout.Commit()
|
||||
ncout.Dispose()
|
||||
|
||||
// Monthly stats
|
||||
let monthlyInds = getMonthlyInds fList.DTime
|
||||
let statsM =
|
||||
(Array.zip [|for j in 0..11 -> j|] monthlyInds)
|
||||
|> Array.choose (fun (i, mi) -> if Option.isSome mi then
|
||||
let dataM = extractMonthlyData mi.Value data
|
||||
let sM = analyseScalarTimeSeriesMonthly varName i dataM startInd nodeRange
|
||||
Some sM
|
||||
else
|
||||
None
|
||||
)
|
||||
(Array.zip [| for j in 0..11 -> j |] monthlyInds)
|
||||
|> Array.choose (fun (i, mi) ->
|
||||
if Option.isSome mi then
|
||||
let dataM = extractMonthlyData mi.Value data
|
||||
let sM = analyseScalarTimeSeriesMonthly varName i dataM startInd nodeRange
|
||||
Some sM
|
||||
else
|
||||
None
|
||||
)
|
||||
|> Array.fold Array.append [||]
|
||||
let ncoutM = tryOpenNcFile outfile
|
||||
writeMonthlyToNc ncoutM statsM startInd nodeRange grd |> ignore
|
||||
writeMonthlyToNc ncoutM statsM startInd nodeRange grd
|
||||
|> ignore
|
||||
ncoutM.Commit()
|
||||
ncoutM.Dispose()
|
||||
//printfn "%A" statsM[0]
|
||||
|
||||
0
|
||||
|
||||
let uvToSpeed (u: float32 [,,]) (v: float32 [,,]) =
|
||||
let uvToSpeed (u: float32[,,]) (v: float32[,,]) =
|
||||
let l1 = Array3D.length1 u
|
||||
let l2 = Array3D.length2 u
|
||||
let l3 = Array3D.length3 u
|
||||
|
||||
let sp = Array3D.zeroCreate l1 l2 l3
|
||||
u
|
||||
|> Array3D.iteri (fun i j k ui -> let vi = v[i, j, k]
|
||||
sp[i, j, k] <- sqrt((ui*ui) + (vi*vi))
|
||||
)
|
||||
|> Array3D.iteri (fun i j k ui ->
|
||||
let vi = v[i, j, k]
|
||||
sp[i, j, k] <- sqrt ((ui * ui) + (vi * vi))
|
||||
)
|
||||
sp
|
||||
|
||||
let uvToPolar (u: float32[,,]) (v: float32 [,,]) =
|
||||
let uvToPolar (u: float32[,,]) (v: float32[,,]) =
|
||||
let l1 = Array3D.length1 u
|
||||
let l2 = Array3D.length2 u
|
||||
let l3 = Array3D.length3 u
|
||||
@@ -286,21 +304,19 @@ let uvToPolar (u: float32[,,]) (v: float32 [,,]) =
|
||||
|
||||
let uvComplex =
|
||||
u
|
||||
|> Array3D.mapi (fun i j k ui -> let c1 = ui |> float
|
||||
let c2 = v[i, j, k] |> float
|
||||
MathNet.Numerics.Complex.mkRect (c1, c2)
|
||||
)
|
||||
|> Array3D.mapi (fun i j k ui ->
|
||||
let c1 = ui |> float
|
||||
let c2 = v[i, j, k] |> float
|
||||
MathNet.Numerics.Complex.mkRect (c1, c2)
|
||||
)
|
||||
let polarComplex =
|
||||
uvComplex
|
||||
|> Array3D.iteri (fun i j k c -> let magnitude = c.Magnitude
|
||||
let phase =
|
||||
if c.Phase > 0 then
|
||||
c.Phase
|
||||
else
|
||||
c.Phase + 2.0 * Math.PI
|
||||
sp[i, j, k] <- (magnitude |> float32)
|
||||
dr[i, j, k] <- (phase |> float32)
|
||||
)
|
||||
|> Array3D.iteri (fun i j k c ->
|
||||
let magnitude = c.Magnitude
|
||||
let phase = if c.Phase > 0 then c.Phase else c.Phase + 2.0 * Math.PI
|
||||
sp[i, j, k] <- (magnitude |> float32)
|
||||
dr[i, j, k] <- (phase |> float32)
|
||||
)
|
||||
sp, dr
|
||||
|
||||
|
||||
@@ -308,18 +324,18 @@ let uvToPolar (u: float32[,,]) (v: float32 [,,]) =
|
||||
let uv2polarSingle (u: float32) (v: float32) =
|
||||
let c1 = float u
|
||||
let c2 = float v
|
||||
let uvComplex = MathNet.Numerics.Complex.mkRect (c1, c2)
|
||||
let uvComplex = MathNet.Numerics.Complex.mkRect (c1, c2)
|
||||
let magnitude = uvComplex.Magnitude
|
||||
let phase =
|
||||
if uvComplex.Phase > 0 then
|
||||
uvComplex.Phase
|
||||
else
|
||||
uvComplex.Phase + 2.0 * Math.PI
|
||||
if uvComplex.Phase > 0 then
|
||||
uvComplex.Phase
|
||||
else
|
||||
uvComplex.Phase + 2.0 * Math.PI
|
||||
let sp = magnitude |> float32
|
||||
let dr = phase |> float32
|
||||
sp, dr
|
||||
|
||||
let uvToPolar2D (u: float32 []) (v: float32 []) =
|
||||
let uvToPolar2D (u: float32[]) (v: float32[]) =
|
||||
let pol =
|
||||
Array.zip u v
|
||||
|> Array.map (fun (ui, vi) -> uv2polarSingle ui vi)
|
||||
@@ -328,9 +344,9 @@ let uvToPolar2D (u: float32 []) (v: float32 []) =
|
||||
|
||||
let rad2deg (rad: float32) =
|
||||
let pi = Math.PI |> float32
|
||||
(rad /(2.f * pi)) * 360.f
|
||||
(rad / (2.f * pi)) * 360.f
|
||||
|
||||
let analyzeCellSubset (subset: int * int) (fList: Filelist) (fArray: fvcomFile []) (grd: gridDims) (outfile: string) =
|
||||
let analyzeCellSubset (subset: int * int) (fList: Filelist) (fArray: fvcomFile[]) (grd: gridDims) (outfile: string) =
|
||||
//let ncout = NetCDFDataSet.Open outfile
|
||||
let startInd, nodeRange = subset
|
||||
printfn "%d" startInd
|
||||
@@ -344,27 +360,32 @@ let analyzeCellSubset (subset: int * int) (fList: Filelist) (fArray: fvcomFile [
|
||||
let uData2NcArray = Array2D.zeroCreate<float32> grd.Siglay nodeRange
|
||||
let ncout = tryOpenNcFile outfile
|
||||
statistics
|
||||
|> List.iter (fun s -> uStatsData
|
||||
|> Array.iter (fun stat -> let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getData stat s
|
||||
if Option.isSome d then
|
||||
uData2NcArray[k, i-startInd] <- d.Value)
|
||||
//let ncout = tryOpenNcFile outfile
|
||||
ncout["u" + s].PutData([|0; startInd|], uData2NcArray))
|
||||
|> List.iter (fun s ->
|
||||
uStatsData
|
||||
|> Array.iter (fun stat ->
|
||||
let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getData stat s
|
||||
if Option.isSome d then
|
||||
uData2NcArray[k, i - startInd] <- d.Value
|
||||
)
|
||||
//let ncout = tryOpenNcFile outfile
|
||||
ncout["u" + s].PutData([| 0; startInd |], uData2NcArray)
|
||||
)
|
||||
ncout.Commit()
|
||||
ncout.Dispose()
|
||||
// Monthly stats
|
||||
let monthlyInds = getMonthlyInds fList.DTime
|
||||
let uStatsM =
|
||||
(Array.zip [|for j in 0..11 -> j|] monthlyInds)
|
||||
|> Array.choose (fun (i, mi) -> if Option.isSome mi then
|
||||
let dataM = extractMonthlyData mi.Value data1
|
||||
let sM = analyseScalarTimeSeriesMonthly "u" i dataM startInd nodeRange
|
||||
Some sM
|
||||
else
|
||||
None
|
||||
)
|
||||
(Array.zip [| for j in 0..11 -> j |] monthlyInds)
|
||||
|> Array.choose (fun (i, mi) ->
|
||||
if Option.isSome mi then
|
||||
let dataM = extractMonthlyData mi.Value data1
|
||||
let sM = analyseScalarTimeSeriesMonthly "u" i dataM startInd nodeRange
|
||||
Some sM
|
||||
else
|
||||
None
|
||||
)
|
||||
|> Array.fold Array.append [||]
|
||||
//let ncoutM = tryOpenNcFile outfile
|
||||
//writeMonthlyToNc ncout uStatsM startInd nodeRange grd |> ignore
|
||||
@@ -373,35 +394,42 @@ let analyzeCellSubset (subset: int * int) (fList: Filelist) (fArray: fvcomFile [
|
||||
let vData2NcArray = Array2D.zeroCreate<float32> grd.Siglay nodeRange
|
||||
let ncoutV = tryOpenNcFile outfile
|
||||
statistics
|
||||
|> List.iter (fun s -> vStatsData
|
||||
|> Array.iter (fun stat -> let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getData stat s
|
||||
if Option.isSome d then
|
||||
vData2NcArray[k, i-startInd] <- d.Value)
|
||||
|> List.iter (fun s ->
|
||||
vStatsData
|
||||
|> Array.iter (fun stat ->
|
||||
let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getData stat s
|
||||
if Option.isSome d then
|
||||
vData2NcArray[k, i - startInd] <- d.Value
|
||||
)
|
||||
|
||||
ncoutV["v" + s].PutData([|0; startInd|], vData2NcArray))
|
||||
ncoutV["v" + s].PutData([| 0; startInd |], vData2NcArray)
|
||||
)
|
||||
ncoutV.Commit()
|
||||
ncoutV.Dispose()
|
||||
// Monthly stats
|
||||
//let monthlyInds = getMonthlyInds fList.DTime
|
||||
let vStatsM =
|
||||
(Array.zip [|for j in 0..11 -> j|] monthlyInds)
|
||||
|> Array.choose (fun (i, mi) -> if Option.isSome mi then
|
||||
let dataM = extractMonthlyData mi.Value data2
|
||||
let sM = analyseScalarTimeSeriesMonthly "v" i dataM startInd nodeRange
|
||||
Some sM
|
||||
else
|
||||
None
|
||||
)
|
||||
(Array.zip [| for j in 0..11 -> j |] monthlyInds)
|
||||
|> Array.choose (fun (i, mi) ->
|
||||
if Option.isSome mi then
|
||||
let dataM = extractMonthlyData mi.Value data2
|
||||
let sM = analyseScalarTimeSeriesMonthly "v" i dataM startInd nodeRange
|
||||
Some sM
|
||||
else
|
||||
None
|
||||
)
|
||||
|> Array.fold Array.append [||]
|
||||
//writeMonthlyToNc ncout vStatsM startInd nodeRange grd |> ignore
|
||||
|
||||
// Replace u and v in data1 and data2 with speed and direction repectively
|
||||
data1
|
||||
|> Array3D.iteri (fun i j k c -> let sp, dr = uv2polarSingle c data2[i, j, k]
|
||||
data1[i, j, k] <- sp
|
||||
data2[i, j, k] <- dr)
|
||||
|> Array3D.iteri (fun i j k c ->
|
||||
let sp, dr = uv2polarSingle c data2[i, j, k]
|
||||
data1[i, j, k] <- sp
|
||||
data2[i, j, k] <- dr
|
||||
)
|
||||
|
||||
|
||||
//printfn "%A" data2[*, 0, 0]
|
||||
@@ -412,79 +440,89 @@ let analyzeCellSubset (subset: int * int) (fList: Filelist) (fArray: fvcomFile [
|
||||
let spData2NcArray = Array2D.zeroCreate<float32> grd.Siglay nodeRange
|
||||
let ncoutSP = tryOpenNcFile outfile
|
||||
statistics
|
||||
|> List.iter (fun s -> spStatsData
|
||||
|> Array.iter (fun stat -> let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getData stat s
|
||||
if Option.isSome d then
|
||||
spData2NcArray[k, i-startInd] <- d.Value)
|
||||
|> List.iter (fun s ->
|
||||
spStatsData
|
||||
|> Array.iter (fun stat ->
|
||||
let k = stat.IndZ
|
||||
let i = stat.Ind
|
||||
let d = getData stat s
|
||||
if Option.isSome d then
|
||||
spData2NcArray[k, i - startInd] <- d.Value
|
||||
)
|
||||
|
||||
ncoutSP["sp" + s].PutData([|0; startInd|], spData2NcArray))
|
||||
ncoutSP["sp" + s].PutData([| 0; startInd |], spData2NcArray)
|
||||
)
|
||||
|
||||
ncoutSP.Commit()
|
||||
ncoutSP.Dispose()
|
||||
// Monthly stats
|
||||
//let monthlyInds = getMonthlyInds fList.DTime
|
||||
let spStatsM =
|
||||
(Array.zip [|for j in 0..11 -> j|] monthlyInds)
|
||||
|> Array.choose (fun (i, mi) -> if Option.isSome mi then
|
||||
let dataM = extractMonthlyData mi.Value data1
|
||||
let sM = analyseScalarTimeSeriesMonthly "sp" i dataM startInd nodeRange
|
||||
Some sM
|
||||
else
|
||||
None
|
||||
)
|
||||
(Array.zip [| for j in 0..11 -> j |] monthlyInds)
|
||||
|> Array.choose (fun (i, mi) ->
|
||||
if Option.isSome mi then
|
||||
let dataM = extractMonthlyData mi.Value data1
|
||||
let sM = analyseScalarTimeSeriesMonthly "sp" i dataM startInd nodeRange
|
||||
Some sM
|
||||
else
|
||||
None
|
||||
)
|
||||
|> Array.fold Array.append [||]
|
||||
let ncoutM = tryOpenNcFile outfile
|
||||
writeMonthlyToNc ncoutM uStatsM startInd nodeRange grd |> ignore
|
||||
writeMonthlyToNc ncoutM vStatsM startInd nodeRange grd |> ignore
|
||||
writeMonthlyToNc ncoutM spStatsM startInd nodeRange grd |> ignore
|
||||
writeMonthlyToNc ncoutM uStatsM startInd nodeRange grd
|
||||
|> ignore
|
||||
writeMonthlyToNc ncoutM vStatsM startInd nodeRange grd
|
||||
|> ignore
|
||||
writeMonthlyToNc ncoutM spStatsM startInd nodeRange grd
|
||||
|> ignore
|
||||
ncoutM.Commit()
|
||||
ncoutM.Dispose()
|
||||
|
||||
|
||||
printfn "%s" "Start wt stats"
|
||||
let waterTransportStats = computeWaterTransport data1 data2 startInd nodeRange
|
||||
let waterTransportStats =
|
||||
computeWaterTransport data1 data2 startInd nodeRange
|
||||
let ncoutWT = tryOpenNcFile outfile
|
||||
ncoutWT["waterTransport"].PutData([|0; 0; startInd|], waterTransportStats)
|
||||
ncoutWT["waterTransport"].PutData([| 0; 0; startInd |], waterTransportStats)
|
||||
ncoutWT.Commit()
|
||||
ncoutWT.Dispose()
|
||||
|
||||
let inds = [|for j in 0..11 -> j|]
|
||||
let inds = [| for j in 0..11 -> j |]
|
||||
let m = Array.zip inds monthlyInds
|
||||
m
|
||||
|> Array.choose (fun (i, mi) -> if Option.isSome mi then
|
||||
let dataM_sp = extractMonthlyData mi.Value data1
|
||||
let dataM_dr = extractMonthlyData mi.Value data2
|
||||
let sM = computeWaterTransport dataM_sp dataM_dr startInd nodeRange
|
||||
let ncoutWTM= tryOpenNcFile outfile
|
||||
let m = Array4D.zeroCreate 1 (Array3D.length1 sM) (Array3D.length2 sM) (Array3D.length3 sM)
|
||||
m[0, *, *, *] <- sM
|
||||
ncoutWTM["waterTransportMonthly"].PutData([|i; 0; 0; 0|], m)
|
||||
ncoutWTM.Commit()
|
||||
ncoutWTM.Dispose()
|
||||
None
|
||||
else
|
||||
None
|
||||
)
|
||||
|> Array.choose (fun (i, mi) ->
|
||||
if Option.isSome mi then
|
||||
let dataM_sp = extractMonthlyData mi.Value data1
|
||||
let dataM_dr = extractMonthlyData mi.Value data2
|
||||
let sM = computeWaterTransport dataM_sp dataM_dr startInd nodeRange
|
||||
let ncoutWTM = tryOpenNcFile outfile
|
||||
let m =
|
||||
Array4D.zeroCreate 1 (Array3D.length1 sM) (Array3D.length2 sM) (Array3D.length3 sM)
|
||||
m[0, *, *, *] <- sM
|
||||
ncoutWTM["waterTransportMonthly"].PutData([| i; 0; 0; 0 |], m)
|
||||
ncoutWTM.Commit()
|
||||
ncoutWTM.Dispose()
|
||||
None
|
||||
else
|
||||
None
|
||||
)
|
||||
|> ignore
|
||||
|
||||
0
|
||||
|
||||
let multiply (a: float32 []) (n: float32) =
|
||||
a
|
||||
|> Array.map (fun x -> n * x)
|
||||
let multiply (a: float32[]) (n: float32) = a |> Array.map (fun x -> n * x)
|
||||
|
||||
let multiply2D (a: float32 [,]) (n: float32) =
|
||||
a
|
||||
|> Array2D.map (fun x -> n * x)
|
||||
let multiply2D (a: float32[,]) (n: float32) = a |> Array2D.map (fun x -> n * x)
|
||||
|
||||
let readBottomSpDr (ncfile: string) (timeInd: int) (nele: int) (nSiglays: int) =
|
||||
printfn "%s%s%d" ncfile " " timeInd
|
||||
let nc = NetCDFDataSet.Open(ncfile, openMode=Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let u3d = nc["u"].GetData([|timeInd; (nSiglays-1); 0|], [|1; 1; nele|]) :?> float32 [,,]
|
||||
let nc =
|
||||
NetCDFDataSet.Open(ncfile, openMode = Microsoft.Research.Science.Data.ResourceOpenMode.ReadOnly)
|
||||
let u3d =
|
||||
nc["u"].GetData([| timeInd; (nSiglays - 1); 0 |], [| 1; 1; nele |]) :?> float32[,,]
|
||||
let u = multiply u3d[0, 0, *] 100.f
|
||||
let v3d = nc["v"].GetData([|timeInd; (nSiglays-1); 0|], [|1; 1; nele|]) :?> float32 [,,]
|
||||
let v3d =
|
||||
nc["v"].GetData([| timeInd; (nSiglays - 1); 0 |], [| 1; 1; nele |]) :?> float32[,,]
|
||||
let v = multiply v3d[0, 0, *] 100.f
|
||||
nc.Dispose()
|
||||
|
||||
@@ -506,75 +544,70 @@ let readBottomSpDr (ncfile: string) (timeInd: int) (nele: int) (nSiglays: int) =
|
||||
// //printfn "%A" vel
|
||||
// vel
|
||||
|
||||
let findBin (bins: float32 []) (sp: float32) =
|
||||
let findBin (bins: float32[]) (sp: float32) =
|
||||
let ind =
|
||||
bins
|
||||
|> Array.tryFindIndex(fun elm -> abs(elm-sp) <= 1.5f)
|
||||
|> Array.tryFindIndex (fun elm -> abs (elm - sp) <= 1.5f)
|
||||
|
||||
if ind.IsNone then
|
||||
bins.Length
|
||||
else
|
||||
ind.Value
|
||||
if ind.IsNone then bins.Length else ind.Value
|
||||
|
||||
|
||||
let findSector (sectors: float32 []) (dr: float32) =
|
||||
let findSector (sectors: float32[]) (dr: float32) =
|
||||
let ind =
|
||||
sectors
|
||||
|> Array.findIndex(fun elm -> abs(elm-rad2deg(dr)) <= 9.f)
|
||||
|> Array.findIndex (fun elm -> abs (elm - rad2deg (dr)) <= 9.f)
|
||||
ind
|
||||
|
||||
|
||||
let createBins =
|
||||
let bins = [|for i in 1.5 .. 3.0 .. 200 -> i|] |> Array.map float32
|
||||
let bins =
|
||||
[| for i in 1.5..3.0..200 -> i |]
|
||||
|> Array.map float32
|
||||
let lBins = bins.Length
|
||||
bins[lBins-1] <- 200.f
|
||||
bins[lBins - 1] <- 200.f
|
||||
bins
|
||||
|
||||
|
||||
let createSectors =
|
||||
[|for i in 9 .. 18 .. 360 -> i|] |> Array.map float32
|
||||
[| for i in 9..18..360 -> i |]
|
||||
|> Array.map float32
|
||||
|
||||
let addCountsTimeStep (counts: int [,,]) (bins: float32 []) (sectors: float32 []) (vel: (float32 * float32) []) =
|
||||
let addCountsTimeStep (counts: int[,,]) (bins: float32[]) (sectors: float32[]) (vel: (float32 * float32)[]) =
|
||||
vel
|
||||
|> Array.iteri (fun i (spi, dri) -> let indBin = findBin bins spi
|
||||
let indSector = findSector sectors dri
|
||||
counts[i, indBin, indSector] <- (counts[i, indBin, indSector] + 1)
|
||||
printfn "%d" counts[i, indBin, indSector])
|
||||
|> Array.iteri (fun i (spi, dri) ->
|
||||
let indBin = findBin bins spi
|
||||
let indSector = findSector sectors dri
|
||||
counts[i, indBin, indSector] <- (counts[i, indBin, indSector] + 1)
|
||||
printfn "%d" counts[i, indBin, indSector]
|
||||
)
|
||||
|
||||
|
||||
let findBinsAndSectors (bins: float32 []) (sectors: float32 []) (vel: (float32* float32) []) =
|
||||
let findBinsAndSectors (bins: float32[]) (sectors: float32[]) (vel: (float32 * float32)[]) =
|
||||
let bs =
|
||||
vel
|
||||
|> Array.map (fun (spi, dri) -> let b = findBin bins spi
|
||||
let s = findSector sectors dri
|
||||
b, s)
|
||||
|> Array.map (fun (spi, dri) ->
|
||||
let b = findBin bins spi
|
||||
let s = findSector sectors dri
|
||||
b, s
|
||||
)
|
||||
bs
|
||||
|
||||
let countSp (grd: gridDims) (fList: (string * DateTime * int) []) (bins: float32 []) (sectors: float32 []) =
|
||||
let counts = Array3D.zeroCreate grd.Nele (bins.Length+1) sectors.Length
|
||||
let countSp (grd: gridDims) (fList: (string * DateTime * int)[]) (bins: float32[]) (sectors: float32[]) =
|
||||
let counts = Array3D.zeroCreate grd.Nele (bins.Length + 1) sectors.Length
|
||||
//let addCounts = addCountsTimeStep counts bins sectors
|
||||
|
||||
let inds =
|
||||
fList
|
||||
|> Array.map (fun (p, _, i) -> readBottomSpDr p i grd.Nele grd.Siglay)
|
||||
|> Array.map (fun vel -> findBinsAndSectors bins sectors vel)
|
||||
//|> Array.iter (fun inds -> Array.iteri (fun ni (bi, si) -> counts[ni, bi, si] <- counts[ni, bi, si] + 1)|> ignore)
|
||||
//|> Array.iter (fun inds -> Array.iteri (fun ni (bi, si) -> counts[ni, bi, si] <- counts[ni, bi, si] + 1)|> ignore)
|
||||
|
||||
inds
|
||||
|> Array.iter (fun ii -> ii
|
||||
|> Array.iteri (fun ni (bi, si) -> counts[ni, bi, si] <- (counts[ni, bi, si] + 1))|> ignore)
|
||||
|> Array.iter (fun ii ->
|
||||
ii
|
||||
|> Array.iteri (fun ni (bi, si) -> counts[ni, bi, si] <- (counts[ni, bi, si] + 1))
|
||||
|> ignore
|
||||
)
|
||||
|
||||
//printfn "%A" counts[10, *, *]
|
||||
bins, sectors, counts
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
bins, sectors, counts
|
||||
1879
src/Variables.fs
1879
src/Variables.fs
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user