Compare commits
167 Commits
Author | SHA1 | Date | |
---|---|---|---|
08b2f5301f | |||
3f3278b73e | |||
9943452ff9 | |||
8feb7b8d56 | |||
b61010b99a | |||
a3c2d17c69 | |||
a73acb8b93 | |||
d88b67ea2f | |||
6925598bf2 | |||
2e84afa87f | |||
784d586f48 | |||
1117882ced | |||
37ed8bfa83 | |||
0d748fb0bf | |||
05e1539134 | |||
ea97070fea | |||
dae9c1cf45 | |||
4c3b736efb | |||
cb923b96c9 | |||
c2ae42bd7a | |||
c1512e67d1 | |||
d359d8bfab | |||
a34f83e684 | |||
eaaf0f8931 | |||
0da6c3db75 | |||
33b52f69ec | |||
cc3434c7e3 | |||
10e854583f | |||
71a074ca05 | |||
09a695d1d9 | |||
37333fc120 | |||
a6fe8fe799 | |||
d815419c7b | |||
88fe584fe1 | |||
27ab3711a6 | |||
2fd1764cce | |||
204ba078a0 | |||
9033f62a59 | |||
3ceb355773 | |||
b08d025567 | |||
2bee7f6bea | |||
52c58ea436 | |||
32c9d13e71 | |||
7440fc2610 | |||
f1a56b0968 | |||
452695c8f5 | |||
614b4426c2 | |||
21e9525e76 | |||
8df03c455c | |||
486281525a | |||
4caed45f96 | |||
55fd39f09d | |||
38f8831275 | |||
4eec426853 | |||
1fdc1fc456 | |||
4eb3a9d2a7 | |||
01933df053 | |||
8910fced57 | |||
20a4e1a89d | |||
8094d1c063 | |||
93fb0b93f0 | |||
69510991dc | |||
c4afd77777 | |||
c34a76981e | |||
ba9b37f512 | |||
4b38af88a4 | |||
6e0049c4d2 | |||
a38bd8a4d7 | |||
2288f4edd0 | |||
eab06e2f6c | |||
432a9472fe | |||
c3ea064bf0 | |||
04b27dc5df | |||
dfd2a00541 | |||
3086885655 | |||
bf79e97462 | |||
06f3247b08 | |||
d3495f874e | |||
bcf4e768ff | |||
a6426da6e1 | |||
2c9e178554 | |||
ba19115390 | |||
2c1e1de7d1 | |||
88be30485f | |||
71b54c1660 | |||
9b82a2c4e2 | |||
1460d2285b | |||
52c083a3d9 | |||
e604806f81 | |||
c25857fff9 | |||
025f265705 | |||
80eae5d82e | |||
d2f96e40ee | |||
2a84dbb1dc | |||
7e23d869a8 | |||
8550f59ee6 | |||
903c71c457 | |||
d60eb180e2 | |||
b65d3479db | |||
c21571c913 | |||
d790793e21 | |||
fa2901c4bb | |||
56aebe7a62 | |||
e9c2fe63de | |||
35ae742615 | |||
1cb2d264ff | |||
f11ae4cfd7 | |||
cb37477e7b | |||
48402ae6f9 | |||
9a357ff79c | |||
e8a59924ef | |||
d6a10ac539 | |||
6480f793c4 | |||
7991864bf4 | |||
3582e65927 | |||
53321d9640 | |||
30de25b577 | |||
aee0bf1515 | |||
d3a52373f1 | |||
39f29fd029 | |||
5f7db0b4d0 | |||
d6c2679416 | |||
75c89c39ec | |||
a9fd9f3624 | |||
56afd5f2cf | |||
75580d564d | |||
c89e63c39f | |||
9e2afc4916 | |||
4d34cbdb00 | |||
59459ac604 | |||
f18db1c5fa | |||
03246f306f | |||
3fb0c8238c | |||
fc58827d7e | |||
024a7d52d7 | |||
080e1b56c5 | |||
7189583c49 | |||
29940da09c | |||
93bcf45054 | |||
19e6ec994b | |||
0474d4561b | |||
d7d53d9a77 | |||
b7445a668a | |||
3a4f023df2 | |||
afaeae7019 | |||
a55498eaed | |||
0888ae2045 | |||
50020e7a44 | |||
70496ddc36 | |||
3a33bed423 | |||
6df89a7a38 | |||
4c1540685d | |||
ab46f7d595 | |||
6d816a17bc | |||
0bcea916a2 | |||
cf61b1504c | |||
8b98b8fe2f | |||
77be7652a5 | |||
e3881c7df2 | |||
611e80f73c | |||
6448db8f2c | |||
369cb7fde5 | |||
cf49268a25 | |||
336b6b7ca4 | |||
c7a6650fb4 | |||
29b9dfbae2 | |||
012292ed77 |
@ -5,16 +5,34 @@ jobs:
|
||||
lint:
|
||||
name: golangci-lint
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
container:
|
||||
env:
|
||||
SSH_AUTH_SOCK: /tmp/ssh.sock
|
||||
ENVIRONMENT: dev
|
||||
volumes:
|
||||
- "/etc/timezone:/etc/timezone:ro"
|
||||
- "/etc/localtime:/etc/localtime:ro"
|
||||
- "/etc/gitconfig:/etc/gitconfig:ro"
|
||||
- "/${{ env.SSH_AUTH_SOCK }}:/tmp/ssh.sock"
|
||||
- "/${{ vars.GITEA_WORKSPACE }}:/go/src"
|
||||
- "/opt/gitea/act_runner/.ssh/known_hosts:/root/.ssh/known_hosts"
|
||||
- "/opt/gitea/act_runner/.gitconfig:/root/.gitconfig:ro"
|
||||
- "/opt/gitea/act_runner/.git-credentials:/root/.git-credentials:ro"
|
||||
- "/etc/ssl/certs:/etc/ssl/certs:ro"
|
||||
options: --cpus 20
|
||||
steps:
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21.1"
|
||||
go-version: "1.22.1"
|
||||
cache: false
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- run: apt-get -y update && apt-get -y install libluajit-5.1-dev
|
||||
- name: Get packages
|
||||
run: go get ./...
|
||||
|
||||
- name: Lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
|
@ -1,18 +1,86 @@
|
||||
name: Releases
|
||||
|
||||
on:
|
||||
push:
|
||||
create:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
|
||||
build:
|
||||
build-fedora:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
container:
|
||||
image: fedora:latest
|
||||
env:
|
||||
GOPATH: /
|
||||
SSH_AUTH_SOCK: /tmp/ssh.sock
|
||||
ENVIRONMENT: dev
|
||||
volumes:
|
||||
- "/etc/timezone:/etc/timezone:ro"
|
||||
- "/etc/localtime:/etc/localtime:ro"
|
||||
- "/etc/gitconfig:/etc/gitconfig:ro"
|
||||
- "/opt/gitea/act_runner/.ssh/known_hosts:/root/.ssh/known_hosts"
|
||||
- "/opt/gitea/act_runner/.gitconfig:/root/.gitconfig:ro"
|
||||
- "/opt/gitea/act_runner/.git-credentials:/root/.git-credentials:ro"
|
||||
- "/etc/ssl/certs:/etc/ssl/certs:ro"
|
||||
options: --cpus 4
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ncipollo/release-action@v1
|
||||
with:
|
||||
artifacts: "jx"
|
||||
- run: dnf install -y nodejs git make
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: dnf install -y dnf-plugins-core rpm-build rpmdevtools
|
||||
- run: echo "$(pwd)/go/bin" >> $GITHUB_PATH
|
||||
- run: dnf builddep -y build/jx.spec
|
||||
- run: make build
|
||||
- run: make rpm
|
||||
- name: Archive binary
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "jx-fedora-40"
|
||||
path: "jx"
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "RPMS"
|
||||
path: "/root/rpmbuild/**/*.rpm"
|
||||
- uses: ncipollo/release-action@v1
|
||||
with:
|
||||
artifacts: "jx,/root/rpmbuild/**/*.rpm"
|
||||
- run: echo "This job's status is ${{ job.status }}."
|
||||
build-ubuntu-focal:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
container:
|
||||
image: ubuntu:focal
|
||||
env:
|
||||
GOPATH: /
|
||||
SSH_AUTH_SOCK: /tmp/ssh.sock
|
||||
ENVIRONMENT: dev
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
TZ: America/Los_Angeles
|
||||
volumes:
|
||||
- "/etc/gitconfig:/etc/gitconfig:ro"
|
||||
- "/opt/gitea/act_runner/.ssh/known_hosts:/root/.ssh/known_hosts"
|
||||
- "/opt/gitea/act_runner/.gitconfig:/root/.gitconfig:ro"
|
||||
- "/opt/gitea/act_runner/.git-credentials:/root/.git-credentials:ro"
|
||||
options: --cpus 4
|
||||
steps:
|
||||
- run: apt-get -y update && apt-get install -y tzdata
|
||||
- run: apt-get install -y nodejs git make
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: echo "$(pwd)/go/bin" >> $GITHUB_PATH
|
||||
- run: make ubuntu-deps
|
||||
- run: make build
|
||||
- run: make deb
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "jx-ubuntu-focal"
|
||||
path: "jx"
|
||||
- uses: ncipollo/release-action@v1
|
||||
with:
|
||||
artifacts: "jx"
|
||||
|
@ -9,18 +9,20 @@ jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: registry.cv.mazarbul.net/declarative/build-golang:1.21.1-alpine-x86_64
|
||||
image: rosskeenhouse/build-golang:1.22.6-alpine
|
||||
env:
|
||||
GOPATH: /
|
||||
SSH_AUTH_SOCK: /tmp/ssh.sock
|
||||
ENVIRONMENT: dev
|
||||
volumes:
|
||||
- ${{ env.HOME }}/.ssh/known_hosts:/root/.ssh/known_hosts
|
||||
- ${{ env.SSH_AUTH_SOCK }}:/tmp/ssh.sock
|
||||
- /etc/gitconfig:/etc/gitconfig
|
||||
- /etc/ssl/certs:/etc/ssl/certs
|
||||
- ${{ vars.GITEA_WORKSPACE }}:/go/src
|
||||
options: --cpus 1
|
||||
- "/etc/timezone:/etc/timezone:ro"
|
||||
- "/etc/localtime:/etc/localtime:ro"
|
||||
- "/etc/gitconfig:/etc/gitconfig:ro"
|
||||
- "/opt/gitea/act_runner/.ssh/known_hosts:/root/.ssh/known_hosts"
|
||||
- "/opt/gitea/act_runner/.gitconfig:/root/.gitconfig:ro"
|
||||
- "/opt/gitea/act_runner/.git-credentials:/root/.git-credentials:ro"
|
||||
- "/etc/ssl/certs:/etc/ssl/certs:ro"
|
||||
options: --cpus 10
|
||||
steps:
|
||||
- run: apk add --no-cache nodejs
|
||||
- name: Check out repository code
|
||||
@ -44,11 +46,11 @@ jobs:
|
||||
- name: Archive code coverage results
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: code-coverage-report
|
||||
path: artifacts/code-coverage.html
|
||||
name: "code-coverage-report"
|
||||
path: "artifacts/code-coverage.html"
|
||||
- name: Archive binary
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: decl
|
||||
path: decl
|
||||
name: "jx-alpine"
|
||||
path: "jx"
|
||||
- run: echo "This job's status is ${{ job.status }}."
|
||||
|
38
Makefile
38
Makefile
@ -1,6 +1,10 @@
|
||||
export PATH := $(PATH):$(HOME)/go/bin
|
||||
IMAGE?=fedora:latest
|
||||
#LDFLAGS?=--ldflags '-extldflags "-static -ldl -lc -lm"' --ldflags="-X 'main.commit=$(shell git rev-parse HEAD)' -X 'main.version=$(shell git describe --tags)' -X 'main.date=$(shell date '+%Y-%m-%d %T.%s%z')'"
|
||||
export CC?=muscl-gcc
|
||||
LDFLAGS?=--ldflags '-extldflags "-static"' --ldflags="-X 'main.commit=$(shell git rev-parse HEAD)' -X 'main.version=$(shell git describe --tags)' -X 'main.date=$(shell date '+%Y-%m-%d %T.%s%z')'"
|
||||
export CGO_ENABLED=0
|
||||
|
||||
export CGO_ENABLED=1
|
||||
VERSION?=$(shell git describe --tags | sed -e 's/^v//' -e 's/-/_/g')
|
||||
.PHONY=jx-cli
|
||||
|
||||
build: jx-cli
|
||||
@ -11,3 +15,33 @@ jx-cli:
|
||||
test: jx-cli
|
||||
go test -coverprofile=artifacts/coverage.profile ./...
|
||||
go tool cover -html=artifacts/coverage.profile -o artifacts/code-coverage.html
|
||||
fedora-deps:
|
||||
./jx apply build/rpm.jx.yaml
|
||||
spectool -g -R build/jx.spec
|
||||
curl -L -o - https://go.dev/dl/go1.22.3.linux-amd64.tar.gz | tar -zxvf -
|
||||
rpm: fedora-deps
|
||||
echo Building $(VERSION)
|
||||
rpmbuild -ba --define='version $(VERSION)' build/jx.spec
|
||||
ubuntu-deps:
|
||||
apt-get update -y
|
||||
apt-get install -y git make libluajit-5.1-dev iproute2 gcc curl
|
||||
curl -L -o - https://go.dev/dl/go1.22.3.linux-amd64.tar.gz | tar -zxvf -
|
||||
deb: ubuntu-deps
|
||||
:
|
||||
run:
|
||||
docker run -it -v $(HOME)/.git-credentials:/root/.git-credentials -v $(HOME)/.gitconfig:/root/.gitconfig -v /var/run/docker.sock:/var/run/docker.sock -v $(shell pwd):/src $(IMAGE) sh
|
||||
run-alpine:
|
||||
docker run -it -v $(HOME)/.git-credentials:/root/.git-credentials -v $(HOME)/.gitconfig:/root/.gitconfig -v /var/run/docker.sock:/var/run/docker.sock -v $(shell pwd):/src golang:1.22.6-alpine sh
|
||||
build-container:
|
||||
docker run -it -v $(HOME)/.git-credentials:/root/.git-credentials -v $(HOME)/.gitconfig:/root/.gitconfig -v /var/run/docker.sock:/var/run/docker.sock -v $(shell pwd):/src -w /src rosskeenhouse/build-golang:1.22.6-alpine sh
|
||||
clean:
|
||||
go clean -modcache
|
||||
rm jx
|
||||
go-deps:
|
||||
go install google.golang.org/protobuf/cmd/protoc-gen-go@latest
|
||||
go install golang.org/x/vuln/cmd/govulncheck@latest
|
||||
lint:
|
||||
golangci-lint run --verbose ./...
|
||||
vulncheck:
|
||||
govulncheck ./...
|
||||
go vet ./...
|
||||
|
77
README.md
77
README.md
@ -8,16 +8,49 @@ These tools work with YAML descriptions of resources (E.g. files, users, contain
|
||||
|
||||
* Go >= 1.21.1
|
||||
|
||||
# Releases
|
||||
|
||||
**<span style="color:red">v0 releases are unstable and changes may be made to interfaces and specifications.</span>**
|
||||
|
||||
Use at your own risk.
|
||||
|
||||
# JX Documents
|
||||
|
||||
The JX YAML specification is a simple way to describe system resources. The two main components are `configurations` and `resources`.
|
||||
|
||||
## Configurations
|
||||
|
||||
```
|
||||
configurations:
|
||||
- name: myhttpconfig
|
||||
values:
|
||||
http_user: jex
|
||||
http_password: sample
|
||||
- name: myhttptoken
|
||||
values:
|
||||
authorization_token: abcde123456789
|
||||
```
|
||||
|
||||
## Resources
|
||||
```
|
||||
resources:
|
||||
- type: http
|
||||
config: myhttptoken
|
||||
transition: read
|
||||
attributes:
|
||||
endpoint: https://myserver/v1/api
|
||||
```
|
||||
|
||||
# Testing
|
||||
|
||||
Testing the current version involves checking out main and building.
|
||||
Testing the current version involves checking out main and building inside of the alpine go build container.
|
||||
|
||||
```
|
||||
git clone https://gitea.rosskeen.house/doublejynx/jx.git
|
||||
|
||||
make test
|
||||
make build-container
|
||||
|
||||
make build
|
||||
make test
|
||||
```
|
||||
|
||||
# Command-line
|
||||
@ -30,6 +63,9 @@ Create the resources specified in a resource document HTTP endpoint.
|
||||
|
||||
`jx apply http://localhost/resources`
|
||||
|
||||
Convert a tar archive into resource definitions and apply them (extracts the contents of a tar).
|
||||
|
||||
`jx apply https://gitea.rosskeen.house/doublejynx/jx/archive/v0.2.1.tar.gz`
|
||||
|
||||
# Read resource state
|
||||
|
||||
@ -37,23 +73,44 @@ Read the state of an existing resource (URI) and generate a YAML representation
|
||||
|
||||
`jx import -resource file://COPYRIGHT`
|
||||
|
||||
![Import Resource](md-images/import-resource.gif)
|
||||
## Importing resources from different sources
|
||||
|
||||
JX supports importing resources data from various source types, among these are filesystem directories, tar archive contents, containers, iptables chains, installed packages, etc.
|
||||
|
||||
Import system packages using the debian package type, and output the resource documents in JSON format.
|
||||
|
||||
`jx import --output json:// package://?type=deb`
|
||||
|
||||
Import the contents of a tar archive into a resource document.
|
||||
|
||||
`jx import ./test.tgz`
|
||||
|
||||
Import resource documents from multiple sources.
|
||||
|
||||
`jx import repo/packages/build.jx.yaml ./gopkgs.tar.gz`
|
||||
|
||||
![Import Doc](md-images/jx-import.gif)
|
||||
|
||||
Read a resource document from an http endpoint.
|
||||
|
||||
`jx import http://localhost/resources`
|
||||
|
||||
# Diff resource documents
|
||||
|
||||
![Diff Resources](md-images/jx-diff.gif)
|
||||
|
||||
# Examples
|
||||
|
||||
Resources:
|
||||
|
||||
* [file](examples/file.yaml) [schema](internal/resource/schemas/file.jsonschema)
|
||||
* [http](examples/http.yaml) [schema](internal/resource/schemas/http.jsonschema)
|
||||
* [user](examples/user.yaml) [schema](internal/resource/schemas/user.jsonschema)
|
||||
* [package](examples/package.yaml) [schema](internal/resource/schemas/package.jsonschema)
|
||||
* [container](examples/container.yaml) [schema](internal/resource/schemas/container.jsonschema)
|
||||
* [network_route](examples/network_route.yaml) [schema](internal/resource/schemas/network_route.jsonschema)
|
||||
* [container](examples/container.jx.yaml) [schema](internal/resource/schemas/container.schema.json)
|
||||
* [container-image](examples/container-image.jx.yaml) [schema](internal/resource/schemas/container-image.schema.json)
|
||||
* [container-network](examples/container-network.jx.yaml) [schema](internal/resource/schemas/container-network.schema.json)
|
||||
* [exec](examples/exec.jx.yaml) [schema](internal/resource/schemas/exec.schema.json)
|
||||
* [file](examples/file.jx.yaml) [schema](internal/resource/schemas/file.schema.json)
|
||||
* [group](examples/group.jx.yaml) [schema](internal/resource/schemas/group.schema.json)
|
||||
* [http](examples/http.jx.yaml) [schema](internal/resource/schemas/http.schema.json)
|
||||
* [iptable](examples/iptable.jx.yaml) [schema](internal/resource/schemas/iptable.schema.json)
|
||||
* [network_route](examples/network_route.jx.yaml) [schema](internal/resource/schemas/network_route.schema.json)
|
||||
* [package](examples/package.jx.yaml) [schema](internal/resource/schemas/package.schema.json)
|
||||
* [user](examples/user.jx.yaml) [schema](internal/resource/schemas/user.schema.json)
|
||||
|
16
build/docker/container-image-build.jx.yaml
Normal file
16
build/docker/container-image-build.jx.yaml
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
imports:
|
||||
- /etc/jx/dockerhub.jx.yaml
|
||||
resources:
|
||||
- type: container-image
|
||||
config: dockerhub
|
||||
transition: update
|
||||
attributes:
|
||||
name: rosskeenhouse/build-golang:1.22.6-alpine
|
||||
push: true
|
||||
dockerfile: |-
|
||||
FROM golang:1.22.6-alpine
|
||||
COPY . /opt/build
|
||||
WORKDIR /opt/build
|
||||
RUN ./jx apply ./alpine.jx.yaml
|
||||
contextref: file://build/docker/golang/build
|
36
build/docker/golang/build/alpine.jx.yaml
Normal file
36
build/docker/golang/build/alpine.jx.yaml
Normal file
@ -0,0 +1,36 @@
|
||||
imports:
|
||||
- file://common.jx.yaml
|
||||
resources:
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: musl-dev
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: luajit
|
||||
verion: =~2.2
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: luajit-dev
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: protobuf
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: openjdk8
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: docker
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: openssh-client
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: golangci-lint
|
45
build/docker/golang/build/common.jx.yaml
Normal file
45
build/docker/golang/build/common.jx.yaml
Normal file
@ -0,0 +1,45 @@
|
||||
resources:
|
||||
- type: file
|
||||
transition: create
|
||||
attributes:
|
||||
path: /usr/local/bin/antlr-4.10-complete.jar
|
||||
sourceref: https://www.antlr.org/download/antlr-4.10-complete.jar
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0755
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: make
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: openssl
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: curl
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: git
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: gcc
|
||||
- type: exec
|
||||
transition: create
|
||||
attributes:
|
||||
create:
|
||||
path: go
|
||||
args:
|
||||
- install
|
||||
- google.golang.org/protobuf/cmd/protoc-gen-go@latest
|
||||
- type: exec
|
||||
transition: create
|
||||
attributes:
|
||||
create:
|
||||
path: go
|
||||
args:
|
||||
- install
|
||||
- golang.org/x/vuln/cmd/govulncheck@latest
|
42
build/jx.spec
Normal file
42
build/jx.spec
Normal file
@ -0,0 +1,42 @@
|
||||
Name: jx
|
||||
Version: %{version}
|
||||
Release: %{!?rel:1}%{?dist}
|
||||
Summary: Provision resources using a declarative YAML syntax.
|
||||
|
||||
License: https://gitea.rosskeen.house/doublejynx/jx/src/branch/main/LICENSE
|
||||
URL: https://gitea.rosskeen.house/doublejynx/jx/
|
||||
Source0: https://gitea.rosskeen.house/doublejynx/jx/archive/v0.2.2.tar.gz
|
||||
|
||||
BuildRequires: luajit-devel
|
||||
BuildRequires: make
|
||||
BuildRequires: golang >= 1.22.1
|
||||
Requires: luajit
|
||||
|
||||
%description
|
||||
|
||||
%global debug_package %{nil}
|
||||
%undefine _missing_build_ids_terminate_build
|
||||
%global _missing_build_ids_terminate_build 0
|
||||
|
||||
%prep
|
||||
%autosetup -n jx
|
||||
|
||||
|
||||
%build
|
||||
LDFLAGS=
|
||||
%make_build
|
||||
|
||||
|
||||
%install
|
||||
mkdir -p %{buildroot}/usr/bin
|
||||
cp jx %{buildroot}/usr/bin/jx
|
||||
|
||||
%files
|
||||
%license LICENSE
|
||||
#%doc add-docs-here
|
||||
%{_bindir}/jx
|
||||
|
||||
|
||||
%changelog
|
||||
* Sun May 26 2024 Matthew Rich <matthewrich.conf@gmail.com> v0.2.2
|
||||
-
|
234
cli_test.go
234
cli_test.go
@ -12,14 +12,35 @@ import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"fmt"
|
||||
"decl/internal/tempdir"
|
||||
"archive/tar"
|
||||
"io"
|
||||
"log"
|
||||
"bytes"
|
||||
)
|
||||
|
||||
var TempDir tempdir.Path = "testcli"
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
err := TempDir.Create()
|
||||
if err != nil || TempDir == "" {
|
||||
slog.Error("Failed creating temp dir", "error", err)
|
||||
}
|
||||
|
||||
rc := m.Run()
|
||||
|
||||
TempDir.Remove()
|
||||
os.Exit(rc)
|
||||
}
|
||||
|
||||
func TestCli(t *testing.T) {
|
||||
if _, e := os.Stat("./jx"); errors.Is(e, os.ErrNotExist) {
|
||||
t.Skip("cli not built")
|
||||
}
|
||||
yaml, cliErr := exec.Command("./jx", "import", "--resource", "file://COPYRIGHT").Output()
|
||||
slog.Info("TestCli", "err", cliErr)
|
||||
if cliErr != nil {
|
||||
slog.Info("Debug CLI error", "error", cliErr, "stderr", cliErr.(*exec.ExitError).Stderr)
|
||||
}
|
||||
assert.Nil(t, cliErr)
|
||||
assert.NotEqual(t, "", string(yaml))
|
||||
assert.Greater(t, len(yaml), 0)
|
||||
@ -47,7 +68,218 @@ resources:
|
||||
defer ts.Close()
|
||||
|
||||
yaml, cliErr := exec.Command("./jx", "import", "--resource", ts.URL).Output()
|
||||
if cliErr != nil {
|
||||
slog.Info("Debug CLI error", "error", cliErr, "stderr", cliErr.(*exec.ExitError).Stderr)
|
||||
}
|
||||
assert.Nil(t, cliErr)
|
||||
assert.NotEqual(t, "", string(yaml))
|
||||
assert.Greater(t, len(yaml), 0)
|
||||
}
|
||||
|
||||
func TestCliConfigSource(t *testing.T) {
|
||||
if _, e := os.Stat("./jx"); errors.Is(e, os.ErrNotExist) {
|
||||
t.Skip("cli not built")
|
||||
}
|
||||
|
||||
configYaml := `
|
||||
configurations:
|
||||
- name: myhttpconnection
|
||||
values:
|
||||
http_user: foo
|
||||
http_pass: bar
|
||||
`
|
||||
|
||||
configPath := fmt.Sprintf("%s/testconfig.jx.yaml", TempDir)
|
||||
f, err := os.Create(configPath)
|
||||
assert.Nil(t, err)
|
||||
defer f.Close()
|
||||
_, writeErr := f.Write([]byte(configYaml))
|
||||
assert.Nil(t, writeErr)
|
||||
|
||||
yaml, cliErr := exec.Command("./jx", "import", "--config", configPath, "--resource", "file://COPYRIGHT").Output()
|
||||
if cliErr != nil {
|
||||
slog.Info("Debug CLI error", "error", cliErr, "stderr", cliErr.(*exec.ExitError).Stderr)
|
||||
}
|
||||
assert.Nil(t, cliErr)
|
||||
slog.Info("TestConfigSource", "yaml", yaml)
|
||||
}
|
||||
|
||||
func TestCliConfigCommand(t *testing.T) {
|
||||
if _, e := os.Stat("./jx"); errors.Is(e, os.ErrNotExist) {
|
||||
t.Skip("cli not built")
|
||||
}
|
||||
|
||||
yaml, cliErr := exec.Command("./jx", "config", "file://examples/config/file.jx.yaml").Output()
|
||||
if cliErr != nil {
|
||||
slog.Info("Debug CLI error", "error", cliErr, "stderr", cliErr.(*exec.ExitError).Stderr)
|
||||
}
|
||||
assert.Nil(t, cliErr)
|
||||
slog.Info("TestConfigCommand", "yaml", yaml)
|
||||
assert.NotEqual(t, "", string(yaml))
|
||||
assert.Greater(t, len(yaml), 0)
|
||||
}
|
||||
|
||||
func TestCliImportPackageCommand(t *testing.T) {
|
||||
if _, e := os.Stat("./jx"); errors.Is(e, os.ErrNotExist) {
|
||||
t.Skip("cli not built")
|
||||
}
|
||||
|
||||
yaml, cliErr := exec.Command("./jx", "import", "package://").Output()
|
||||
if cliErr != nil {
|
||||
slog.Info("Debug CLI error", "error", cliErr, "stderr", cliErr.(*exec.ExitError).Stderr)
|
||||
}
|
||||
assert.Nil(t, cliErr)
|
||||
assert.NotEqual(t, "", string(yaml))
|
||||
assert.Greater(t, len(yaml), 0)
|
||||
}
|
||||
|
||||
func TestCliExportTar(t *testing.T) {
|
||||
if _, e := os.Stat("./jx"); errors.Is(e, os.ErrNotExist) {
|
||||
t.Skip("cli not built")
|
||||
}
|
||||
assert.Nil(t, TempDir.Mkdir("tar", 0755))
|
||||
assert.Nil(t, TempDir.CreateFile("tar/foo", "data"))
|
||||
assert.Nil(t, TempDir.CreateFile("tar/bar", "data"))
|
||||
|
||||
cmdArgs := []string{"import", "--output", "tar://-", fmt.Sprintf("file://%s/tar", TempDir)}
|
||||
slog.Info("TestCliExportTar()", "cmd", cmdArgs)
|
||||
cmd := exec.Command("./jx", cmdArgs...)
|
||||
slog.Info("TestCliExportTar()", "cmd", cmd)
|
||||
stderr, errerr := cmd.StderrPipe()
|
||||
assert.Nil(t, errerr)
|
||||
stdout, outerr := cmd.StdoutPipe()
|
||||
assert.Nil(t, outerr)
|
||||
|
||||
assert.Nil(t, cmd.Start())
|
||||
|
||||
errOutput, _ := io.ReadAll(stderr)
|
||||
tarData, _ := io.ReadAll(stdout)
|
||||
|
||||
assert.Nil(t, cmd.Wait())
|
||||
|
||||
slog.Info("TestCliExportTar()", "stderr", errOutput)
|
||||
|
||||
assert.Greater(t, len(tarData), 0)
|
||||
|
||||
tr := tar.NewReader(bytes.NewBuffer(tarData))
|
||||
|
||||
files := []string{fmt.Sprintf("%s/tar/foo", TempDir), fmt.Sprintf("%s/tar/bar", TempDir)}
|
||||
for {
|
||||
hdr, err := tr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
assert.Contains(t, files, hdr.Name)
|
||||
contents, err := io.ReadAll(tr)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []byte("data"), contents)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResourcesRead(t *testing.T) {
|
||||
if _, e := os.Stat("./jx"); errors.Is(e, os.ErrNotExist) {
|
||||
t.Skip("cli not built")
|
||||
}
|
||||
|
||||
|
||||
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
defer req.Body.Close()
|
||||
assert.Equal(t, req.URL.String(), "/resource/user")
|
||||
_, err := io.ReadAll(req.Body)
|
||||
assert.Nil(t, err)
|
||||
userdecl := []byte(`
|
||||
type: "user"
|
||||
attributes:
|
||||
name: "foo"
|
||||
gecos: "foo user"
|
||||
`)
|
||||
|
||||
_, writeErr := rw.Write(userdecl)
|
||||
assert.Nil(t, writeErr)
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
|
||||
assert.Nil(t, TempDir.CreateFile("testread", "data"))
|
||||
|
||||
resources := fmt.Sprintf(`
|
||||
resources:
|
||||
- type: file
|
||||
transition: read
|
||||
attributes:
|
||||
path: %s
|
||||
- type: user
|
||||
transition: read
|
||||
attributes:
|
||||
name: nobody
|
||||
- type: group
|
||||
transition: read
|
||||
attributes:
|
||||
name: wheel
|
||||
- type: container
|
||||
transition: read
|
||||
attributes:
|
||||
name: builder
|
||||
- type: container-network
|
||||
transition: read
|
||||
attributes:
|
||||
name: default
|
||||
- type: container-image
|
||||
transition: read
|
||||
attributes:
|
||||
name: nginx:latest
|
||||
- type: http
|
||||
transition: read
|
||||
attributes:
|
||||
endpoint: %s/resource/user
|
||||
- type: route
|
||||
transition: read
|
||||
attributes:
|
||||
to: 0.0.0.0
|
||||
gateway: 172.17.0.1
|
||||
interface: eth0
|
||||
proto: static
|
||||
scope: global
|
||||
rtid: all
|
||||
routetype: local
|
||||
metric: 100
|
||||
`, TempDir.FilePath("testread"), server.URL)
|
||||
|
||||
assert.Nil(t, TempDir.CreateFile("resources.jx.yaml", resources))
|
||||
|
||||
yaml, cliErr := exec.Command("./jx", "apply", TempDir.FilePath("resources.jx.yaml")).Output()
|
||||
if cliErr != nil {
|
||||
slog.Info("Debug CLI error", "error", cliErr, "stderr", cliErr.(*exec.ExitError).Stderr)
|
||||
}
|
||||
assert.Nil(t, cliErr)
|
||||
assert.NotEqual(t, "", string(yaml))
|
||||
assert.Greater(t, len(yaml), 0)
|
||||
}
|
||||
|
||||
func TestFailedResources(t *testing.T) {
|
||||
if _, e := os.Stat("./jx"); errors.Is(e, os.ErrNotExist) {
|
||||
t.Skip("cli not built")
|
||||
}
|
||||
os.Unsetenv("JX_DEBUG")
|
||||
resources := `
|
||||
resources:
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: foobarbaz
|
||||
`
|
||||
|
||||
assert.Nil(t, TempDir.CreateFile("err.jx.yaml", resources))
|
||||
|
||||
yaml, cliErr := exec.Command("./jx", "apply", TempDir.FilePath("err.jx.yaml")).Output()
|
||||
if cliErr != nil {
|
||||
slog.Info("Debug CLI error", "error", cliErr, "stderr", cliErr.(*exec.ExitError).Stderr)
|
||||
}
|
||||
|
||||
assert.NotNil(t, cliErr)
|
||||
assert.NotEqual(t, "", string(yaml))
|
||||
assert.Contains(t, string(cliErr.(*exec.ExitError).Stderr), "Document errors: 1")
|
||||
}
|
||||
|
254
cmd/cli/main.go
254
cmd/cli/main.go
@ -4,17 +4,20 @@ package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"os"
|
||||
"flag"
|
||||
"log"
|
||||
"log/slog"
|
||||
"decl/internal/data"
|
||||
_ "decl/internal/config"
|
||||
"decl/internal/folio"
|
||||
_ "decl/internal/resource"
|
||||
_ "decl/internal/fan"
|
||||
"decl/internal/builtin"
|
||||
_ "errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"decl/internal/resource"
|
||||
"decl/internal/source"
|
||||
"decl/internal/target"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"decl/internal/client"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -24,10 +27,12 @@ const (
|
||||
|
||||
var (
|
||||
version string
|
||||
commit string
|
||||
date string
|
||||
commit string
|
||||
date string
|
||||
)
|
||||
|
||||
var Client *client.App = client.NewClient()
|
||||
|
||||
var GlobalOformat *string
|
||||
var GlobalOutput string
|
||||
var GlobalQuiet *bool
|
||||
@ -35,6 +40,11 @@ var GlobalQuiet *bool
|
||||
var ImportMerge *bool
|
||||
var ImportResource *string
|
||||
|
||||
var ApplyDelete *bool
|
||||
|
||||
var ConfigPath string
|
||||
|
||||
var ConfigDoc data.Document = folio.DocumentRegistry.NewDocument("")
|
||||
|
||||
var ctx context.Context = context.Background()
|
||||
|
||||
@ -42,21 +52,25 @@ type RunCommand func(cmd *flag.FlagSet, output io.Writer) error
|
||||
|
||||
type SubCommand struct {
|
||||
Name string
|
||||
Run RunCommand
|
||||
Run RunCommand
|
||||
}
|
||||
|
||||
var jxSubCommands = []SubCommand {
|
||||
var jxSubCommands = []SubCommand{
|
||||
{
|
||||
Name: "diff",
|
||||
Run: DiffSubCommand,
|
||||
Run: DiffSubCommand,
|
||||
},
|
||||
{
|
||||
Name: "apply",
|
||||
Run: ApplySubCommand,
|
||||
Run: ApplySubCommand,
|
||||
},
|
||||
{
|
||||
Name: "import",
|
||||
Run: ImportSubCommand,
|
||||
Run: ImportSubCommand,
|
||||
},
|
||||
{
|
||||
Name: "config",
|
||||
Run: ConfigSubCommand,
|
||||
},
|
||||
}
|
||||
|
||||
@ -71,27 +85,26 @@ func LoggerConfig() {
|
||||
var programLevel = new(slog.LevelVar)
|
||||
logger := slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{Level: programLevel}))
|
||||
slog.SetDefault(logger)
|
||||
if debugLogging,ok := os.LookupEnv("DECL_DEBUG"); ok && debugLogging != "" {
|
||||
if debugLogging, ok := os.LookupEnv("JX_DEBUG"); ok && debugLogging != "" {
|
||||
programLevel.Set(slog.LevelDebug)
|
||||
} else {
|
||||
programLevel.Set(slog.LevelError)
|
||||
}
|
||||
}
|
||||
|
||||
func LoadSourceURI(uri string) []*resource.Document {
|
||||
slog.Info("loading ", "uri", uri)
|
||||
if uri != "" {
|
||||
ds, err := source.SourceTypes.New(uri)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
extractDocuments, extractErr := ds.ExtractResources(nil)
|
||||
if extractErr != nil {
|
||||
log.Fatal(extractErr)
|
||||
}
|
||||
return extractDocuments
|
||||
func ConfigSubCommand(cmd *flag.FlagSet, output io.Writer) (err error) {
|
||||
if err = cmd.Parse(os.Args[2:]); err != nil {
|
||||
return
|
||||
}
|
||||
return []*resource.Document{ resource.NewDocument() }
|
||||
|
||||
if err = Client.SetOutput(GlobalOutput); err == nil {
|
||||
if configErr := Client.SystemConfiguration(ConfigPath); configErr != nil {
|
||||
slog.Info("Main.Import - SystemConfiguration", "config", ConfigPath, "error", configErr)
|
||||
}
|
||||
err = Client.ConfigCmd(cmd.Args(), true)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func ImportSubCommand(cmd *flag.FlagSet, output io.Writer) (err error) {
|
||||
@ -101,172 +114,42 @@ func ImportSubCommand(cmd *flag.FlagSet, output io.Writer) (err error) {
|
||||
if e != nil { // returns ErrHelp
|
||||
return e
|
||||
}
|
||||
|
||||
merged := resource.NewDocument()
|
||||
documents := make([]*resource.Document, 0, 100)
|
||||
for _,source := range cmd.Args() {
|
||||
loaded := LoadSourceURI(source)
|
||||
if loaded != nil {
|
||||
documents = append(documents, loaded...)
|
||||
if err = Client.SetOutput(GlobalOutput); err == nil {
|
||||
if configErr := Client.SystemConfiguration(ConfigPath); configErr != nil {
|
||||
slog.Info("Main.Import - SystemConfiguration", "config", ConfigPath, "error", configErr)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
switch *GlobalOformat {
|
||||
case FormatYaml:
|
||||
encoder = resource.NewYAMLEncoder(output)
|
||||
case FormatJson:
|
||||
encoder = resource.NewJSONEncoder(output)
|
||||
err = Client.ImportCmd(ctx, cmd.Args(), *ImportResource, *GlobalQuiet, *ImportMerge)
|
||||
}
|
||||
*/
|
||||
|
||||
slog.Info("main.ImportResource", "args", os.Args, "output", GlobalOutput)
|
||||
outputTarget, err := target.TargetTypes.New(GlobalOutput)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if len(documents) == 0 {
|
||||
documents = append(documents, resource.NewDocument())
|
||||
}
|
||||
|
||||
for _,d := range documents {
|
||||
if d != nil {
|
||||
|
||||
if *ImportResource != "" {
|
||||
slog.Info("ImportResource", "resource", ImportResource)
|
||||
if addResourceErr := d.AddResource(*ImportResource); addResourceErr != nil {
|
||||
log.Fatal(addResourceErr)
|
||||
}
|
||||
}
|
||||
|
||||
if *GlobalQuiet {
|
||||
for _, dr := range d.Resources() {
|
||||
if _,e := output.Write([]byte(dr.Resource().URI())); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if *ImportMerge {
|
||||
merged.ResourceDecls = append(merged.ResourceDecls, d.ResourceDecls...)
|
||||
} else {
|
||||
slog.Info("main.ImportResource", "outputTarget", outputTarget, "type", outputTarget.Type())
|
||||
if outputErr := outputTarget.EmitResources([]*resource.Document{d}, nil); outputErr != nil {
|
||||
return outputErr
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if *ImportMerge {
|
||||
if outputErr := outputTarget.EmitResources([]*resource.Document{merged}, nil); outputErr != nil {
|
||||
return outputErr
|
||||
}
|
||||
}
|
||||
return err
|
||||
return
|
||||
}
|
||||
|
||||
func ApplySubCommand(cmd *flag.FlagSet, output io.Writer) (err error) {
|
||||
ApplyDelete = cmd.Bool("delete", false, "Delete resources defined in the available documents.")
|
||||
if e := cmd.Parse(os.Args[2:]); e != nil {
|
||||
return e
|
||||
}
|
||||
var encoder resource.Encoder
|
||||
documents := make([]*resource.Document, 0, 100)
|
||||
for _,source := range cmd.Args() {
|
||||
loaded := LoadSourceURI(source)
|
||||
if loaded != nil {
|
||||
documents = append(documents, loaded...)
|
||||
}
|
||||
}
|
||||
|
||||
slog.Info("main.Apply()", "documents", documents)
|
||||
for _,d := range documents {
|
||||
slog.Info("main.Appl()", "doc", d)
|
||||
if e := d.Apply(); e != nil {
|
||||
return e
|
||||
if err = Client.SetOutput(GlobalOutput); err == nil {
|
||||
if configErr := Client.SystemConfiguration(ConfigPath); configErr != nil {
|
||||
slog.Info("Main.Import - SystemConfiguration", "config", ConfigPath, "error", configErr)
|
||||
}
|
||||
|
||||
switch *GlobalOformat {
|
||||
case FormatYaml:
|
||||
encoder = resource.NewYAMLEncoder(output)
|
||||
case FormatJson:
|
||||
encoder = resource.NewJSONEncoder(output)
|
||||
}
|
||||
if *GlobalQuiet {
|
||||
for _, dr := range d.Resources() {
|
||||
if _,e := output.Write([]byte(dr.Resource().URI())); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if documentGenerateErr := encoder.Encode(d); documentGenerateErr != nil {
|
||||
return documentGenerateErr
|
||||
}
|
||||
}
|
||||
err = Client.ApplyCmd(ctx, cmd.Args(), *GlobalQuiet, *ApplyDelete)
|
||||
}
|
||||
return err
|
||||
return
|
||||
}
|
||||
|
||||
func DiffSubCommand(cmd *flag.FlagSet, output io.Writer) (err error) {
|
||||
if e := cmd.Parse(os.Args[2:]); e != nil {
|
||||
return e
|
||||
}
|
||||
leftSource := cmd.Arg(0)
|
||||
rightSource := cmd.Arg(1)
|
||||
leftDocuments := make([]*resource.Document, 0, 100)
|
||||
rightDocuments := make([]*resource.Document, 0, 100)
|
||||
|
||||
slog.Info("jx diff subcommand", "left", leftSource, "right", rightSource, "flagset", cmd)
|
||||
|
||||
if rightSource == "" {
|
||||
rightDocuments = append(rightDocuments, LoadSourceURI(leftSource)...)
|
||||
slog.Info("jx diff clone", "docs", rightDocuments)
|
||||
for i, doc := range rightDocuments {
|
||||
if doc != nil {
|
||||
leftDocuments = append(leftDocuments, doc.Clone())
|
||||
for _,resourceDeclaration := range leftDocuments[i].Resources() {
|
||||
if _, e := resourceDeclaration.Resource().Read(ctx); e != nil {
|
||||
slog.Info("jx diff ", "err", e)
|
||||
//return e
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
leftDocuments = append(leftDocuments, LoadSourceURI(leftSource)...)
|
||||
rightDocuments = append(rightDocuments, LoadSourceURI(rightSource)...)
|
||||
}
|
||||
|
||||
slog.Info("jx diff ", "right", rightDocuments, "left", leftDocuments)
|
||||
index := 0
|
||||
for {
|
||||
if index >= len(rightDocuments) && index >= len(leftDocuments) {
|
||||
break
|
||||
}
|
||||
if index >= len(rightDocuments) {
|
||||
if _,e := leftDocuments[index].Diff(resource.NewDocument(), output); e != nil {
|
||||
return e
|
||||
}
|
||||
index++
|
||||
continue
|
||||
}
|
||||
if index >= len(leftDocuments) {
|
||||
if _,e := resource.NewDocument().Diff(rightDocuments[index], output); e != nil {
|
||||
return e
|
||||
}
|
||||
index++
|
||||
continue
|
||||
}
|
||||
if _,e := leftDocuments[index].Diff(rightDocuments[index], output); e != nil {
|
||||
return e
|
||||
}
|
||||
index++
|
||||
}
|
||||
return err
|
||||
|
||||
return Client.DiffCmd(cmd.Args())
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
||||
func main() {
|
||||
LoggerConfig()
|
||||
|
||||
if len(os.Args) < 2 {
|
||||
@ -274,8 +157,19 @@ func main() {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for _,subCmd := range jxSubCommands {
|
||||
DefaultConfigurations, configErr := builtin.BuiltInDocuments()
|
||||
if configErr != nil {
|
||||
slog.Warn("Failed loading default configuration", "error", configErr)
|
||||
}
|
||||
|
||||
ConfigDoc.AppendConfigurations(DefaultConfigurations)
|
||||
|
||||
for _, subCmd := range jxSubCommands {
|
||||
cmdFlagSet := flag.NewFlagSet(subCmd.Name, flag.ExitOnError)
|
||||
|
||||
cmdFlagSet.StringVar(&ConfigPath, "config", "/etc/jx/conf.d", "Config file path")
|
||||
cmdFlagSet.StringVar(&ConfigPath, "c", "/etc/jx/conf.d", "Config file path")
|
||||
|
||||
GlobalOformat = cmdFlagSet.String("oformat", "yaml", "Output serialization format")
|
||||
cmdFlagSet.StringVar(&GlobalOutput, "output", "-", "Output target (default stdout)")
|
||||
cmdFlagSet.StringVar(&GlobalOutput, "o", "-", "Output target (default stdout)")
|
||||
@ -300,10 +194,18 @@ func main() {
|
||||
cmdFlagSet.PrintDefaults()
|
||||
VersionUsage()
|
||||
}
|
||||
case "config":
|
||||
cmdFlagSet.Usage = func() {
|
||||
fmt.Println("jx config source...")
|
||||
cmdFlagSet.PrintDefaults()
|
||||
VersionUsage()
|
||||
}
|
||||
}
|
||||
slog.Info("CLI", "cmd", subCmd.Name)
|
||||
if os.Args[1] == subCmd.Name {
|
||||
if e := subCmd.Run(cmdFlagSet, os.Stdout); e != nil {
|
||||
log.Fatal(e)
|
||||
slog.Error("Failed running command", "command", os.Args[1], "error", e)
|
||||
os.Exit(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
26
cmd/cli/main_test.go
Normal file
26
cmd/cli/main_test.go
Normal file
@ -0,0 +1,26 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
_ "github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
_ "decl/internal/folio"
|
||||
_ "decl/internal/data"
|
||||
_ "log/slog"
|
||||
)
|
||||
|
||||
func TestLoadSourceURIConverter(t *testing.T) {
|
||||
/*
|
||||
var uri folio.URI = "file://../../examples/file.jx.yaml"
|
||||
docs, err := LoadSourceURIConverter(uri)
|
||||
assert.Nil(t, err)
|
||||
assert.Greater(t, len(docs), 0)
|
||||
slog.Info("TestLoadSourceURIConverter", "doc", docs[0], "resource", docs[0].(*folio.Document).ResourceDeclarations[0].Attributes)
|
||||
resDecl := docs[0].(*folio.Document).ResourceDeclarations[0]
|
||||
assert.Equal(t, "file", resDecl.Attributes.Type())
|
||||
v, ok := docs[0].Get("file:///tmp/foo.txt")
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, "/tmp/foo.txt", v.(data.Declaration).Resource().(data.FileResource).FilePath())
|
||||
*/
|
||||
}
|
15
examples/certificate.jx.yaml
Normal file
15
examples/certificate.jx.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
resources:
|
||||
- type: pki
|
||||
transition: create
|
||||
config: myca
|
||||
attributes:
|
||||
privatekeyref: file://myca_privkey.pem
|
||||
publickeyref: file://myca_pubkey.pem
|
||||
certificateref: file://myca_cert.pem
|
||||
- type: pki
|
||||
transition: update
|
||||
attributes:
|
||||
signedbyref: pki://myca_privkey.pem
|
||||
privatekeyref: file://mycert_key.pem
|
||||
publickeyref: file://mycert_pubkey.pem
|
||||
certificateref: file://mycert.pem
|
43
examples/config/cert.cfg.jx.yaml
Normal file
43
examples/config/cert.cfg.jx.yaml
Normal file
@ -0,0 +1,43 @@
|
||||
configurations:
|
||||
- name: myca
|
||||
type: certificate
|
||||
values:
|
||||
certtemplate:
|
||||
serialnumber: 2024
|
||||
subject:
|
||||
organization:
|
||||
- RKH
|
||||
country:
|
||||
- US
|
||||
province:
|
||||
- CA
|
||||
locality:
|
||||
- San Francisco
|
||||
streetaddress:
|
||||
- 0 cert st
|
||||
postalcode:
|
||||
- 94101
|
||||
notbefore: 2024-07-10
|
||||
notafter: 2025-07-10
|
||||
basicconstraintsvalid: true
|
||||
isca: true
|
||||
- name: mycert
|
||||
type: certificate
|
||||
values:
|
||||
certtemplate:
|
||||
serialnumber: 2025
|
||||
subject:
|
||||
organization:
|
||||
- RKH
|
||||
country:
|
||||
- US
|
||||
province:
|
||||
- CA
|
||||
locality:
|
||||
- San Francisco
|
||||
streetaddress:
|
||||
- 0 cert st
|
||||
postalcode:
|
||||
- 94101
|
||||
notbefore: 2024-07-10
|
||||
notafter: 2025-07-10
|
4
examples/config/file.jx.yaml
Normal file
4
examples/config/file.jx.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
configurations:
|
||||
- name: myfiles
|
||||
values:
|
||||
prefix: /home/testuser
|
5
examples/container-image.jx.yaml
Normal file
5
examples/container-image.jx.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
resources:
|
||||
- type: container-image
|
||||
transition: read
|
||||
attributes:
|
||||
name: nginx:latest
|
5
examples/fedora.jx.yaml
Normal file
5
examples/fedora.jx.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
resources:
|
||||
- type: container-image
|
||||
transition: read
|
||||
attributes:
|
||||
name: "fedora:latest"
|
@ -1,8 +1,8 @@
|
||||
resources:
|
||||
- type: file
|
||||
transition: create
|
||||
attributes:
|
||||
path: /tmp/foo.txt
|
||||
owner: nobody
|
||||
group: nobody
|
||||
mode: 0644
|
||||
state: present
|
6
examples/golang.jx.yaml
Normal file
6
examples/golang.jx.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
resources:
|
||||
- type: file
|
||||
transition: create
|
||||
attributes:
|
||||
path: go1.22.5.linux-amd64.tar.gz
|
||||
sourceref: https://go.dev/dl/go1.22.5.linux-amd64.tar.gz
|
12
examples/golangci-lint.jx.yaml
Normal file
12
examples/golangci-lint.jx.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
resources:
|
||||
- type: file
|
||||
transition: create
|
||||
attributes:
|
||||
path: golangci-lint-1.55.2-linux-amd64.deb
|
||||
sourceref: https://github.com/golangci/golangci-lint/releases/download/v1.55.2/golangci-lint-1.55.2-linux-amd64.deb
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: golangci-lint
|
||||
source: golangci-lint-1.55.2-linux-amd64.deb
|
||||
type: deb
|
6
examples/group.jx.yaml
Normal file
6
examples/group.jx.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
resources:
|
||||
- type: group
|
||||
transition: create
|
||||
attributes:
|
||||
name: "testgroup"
|
||||
gid: "12001"
|
11
examples/iptable.jx.yaml
Normal file
11
examples/iptable.jx.yaml
Normal file
@ -0,0 +1,11 @@
|
||||
resources:
|
||||
- type: iptable
|
||||
transition: create
|
||||
attributes:
|
||||
id: 1
|
||||
table: filter
|
||||
chain: INPUT
|
||||
jump: LIBVIRT_INP
|
||||
state: present
|
||||
resourcetype: rule
|
||||
|
9
examples/package.jx.yaml
Normal file
9
examples/package.jx.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
resources:
|
||||
- type: package
|
||||
transition: create
|
||||
attributes:
|
||||
name: zip
|
||||
version: 3.0-12build2
|
||||
type: apt
|
||||
state: present
|
||||
|
@ -1,7 +1,7 @@
|
||||
resources:
|
||||
- type: user
|
||||
transition: create
|
||||
attributes:
|
||||
name: "testuser"
|
||||
uid: "12001"
|
||||
home: "/home/testuser"
|
||||
state: present
|
19
go.mod
19
go.mod
@ -1,9 +1,12 @@
|
||||
module decl
|
||||
|
||||
go 1.21.1
|
||||
go 1.22.5
|
||||
|
||||
require (
|
||||
github.com/docker/docker v25.0.5+incompatible
|
||||
// gitea.rosskeen.house/pylon/luaruntime v0.0.0-20240513200425-f413d8adf7b3
|
||||
gitea.rosskeen.house/rosskeen.house/machine v0.0.0-20240520193117-1835255b6d02
|
||||
github.com/docker/docker v27.0.3+incompatible
|
||||
github.com/docker/go-connections v0.5.0
|
||||
github.com/opencontainers/image-spec v1.1.0
|
||||
github.com/sters/yaml-diff v1.3.2
|
||||
github.com/stretchr/testify v1.9.0
|
||||
@ -11,13 +14,16 @@ require (
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require google.golang.org/protobuf v1.33.0
|
||||
|
||||
require (
|
||||
gitea.rosskeen.house/rosskeen.house/machine v0.0.0-20240404204346-6c7c3faf2814 // indirect
|
||||
gitea.rosskeen.house/pylon/luaruntime v0.0.0-20240924031921-4d00743b53e1 // indirect
|
||||
github.com/Microsoft/go-winio v0.4.14 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.0.0 // indirect
|
||||
github.com/cloudflare/circl v1.3.3 // indirect
|
||||
github.com/containerd/log v0.1.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/distribution/reference v0.5.0 // indirect
|
||||
github.com/docker/go-connections v0.5.0 // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/fatih/color v1.16.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
@ -27,6 +33,7 @@ require (
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/moby/docker-image-spec v1.3.1 // indirect
|
||||
github.com/moby/term v0.5.0 // indirect
|
||||
github.com/morikuni/aec v1.0.0 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
@ -40,7 +47,9 @@ require (
|
||||
go.opentelemetry.io/otel/metric v1.25.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.25.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.25.0 // indirect
|
||||
golang.org/x/sys v0.18.0 // indirect
|
||||
golang.org/x/crypto v0.24.0 // indirect
|
||||
golang.org/x/net v0.26.0 // indirect
|
||||
golang.org/x/sys v0.21.0 // indirect
|
||||
golang.org/x/time v0.5.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
|
||||
gotest.tools/v3 v3.5.1 // indirect
|
||||
|
64
go.sum
64
go.sum
@ -1,11 +1,20 @@
|
||||
gitea.rosskeen.house/rosskeen.house/machine v0.0.0-20240404204346-6c7c3faf2814 h1:nPMHPc3NB+jsd1OFQlrEiRKPXT7KHd6fYyWItt4jCVs=
|
||||
gitea.rosskeen.house/rosskeen.house/machine v0.0.0-20240404204346-6c7c3faf2814/go.mod h1:5J2OFjFIBaCfsjcC9kSyycbIL8g/qAJH2A8BnbIig+Y=
|
||||
gitea.rosskeen.house/pylon/luaruntime v0.0.0-20240513200425-f413d8adf7b3 h1:ge74Hmzxp+bqVwSK9hOOBlZB9KeL3xuwMIXAYLPHBxA=
|
||||
gitea.rosskeen.house/pylon/luaruntime v0.0.0-20240513200425-f413d8adf7b3/go.mod h1:9sKIXsGDcf1uBnHhY29wi38Vll8dpVNUOxkXphN2KEk=
|
||||
gitea.rosskeen.house/pylon/luaruntime v0.0.0-20240924031921-4d00743b53e1 h1:UT79l0TvkYjlAbJrsFIm6R0tL+Rl/814ThKbjOgrTPo=
|
||||
gitea.rosskeen.house/pylon/luaruntime v0.0.0-20240924031921-4d00743b53e1/go.mod h1:9sKIXsGDcf1uBnHhY29wi38Vll8dpVNUOxkXphN2KEk=
|
||||
gitea.rosskeen.house/rosskeen.house/machine v0.0.0-20240520193117-1835255b6d02 h1:FLRmUvu0mz8Ac+/VZf/P4yuv2e6++SSkKOcEIHSlpAI=
|
||||
gitea.rosskeen.house/rosskeen.house/machine v0.0.0-20240520193117-1835255b6d02/go.mod h1:5J2OFjFIBaCfsjcC9kSyycbIL8g/qAJH2A8BnbIig+Y=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/Microsoft/go-winio v0.4.14 h1:+hMXMk01us9KgxGb7ftKQt2Xpf5hH/yky+TDA+qxleU=
|
||||
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
|
||||
github.com/ProtonMail/go-crypto v1.0.0 h1:LRuvITjQWX+WIfr930YHG2HNfjR1uOfyf5vE0kC2U78=
|
||||
github.com/ProtonMail/go-crypto v1.0.0/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
|
||||
github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs=
|
||||
github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA=
|
||||
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
||||
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@ -13,8 +22,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0=
|
||||
github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/docker/docker v25.0.5+incompatible h1:UmQydMduGkrD5nQde1mecF/YnSbTOaPeFIeP5C4W+DE=
|
||||
github.com/docker/docker v25.0.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v27.0.3+incompatible h1:aBGI9TeQ4MPlhquTQKq9XbK79rKFVwXNUAYz9aXyEBE=
|
||||
github.com/docker/docker v27.0.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
|
||||
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
|
||||
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||
@ -54,6 +63,8 @@ github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovk
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
|
||||
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
||||
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||
@ -88,6 +99,7 @@ github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17
|
||||
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||
go.opentelemetry.io/otel v1.25.0 h1:gldB5FfhRl7OJQbUHt/8s0a7cE8fbsPAtdpRaApKy4k=
|
||||
@ -107,38 +119,68 @@ go.opentelemetry.io/proto/otlp v1.1.0/go.mod h1:GpBHCBWiqvVLDqmHZsoMM3C5ySeKTC7e
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.7.0 h1:AvwMYaRytfdeVt3u6mLaxYtErKYjxA2OXjJ1HHq6t3A=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
|
||||
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
|
||||
golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI=
|
||||
golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
|
||||
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ=
|
||||
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
|
||||
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
|
||||
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
||||
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
69
internal/builtin/builtin.go
Normal file
69
internal/builtin/builtin.go
Normal file
@ -0,0 +1,69 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package builtin
|
||||
|
||||
import (
|
||||
_ "context"
|
||||
_ "encoding/json"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
_ "net/url"
|
||||
_ "regexp"
|
||||
_ "strings"
|
||||
_ "os"
|
||||
_ "io"
|
||||
_ "compress/gzip"
|
||||
_ "archive/tar"
|
||||
_ "errors"
|
||||
_ "path/filepath"
|
||||
_ "decl/internal/codec"
|
||||
"decl/internal/data"
|
||||
"decl/internal/fs"
|
||||
"decl/internal/folio"
|
||||
_ "decl/internal/resource"
|
||||
_ "decl/internal/config"
|
||||
_ "decl/internal/fan"
|
||||
"embed"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
//go:embed documents/*.jx.yaml
|
||||
var documentFiles embed.FS
|
||||
|
||||
func Load(uri folio.URI) (documents []data.Document, err error) {
|
||||
var extractor data.Converter
|
||||
var sourceResource data.Resource
|
||||
if extractor, err = folio.DocumentRegistry.ConverterTypes.New(string(uri)); err == nil {
|
||||
slog.Info("builtin.Load() extractor", "uri", uri, "error", err)
|
||||
|
||||
targetDeclaration := folio.NewDeclaration()
|
||||
if err = targetDeclaration.NewResource((*string)(&uri)); err == nil {
|
||||
slog.Info("builtin.Load() extract many", "resource", sourceResource, "error", err, "uri", uri, "extractor", extractor)
|
||||
sourceResource = targetDeclaration.Attributes
|
||||
sourceResource.(data.FileResource).SetFS(documentFiles)
|
||||
documents, err = extractor.(data.ManyExtractor).ExtractMany(sourceResource, nil)
|
||||
slog.Info("builtin.Load() extract many", "resource", sourceResource, "error", err)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func BuiltInDocuments() (documents []data.Document, err error) {
|
||||
|
||||
docFs := fs.NewWalkDir(documentFiles, "", func(fsys fs.FS, path string, file fs.DirEntry) (walkErr error) {
|
||||
u := folio.URI(fmt.Sprintf("file://%s", path))
|
||||
slog.Info("BuiltInDocuments()", "file", u)
|
||||
|
||||
if ! file.IsDir() {
|
||||
if loadDocs, loadErr := Load(u); loadErr == nil {
|
||||
documents = append(documents, loadDocs...)
|
||||
} else {
|
||||
err = loadErr
|
||||
}
|
||||
}
|
||||
return
|
||||
})
|
||||
|
||||
docFs.Walk(nil)
|
||||
return documents, err
|
||||
}
|
59
internal/builtin/builtin_test.go
Normal file
59
internal/builtin/builtin_test.go
Normal file
@ -0,0 +1,59 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package builtin
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"log/slog"
|
||||
"os"
|
||||
"testing"
|
||||
"decl/internal/tempdir"
|
||||
"decl/internal/folio"
|
||||
"errors"
|
||||
)
|
||||
|
||||
var TempDir tempdir.Path = "testbuiltin"
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
err := TempDir.Create()
|
||||
if err != nil || TempDir == "" {
|
||||
slog.Error("Failed creating temp dir", "error", err)
|
||||
}
|
||||
|
||||
rc := m.Run()
|
||||
|
||||
TempDir.Remove()
|
||||
os.Exit(rc)
|
||||
}
|
||||
|
||||
func TestBuiltInLoad(t *testing.T) {
|
||||
docs, err := Load("file://documents/facter.jx.yaml")
|
||||
if ! errors.Is(err, os.ErrNotExist) {
|
||||
assert.Nil(t, err)
|
||||
assert.Greater(t, len(docs), 0)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuiltInDocuments(t *testing.T) {
|
||||
docs, err := BuiltInDocuments()
|
||||
assert.Greater(t, len(docs), 0)
|
||||
if ! errors.Is(err, os.ErrNotExist) {
|
||||
assert.Nil(t, err)
|
||||
config, ok := folio.DocumentRegistry.GetDocument("file://documents/facter.jx.yaml")
|
||||
|
||||
assert.True(t, ok)
|
||||
|
||||
slog.Info("TestBuiltInDocuments()", "docuemnt", config)
|
||||
|
||||
assert.True(t, config.HasConfig("facts"))
|
||||
c := config.GetConfig("facts")
|
||||
v, e := c.GetValue("virtual")
|
||||
assert.Nil(t, e)
|
||||
assert.Equal(t, "physical", v)
|
||||
}
|
||||
|
||||
systemConfig, systemExists := folio.DocumentRegistry.GetDocument("file://documents/system.jx.yaml")
|
||||
assert.True(t, systemExists)
|
||||
|
||||
assert.True(t, systemConfig.HasConfig("system"))
|
||||
}
|
54
internal/builtin/documents/config.jx.yaml
Normal file
54
internal/builtin/documents/config.jx.yaml
Normal file
@ -0,0 +1,54 @@
|
||||
configurations:
|
||||
- name: confdir
|
||||
values:
|
||||
prefix: /etc/jx
|
||||
resources:
|
||||
- type: group
|
||||
transition: create
|
||||
onerror: stop
|
||||
attributes:
|
||||
name: "jx"
|
||||
- type: file
|
||||
transition: update
|
||||
attributes:
|
||||
path: "/etc/jx"
|
||||
owner: "root"
|
||||
group: "root"
|
||||
mode: "0755"
|
||||
filetype: directory
|
||||
- type: file
|
||||
transition: update
|
||||
config: confdir
|
||||
attributes:
|
||||
path: "conf.d"
|
||||
owner: "root"
|
||||
group: "jx"
|
||||
mode: "0770"
|
||||
filetype: directory
|
||||
- type: file
|
||||
transition: update
|
||||
config: confdir
|
||||
attributes:
|
||||
path: "lib"
|
||||
owner: "root"
|
||||
group: "jx"
|
||||
mode: "0770"
|
||||
filetype: directory
|
||||
- type: file
|
||||
transition: update
|
||||
config: confdir
|
||||
attributes:
|
||||
path: "pki"
|
||||
owner: "root"
|
||||
group: "jx"
|
||||
mode: "0770"
|
||||
filetype: directory
|
||||
- type: file
|
||||
transition: update
|
||||
config: confdir
|
||||
attributes:
|
||||
path: "pki/ca"
|
||||
owner: "root"
|
||||
group: "jx"
|
||||
mode: "0770"
|
||||
filetype: directory
|
9
internal/builtin/documents/facter.jx.yaml
Normal file
9
internal/builtin/documents/facter.jx.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
configurations:
|
||||
- name: facts
|
||||
type: exec
|
||||
values:
|
||||
path: /usr/bin/facter
|
||||
args:
|
||||
- "-j"
|
||||
format: "json"
|
||||
|
16
internal/builtin/documents/install.jx.yaml
Normal file
16
internal/builtin/documents/install.jx.yaml
Normal file
@ -0,0 +1,16 @@
|
||||
imports:
|
||||
- file://documents/config.jx.yaml
|
||||
configurations:
|
||||
- name: bindir
|
||||
values:
|
||||
prefix: /usr/local/bin
|
||||
resources:
|
||||
- type: file
|
||||
transition: update
|
||||
config: bindir
|
||||
attributes:
|
||||
path: "jx"
|
||||
owner: "root"
|
||||
group: "root"
|
||||
mode: "0755"
|
||||
sourceref: file://jx
|
4
internal/builtin/documents/system.jx.yaml
Normal file
4
internal/builtin/documents/system.jx.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
configurations:
|
||||
- name: system
|
||||
type: system
|
||||
values: {}
|
390
internal/client/client.go
Normal file
390
internal/client/client.go
Normal file
@ -0,0 +1,390 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package client
|
||||
|
||||
import (
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
_ "decl/internal/fan"
|
||||
_ "decl/internal/config"
|
||||
_ "decl/internal/resource"
|
||||
"decl/internal/fs"
|
||||
"decl/internal/builtin"
|
||||
"errors"
|
||||
"fmt"
|
||||
"context"
|
||||
"log/slog"
|
||||
"os"
|
||||
)
|
||||
|
||||
|
||||
var (
|
||||
ErrFailedResources error = errors.New("Failed Resources")
|
||||
ErrFailedDocuments error = errors.New("Document errors")
|
||||
)
|
||||
|
||||
type App struct {
|
||||
Target folio.URI
|
||||
ImportedMap map[folio.URI]data.Document
|
||||
Documents []data.Document
|
||||
emitter data.Converter
|
||||
merged data.Document
|
||||
Config data.Document
|
||||
}
|
||||
|
||||
func NewClient() *App {
|
||||
a := &App{ ImportedMap: make(map[folio.URI]data.Document), Documents: make([]data.Document, 0, 100) }
|
||||
return a
|
||||
}
|
||||
|
||||
// Load compiled-in config documents.
|
||||
func (a *App) BuiltInConfiguration() (err error) {
|
||||
var defaultConfigurations []data.Document
|
||||
if defaultConfigurations, err = builtin.BuiltInDocuments(); len(defaultConfigurations) > 0 {
|
||||
slog.Info("Client.BuiltInConfiguration()", "documents", defaultConfigurations, "error", err)
|
||||
a.Config.AppendConfigurations(defaultConfigurations)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Load config documents from default system config path. Ignore if missing.
|
||||
func (a *App) SystemConfiguration(configPath string) (err error) {
|
||||
var extractor data.Converter
|
||||
var sourceResource data.Resource
|
||||
if a.Config == nil {
|
||||
a.Config = folio.DocumentRegistry.NewDocument("file:///etc/jx/runtimeconfig.jx.yaml")
|
||||
}
|
||||
if configPath != "" {
|
||||
//configURI := folio.URI(configPath)
|
||||
var loaded []data.Document
|
||||
docFs := fs.NewWalkDir(os.DirFS(configPath), configPath, func(fsys fs.FS, path string, file fs.DirEntry) (loadErr error) {
|
||||
u := folio.URI(fmt.Sprintf("file://%s", path))
|
||||
|
||||
if ! file.IsDir() {
|
||||
slog.Info("Client.SystemConfiguration()", "uri", u)
|
||||
if extractor, loadErr = folio.DocumentRegistry.ConverterTypes.New(string(u)); loadErr == nil {
|
||||
if sourceResource, loadErr = u.NewResource(nil); loadErr == nil {
|
||||
if loaded, loadErr = extractor.(data.ManyExtractor).ExtractMany(sourceResource, nil); loadErr == nil {
|
||||
a.Config.AppendConfigurations(loaded)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
})
|
||||
err = docFs.Walk(nil)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) MergeDocuments() {
|
||||
a.merged = folio.DocumentRegistry.NewDocument("file://-")
|
||||
for _, d := range a.Documents {
|
||||
for _, declaration := range d.(*folio.Document).ResourceDeclarations {
|
||||
a.merged.AddDeclaration((data.Declaration)(declaration))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) SetOutput(uri string) (err error) {
|
||||
if uri == "-" {
|
||||
uri = "jx://-"
|
||||
}
|
||||
a.Target = folio.URI(uri)
|
||||
if a.emitter, err = folio.DocumentRegistry.ConverterTypes.New(uri); err != nil {
|
||||
return fmt.Errorf("Failed opening target: %s, %w", uri, err)
|
||||
}
|
||||
slog.Info("Client.SetOutput()", "uri", uri, "emitter", a.emitter)
|
||||
return
|
||||
}
|
||||
|
||||
// Each document has an `imports` keyword which can be used to load dependencies
|
||||
func (a *App) LoadDocumentImports() error {
|
||||
for i, d := range a.Documents {
|
||||
importedDocs := d.ImportedDocuments()
|
||||
for _, importedDocument := range importedDocs {
|
||||
docURI := folio.URI(importedDocument.GetURI())
|
||||
if _, ok := a.ImportedMap[docURI]; !ok {
|
||||
a.ImportedMap[docURI] = importedDocument
|
||||
a.Documents = append(a.Documents, nil)
|
||||
copy(a.Documents[i+1:], a.Documents[i:])
|
||||
a.Documents[i] = importedDocument
|
||||
/*
|
||||
if _, outputErr := a.emitter.Emit(importedDocument, nil); outputErr != nil {
|
||||
return outputErr
|
||||
}
|
||||
*/
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
func (a *App) ImportResource(ctx context.Context, uri string) (err error) {
|
||||
if len(a.Documents) < 1 {
|
||||
a.Documents = append(a.Documents, folio.DocumentRegistry.NewDocument(""))
|
||||
}
|
||||
resourceURI := folio.URI(uri)
|
||||
u := resourceURI.Parse().URL()
|
||||
if u == nil {
|
||||
return fmt.Errorf("Failed adding resource: %s", uri)
|
||||
}
|
||||
if u.Scheme == "" {
|
||||
u.Scheme = "file"
|
||||
}
|
||||
|
||||
for _, d := range a.Documents {
|
||||
if newResource, newResourceErr := d.NewResource(uri); newResourceErr == nil {
|
||||
if _, err = newResource.Read(ctx); err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
return newResourceErr
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) ImportSource(uri string) (loadedDocuments []data.Document, err error) {
|
||||
if source := folio.URI(uri).Parse().URL(); source != nil {
|
||||
if source.Scheme == "" {
|
||||
source.Scheme = "file"
|
||||
}
|
||||
|
||||
slog.Info("Client.ImportSource()", "uri", uri, "source", source, "error", err)
|
||||
if loadedDocuments, err = folio.DocumentRegistry.LoadFromParsedURI(source); err == nil && loadedDocuments != nil {
|
||||
a.Documents = append(a.Documents, loadedDocuments...)
|
||||
}
|
||||
} else {
|
||||
err = folio.ErrInvalidURI
|
||||
}
|
||||
|
||||
slog.Info("Client.ImportSource()", "uri", uri, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) Import(docs []string) (err error) {
|
||||
for _, source := range docs {
|
||||
if _, err = a.ImportSource(source); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) Apply(ctx context.Context, deleteResources bool) (err error) {
|
||||
var errorsCount int = 0
|
||||
for _, d := range a.Documents {
|
||||
d.SetConfig(a.Config)
|
||||
|
||||
var overrideState string = ""
|
||||
if deleteResources {
|
||||
overrideState = "delete"
|
||||
}
|
||||
d.ResolveIds(ctx)
|
||||
|
||||
_ = d.Apply("stat")
|
||||
|
||||
if ! d.CheckConstraints() {
|
||||
slog.Info("Client.Apply() document constraints failed", "requires", d)
|
||||
d.AddError(fmt.Errorf("%w: %s", folio.ErrConstraintFailure, d.GetURI()))
|
||||
errorsCount++
|
||||
continue
|
||||
}
|
||||
|
||||
slog.Info("Client.Apply()", "uri", d.GetURI(), "document", d, "state", overrideState, "error", err)
|
||||
if e := d.(*folio.Document).Apply(overrideState); e != nil {
|
||||
slog.Info("Client.Apply() error", "error", e)
|
||||
return e
|
||||
}
|
||||
if d.Failures() > 0 {
|
||||
d.AddError(fmt.Errorf("%w: %d, %w", ErrFailedResources, d.Failures(), err))
|
||||
errorsCount++
|
||||
}
|
||||
}
|
||||
|
||||
if errorsCount > 0 {
|
||||
return fmt.Errorf("%w: %d", ErrFailedDocuments, errorsCount)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) ImportCmd(ctx context.Context, docs []string, resourceURI string, quiet bool, merge bool) (err error) {
|
||||
defer a.Close()
|
||||
if err = a.Import(docs); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if err = a.LoadDocumentImports(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if len(resourceURI) > 0 {
|
||||
if err = a.ImportResource(ctx, resourceURI); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if quiet {
|
||||
err = a.Quiet()
|
||||
} else {
|
||||
if merge {
|
||||
a.MergeDocuments()
|
||||
}
|
||||
err = a.Emit()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) ApplyCmd(ctx context.Context, docs []string, quiet bool, deleteResources bool) (err error) {
|
||||
defer a.Close()
|
||||
var failedResources error
|
||||
if err = a.Import(docs); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if err = a.LoadDocumentImports(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if failedResources = a.Apply(ctx, deleteResources); failedResources != nil {
|
||||
slog.Info("Client.ApplyCmd()", "client", a, "error", failedResources)
|
||||
if ! errors.Is(failedResources, ErrFailedResources) && ! errors.Is(failedResources, ErrFailedDocuments) {
|
||||
return failedResources
|
||||
}
|
||||
}
|
||||
|
||||
if quiet {
|
||||
err = a.Quiet()
|
||||
} else {
|
||||
err = a.Emit()
|
||||
}
|
||||
|
||||
if failedResources != nil {
|
||||
if err != nil {
|
||||
return fmt.Errorf("%w %w", failedResources, err)
|
||||
} else {
|
||||
return failedResources
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) Diff(left []data.Document, right []data.Document) (err error) {
|
||||
output := os.Stdout
|
||||
slog.Info("jx diff ", "right", right, "left", left)
|
||||
index := 0
|
||||
for {
|
||||
if index >= len(right) && index >= len(left) {
|
||||
break
|
||||
}
|
||||
if index >= len(right) {
|
||||
if _, err = left[index].Diff(folio.DocumentRegistry.NewDocument(""), output); err != nil {
|
||||
return
|
||||
}
|
||||
index++
|
||||
continue
|
||||
}
|
||||
if index >= len(left) {
|
||||
if _, err = folio.DocumentRegistry.NewDocument("").Diff(right[index], output); err != nil {
|
||||
return
|
||||
}
|
||||
index++
|
||||
continue
|
||||
}
|
||||
if _, err = left[index].Diff(right[index], output); err != nil {
|
||||
return
|
||||
}
|
||||
index++
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) DiffCmd(docs []string) (err error) {
|
||||
output := os.Stdout
|
||||
|
||||
var leftDocuments, rightDocuments []data.Document
|
||||
var rightSource folio.URI
|
||||
|
||||
//leftSource := folio.URI(docs[0])
|
||||
if len(docs) > 1 {
|
||||
rightSource = folio.URI(docs[1])
|
||||
}
|
||||
|
||||
if leftDocuments, err = a.ImportSource(docs[0]); err == nil {
|
||||
if rightSource.IsEmpty() {
|
||||
for _, doc := range leftDocuments {
|
||||
_, err = doc.DiffState(output)
|
||||
}
|
||||
} else {
|
||||
if rightDocuments, err = a.ImportSource(docs[1]); err == nil {
|
||||
err = a.Diff(leftDocuments, rightDocuments)
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (a *App) ConfigCmd(docs []string, includeSystemConfig bool) (err error) {
|
||||
defer a.Close()
|
||||
if err = a.BuiltInConfiguration(); err != nil {
|
||||
slog.Warn("BuiltInConfiguration()", "error", err)
|
||||
}
|
||||
|
||||
if err = a.Import(docs); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if err = a.LoadDocumentImports(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if includeSystemConfig {
|
||||
if _, err = a.emitter.Emit(a.Config, nil); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
_, err = a.emitter.(data.ManyEmitter).EmitMany(a.Documents, nil)
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) Quiet() (err error) {
|
||||
output := os.Stdout
|
||||
for _, d := range a.Documents {
|
||||
for _, dr := range d.Declarations() {
|
||||
if _, err = output.Write([]byte(fmt.Sprintf("%s\n", dr.Resource().URI()))); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) Emit() (err error) {
|
||||
if a.merged == nil {
|
||||
for _, d := range a.Documents {
|
||||
slog.Info("Client.Emit() document", "document", d)
|
||||
if _, err = a.emitter.Emit(d, nil); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if _, err = a.emitter.Emit(a.merged, nil); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *App) Close() (err error) {
|
||||
if a.emitter != nil {
|
||||
slog.Info("Client.Close() emitter", "emitter", a.emitter)
|
||||
return a.emitter.Close()
|
||||
}
|
||||
return
|
||||
}
|
323
internal/client/client_test.go
Normal file
323
internal/client/client_test.go
Normal file
@ -0,0 +1,323 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package client
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"os"
|
||||
"os/user"
|
||||
"os/exec"
|
||||
"testing"
|
||||
"decl/internal/tempdir"
|
||||
"log"
|
||||
"decl/internal/folio"
|
||||
_ "decl/internal/fan"
|
||||
"decl/internal/codec"
|
||||
"decl/internal/data"
|
||||
"decl/internal/ext"
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
var programLevel = new(slog.LevelVar)
|
||||
|
||||
var TempDir tempdir.Path = "jx_client"
|
||||
|
||||
var ProcessTestUserName string
|
||||
var ProcessTestGroupName string
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
LoggerConfig()
|
||||
err := TempDir.Create()
|
||||
if err != nil || TempDir == "" {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
ProcessTestUserName, ProcessTestGroupName = ProcessUserName()
|
||||
rc := m.Run()
|
||||
|
||||
TempDir.Remove()
|
||||
os.Exit(rc)
|
||||
}
|
||||
|
||||
func LoggerConfig() {
|
||||
logger := slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{Level: programLevel}))
|
||||
slog.SetDefault(logger)
|
||||
programLevel.Set(slog.LevelDebug)
|
||||
}
|
||||
|
||||
func ProcessUserName() (string, string) {
|
||||
processUser, userErr := user.Current()
|
||||
if userErr != nil {
|
||||
panic(userErr)
|
||||
}
|
||||
processGroup, groupErr := user.LookupGroupId(processUser.Gid)
|
||||
if groupErr != nil {
|
||||
panic(groupErr)
|
||||
}
|
||||
return processUser.Username, processGroup.Name
|
||||
}
|
||||
|
||||
func ExitError(e error) string {
|
||||
if e != nil {
|
||||
switch v := e.(type) {
|
||||
case *exec.ExitError:
|
||||
return string(v.Stderr)
|
||||
default:
|
||||
return e.Error()
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// jx import <docuri>...
|
||||
func TestClientImport(t *testing.T) {
|
||||
c := NewClient()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
importDocuments := []string{
|
||||
"file://../../examples/file.jx.yaml",
|
||||
"file://../../examples/user.jx.yaml",
|
||||
}
|
||||
|
||||
assert.Nil(t, c.Import(importDocuments))
|
||||
|
||||
for index, uri := range importDocuments {
|
||||
u := folio.URI(uri)
|
||||
r, readerErr := u.ContentReaderStream()
|
||||
assert.Nil(t, readerErr)
|
||||
assert.NotNil(t, r)
|
||||
|
||||
doc := folio.DocumentRegistry.NewDocument(folio.URI(uri))
|
||||
assert.Nil(t, doc.LoadReader(r, codec.FormatYaml))
|
||||
|
||||
imported := c.Documents[index]
|
||||
assert.NotNil(t, imported)
|
||||
assert.Equal(t, uri, imported.GetURI())
|
||||
assert.Equal(t, doc.Len(), imported.Len())
|
||||
}
|
||||
}
|
||||
|
||||
// jx import --resource <resource>
|
||||
func TestClientImportResource(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
c := NewClient()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
importResources := []string{
|
||||
"file://../../COPYRIGHT",
|
||||
}
|
||||
|
||||
for _, uri := range importResources {
|
||||
assert.Nil(t, c.ImportResource(ctx, uri))
|
||||
}
|
||||
|
||||
imported := c.Documents[0]
|
||||
assert.NotNil(t, imported)
|
||||
for _, uri := range importResources {
|
||||
assert.NotNil(t, imported.(*folio.Document).GetResource(uri))
|
||||
}
|
||||
}
|
||||
|
||||
func TestClientEmit(t *testing.T) {
|
||||
//ctx := context.Background()
|
||||
c := NewClient()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
importDocuments := []string{
|
||||
"file://../../examples/file.jx.yaml",
|
||||
"file://../../examples/user.jx.yaml",
|
||||
}
|
||||
|
||||
assert.Nil(t, c.Import(importDocuments))
|
||||
targetFile := TempDir.FilePath("jx_emit_output.jx.yaml")
|
||||
targetFileURI := fmt.Sprintf("file://%s", targetFile)
|
||||
assert.Nil(t, c.SetOutput(targetFile))
|
||||
assert.Nil(t, c.Emit())
|
||||
|
||||
assert.FileExists(t, targetFile)
|
||||
|
||||
|
||||
u := folio.URI(targetFileURI)
|
||||
r, readerErr := u.ContentReaderStream()
|
||||
assert.Nil(t, readerErr)
|
||||
assert.NotNil(t, r)
|
||||
|
||||
extractor, err := folio.DocumentRegistry.ConverterTypes.New(targetFileURI)
|
||||
assert.Nil(t, err)
|
||||
assert.NotNil(t, extractor)
|
||||
|
||||
targetResource, resErr := u.NewResource(nil)
|
||||
assert.Nil(t, resErr)
|
||||
docs, exErr := extractor.(data.ManyExtractor).ExtractMany(targetResource, nil)
|
||||
assert.Nil(t, exErr)
|
||||
|
||||
assert.Equal(t, 2, len(docs))
|
||||
|
||||
assert.Equal(t, 1, docs[1].Len())
|
||||
}
|
||||
|
||||
func BenchmarkClientSystemConfigurations(b *testing.B) {
|
||||
assert.Nil(b, TempDir.Mkdir("benchconfig", 0700))
|
||||
ConfDir := tempdir.Path(TempDir.FilePath("benchconfig"))
|
||||
assert.Nil(b, ConfDir.CreateFile("cfg.jx.yaml", `
|
||||
configurations:
|
||||
- name: files
|
||||
values:
|
||||
prefix: /usr
|
||||
`))
|
||||
|
||||
configDirURI := fmt.Sprintf("file://%s", ConfDir)
|
||||
|
||||
programLevel.Set(slog.LevelError)
|
||||
b.Run("systemconfiguration", func(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
c := NewClient()
|
||||
_ = c.SystemConfiguration(configDirURI)
|
||||
}
|
||||
})
|
||||
programLevel.Set(slog.LevelDebug)
|
||||
}
|
||||
|
||||
func TestClientSystemConfiguration(t *testing.T) {
|
||||
c := NewClient()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
assert.Nil(t, TempDir.Mkdir("config", 0700))
|
||||
|
||||
ConfDir := tempdir.Path(TempDir.FilePath("config"))
|
||||
assert.Nil(t, ConfDir.CreateFile("cfg.jx.yaml", `
|
||||
configurations:
|
||||
- name: files
|
||||
values:
|
||||
prefix: /usr
|
||||
`))
|
||||
|
||||
//configDirURI := fmt.Sprintf("file://%s", ConfDir)
|
||||
configErr := c.SystemConfiguration(string(ConfDir))
|
||||
assert.Nil(t, configErr)
|
||||
|
||||
assert.NotNil(t, c.Config)
|
||||
|
||||
slog.Info("TestClientSystemConfiguration", "config", c.Config)
|
||||
cfg := c.Config.GetConfig("files")
|
||||
assert.NotNil(t, cfg)
|
||||
|
||||
value, valueErr := cfg.GetValue("prefix")
|
||||
assert.Nil(t, valueErr)
|
||||
assert.Equal(t, "/usr", value.(string))
|
||||
}
|
||||
|
||||
func TestClientApply(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
c := NewClient()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
assert.Nil(t, TempDir.Mkdir("apply", 0700))
|
||||
ApplyDir := tempdir.Path(TempDir.FilePath("apply"))
|
||||
|
||||
DocSource := ApplyDir.FilePath("res.jx.yaml")
|
||||
TestFile := ApplyDir.FilePath("testfile.txt")
|
||||
assert.Nil(t, ApplyDir.CreateFile("res.jx.yaml", fmt.Sprintf(`
|
||||
resources:
|
||||
- type: file
|
||||
transition: create
|
||||
attributes:
|
||||
path: %s
|
||||
content: |
|
||||
a test string
|
||||
owner: %s
|
||||
group: %s
|
||||
mode: 0644
|
||||
`, TestFile, ProcessTestUserName, ProcessTestGroupName)))
|
||||
|
||||
|
||||
assert.Nil(t, c.Import([]string{DocSource}))
|
||||
assert.Nil(t, c.LoadDocumentImports())
|
||||
|
||||
assert.Nil(t, c.Apply(ctx, false))
|
||||
|
||||
assert.FileExists(t, TestFile)
|
||||
|
||||
assert.Nil(t, c.Apply(ctx, true))
|
||||
|
||||
assert.NoFileExists(t, TestFile)
|
||||
}
|
||||
|
||||
|
||||
|
||||
var tarArchiveBuffer bytes.Buffer
|
||||
|
||||
func TarArchive(compress bool) (err error) {
|
||||
var fileWriter io.WriteCloser
|
||||
|
||||
if compress {
|
||||
gz := gzip.NewWriter(&tarArchiveBuffer)
|
||||
defer gz.Close()
|
||||
fileWriter = gz
|
||||
} else {
|
||||
fileWriter = ext.WriteNopCloser(&tarArchiveBuffer)
|
||||
}
|
||||
|
||||
tw := tar.NewWriter(fileWriter)
|
||||
|
||||
fileContent := "test file content"
|
||||
|
||||
if err = tw.WriteHeader(&tar.Header{
|
||||
Name: "testfile",
|
||||
Mode: 0600,
|
||||
Size: int64(len(fileContent)),
|
||||
}); err == nil {
|
||||
_, err = tw.Write([]byte(fileContent))
|
||||
}
|
||||
tw.Close()
|
||||
return
|
||||
}
|
||||
|
||||
func TestClientConverters(t *testing.T) {
|
||||
for _, v := range []struct { Expected data.TypeName; URI string } {
|
||||
{ Expected: data.TypeName("dir"), URI: "file:///tmp" },
|
||||
{ Expected: data.TypeName("http"), URI: "https://localhost/test" },
|
||||
{ Expected: data.TypeName("iptable"), URI: "iptable://filter/INPUT" },
|
||||
{ Expected: data.TypeName("jx"), URI: "file:///tmp/test.jx.yaml" },
|
||||
{ Expected: data.TypeName("package"), URI: "package://" },
|
||||
{ Expected: data.TypeName("container"), URI: "container://" },
|
||||
{ Expected: data.TypeName("user"), URI: "user://" },
|
||||
{ Expected: data.TypeName("group"), URI: "group://" },
|
||||
{ Expected: data.TypeName("tar"), URI: "tar://" },
|
||||
{ Expected: data.TypeName("tar"), URI: "file:///tmp/foo.tar" },
|
||||
{ Expected: data.TypeName("tar"), URI: "file:///tmp/foo.tar.gz" },
|
||||
{ Expected: data.TypeName("tar"), URI: "file:///tmp/foo.tgz" },
|
||||
} {
|
||||
c, e := folio.DocumentRegistry.ConverterTypes.New(v.URI)
|
||||
assert.Nil(t, e)
|
||||
assert.NotNil(t, c)
|
||||
assert.Equal(t, v.Expected, c.Type())
|
||||
}
|
||||
}
|
||||
|
||||
func TestClientImportTar(t *testing.T) {
|
||||
c := NewClient()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
e := TarArchive(true)
|
||||
assert.Nil(t, e)
|
||||
assert.Greater(t, tarArchiveBuffer.Len(), 0)
|
||||
|
||||
path, err := TempDir.CreateFileFromReader("test.tar.gz", &tarArchiveBuffer)
|
||||
assert.Nil(t, err)
|
||||
uri := fmt.Sprintf("file://%s", path)
|
||||
|
||||
d := folio.NewDeclaration()
|
||||
assert.Nil(t, d.NewResource(&uri))
|
||||
|
||||
docs, importErr := c.ImportSource(uri)
|
||||
|
||||
assert.Nil(t, importErr)
|
||||
assert.Greater(t, len(docs), 0)
|
||||
}
|
70
internal/codec/decoder.go
Normal file
70
internal/codec/decoder.go
Normal file
@ -0,0 +1,70 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
_ "fmt"
|
||||
_ "github.com/xeipuuv/gojsonschema"
|
||||
"gopkg.in/yaml.v3"
|
||||
"io"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
//type JSONDecoder json.Decoder
|
||||
|
||||
type Decoder interface {
|
||||
Decode(v any) error
|
||||
}
|
||||
|
||||
func NewDecoder(r io.Reader, format Format) Decoder {
|
||||
switch format {
|
||||
case FormatYaml:
|
||||
return NewYAMLDecoder(r)
|
||||
case FormatJson:
|
||||
return NewJSONDecoder(r)
|
||||
case FormatProtoBuf:
|
||||
return NewProtoBufDecoder(r)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewStringDecoder(s string, format Format) Decoder {
|
||||
return NewDecoder(strings.NewReader(s), format)
|
||||
}
|
||||
|
||||
func NewJSONDecoder(r io.Reader) Decoder {
|
||||
return json.NewDecoder(r)
|
||||
}
|
||||
|
||||
func NewJSONStringDecoder(s string) Decoder {
|
||||
return json.NewDecoder(strings.NewReader(s))
|
||||
}
|
||||
|
||||
func NewYAMLDecoder(r io.Reader) Decoder {
|
||||
slog.Info("NewYAMLDecoder()", "reader", r)
|
||||
return yaml.NewDecoder(r)
|
||||
}
|
||||
|
||||
func NewYAMLStringDecoder(s string) Decoder {
|
||||
return yaml.NewDecoder(strings.NewReader(s))
|
||||
}
|
||||
|
||||
type ProtoDecoder struct {
|
||||
reader io.Reader
|
||||
}
|
||||
|
||||
func (p *ProtoDecoder) Decode(v any) (err error) {
|
||||
var protoData []byte
|
||||
protoData, err = io.ReadAll(p.reader)
|
||||
if err == nil {
|
||||
err = proto.Unmarshal(protoData, v.(proto.Message))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func NewProtoBufDecoder(r io.Reader) Decoder {
|
||||
return &ProtoDecoder{ reader: r }
|
||||
}
|
134
internal/codec/decoder_test.go
Normal file
134
internal/codec/decoder_test.go
Normal file
@ -0,0 +1,134 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
_ "fmt"
|
||||
"github.com/stretchr/testify/assert"
|
||||
_ "log"
|
||||
"strings"
|
||||
"testing"
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
"io"
|
||||
"bytes"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
type TestUser struct {
|
||||
Name string `json:"name" yaml:"name" protobuf:"bytes,1,opt,name=name"`
|
||||
Uid string `json:"uid" yaml:"uid" protobuf:"bytes,2,opt,name=uid"`
|
||||
Group string `json:"group" yaml:"group" protobuf:"bytes,3,opt,name=group"`
|
||||
Home string `json:"home" yaml:"home" protobuf:"bytes,4,opt,name=home"`
|
||||
State string `json:"state" yaml:"state" protobuf:"bytes,5,opt,name=state"`
|
||||
}
|
||||
|
||||
func TestNewYAMLDecoder(t *testing.T) {
|
||||
e := NewYAMLDecoder(strings.NewReader(""))
|
||||
assert.NotNil(t, e)
|
||||
}
|
||||
|
||||
func TestNewDecoderDecodeJSON(t *testing.T) {
|
||||
schema:=`
|
||||
{
|
||||
"$id": "user",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "user",
|
||||
"type": "object",
|
||||
"required": [ "name" ],
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "user name",
|
||||
"minLength": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
decl := `{
|
||||
"name": "testuser",
|
||||
"uid": "12001",
|
||||
"group": "12001",
|
||||
"home": "/home/testuser",
|
||||
"state": "present"
|
||||
}`
|
||||
|
||||
jsonReader := strings.NewReader(decl)
|
||||
user := &TestUser{}
|
||||
|
||||
e := NewJSONDecoder(jsonReader)
|
||||
assert.NotNil(t, e)
|
||||
docErr := e.Decode(user)
|
||||
assert.Nil(t, docErr)
|
||||
|
||||
schemaLoader := gojsonschema.NewStringLoader(schema)
|
||||
loader := gojsonschema.NewStringLoader(decl)
|
||||
result, validateErr := gojsonschema.Validate(schemaLoader, loader)
|
||||
assert.True(t, result.Valid())
|
||||
assert.Nil(t, validateErr)
|
||||
}
|
||||
|
||||
func TestNewJSONStringDecoder(t *testing.T) {
|
||||
decl := `{
|
||||
"name": "testuser",
|
||||
"uid": "12001",
|
||||
"group": "12001",
|
||||
"home": "/home/testuser",
|
||||
"state": "present"
|
||||
}`
|
||||
|
||||
e := NewJSONStringDecoder(decl)
|
||||
assert.NotNil(t, e)
|
||||
docErr := e.Decode(&TestUser{})
|
||||
assert.Nil(t, docErr)
|
||||
}
|
||||
|
||||
func TestNewDecoder(t *testing.T) {
|
||||
pbData, err := proto.Marshal(&TestPBUser{ Name: "pb", Uid: "15001", Group: "15005", Home: "/home/pb", State: "present" })
|
||||
assert.Nil(t, err)
|
||||
for _, v := range []struct{ reader io.Reader; format Format; expectedhome string } {
|
||||
{ reader: strings.NewReader(`{
|
||||
"name": "testuser",
|
||||
"uid": "12001",
|
||||
"group": "12001",
|
||||
"home": "/home/testuser",
|
||||
"state": "present" }`), format: FormatJson, expectedhome: "/home/testuser" },
|
||||
{ reader: strings.NewReader(`
|
||||
name: "testuser"
|
||||
uid: "12001"
|
||||
group: "12001"
|
||||
home: "/home/test"
|
||||
state: "present"
|
||||
`), format: FormatYaml, expectedhome: "/home/test" },
|
||||
{ reader: bytes.NewReader(pbData), format: FormatProtoBuf, expectedhome: "/home/pb" },
|
||||
} {
|
||||
|
||||
decoder := NewDecoder(v.reader, v.format)
|
||||
assert.NotNil(t, decoder)
|
||||
u := &TestPBUser{}
|
||||
assert.Nil(t, decoder.Decode(u))
|
||||
assert.Equal(t, v.expectedhome, u.Home )
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewDecoderError(t *testing.T) {
|
||||
pbData, err := proto.Marshal(&TestPBUser{ Name: "pb", Uid: "15001", Group: "15005", Home: "/home/pb", State: "present" })
|
||||
assert.Nil(t, err)
|
||||
|
||||
decoder := NewDecoder(bytes.NewReader(pbData), Format("foo"))
|
||||
assert.Nil(t, decoder)
|
||||
}
|
||||
|
||||
func TestNewStringDecoder(t *testing.T) {
|
||||
jsonDoc := `{
|
||||
"name": "testuser",
|
||||
"uid": "12001",
|
||||
"group": "12001",
|
||||
"home": "/home/testuser",
|
||||
"state": "present" }`
|
||||
decoder := NewStringDecoder(jsonDoc, FormatJson)
|
||||
assert.NotNil(t, decoder)
|
||||
u := &TestUser{}
|
||||
assert.Nil(t, decoder.Decode(u))
|
||||
assert.Equal(t, "testuser", u.Name)
|
||||
|
||||
}
|
76
internal/codec/encoder.go
Normal file
76
internal/codec/encoder.go
Normal file
@ -0,0 +1,76 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
_ "fmt"
|
||||
_ "github.com/xeipuuv/gojsonschema"
|
||||
"gopkg.in/yaml.v3"
|
||||
"io"
|
||||
_ "log"
|
||||
"errors"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
var ErrInvalidWriter error = errors.New("Invalid writer")
|
||||
|
||||
type JSONEncoder json.Encoder
|
||||
|
||||
type Encoder interface {
|
||||
Encode(v any) error
|
||||
Close() error
|
||||
}
|
||||
|
||||
func NewEncoder(w io.Writer, format Format) Encoder {
|
||||
switch format {
|
||||
case FormatYaml:
|
||||
return NewYAMLEncoder(w)
|
||||
case FormatJson:
|
||||
return NewJSONEncoder(w)
|
||||
case FormatProtoBuf:
|
||||
return NewProtoBufEncoder(w)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewJSONEncoder(w io.Writer) Encoder {
|
||||
return (*JSONEncoder)(json.NewEncoder(w))
|
||||
}
|
||||
|
||||
func NewYAMLEncoder(w io.Writer) Encoder {
|
||||
return yaml.NewEncoder(w)
|
||||
}
|
||||
|
||||
type ProtoEncoder struct {
|
||||
writer io.Writer
|
||||
}
|
||||
|
||||
func (p *ProtoEncoder) Encode(v any) (err error) {
|
||||
var encoded []byte
|
||||
encoded, err = proto.Marshal(v.(proto.Message))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
_, err = p.writer.Write(encoded)
|
||||
return
|
||||
}
|
||||
|
||||
func (p *ProtoEncoder) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewProtoBufEncoder(w io.Writer) Encoder {
|
||||
if w != nil {
|
||||
return &ProtoEncoder{ writer: w }
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (j *JSONEncoder) Encode(v any) error {
|
||||
return (*json.Encoder)(j).Encode(v)
|
||||
}
|
||||
func (j *JSONEncoder) Close() error {
|
||||
return nil
|
||||
}
|
110
internal/codec/encoder_test.go
Normal file
110
internal/codec/encoder_test.go
Normal file
@ -0,0 +1,110 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
_ "fmt"
|
||||
"github.com/stretchr/testify/assert"
|
||||
_ "log"
|
||||
"strings"
|
||||
"testing"
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
"io"
|
||||
"bytes"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
type TestFile struct {
|
||||
Path string `json:"path" yaml:"path"`
|
||||
}
|
||||
|
||||
func TestNewYAMLEncoder(t *testing.T) {
|
||||
var yamlDoc strings.Builder
|
||||
e := NewYAMLEncoder(&yamlDoc)
|
||||
assert.NotNil(t, e)
|
||||
}
|
||||
|
||||
func TestNewEncoderEncodeJSON(t *testing.T) {
|
||||
schema:=`
|
||||
{
|
||||
"$id": "file",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "file",
|
||||
"type": "object",
|
||||
"required": [ "path" ],
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "file path",
|
||||
"minLength": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
var jsonDoc strings.Builder
|
||||
file := &TestFile{}
|
||||
file.Path = "foo"
|
||||
|
||||
e := NewJSONEncoder(&jsonDoc)
|
||||
assert.NotNil(t, e)
|
||||
docErr := e.Encode(file)
|
||||
assert.Nil(t, docErr)
|
||||
|
||||
schemaLoader := gojsonschema.NewStringLoader(schema)
|
||||
loader := gojsonschema.NewStringLoader(jsonDoc.String())
|
||||
result, err := gojsonschema.Validate(schemaLoader, loader)
|
||||
|
||||
assert.Nil(t, err)
|
||||
|
||||
assert.True(t, result.Valid())
|
||||
}
|
||||
|
||||
func TestNewEncoder(t *testing.T) {
|
||||
|
||||
pb := &TestPBUser{ Name: "pb", Uid: "15001", Group: "15005", Home: "/home/pb", State: "present" }
|
||||
jx := &TestUser{ Name: "jx", Uid: "17001", Group: "17005", Home: "/home/jx", State: "present" }
|
||||
|
||||
pbData, pbErr := proto.Marshal(pb)
|
||||
assert.Nil(t, pbErr)
|
||||
|
||||
for _, v := range []struct{ writer io.Writer; testuser any; format Format; expected []byte} {
|
||||
{ writer: &bytes.Buffer{}, testuser: jx, expected: []byte(`{"name":"jx","uid":"17001","group":"17005","home":"/home/jx","state":"present"}
|
||||
`), format: FormatJson },
|
||||
{ writer: &bytes.Buffer{}, testuser: jx, expected: []byte(`name: jx
|
||||
uid: "17001"
|
||||
group: "17005"
|
||||
home: /home/jx
|
||||
state: present
|
||||
`), format: FormatYaml },
|
||||
{ writer: &bytes.Buffer{}, testuser: pb, expected: pbData , format: FormatProtoBuf },
|
||||
} {
|
||||
encoder := NewEncoder(v.writer, v.format)
|
||||
assert.NotNil(t, encoder)
|
||||
assert.Nil(t, encoder.Encode(v.testuser))
|
||||
assert.Equal(t, string(v.expected), v.writer.(*bytes.Buffer).String())
|
||||
assert.Equal(t, v.expected, v.writer.(*bytes.Buffer).Bytes())
|
||||
assert.Nil(t, encoder.Close())
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewEncoderError(t *testing.T) {
|
||||
encoder := NewEncoder(&strings.Builder{}, Format("foo"))
|
||||
assert.Nil(t, encoder)
|
||||
}
|
||||
|
||||
func TestNewProtobufError(t *testing.T) {
|
||||
encoder := NewProtoBufEncoder(nil)
|
||||
assert.Nil(t, encoder)
|
||||
}
|
||||
|
||||
/*
|
||||
func TestProtobufEncodeError(t *testing.T) {
|
||||
buf := &bytes.Buffer{}
|
||||
buf.Write([]byte("broken input"))
|
||||
|
||||
encoder := NewProtoBufEncoder(buf)
|
||||
assert.NotNil(t, encoder)
|
||||
assert.NotNil(t, encoder.Encode(&TestPBUser{}))
|
||||
}
|
||||
*/
|
179
internal/codec/testuser.pb.go
Normal file
179
internal/codec/testuser.pb.go
Normal file
@ -0,0 +1,179 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.34.2
|
||||
// protoc v3.12.4
|
||||
// source: testuser.proto
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
// Verify that this generated code is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
|
||||
// Verify that runtime/protoimpl is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
|
||||
)
|
||||
|
||||
type TestPBUser struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"Name,omitempty"`
|
||||
Uid string `protobuf:"bytes,2,opt,name=Uid,proto3" json:"Uid,omitempty"`
|
||||
Group string `protobuf:"bytes,3,opt,name=Group,proto3" json:"Group,omitempty"`
|
||||
Home string `protobuf:"bytes,4,opt,name=Home,proto3" json:"Home,omitempty"`
|
||||
State string `protobuf:"bytes,5,opt,name=State,proto3" json:"State,omitempty"`
|
||||
}
|
||||
|
||||
func (x *TestPBUser) Reset() {
|
||||
*x = TestPBUser{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_testuser_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *TestPBUser) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*TestPBUser) ProtoMessage() {}
|
||||
|
||||
func (x *TestPBUser) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_testuser_proto_msgTypes[0]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use TestPBUser.ProtoReflect.Descriptor instead.
|
||||
func (*TestPBUser) Descriptor() ([]byte, []int) {
|
||||
return file_testuser_proto_rawDescGZIP(), []int{0}
|
||||
}
|
||||
|
||||
func (x *TestPBUser) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *TestPBUser) GetUid() string {
|
||||
if x != nil {
|
||||
return x.Uid
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *TestPBUser) GetGroup() string {
|
||||
if x != nil {
|
||||
return x.Group
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *TestPBUser) GetHome() string {
|
||||
if x != nil {
|
||||
return x.Home
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *TestPBUser) GetState() string {
|
||||
if x != nil {
|
||||
return x.State
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var File_testuser_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_testuser_proto_rawDesc = []byte{
|
||||
0x0a, 0x0e, 0x74, 0x65, 0x73, 0x74, 0x75, 0x73, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||
0x12, 0x05, 0x63, 0x6f, 0x64, 0x65, 0x63, 0x22, 0x72, 0x0a, 0x0a, 0x54, 0x65, 0x73, 0x74, 0x50,
|
||||
0x42, 0x55, 0x73, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x55, 0x69, 0x64,
|
||||
0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x55, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x47,
|
||||
0x72, 0x6f, 0x75, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x47, 0x72, 0x6f, 0x75,
|
||||
0x70, 0x12, 0x12, 0x0a, 0x04, 0x48, 0x6f, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x04, 0x48, 0x6f, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x18, 0x05,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x42, 0x15, 0x5a, 0x13, 0x64,
|
||||
0x65, 0x63, 0x6c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x63, 0x6f, 0x64,
|
||||
0x65, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
file_testuser_proto_rawDescOnce sync.Once
|
||||
file_testuser_proto_rawDescData = file_testuser_proto_rawDesc
|
||||
)
|
||||
|
||||
func file_testuser_proto_rawDescGZIP() []byte {
|
||||
file_testuser_proto_rawDescOnce.Do(func() {
|
||||
file_testuser_proto_rawDescData = protoimpl.X.CompressGZIP(file_testuser_proto_rawDescData)
|
||||
})
|
||||
return file_testuser_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_testuser_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
|
||||
var file_testuser_proto_goTypes = []any{
|
||||
(*TestPBUser)(nil), // 0: codec.TestPBUser
|
||||
}
|
||||
var file_testuser_proto_depIdxs = []int32{
|
||||
0, // [0:0] is the sub-list for method output_type
|
||||
0, // [0:0] is the sub-list for method input_type
|
||||
0, // [0:0] is the sub-list for extension type_name
|
||||
0, // [0:0] is the sub-list for extension extendee
|
||||
0, // [0:0] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_testuser_proto_init() }
|
||||
func file_testuser_proto_init() {
|
||||
if File_testuser_proto != nil {
|
||||
return
|
||||
}
|
||||
if !protoimpl.UnsafeEnabled {
|
||||
file_testuser_proto_msgTypes[0].Exporter = func(v any, i int) any {
|
||||
switch v := v.(*TestPBUser); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
File: protoimpl.DescBuilder{
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_testuser_proto_rawDesc,
|
||||
NumEnums: 0,
|
||||
NumMessages: 1,
|
||||
NumExtensions: 0,
|
||||
NumServices: 0,
|
||||
},
|
||||
GoTypes: file_testuser_proto_goTypes,
|
||||
DependencyIndexes: file_testuser_proto_depIdxs,
|
||||
MessageInfos: file_testuser_proto_msgTypes,
|
||||
}.Build()
|
||||
File_testuser_proto = out.File
|
||||
file_testuser_proto_rawDesc = nil
|
||||
file_testuser_proto_goTypes = nil
|
||||
file_testuser_proto_depIdxs = nil
|
||||
}
|
13
internal/codec/testuser.proto
Normal file
13
internal/codec/testuser.proto
Normal file
@ -0,0 +1,13 @@
|
||||
syntax = "proto3";
|
||||
package codec;
|
||||
option go_package = "decl/internal/codec";
|
||||
|
||||
|
||||
message TestPBUser {
|
||||
string Name = 1;
|
||||
string Uid = 2;
|
||||
string Group = 3;
|
||||
string Home = 4;
|
||||
string State = 5;
|
||||
}
|
||||
|
87
internal/codec/types.go
Normal file
87
internal/codec/types.go
Normal file
@ -0,0 +1,87 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"io"
|
||||
"fmt"
|
||||
"errors"
|
||||
"encoding/json"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
const (
|
||||
FormatYml Format = "yml"
|
||||
FormatYaml Format = "yaml"
|
||||
FormatJson Format = "json"
|
||||
FormatProtoBuf Format = "protobuf"
|
||||
)
|
||||
|
||||
var ErrInvalidFormat error = errors.New("invalid Format value")
|
||||
|
||||
type Format string
|
||||
|
||||
func (f *Format) Validate() error {
|
||||
switch *f {
|
||||
case FormatYml, FormatYaml, FormatJson, FormatProtoBuf:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("%w: %s", ErrInvalidFormat, *f)
|
||||
}
|
||||
}
|
||||
|
||||
func (f *Format) Set(value string) (err error) {
|
||||
if err = (*Format)(&value).Validate(); err == nil {
|
||||
err = f.UnmarshalValue(value)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (f *Format) UnmarshalValue(value string) error {
|
||||
switch value {
|
||||
case string(FormatYml):
|
||||
*f = FormatYaml
|
||||
case string(FormatYaml), string(FormatJson), string(FormatProtoBuf):
|
||||
*f = Format(value)
|
||||
default:
|
||||
return ErrInvalidFormat
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *Format) UnmarshalJSON(data []byte) error {
|
||||
var s string
|
||||
if unmarshalFormatTypeErr := json.Unmarshal(data, &s); unmarshalFormatTypeErr != nil {
|
||||
return unmarshalFormatTypeErr
|
||||
}
|
||||
return f.UnmarshalValue(s)
|
||||
}
|
||||
|
||||
func (f *Format) UnmarshalYAML(value *yaml.Node) error {
|
||||
var s string
|
||||
if err := value.Decode(&s); err != nil {
|
||||
return err
|
||||
}
|
||||
return f.UnmarshalValue(s)
|
||||
}
|
||||
|
||||
func (f Format) Encoder(w io.Writer) Encoder {
|
||||
return NewEncoder(w, f)
|
||||
}
|
||||
|
||||
func (f Format) Decoder(r io.Reader) Decoder {
|
||||
return NewDecoder(r, f)
|
||||
}
|
||||
|
||||
func (f Format) StringDecoder(s string) Decoder {
|
||||
return NewStringDecoder(s, f)
|
||||
}
|
||||
|
||||
func (f Format) Serialize(object any, w io.Writer) error {
|
||||
return f.Encoder(w).Encode(object)
|
||||
}
|
||||
|
||||
func (f Format) Deserialize(r io.Reader, object any) error {
|
||||
return f.Decoder(r).Decode(object)
|
||||
}
|
||||
|
77
internal/codec/types_test.go
Normal file
77
internal/codec/types_test.go
Normal file
@ -0,0 +1,77 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
_ "fmt"
|
||||
"github.com/stretchr/testify/assert"
|
||||
_ "log"
|
||||
"testing"
|
||||
"strings"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
type TestDec struct {
|
||||
FormatType Format `yaml:"formattype" json:"formattype"`
|
||||
}
|
||||
|
||||
func TestFormatType(t *testing.T) {
|
||||
yamlData := `
|
||||
formattype: json
|
||||
`
|
||||
v := &TestDec{}
|
||||
|
||||
dec := NewYAMLStringDecoder(yamlData)
|
||||
e := dec.Decode(v)
|
||||
|
||||
assert.Nil(t, e)
|
||||
|
||||
assert.Equal(t, FormatJson, v.FormatType)
|
||||
}
|
||||
|
||||
func TestFormatTypeErr(t *testing.T) {
|
||||
yamlData := `
|
||||
formattype: foo
|
||||
`
|
||||
|
||||
v := &TestDec{}
|
||||
|
||||
dec := NewYAMLStringDecoder(yamlData)
|
||||
e := dec.Decode(v)
|
||||
|
||||
assert.ErrorIs(t, ErrInvalidFormat, e)
|
||||
}
|
||||
|
||||
func TestFormatValidate(t *testing.T) {
|
||||
f := FormatYaml
|
||||
assert.Nil(t, f.Validate())
|
||||
|
||||
var fail Format = Format("foo")
|
||||
assert.ErrorIs(t, fail.Validate(), ErrInvalidFormat)
|
||||
|
||||
var testFormatSet Format
|
||||
assert.Nil(t, testFormatSet.Set("yaml"))
|
||||
|
||||
assert.ErrorIs(t, testFormatSet.Set("yamlv3"), ErrInvalidFormat)
|
||||
}
|
||||
|
||||
func TestFormatCodec(t *testing.T) {
|
||||
var output map[string]Format = make(map[string]Format)
|
||||
var writer strings.Builder
|
||||
encoder := FormatYaml.Encoder(&writer)
|
||||
assert.NotNil(t, encoder)
|
||||
|
||||
decoder := FormatYaml.Decoder(strings.NewReader("formattype: json"))
|
||||
assert.Nil(t, decoder.Decode(output))
|
||||
assert.Equal(t, FormatJson, output["formattype"])
|
||||
}
|
||||
|
||||
func TestFormatUnmarshal(t *testing.T) {
|
||||
var f Format
|
||||
assert.Nil(t, json.Unmarshal([]byte("\"yaml\""), &f))
|
||||
assert.Equal(t, FormatYaml, f)
|
||||
assert.NotNil(t, json.Unmarshal([]byte("\"yaml"), &f))
|
||||
|
||||
assert.Nil(t, json.Unmarshal([]byte("\"yml\""), &f))
|
||||
assert.Equal(t, FormatYaml, f)
|
||||
}
|
223
internal/command/command.go
Normal file
223
internal/command/command.go
Normal file
@ -0,0 +1,223 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package command
|
||||
|
||||
import (
|
||||
_ "context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"errors"
|
||||
"gopkg.in/yaml.v3"
|
||||
"io"
|
||||
"log/slog"
|
||||
_ "net/url"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"text/template"
|
||||
"decl/internal/codec"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
var ErrUnknownCommand error = errors.New("Unable to find command in path")
|
||||
|
||||
type CommandExecutor func(value any) ([]byte, error)
|
||||
type CommandExtractAttributes func(output []byte, target any) error
|
||||
type CommandExists func() error
|
||||
|
||||
type CommandArg string
|
||||
|
||||
type CommandInput string
|
||||
|
||||
type Command struct {
|
||||
Path string `json:"path" yaml:"path"`
|
||||
Args []CommandArg `json:"args" yaml:"args"`
|
||||
Env []string `json:"env" yaml:"env"`
|
||||
Split bool `json:"split" yaml:"split"`
|
||||
FailOnError bool `json:"failonerror" yaml:"failonerror"`
|
||||
StdinAvailable bool `json:"stdinavailable,omitempty" yaml:"stdinavailable,omitempty"`
|
||||
ExitCode int `json:"exitcode,omitempty" yaml:"exitcode,omitempty"`
|
||||
Stdout string `json:"stdout,omitempty" yaml:"stdout,omitempty"`
|
||||
Stderr string `json:"stderr,omitempty" yaml:"stderr,omitempty"`
|
||||
Executor CommandExecutor `json:"-" yaml:"-"`
|
||||
Extractor CommandExtractAttributes `json:"-" yaml:"-"`
|
||||
CommandExists CommandExists `json:"-" yaml:"-"`
|
||||
Input CommandInput `json:"-" yaml:"-"`
|
||||
stdin io.Reader `json:"-" yaml:"-"`
|
||||
}
|
||||
|
||||
func NewCommand() *Command {
|
||||
c := &Command{ Split: true, FailOnError: true }
|
||||
c.Defaults()
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *Command) ClearOutput() {
|
||||
c.Stdout = ""
|
||||
c.Stderr = ""
|
||||
c.ExitCode = 0
|
||||
}
|
||||
|
||||
func (c *Command) Defaults() {
|
||||
c.ClearOutput()
|
||||
c.Split = true
|
||||
c.FailOnError = true
|
||||
c.CommandExists = func() error {
|
||||
if _, err := exec.LookPath(c.Path); err != nil {
|
||||
return fmt.Errorf("%w - %w", ErrUnknownCommand, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
c.Executor = func(value any) ([]byte, error) {
|
||||
c.ClearOutput()
|
||||
args, err := c.Template(value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if inputErr := c.SetInput(value); inputErr != nil {
|
||||
return nil, inputErr
|
||||
}
|
||||
cmd := exec.Command(c.Path, args...)
|
||||
c.SetCmdEnv(cmd)
|
||||
|
||||
if c.stdin != nil {
|
||||
cmd.Stdin = c.stdin
|
||||
}
|
||||
|
||||
slog.Info("execute() - cmd", "path", c.Path, "args", args)
|
||||
output, stdoutPipeErr := cmd.StdoutPipe()
|
||||
if stdoutPipeErr != nil {
|
||||
return nil, stdoutPipeErr
|
||||
}
|
||||
|
||||
stderr, pipeErr := cmd.StderrPipe()
|
||||
if pipeErr != nil {
|
||||
return nil, pipeErr
|
||||
}
|
||||
|
||||
if startErr := cmd.Start(); startErr != nil {
|
||||
return nil, startErr
|
||||
}
|
||||
|
||||
slog.Info("execute() - start", "cmd", cmd)
|
||||
stdOutOutput, _ := io.ReadAll(output)
|
||||
stdErrOutput, _ := io.ReadAll(stderr)
|
||||
if len(stdOutOutput) > 100 {
|
||||
slog.Info("execute() - io", "stdout", string(stdOutOutput[:100]), "stderr", string(stdErrOutput))
|
||||
} else {
|
||||
slog.Info("execute() - io", "stdout", string(stdOutOutput), "stderr", string(stdErrOutput))
|
||||
}
|
||||
waitErr := cmd.Wait()
|
||||
|
||||
c.Stdout = string(stdOutOutput)
|
||||
c.Stderr = string(stdErrOutput)
|
||||
c.ExitCode = c.GetExitCodeFromError(waitErr)
|
||||
|
||||
if len(stdOutOutput) > 100 {
|
||||
slog.Info("execute()", "path", c.Path, "args", args, "output", string(stdOutOutput[:100]), "error", string(stdErrOutput))
|
||||
} else {
|
||||
slog.Info("execute()", "path", c.Path, "args", args, "output", string(stdOutOutput), "error", string(stdErrOutput))
|
||||
}
|
||||
|
||||
if len(stdErrOutput) > 0 && c.FailOnError {
|
||||
return stdOutOutput, fmt.Errorf("%w %s", waitErr, string(stdErrOutput))
|
||||
}
|
||||
return stdOutOutput, waitErr
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Command) Load(r io.Reader) error {
|
||||
return codec.NewYAMLDecoder(r).Decode(c)
|
||||
}
|
||||
|
||||
func (c *Command) LoadDecl(yamlResourceDeclaration string) error {
|
||||
return codec.NewYAMLStringDecoder(yamlResourceDeclaration).Decode(c)
|
||||
}
|
||||
|
||||
func (c *Command) SetCmdEnv(cmd *exec.Cmd) {
|
||||
cmd.Env = append(os.Environ(), c.Env...)
|
||||
}
|
||||
|
||||
func (c *Command) SetStdinReader(r io.Reader) {
|
||||
if c.StdinAvailable {
|
||||
c.stdin = r
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Command) Exists() bool {
|
||||
return c.CommandExists() == nil
|
||||
}
|
||||
|
||||
func (c *Command) GetExitCodeFromError(err error) (ec int) {
|
||||
if exitErr, ok := err.(*exec.ExitError); ok {
|
||||
if status, ok := exitErr.Sys().(syscall.WaitStatus); ok {
|
||||
return status.ExitStatus()
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (c *Command) Template(value any) ([]string, error) {
|
||||
var args []string = make([]string, 0, len(c.Args) * 2)
|
||||
for i, arg := range c.Args {
|
||||
var commandLineArg strings.Builder
|
||||
err := template.Must(template.New(fmt.Sprintf("arg%d", i)).Parse(string(arg))).Execute(&commandLineArg, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if commandLineArg.Len() > 0 {
|
||||
var splitArg []string
|
||||
if c.Split {
|
||||
splitArg = strings.Split(commandLineArg.String(), " ")
|
||||
} else {
|
||||
splitArg = []string{commandLineArg.String()}
|
||||
}
|
||||
slog.Info("Template()", "split", splitArg, "len", len(splitArg))
|
||||
args = append(args, splitArg...)
|
||||
}
|
||||
}
|
||||
|
||||
slog.Info("Template()", "Args", c.Args, "lencargs", len(c.Args), "args", args, "lenargs", len(args), "value", value)
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (c *Command) Execute(value any) ([]byte, error) {
|
||||
return c.Executor(value)
|
||||
}
|
||||
|
||||
func (c *Command) SetInput(value any) error {
|
||||
if len(c.Input) > 0 {
|
||||
if r, err := c.Input.Template(value); err != nil {
|
||||
return err
|
||||
} else {
|
||||
c.SetStdinReader(strings.NewReader(r.String()))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *CommandInput) Template(value any) (result strings.Builder, err error) {
|
||||
err = template.Must(template.New("commandInput").Parse(string(*c))).Execute(&result, value)
|
||||
return
|
||||
}
|
||||
|
||||
func (c *CommandArg) UnmarshalValue(value string) error {
|
||||
*c = CommandArg(value)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *CommandArg) UnmarshalJSON(data []byte) error {
|
||||
var s string
|
||||
if unmarshalRouteTypeErr := json.Unmarshal(data, &s); unmarshalRouteTypeErr != nil {
|
||||
return unmarshalRouteTypeErr
|
||||
}
|
||||
return c.UnmarshalValue(s)
|
||||
}
|
||||
|
||||
func (c *CommandArg) UnmarshalYAML(value *yaml.Node) error {
|
||||
var s string
|
||||
if err := value.Decode(&s); err != nil {
|
||||
return err
|
||||
}
|
||||
return c.UnmarshalValue(s)
|
||||
}
|
102
internal/command/command_test.go
Normal file
102
internal/command/command_test.go
Normal file
@ -0,0 +1,102 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
|
||||
package command
|
||||
|
||||
import (
|
||||
_ "fmt"
|
||||
"github.com/stretchr/testify/assert"
|
||||
_ "os"
|
||||
_ "strings"
|
||||
"testing"
|
||||
"bytes"
|
||||
)
|
||||
|
||||
func TestNewCommand(t *testing.T) {
|
||||
c := NewCommand()
|
||||
assert.NotNil(t, c)
|
||||
}
|
||||
|
||||
func TestCommandLoad(t *testing.T) {
|
||||
c := NewCommand()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
decl := `
|
||||
path: find
|
||||
args:
|
||||
- "{{ .Path }}"
|
||||
`
|
||||
|
||||
assert.Nil(t, c.LoadDecl(decl))
|
||||
assert.Equal(t, "find", c.Path)
|
||||
}
|
||||
|
||||
func TestCommandTemplate(t *testing.T) {
|
||||
c := NewCommand()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
decl := `
|
||||
path: find
|
||||
args:
|
||||
- "{{ .Path }}"
|
||||
`
|
||||
|
||||
assert.Nil(t, c.LoadDecl(decl))
|
||||
assert.Equal(t, "find", c.Path)
|
||||
assert.Equal(t, 1, len(c.Args))
|
||||
|
||||
f := struct { Path string } {
|
||||
Path: "./",
|
||||
}
|
||||
|
||||
args, templateErr := c.Template(f)
|
||||
assert.Nil(t, templateErr)
|
||||
assert.Equal(t, 1, len(args))
|
||||
|
||||
assert.Equal(t, "./", string(args[0]))
|
||||
|
||||
out, err := c.Execute(f)
|
||||
assert.Nil(t, err)
|
||||
assert.Greater(t, len(out), 0)
|
||||
}
|
||||
|
||||
func TestCommandStdin(t *testing.T) {
|
||||
var expected string = "stdin test data"
|
||||
var stdinBuffer bytes.Buffer
|
||||
stdinBuffer.WriteString(expected)
|
||||
|
||||
c := NewCommand()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
decl := `
|
||||
path: cat
|
||||
stdinavailable: true
|
||||
`
|
||||
|
||||
assert.Nil(t, c.LoadDecl(decl))
|
||||
assert.Equal(t, "cat", c.Path)
|
||||
|
||||
c.SetStdinReader(&stdinBuffer)
|
||||
out, err := c.Execute(nil)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, expected, string(out))
|
||||
}
|
||||
|
||||
func TestCommandExitCode(t *testing.T) {
|
||||
c := NewCommand()
|
||||
assert.NotNil(t, c)
|
||||
decl := `
|
||||
path: ls
|
||||
args:
|
||||
- "amissingfile"
|
||||
`
|
||||
|
||||
assert.Nil(t, c.LoadDecl(decl))
|
||||
assert.Equal(t, "ls", c.Path)
|
||||
|
||||
out, err := c.Execute(nil)
|
||||
assert.NotNil(t, err)
|
||||
assert.Greater(t, c.ExitCode, 0)
|
||||
assert.Equal(t, string(out), c.Stdout)
|
||||
assert.Equal(t, string("ls: amissingfile: No such file or directory\n"), c.Stderr)
|
||||
}
|
103
internal/config/certificate.go
Normal file
103
internal/config/certificate.go
Normal file
@ -0,0 +1,103 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"decl/internal/codec"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
"encoding/json"
|
||||
"gopkg.in/yaml.v3"
|
||||
"crypto/x509"
|
||||
)
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConfigurationTypes.Register([]string{"certificate"}, func(u *url.URL) data.Configuration {
|
||||
c := NewCertificate()
|
||||
return c
|
||||
})
|
||||
}
|
||||
|
||||
type Certificate map[string]*x509.Certificate
|
||||
|
||||
func NewCertificate() *Certificate {
|
||||
c := make(Certificate)
|
||||
return &c
|
||||
}
|
||||
|
||||
func (c *Certificate) URI() string {
|
||||
return fmt.Sprintf("%s://%s", c.Type(), "")
|
||||
}
|
||||
|
||||
func (c *Certificate) SetURI(uri string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Certificate) SetParsedURI(uri data.URIParser) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Certificate) Read(ctx context.Context) ([]byte, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (c *Certificate) Load(r io.Reader) (err error) {
|
||||
err = codec.NewYAMLDecoder(r).Decode(c)
|
||||
if err == nil {
|
||||
_, err = c.Read(context.Background())
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Certificate) LoadYAML(yamlData string) (err error) {
|
||||
err = codec.NewYAMLStringDecoder(yamlData).Decode(c)
|
||||
if err == nil {
|
||||
_, err = c.Read(context.Background())
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Certificate) UnmarshalJSON(data []byte) error {
|
||||
if unmarshalErr := json.Unmarshal(data, c); unmarshalErr != nil {
|
||||
return unmarshalErr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Certificate) UnmarshalYAML(value *yaml.Node) error {
|
||||
type decodeCertificate Certificate
|
||||
if unmarshalErr := value.Decode((*decodeCertificate)(c)); unmarshalErr != nil {
|
||||
return unmarshalErr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Certificate) Clone() data.Configuration {
|
||||
jsonGeneric, _ := json.Marshal(c)
|
||||
clone := NewCertificate()
|
||||
if unmarshalErr := json.Unmarshal(jsonGeneric, &clone); unmarshalErr != nil {
|
||||
panic(unmarshalErr)
|
||||
}
|
||||
return clone
|
||||
}
|
||||
|
||||
func (c *Certificate) Type() string {
|
||||
return "certificate"
|
||||
}
|
||||
|
||||
func (c *Certificate) GetValue(name string) (result any, err error) {
|
||||
var ok bool
|
||||
if result, ok = (*c)[name]; !ok {
|
||||
err = data.ErrUnknownConfigurationKey
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (c *Certificate) Has(key string) (ok bool) {
|
||||
_, ok = (*c)[key]
|
||||
return
|
||||
}
|
33
internal/config/certificate_test.go
Normal file
33
internal/config/certificate_test.go
Normal file
@ -0,0 +1,33 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
"crypto/x509"
|
||||
)
|
||||
|
||||
func TestNewCertificateConfig(t *testing.T) {
|
||||
c := NewCertificate()
|
||||
assert.NotNil(t, c)
|
||||
}
|
||||
|
||||
func TestNewCertificateConfigYAML(t *testing.T) {
|
||||
c := NewCertificate()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
config := `
|
||||
catemplate:
|
||||
subject:
|
||||
organization:
|
||||
- RKH
|
||||
notbefore: 2024-07-10
|
||||
`
|
||||
|
||||
yamlErr := c.LoadYAML(config)
|
||||
assert.Nil(t, yamlErr)
|
||||
crt, err := c.GetValue("catemplate")
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []string{"RKH"}, crt.(*x509.Certificate).Subject.Organization)
|
||||
}
|
9
internal/config/configs/facter.yaml
Normal file
9
internal/config/configs/facter.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
configurations:
|
||||
- name: facts
|
||||
type: exec
|
||||
values:
|
||||
path: /usr/bin/facter
|
||||
args:
|
||||
- "-j"
|
||||
format: "json"
|
||||
|
140
internal/config/exec.go
Normal file
140
internal/config/exec.go
Normal file
@ -0,0 +1,140 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"decl/internal/codec"
|
||||
"decl/internal/command"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
"encoding/json"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConfigurationTypes.Register([]string{"exec"}, func(u *url.URL) data.Configuration {
|
||||
x := NewExec()
|
||||
return x
|
||||
})
|
||||
}
|
||||
|
||||
type Exec struct {
|
||||
Path string `yaml:"path" json:"path"`
|
||||
Args []command.CommandArg `yaml:"args" json:"args"`
|
||||
ValuesFormat codec.Format `yaml:"format" json:"format"`
|
||||
Values map[string]any `yaml:"values" json:"values"`
|
||||
ReadCommand *command.Command `yaml:"-" json:"-"`
|
||||
}
|
||||
|
||||
func NewExec() *Exec {
|
||||
x := &Exec{}
|
||||
return x
|
||||
}
|
||||
|
||||
func (x *Exec) SetURI(uri string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Exec) SetParsedURI(uri data.URIParser) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Exec) URI() string {
|
||||
return fmt.Sprintf("%s://%s", x.Type(), x.Path)
|
||||
}
|
||||
|
||||
func (x *Exec) Read(ctx context.Context) ([]byte, error) {
|
||||
out, err := x.ReadCommand.Execute(x)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
exErr := x.ReadCommand.Extractor(out, x)
|
||||
if exErr != nil {
|
||||
return nil, exErr
|
||||
}
|
||||
return nil, exErr
|
||||
}
|
||||
|
||||
func (x *Exec) Load(r io.Reader) (err error) {
|
||||
err = codec.NewYAMLDecoder(r).Decode(x)
|
||||
if err == nil {
|
||||
_, err = x.Read(context.Background())
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (x *Exec) LoadYAML(yamlData string) (err error) {
|
||||
err = codec.NewYAMLStringDecoder(yamlData).Decode(x)
|
||||
if err == nil {
|
||||
_, err = x.Read(context.Background())
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (x *Exec) UnmarshalJSON(data []byte) error {
|
||||
if unmarshalErr := json.Unmarshal(data, x); unmarshalErr != nil {
|
||||
return unmarshalErr
|
||||
}
|
||||
x.NewReadConfigCommand()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Exec) UnmarshalYAML(value *yaml.Node) error {
|
||||
type decodeExec Exec
|
||||
if unmarshalErr := value.Decode((*decodeExec)(x)); unmarshalErr != nil {
|
||||
return unmarshalErr
|
||||
}
|
||||
x.NewReadConfigCommand()
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
func (x *Exec) Clone() data.Configuration {
|
||||
clone := NewExec()
|
||||
clone.Path = x.Path
|
||||
clone.Args = x.Args
|
||||
clone.ValuesFormat = x.ValuesFormat
|
||||
clone.Values = x.Values
|
||||
clone.ReadCommand = x.ReadCommand
|
||||
return clone
|
||||
}
|
||||
|
||||
func (x *Exec) Type() string {
|
||||
return "exec"
|
||||
}
|
||||
|
||||
func (x *Exec) GetValue(name string) (result any, err error) {
|
||||
var ok bool
|
||||
if result, ok = x.Values[name]; !ok {
|
||||
err = data.ErrUnknownConfigurationKey
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (x *Exec) Has(key string) (ok bool) {
|
||||
_, ok = x.Values[key]
|
||||
return
|
||||
}
|
||||
|
||||
func (ex *Exec) NewReadConfigCommand() {
|
||||
ex.ReadCommand = command.NewCommand()
|
||||
ex.ReadCommand.Path = ex.Path
|
||||
ex.ReadCommand.Args = ex.Args
|
||||
|
||||
ex.ReadCommand.Extractor = func(out []byte, target any) error {
|
||||
x := target.(*Exec)
|
||||
switch x.ValuesFormat {
|
||||
case codec.FormatYaml:
|
||||
return codec.NewYAMLStringDecoder(string(out)).Decode(&x.Values)
|
||||
case codec.FormatJson:
|
||||
return codec.NewJSONStringDecoder(string(out)).Decode(&x.Values)
|
||||
case codec.FormatProtoBuf:
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
86
internal/config/generic.go
Normal file
86
internal/config/generic.go
Normal file
@ -0,0 +1,86 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"fmt"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
"decl/internal/codec"
|
||||
"io"
|
||||
)
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConfigurationTypes.Register([]string{"generic"}, func(u *url.URL) data.Configuration {
|
||||
g := NewGeneric[any]()
|
||||
return g
|
||||
})
|
||||
}
|
||||
|
||||
type Generic[Value any] map[string]Value
|
||||
|
||||
func NewGeneric[Value any]() *Generic[Value] {
|
||||
g := make(Generic[Value])
|
||||
return &g
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) URI() string {
|
||||
return fmt.Sprintf("%s://%s", g.Type(), "")
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) SetURI(uri string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) SetParsedURI(uri data.URIParser) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) Load(r io.Reader) (err error) {
|
||||
err = codec.NewYAMLDecoder(r).Decode(g)
|
||||
if err == nil {
|
||||
_, err = g.Read(context.Background())
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) LoadYAML(yamlData string) (err error) {
|
||||
err = codec.NewYAMLStringDecoder(yamlData).Decode(g)
|
||||
if err == nil {
|
||||
_, err = g.Read(context.Background())
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) Clone() data.Configuration {
|
||||
jsonGeneric, _ := json.Marshal(g)
|
||||
clone := NewGeneric[Value]()
|
||||
if unmarshalErr := json.Unmarshal(jsonGeneric, clone); unmarshalErr != nil {
|
||||
panic(unmarshalErr)
|
||||
}
|
||||
return clone
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) Type() string {
|
||||
return "generic"
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) Read(context.Context) ([]byte, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) GetValue(name string) (result any, err error) {
|
||||
var ok bool
|
||||
if result, ok = (*g)[name]; !ok {
|
||||
err = data.ErrUnknownConfigurationKey
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (g *Generic[Value]) Has(key string) (ok bool) {
|
||||
_, ok = (*g)[key]
|
||||
return
|
||||
}
|
@ -1,14 +1,13 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package target
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNewTarSource(t *testing.T) {
|
||||
s := NewTar()
|
||||
assert.NotNil(t, s)
|
||||
func TestNewGenericConfig(t *testing.T) {
|
||||
g := NewGeneric[any]()
|
||||
assert.NotNil(t, g)
|
||||
}
|
||||
|
63
internal/config/schema.go
Normal file
63
internal/config/schema.go
Normal file
@ -0,0 +1,63 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
"strings"
|
||||
"embed"
|
||||
"net/http"
|
||||
"log/slog"
|
||||
"decl/internal/folio"
|
||||
)
|
||||
|
||||
//go:embed schemas/*.schema.json
|
||||
var schemaFiles embed.FS
|
||||
|
||||
var schemaFilesUri folio.URI = "file://config/schemas/*.schema.json"
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.Schemas[schemaFilesUri] = schemaFiles
|
||||
folio.DocumentRegistry.DefaultSchema = schemaFilesUri
|
||||
}
|
||||
|
||||
type Schema struct {
|
||||
schema gojsonschema.JSONLoader
|
||||
}
|
||||
|
||||
func NewSchema(name string) *Schema {
|
||||
path := fmt.Sprintf("file://schemas/%s.schema.json", name)
|
||||
|
||||
return &Schema{schema: gojsonschema.NewReferenceLoaderFileSystem(path, http.FS(schemaFiles))}
|
||||
}
|
||||
|
||||
func (s *Schema) Validate(source string) error {
|
||||
loader := gojsonschema.NewStringLoader(source)
|
||||
result, err := gojsonschema.Validate(s.schema, loader)
|
||||
|
||||
if err != nil {
|
||||
slog.Info("schema error", "source", source, "schema", s.schema, "result", result, "err", err)
|
||||
return err
|
||||
}
|
||||
slog.Info("schema", "source", source, "schema", s.schema, "result", result, "err", err)
|
||||
|
||||
if !result.Valid() {
|
||||
schemaErrors := strings.Builder{}
|
||||
for _, err := range result.Errors() {
|
||||
schemaErrors.WriteString(err.String() + "\n")
|
||||
}
|
||||
schemaErrors.WriteString(source)
|
||||
return errors.New(schemaErrors.String())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Schema) ValidateSchema() error {
|
||||
sl := gojsonschema.NewSchemaLoader()
|
||||
sl.Validate = true
|
||||
schemaErr := sl.AddSchemas(s.schema)
|
||||
slog.Info("validate schema definition", "schemaloader", sl, "err", schemaErr)
|
||||
return schemaErr
|
||||
}
|
47
internal/config/schema_test.go
Normal file
47
internal/config/schema_test.go
Normal file
@ -0,0 +1,47 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNewSchema(t *testing.T) {
|
||||
s := NewSchema("document")
|
||||
assert.NotEqual(t, nil, s)
|
||||
}
|
||||
|
||||
func TestSchemaValidateJSON(t *testing.T) {
|
||||
// ctx := context.Background()
|
||||
s := NewSchema("block")
|
||||
assert.NotNil(t, s)
|
||||
|
||||
assert.Nil(t, s.ValidateSchema())
|
||||
|
||||
configBlockYaml := `
|
||||
type: "generic"
|
||||
name: "foo"
|
||||
values:
|
||||
bar: quuz
|
||||
`
|
||||
|
||||
testConfig := NewGeneric[any]()
|
||||
|
||||
e := testConfig.LoadYAML(configBlockYaml)
|
||||
assert.Nil(t, e)
|
||||
|
||||
jsonDoc, jsonErr := json.Marshal(testConfig)
|
||||
assert.Nil(t, jsonErr)
|
||||
|
||||
schemaErr := s.Validate(string(jsonDoc))
|
||||
assert.Nil(t, schemaErr)
|
||||
}
|
||||
|
||||
func TestSchemaValidateSchema(t *testing.T) {
|
||||
s := NewSchema("document")
|
||||
assert.NotNil(t, s)
|
||||
|
||||
assert.Nil(t, s.ValidateSchema())
|
||||
}
|
25
internal/config/schemas/block.schema.json
Normal file
25
internal/config/schemas/block.schema.json
Normal file
@ -0,0 +1,25 @@
|
||||
{
|
||||
"$id": "block.schema.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "block",
|
||||
"type": "object",
|
||||
"required": [ "name", "values" ],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Config block name",
|
||||
"minLength": 2
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"description": "Config type name.",
|
||||
"enum": [ "system", "generic", "exec", "certificate" ]
|
||||
},
|
||||
"values": {
|
||||
"oneOf": [
|
||||
{ "type": "object" },
|
||||
{ "$ref": "certificate.schema.json" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
62
internal/config/schemas/certificate.schema.json
Normal file
62
internal/config/schemas/certificate.schema.json
Normal file
@ -0,0 +1,62 @@
|
||||
{
|
||||
"$id": "certificate.schema.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "certificate",
|
||||
"type": "object",
|
||||
"required": [ "path", "filetype" ],
|
||||
"properties": {
|
||||
"SerialNumber": {
|
||||
"type": "integer",
|
||||
"description": "Serial number",
|
||||
"minLength": 1
|
||||
},
|
||||
"Issuer": {
|
||||
"$ref": "pkixname.schema.json"
|
||||
},
|
||||
"Subject": {
|
||||
"$ref": "pkixname.schema.json"
|
||||
},
|
||||
"NotBefore": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "Cert is not valid before time in YYYY-MM-DDTHH:MM:SS.sssssssssZ format."
|
||||
},
|
||||
"NotAfter": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "Cert is not valid after time in YYYY-MM-DDTHH:MM:SS.sssssssssZ format."
|
||||
},
|
||||
"KeyUsage": {
|
||||
"type": "integer",
|
||||
"enum": [
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9
|
||||
],
|
||||
"description": "Actions valid for a key. E.g. 1 = KeyUsageDigitalSignature"
|
||||
},
|
||||
"ExtKeyUsage": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"maximum": 13
|
||||
},
|
||||
"description": "Extended set of actions valid for a key"
|
||||
},
|
||||
"BasicConstraintsValid": {
|
||||
"type": "boolean",
|
||||
"description": "BasicConstraintsValid indicates whether IsCA, MaxPathLen, and MaxPathLenZero are valid"
|
||||
},
|
||||
"IsCA": {
|
||||
"type": "boolean",
|
||||
"description": ""
|
||||
}
|
||||
}
|
||||
}
|
18
internal/config/schemas/config.schema.json
Normal file
18
internal/config/schemas/config.schema.json
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"$id": "config.schema.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "config",
|
||||
"type": "object",
|
||||
"required": [ "configurations" ],
|
||||
"properties": {
|
||||
"configurations": {
|
||||
"type": "array",
|
||||
"description": "Configurations list",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{ "$ref": "block.schema.json" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
19
internal/config/schemas/document.schema.json
Normal file
19
internal/config/schemas/document.schema.json
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"$id": "document.schema.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "document",
|
||||
"type": "object",
|
||||
"required": [ "configurations" ],
|
||||
"properties": {
|
||||
"configurations": {
|
||||
"type": "array",
|
||||
"description": "Configurations list",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{ "$ref": "block.schema.json" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
65
internal/config/schemas/pkixname.schema.json
Normal file
65
internal/config/schemas/pkixname.schema.json
Normal file
@ -0,0 +1,65 @@
|
||||
{
|
||||
"$id": "pkixname.schema.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "pkixname",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Country": {
|
||||
"type": "array",
|
||||
"description": "Country name",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"Organization": {
|
||||
"type": "array",
|
||||
"description": "Organization name",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"OrganizationalUnit": {
|
||||
"type": "array",
|
||||
"description": "Organizational Unit name",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"Locality": {
|
||||
"type": "array",
|
||||
"description": "Locality name",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"Province": {
|
||||
"type": "array",
|
||||
"description": "Province name",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"StreetAddress": {
|
||||
"type": "array",
|
||||
"description": "Street address",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"PostalCode": {
|
||||
"type": "array",
|
||||
"description": "Postal Code",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"SerialNumber": {
|
||||
"type": "string",
|
||||
"description": ""
|
||||
},
|
||||
"CommonName": {
|
||||
"type": "string",
|
||||
"description": "Name"
|
||||
}
|
||||
}
|
||||
}
|
97
internal/config/system.go
Normal file
97
internal/config/system.go
Normal file
@ -0,0 +1,97 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"fmt"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
"runtime"
|
||||
"decl/internal/system"
|
||||
)
|
||||
|
||||
// Collects facts about the system
|
||||
|
||||
var (
|
||||
buildValues = map[string]any{
|
||||
"GOOS": runtime.GOOS,
|
||||
"GOARCH": runtime.GOARCH,
|
||||
}
|
||||
)
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConfigurationTypes.Register([]string{"system"}, func(u *url.URL) data.Configuration {
|
||||
s := NewSystem()
|
||||
return s
|
||||
})
|
||||
}
|
||||
|
||||
type System Generic[any]
|
||||
|
||||
func NewSystem() *System {
|
||||
s := make(System)
|
||||
for k, v := range buildValues {
|
||||
s[k] = v
|
||||
}
|
||||
s.CurrentUser()
|
||||
s["importpath"] = []string {
|
||||
"/etc/jx/lib",
|
||||
}
|
||||
return &s
|
||||
}
|
||||
|
||||
func (s *System) CurrentUser() {
|
||||
processUser := system.ProcessUser()
|
||||
processGroup := system.ProcessGroup(processUser)
|
||||
(*s)["user"] = processUser.Username
|
||||
(*s)["gecos"] = processUser.Name
|
||||
(*s)["home"] = processUser.HomeDir
|
||||
(*s)["uid"] = processUser.Uid
|
||||
(*s)["group"] = processGroup.Name
|
||||
(*s)["gid"] = processUser.Gid
|
||||
}
|
||||
|
||||
func (s *System) URI() string {
|
||||
return fmt.Sprintf("%s://%s", s.Type(), "")
|
||||
}
|
||||
|
||||
func (s *System) SetURI(uri string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *System) SetParsedURI(uri data.URIParser) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *System) Clone() data.Configuration {
|
||||
jsonSystem, _ := json.Marshal(s)
|
||||
clone := NewSystem()
|
||||
if unmarshalErr := json.Unmarshal(jsonSystem, clone); unmarshalErr != nil {
|
||||
panic(unmarshalErr)
|
||||
}
|
||||
return clone
|
||||
}
|
||||
|
||||
func (s *System) Has(key string) (ok bool) {
|
||||
_, ok = (*s)[key]
|
||||
return
|
||||
}
|
||||
|
||||
func (s *System) Type() string {
|
||||
return "system"
|
||||
}
|
||||
|
||||
func (s *System) Read(context.Context) ([]byte, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (s *System) GetValue(name string) (result any, err error) {
|
||||
var ok bool
|
||||
if result, ok = (*s)[name]; !ok {
|
||||
err = data.ErrUnknownConfigurationKey
|
||||
}
|
||||
return
|
||||
}
|
20
internal/config/system_test.go
Normal file
20
internal/config/system_test.go
Normal file
@ -0,0 +1,20 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNewSystemConfig(t *testing.T) {
|
||||
s := NewSystem()
|
||||
assert.NotNil(t, s)
|
||||
}
|
||||
|
||||
func TestSystemConfig(t *testing.T) {
|
||||
s := NewSystem()
|
||||
assert.NotNil(t, s)
|
||||
|
||||
assert.True(t, s.Has("GOARCH"))
|
||||
}
|
24
internal/data/block.go
Normal file
24
internal/data/block.go
Normal file
@ -0,0 +1,24 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrConfigUndefinedName = errors.New("Config block is missing a defined name")
|
||||
)
|
||||
|
||||
|
||||
type Block interface {
|
||||
Identifier
|
||||
ConfigurationType() TypeName
|
||||
Loader
|
||||
Validator
|
||||
NewConfiguration(uri *string) error
|
||||
ConfigurationValueGetter
|
||||
Configuration() Configuration
|
||||
Clone() Block
|
||||
}
|
||||
|
29
internal/data/config.go
Normal file
29
internal/data/config.go
Normal file
@ -0,0 +1,29 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrUnknownConfigurationType = errors.New("Unknown configuration type")
|
||||
ErrUnknownConfigurationKey = errors.New("Unknown configuration key")
|
||||
)
|
||||
|
||||
type ConfigurationValueGetter interface {
|
||||
GetValue(key string) (any, error)
|
||||
}
|
||||
|
||||
type ConfigurationValueChecker interface {
|
||||
Has(key string) bool
|
||||
}
|
||||
|
||||
type Configuration interface {
|
||||
Identifier
|
||||
Type() string
|
||||
Reader
|
||||
ConfigurationValueGetter
|
||||
ConfigurationValueChecker
|
||||
Clone() Configuration
|
||||
}
|
40
internal/data/converter.go
Normal file
40
internal/data/converter.go
Normal file
@ -0,0 +1,40 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrUnsupportedConversion = errors.New("Unsupported conversion")
|
||||
)
|
||||
|
||||
// Convert a resource to a document and a document to a resource
|
||||
|
||||
type Emitter interface {
|
||||
Emit(document Document, filter ElementSelector) (Resource, error)
|
||||
}
|
||||
|
||||
type Extractor interface {
|
||||
Extract(resource Resource, filter ElementSelector) (Document, error)
|
||||
}
|
||||
|
||||
type Converter interface {
|
||||
Typer
|
||||
Emitter
|
||||
Extractor
|
||||
Close() error
|
||||
}
|
||||
|
||||
type ManyExtractor interface {
|
||||
ExtractMany(resource Resource, filter ElementSelector) ([]Document, error)
|
||||
}
|
||||
|
||||
type ManyEmitter interface {
|
||||
EmitMany(documents []Document, filter ElementSelector) (Resource, error)
|
||||
}
|
||||
|
||||
type DirectoryConverter interface {
|
||||
SetRelative(flag bool)
|
||||
}
|
38
internal/data/data.go
Normal file
38
internal/data/data.go
Normal file
@ -0,0 +1,38 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
type Validator interface {
|
||||
Validate() error
|
||||
}
|
||||
|
||||
type Creator interface {
|
||||
Create(context.Context) error
|
||||
}
|
||||
|
||||
type Reader interface {
|
||||
Read(context.Context) ([]byte, error)
|
||||
}
|
||||
|
||||
type Updater interface {
|
||||
Update(context.Context) error
|
||||
}
|
||||
|
||||
type Deleter interface {
|
||||
Delete(context.Context) error
|
||||
}
|
||||
|
||||
type Info interface {
|
||||
ReadStat() error
|
||||
}
|
||||
|
||||
type Crudder interface {
|
||||
Creator
|
||||
Reader
|
||||
Updater
|
||||
Deleter
|
||||
}
|
76
internal/data/document.go
Normal file
76
internal/data/document.go
Normal file
@ -0,0 +1,76 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"decl/internal/codec"
|
||||
"io"
|
||||
"decl/internal/mapper"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrEmptyDocument error = errors.New("Document contains no resources")
|
||||
)
|
||||
|
||||
type Serializer interface {
|
||||
JSON() ([]byte, error)
|
||||
YAML() ([]byte, error)
|
||||
PB() ([]byte, error)
|
||||
Generate(w io.Writer) (error)
|
||||
}
|
||||
|
||||
type Loader interface {
|
||||
LoadString(string, codec.Format) (error)
|
||||
Load([]byte, codec.Format) (error)
|
||||
LoadReader(io.ReadCloser, codec.Format) (error)
|
||||
}
|
||||
|
||||
type DocumentGetter interface {
|
||||
GetDocument() Document
|
||||
}
|
||||
|
||||
type DocumentStateTransformer interface {
|
||||
Apply(overrideState string) error
|
||||
}
|
||||
|
||||
type Document interface {
|
||||
GetURI() string
|
||||
Serializer
|
||||
Loader
|
||||
Validator
|
||||
mapper.Mapper
|
||||
|
||||
NewResource(uri string) (Resource, error)
|
||||
NewResourceFromParsedURI(uri URIParser) (Resource, error)
|
||||
AddDeclaration(Declaration)
|
||||
AddResourceDeclaration(resourceType string, resourceDeclaration Resource)
|
||||
|
||||
Types() (TypesRegistry[Resource])
|
||||
// Resources() []Declaration
|
||||
|
||||
SetConfig(config Document)
|
||||
ConfigDoc() Document
|
||||
|
||||
HasConfig(string) bool
|
||||
GetConfig(string) Block
|
||||
|
||||
Apply(state string) error
|
||||
Len() int
|
||||
ResolveIds(ctx context.Context)
|
||||
Filter(filter DeclarationSelector) []Declaration
|
||||
Declarations() []Declaration
|
||||
|
||||
CheckConstraints() bool
|
||||
Failures() int
|
||||
|
||||
ImportedDocuments() []Document
|
||||
|
||||
ConfigFilter(filter BlockSelector) []Block
|
||||
AppendConfigurations([]Document)
|
||||
Diff(with Document, output io.Writer) (returnOutput string, diffErr error)
|
||||
DiffState(output io.Writer) (returnOutput string, diffErr error)
|
||||
Clone() Document
|
||||
AddError(error)
|
||||
}
|
50
internal/data/identifier.go
Normal file
50
internal/data/identifier.go
Normal file
@ -0,0 +1,50 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/url"
|
||||
"decl/internal/transport"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidURI error = errors.New("Invalid URI")
|
||||
)
|
||||
|
||||
type URIParser interface {
|
||||
URL() *url.URL
|
||||
NewResource(document Document) (newResource Resource, err error)
|
||||
ConstructResource(res Resource) (err error)
|
||||
Converter() (converter Converter, err error)
|
||||
Exists() bool
|
||||
|
||||
ContentReaderStream() (*transport.Reader, error)
|
||||
ContentWriterStream() (*transport.Writer, error)
|
||||
|
||||
String() string
|
||||
SetURL(url *url.URL)
|
||||
Extension() (string, string)
|
||||
|
||||
ContentType() string
|
||||
IsEmpty() bool
|
||||
}
|
||||
|
||||
type Identifier interface {
|
||||
URI() string
|
||||
SetParsedURI(URIParser) error
|
||||
}
|
||||
|
||||
type DocumentElement interface {
|
||||
Identifier
|
||||
}
|
||||
|
||||
type Selector[Item comparable] func(r Item) bool
|
||||
|
||||
type ResourceSelector Selector[Resource]
|
||||
|
||||
type DeclarationSelector Selector[Declaration]
|
||||
|
||||
type BlockSelector Selector[Block]
|
||||
|
||||
type ElementSelector Selector[DocumentElement]
|
115
internal/data/resource.go
Normal file
115
internal/data/resource.go
Normal file
@ -0,0 +1,115 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"decl/internal/mapper"
|
||||
"decl/internal/transport"
|
||||
"gitea.rosskeen.house/rosskeen.house/machine"
|
||||
"io"
|
||||
"io/fs"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidResource error = errors.New("Invalid resource")
|
||||
)
|
||||
|
||||
type ResourceMapper mapper.Map[string, Declaration]
|
||||
|
||||
type StateTransformer interface {
|
||||
Apply() error
|
||||
}
|
||||
|
||||
// Used by the resource factory to initialize new resources.
|
||||
type ResourceInitializer interface {
|
||||
Init(uri URIParser) error
|
||||
}
|
||||
|
||||
type Resource interface {
|
||||
Identifier
|
||||
Type() string
|
||||
StateMachine() machine.Stater
|
||||
UseConfig(config ConfigurationValueGetter)
|
||||
ResolveId(context.Context) string
|
||||
Loader
|
||||
StateTransformer
|
||||
Crudder
|
||||
Validator
|
||||
Clone() Resource
|
||||
SetResourceMapper(ResourceMapper)
|
||||
}
|
||||
|
||||
type Declaration interface {
|
||||
Identifier
|
||||
ResourceType() TypeName
|
||||
ResolveId(context.Context) string
|
||||
Loader
|
||||
Validator
|
||||
DocumentStateTransformer
|
||||
Resource() Resource
|
||||
Clone() Declaration
|
||||
}
|
||||
|
||||
func NewResourceMapper() ResourceMapper {
|
||||
return mapper.New[string, Declaration]()
|
||||
}
|
||||
|
||||
type ContentHasher interface {
|
||||
Hash() []byte
|
||||
HashHexString() string
|
||||
}
|
||||
|
||||
type ContentIdentifier interface {
|
||||
ContentType() string
|
||||
}
|
||||
|
||||
type ContentReader interface {
|
||||
ContentReaderStream() (*transport.Reader, error)
|
||||
}
|
||||
|
||||
type ContentWriter interface {
|
||||
ContentWriterStream() (*transport.Writer, error)
|
||||
}
|
||||
|
||||
type ContentReadWriter interface {
|
||||
ContentReader
|
||||
ContentWriter
|
||||
}
|
||||
|
||||
type ContentGetter interface {
|
||||
GetContent(w io.Writer) (contentReader io.ReadCloser, err error)
|
||||
}
|
||||
|
||||
type ContentSetter interface {
|
||||
SetContent(r io.Reader) error
|
||||
}
|
||||
|
||||
type ContentGetSetter interface {
|
||||
ContentGetter
|
||||
ContentSetter
|
||||
}
|
||||
|
||||
type FileResource interface {
|
||||
SetBasePath(int)
|
||||
FilePath() string
|
||||
SetFileInfo(fs.FileInfo) error
|
||||
FileInfo() fs.FileInfo
|
||||
ContentGetSetter
|
||||
GetContentSourceRef() string
|
||||
SetContentSourceRef(uri string)
|
||||
SetFS(fs.FS)
|
||||
PathNormalization(bool)
|
||||
NormalizePath() error
|
||||
GetTarget() string
|
||||
SetGzipContent(bool)
|
||||
}
|
||||
|
||||
type Signed interface {
|
||||
Signature() Signature
|
||||
}
|
||||
|
||||
type FileInfoGetter interface {
|
||||
Stat() (fs.FileInfo, error)
|
||||
}
|
12
internal/data/signature.go
Normal file
12
internal/data/signature.go
Normal file
@ -0,0 +1,12 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
)
|
||||
|
||||
type Signature interface {
|
||||
Verify(ContentHasher) error
|
||||
SetHexString(string) error
|
||||
String() string
|
||||
}
|
50
internal/data/stater.go
Normal file
50
internal/data/stater.go
Normal file
@ -0,0 +1,50 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"gitea.rosskeen.house/rosskeen.house/machine"
|
||||
)
|
||||
|
||||
func StorageMachine(sub machine.Subscriber) machine.Stater {
|
||||
// start_destroy -> absent -> start_create -> present -> start_destroy
|
||||
stater := machine.New("unknown")
|
||||
stater.AddStates("initialized", "unkonwn", "absent", "start_create", "present", "start_delete", "start_read", "start_update")
|
||||
stater.AddTransition("construct", machine.States("unknown"), "initialized")
|
||||
stater.AddTransition("create", machine.States("unknown", "initialized", "absent"), "start_create")
|
||||
if e := stater.AddSubscription("create", sub); e != nil {
|
||||
return nil
|
||||
}
|
||||
stater.AddTransition("created", machine.States("start_create"), "present")
|
||||
if e := stater.AddSubscription("created", sub); e != nil {
|
||||
return nil
|
||||
}
|
||||
stater.AddTransition("exists", machine.States("unknown", "initialized", "absent"), "present")
|
||||
if e := stater.AddSubscription("exists", sub); e != nil {
|
||||
return nil
|
||||
}
|
||||
stater.AddTransition("notexists", machine.States("*"), "absent")
|
||||
if e := stater.AddSubscription("notexists", sub); e != nil {
|
||||
return nil
|
||||
}
|
||||
stater.AddTransition("read", machine.States("*"), "start_read")
|
||||
if e := stater.AddSubscription("read", sub); e != nil {
|
||||
return nil
|
||||
}
|
||||
stater.AddTransition("state_read", machine.States("start_read"), "present")
|
||||
stater.AddTransition("update", machine.States("*"), "start_update")
|
||||
if e := stater.AddSubscription("update", sub); e != nil {
|
||||
return nil
|
||||
}
|
||||
stater.AddTransition("updated", machine.States("start_update"), "present")
|
||||
stater.AddTransition("delete", machine.States("*"), "start_delete")
|
||||
if e := stater.AddSubscription("delete", sub); e != nil {
|
||||
return nil
|
||||
}
|
||||
stater.AddTransition("deleted", machine.States("start_delete"), "absent")
|
||||
if e := stater.AddSubscription("deleted", sub); e != nil {
|
||||
return nil
|
||||
}
|
||||
return stater
|
||||
}
|
||||
|
25
internal/data/types.go
Normal file
25
internal/data/types.go
Normal file
@ -0,0 +1,25 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package data
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type Factory[Product comparable] func(*url.URL) Product
|
||||
|
||||
type TypesRegistry[Product comparable] interface {
|
||||
New(uri string) (result Product, err error)
|
||||
NewFromParsedURI(uri *url.URL) (result Product, err error)
|
||||
NewFromType(typename string) (result Product, err error)
|
||||
Has(typename string) bool
|
||||
//Get(string) Factory[Product]
|
||||
}
|
||||
|
||||
type TypeName string //`json:"type"`
|
||||
|
||||
func (t TypeName) String() string { return string(t) }
|
||||
|
||||
type Typer interface {
|
||||
Type() TypeName
|
||||
}
|
63
internal/ds/orderedset.go
Normal file
63
internal/ds/orderedset.go
Normal file
@ -0,0 +1,63 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ds
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type OrderedSet[Value comparable] struct {
|
||||
Values []*Value
|
||||
elements map[Value]int
|
||||
}
|
||||
|
||||
func NewOrderedSet[Value comparable]() *OrderedSet[Value] {
|
||||
return &OrderedSet[Value]{ elements: make(map[Value]int), Values: make([]*Value, 0, 10) }
|
||||
}
|
||||
|
||||
func (s *OrderedSet[Value]) Add(value Value) {
|
||||
slog.Info("OrderedSet.Add", "key", value, "s", s)
|
||||
s.Values = append(s.Values, &value)
|
||||
s.elements[value] = len(s.Values)
|
||||
slog.Info("OrderedSet.Add", "key", value, "s", s, "v", &s.Values)
|
||||
}
|
||||
|
||||
func (s *OrderedSet[Value]) Delete(key Value) {
|
||||
slog.Info("OrderedSet.Delete", "key", key, "s", s, "size", len(s.Values))
|
||||
if i, ok := s.elements[key]; ok {
|
||||
i--
|
||||
s.Values[i] = nil
|
||||
delete(s.elements, key)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *OrderedSet[Value]) Contains(value Value) (result bool) {
|
||||
slog.Info("OrderedSet.Contains", "key", value, "s", s, "size", len(s.Values), "v", &s.Values)
|
||||
_, result = s.elements[value]
|
||||
return
|
||||
}
|
||||
|
||||
func (s *OrderedSet[Value]) Len() int {
|
||||
return len(s.elements)
|
||||
}
|
||||
|
||||
func (s *OrderedSet[Value]) AddItems(value []Value) {
|
||||
for _, v := range value {
|
||||
s.Add(v)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *OrderedSet[Value]) Items() []*Value {
|
||||
slog.Info("OrderedSet.Items - start", "s", s)
|
||||
result := make([]*Value, 0, len(s.elements) - 1)
|
||||
for _, v := range s.Values {
|
||||
slog.Info("OrderedSet.Items", "value", v)
|
||||
if v != nil {
|
||||
result = append(result, v)
|
||||
s.elements[*v] = len(result)
|
||||
}
|
||||
}
|
||||
slog.Info("OrderedSet.Items", "s", s, "result", result)
|
||||
s.Values = result
|
||||
return result
|
||||
}
|
51
internal/ds/orderedset_test.go
Normal file
51
internal/ds/orderedset_test.go
Normal file
@ -0,0 +1,51 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ds
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
var (
|
||||
)
|
||||
|
||||
func TestNewOrderedSet(t *testing.T) {
|
||||
s := NewOrderedSet[string]()
|
||||
assert.NotNil(t, s)
|
||||
|
||||
testValues := []string{
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
"quuz",
|
||||
}
|
||||
|
||||
for _,value := range testValues {
|
||||
|
||||
s.Add(value)
|
||||
|
||||
|
||||
slog.Info("TestNewOrderedSet - ADD", "item", value, "s", s)
|
||||
|
||||
assert.True(t, s.Contains(value))
|
||||
slog.Info("TestNewOrderedSet - CONTAINS", "s", s)
|
||||
|
||||
for x, item := range s.Items() {
|
||||
slog.Info("TestNewOrderedSet", "item", item, "s", s)
|
||||
assert.Equal(t, testValues[x], *item)
|
||||
}
|
||||
|
||||
}
|
||||
s.Delete("bar")
|
||||
|
||||
expectedValues := []string {
|
||||
"foo",
|
||||
"baz",
|
||||
"quuz",
|
||||
}
|
||||
for x, item := range s.Items() {
|
||||
assert.Equal(t, expectedValues[x], *item)
|
||||
}
|
||||
}
|
34
internal/ds/set.go
Normal file
34
internal/ds/set.go
Normal file
@ -0,0 +1,34 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ds
|
||||
|
||||
import (
|
||||
)
|
||||
|
||||
type Set[Value comparable] map[Value]bool
|
||||
|
||||
func NewSet[Value comparable]() Set[Value] {
|
||||
return make(map[Value]bool)
|
||||
}
|
||||
|
||||
func (s Set[Value]) Add(value Value) {
|
||||
s[value] = true
|
||||
}
|
||||
|
||||
func (s Set[Value]) Delete(value Value) {
|
||||
delete(s, value)
|
||||
}
|
||||
|
||||
func (s Set[Value]) Contains(value Value) bool {
|
||||
return s[value]
|
||||
}
|
||||
|
||||
func (s Set[Value]) Len() int {
|
||||
return len(s)
|
||||
}
|
||||
|
||||
func (s Set[Value]) AddSlice(value []Value) {
|
||||
for _, v := range value {
|
||||
s.Add(v)
|
||||
}
|
||||
}
|
24
internal/ds/set_test.go
Normal file
24
internal/ds/set_test.go
Normal file
@ -0,0 +1,24 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ds
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var (
|
||||
)
|
||||
|
||||
func TestNewSet(t *testing.T) {
|
||||
s := NewSet[string]()
|
||||
assert.NotNil(t, s)
|
||||
s["foo"] = true
|
||||
assert.True(t, s.Contains("foo"))
|
||||
|
||||
s.Add("bar")
|
||||
|
||||
assert.True(t, s.Contains("bar"))
|
||||
|
||||
|
||||
}
|
31
internal/ext/addcloser.go
Normal file
31
internal/ext/addcloser.go
Normal file
@ -0,0 +1,31 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
type Closer func() error
|
||||
|
||||
func WriteAddCloser(w io.WriteCloser, c Closer) io.WriteCloser {
|
||||
a := writeAddCloser{ WriteCloser: w, AddClose: func() (err error) {
|
||||
if err = w.Close(); err != nil {
|
||||
return
|
||||
}
|
||||
if c != nil {
|
||||
return c()
|
||||
}
|
||||
return
|
||||
} }
|
||||
return a
|
||||
}
|
||||
|
||||
type writeAddCloser struct {
|
||||
io.WriteCloser
|
||||
AddClose Closer
|
||||
}
|
||||
|
||||
func (w writeAddCloser) Close() error {
|
||||
return w.AddClose()
|
||||
}
|
19
internal/ext/addcloser_test.go
Normal file
19
internal/ext/addcloser_test.go
Normal file
@ -0,0 +1,19 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"strings"
|
||||
_ "fmt"
|
||||
_ "log"
|
||||
)
|
||||
|
||||
func TestNewWriteAddCloser(t *testing.T) {
|
||||
var testWriter strings.Builder
|
||||
|
||||
closer := WriteAddCloser(WriteNopCloser(&testWriter), func() error { testWriter.Write([]byte("foo")); return nil })
|
||||
closer.Close()
|
||||
assert.Equal(t, "foo", testWriter.String())
|
||||
}
|
25
internal/ext/basicreader.go
Normal file
25
internal/ext/basicreader.go
Normal file
@ -0,0 +1,25 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// Restrict the underlying io.Reader to only exposed the io.Reader interface.
|
||||
// Removes the io.WriterTo interface.
|
||||
func NewReadCloser(r io.ReadCloser) io.ReadCloser {
|
||||
return basicReadCloser{r}
|
||||
}
|
||||
|
||||
type basicReadCloser struct {
|
||||
io.ReadCloser
|
||||
}
|
||||
|
||||
func NewReader(r io.Reader) io.Reader {
|
||||
return basicReader{r}
|
||||
}
|
||||
|
||||
type basicReader struct {
|
||||
io.Reader
|
||||
}
|
30
internal/ext/basicreader_test.go
Normal file
30
internal/ext/basicreader_test.go
Normal file
@ -0,0 +1,30 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"strings"
|
||||
_ "fmt"
|
||||
_ "log"
|
||||
"io"
|
||||
)
|
||||
|
||||
func TestNewBasicReader(t *testing.T) {
|
||||
testReader := strings.NewReader("some test data")
|
||||
basicReader := NewReader(testReader)
|
||||
assert.NotNil(t, basicReader)
|
||||
_, ok := basicReader.(io.WriterTo)
|
||||
assert.False(t, ok)
|
||||
}
|
||||
|
||||
func TestNewBasicReadCloser(t *testing.T) {
|
||||
testReader := strings.NewReader("some test data")
|
||||
basicReader := NewReadCloser(io.NopCloser(testReader))
|
||||
assert.NotNil(t, basicReader)
|
||||
_, ok := basicReader.(io.WriterTo)
|
||||
assert.False(t, ok)
|
||||
_, hasCloser := basicReader.(io.Closer)
|
||||
assert.True(t, hasCloser)
|
||||
}
|
26
internal/ext/file.go
Normal file
26
internal/ext/file.go
Normal file
@ -0,0 +1,26 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type FilePath string
|
||||
|
||||
func (f *FilePath) Exists() bool {
|
||||
_, err := os.Stat(string(*f))
|
||||
return !os.IsNotExist(err)
|
||||
}
|
||||
|
||||
func (f *FilePath) Add(relative string) {
|
||||
newPath := filepath.Join(string(*f), relative)
|
||||
*f = FilePath(newPath)
|
||||
}
|
||||
|
||||
|
||||
func (f FilePath) Abs() FilePath {
|
||||
result, _ := filepath.Abs(string(f))
|
||||
return FilePath(result)
|
||||
}
|
17
internal/ext/nopwritecloser.go
Normal file
17
internal/ext/nopwritecloser.go
Normal file
@ -0,0 +1,17 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
func WriteNopCloser(w io.Writer) io.WriteCloser {
|
||||
return writeNopCloser{w}
|
||||
}
|
||||
|
||||
type writeNopCloser struct {
|
||||
io.Writer
|
||||
}
|
||||
|
||||
func (writeNopCloser) Close() error { return nil }
|
21
internal/ext/nopwritecloser_test.go
Normal file
21
internal/ext/nopwritecloser_test.go
Normal file
@ -0,0 +1,21 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"strings"
|
||||
_ "fmt"
|
||||
_ "log"
|
||||
)
|
||||
|
||||
func TestNewWriteNopCloser(t *testing.T) {
|
||||
var testWriter strings.Builder
|
||||
closer := WriteNopCloser(&testWriter)
|
||||
assert.NotNil(t, closer)
|
||||
_, err := closer.Write([]byte("test data"))
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "test data", testWriter.String())
|
||||
assert.Nil(t, closer.Close())
|
||||
}
|
16
internal/ext/stringreader.go
Normal file
16
internal/ext/stringreader.go
Normal file
@ -0,0 +1,16 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func NewStringReader(value string) io.Reader {
|
||||
return stringReader{strings.NewReader(value)}
|
||||
}
|
||||
|
||||
type stringReader struct {
|
||||
io.Reader
|
||||
}
|
18
internal/ext/stringreader_test.go
Normal file
18
internal/ext/stringreader_test.go
Normal file
@ -0,0 +1,18 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package ext
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"github.com/stretchr/testify/assert"
|
||||
_ "fmt"
|
||||
_ "log"
|
||||
"io"
|
||||
)
|
||||
|
||||
func TestNewStringReader(t *testing.T) {
|
||||
basicReader := NewStringReader("some test data")
|
||||
assert.NotNil(t, basicReader)
|
||||
_, ok := basicReader.(io.WriterTo)
|
||||
assert.False(t, ok)
|
||||
}
|
77
internal/fan/container.go
Normal file
77
internal/fan/container.go
Normal file
@ -0,0 +1,77 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "encoding/json"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
_ "path/filepath"
|
||||
"decl/internal/resource"
|
||||
"decl/internal/folio"
|
||||
"decl/internal/data"
|
||||
_ "os"
|
||||
_ "io"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
"github.com/docker/docker/client"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type Container struct {
|
||||
apiClient resource.ContainerClient
|
||||
}
|
||||
|
||||
func NewContainer(containerClientApi resource.ContainerClient) *Container {
|
||||
var apiClient resource.ContainerClient = containerClientApi
|
||||
if apiClient == nil {
|
||||
var err error
|
||||
apiClient, err = client.NewClientWithOpts(client.FromEnv, client.WithAPIVersionNegotiation())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
return &Container{
|
||||
apiClient: apiClient,
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"container"}, func(u *url.URL) data.Converter {
|
||||
c := NewContainer(nil)
|
||||
return c
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (c *Container) Type() data.TypeName { return "container" }
|
||||
|
||||
func (c *Container) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
|
||||
var extractErr error
|
||||
ctx := context.Background()
|
||||
slog.Info("container source Extract()", "container", c)
|
||||
containers, err := c.apiClient.ContainerList(ctx, container.ListOptions{All: true})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
document = folio.DocumentRegistry.NewDocument(folio.URI(sourceResource.URI()))
|
||||
for _, container := range containers {
|
||||
runningContainer := resource.NewContainer(nil)
|
||||
if inspectErr := runningContainer.Inspect(ctx, container.ID); inspectErr != nil {
|
||||
extractErr = fmt.Errorf("%w: %w", extractErr, inspectErr)
|
||||
}
|
||||
document.(*folio.Document).AddResourceDeclaration("container", runningContainer)
|
||||
}
|
||||
|
||||
return document, extractErr
|
||||
}
|
||||
|
||||
func (c *Container) Emit(document data.Document, filter data.ElementSelector) (resource data.Resource, err error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (c *Container) Close() error {
|
||||
return nil
|
||||
}
|
174
internal/fan/dir.go
Normal file
174
internal/fan/dir.go
Normal file
@ -0,0 +1,174 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "encoding/json"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
"os"
|
||||
_ "io"
|
||||
"log/slog"
|
||||
"decl/internal/fs"
|
||||
)
|
||||
|
||||
type Dir struct {
|
||||
Path string `yaml:"path" json:"path"`
|
||||
Relative bool `yaml:"relative" json:"relative"`
|
||||
subDirsStack []string `yaml:"-" json:"-"`
|
||||
fs *fs.WalkDir `yaml:"-" json:"-"`
|
||||
}
|
||||
|
||||
func NewDir() *Dir {
|
||||
return &Dir{
|
||||
subDirsStack: make([]string, 0, 100),
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"file"}, func(u *url.URL) data.Converter {
|
||||
t := NewDir()
|
||||
t.Path,_ = filepath.Abs(filepath.Join(u.Hostname(), u.Path))
|
||||
t.Relative = false
|
||||
return t
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (d *Dir) SetRelative(flag bool) { d.Relative = flag }
|
||||
|
||||
func (d *Dir) Type() data.TypeName { return "dir" }
|
||||
|
||||
func (d *Dir) ExtractDirectory(path string, document data.Document) (err error) {
|
||||
ctx := context.Background()
|
||||
files, readDirErr := os.ReadDir(path)
|
||||
slog.Info("fan.Dir.ExtractDirectory()", "path", path, "error", readDirErr)
|
||||
if readDirErr != nil {
|
||||
return readDirErr
|
||||
}
|
||||
|
||||
for _,file := range files {
|
||||
filePath := filepath.Join(path, file.Name())
|
||||
u := fmt.Sprintf("file://%s", filePath)
|
||||
var f data.Resource
|
||||
if f, err = document.NewResource(u); err != nil {
|
||||
return
|
||||
}
|
||||
if _, err = f.Read(ctx); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if file.IsDir() {
|
||||
d.subDirsStack = append(d.subDirsStack, filePath)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Dir) isParent(m *map[string]int, path string, containingDirectoryPath string) (newCDP string, cdpCount int) {
|
||||
newCDP = containingDirectoryPath
|
||||
cdpCount = (*m)[containingDirectoryPath]
|
||||
pathLen := len(path)
|
||||
for i, p := range path {
|
||||
if p == '/' || i == pathLen {
|
||||
sPath := path[:i]
|
||||
if len(sPath) > 0 {
|
||||
(*m)[sPath]++
|
||||
superDirCount := (*m)[sPath]
|
||||
if superDirCount >= cdpCount {
|
||||
newCDP = sPath
|
||||
cdpCount = superDirCount
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (d *Dir) LCPath(files []string) (lcPath string) {
|
||||
parentPaths := make(map[string]int)
|
||||
var containingDirectoryPath string
|
||||
for _,filePath := range files {
|
||||
containingDirectoryPath, _ = d.isParent(&parentPaths, filePath, containingDirectoryPath)
|
||||
}
|
||||
lcPath = containingDirectoryPath
|
||||
return
|
||||
}
|
||||
|
||||
func (d *Dir) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
|
||||
if document == nil || document.Len() <= 0 {
|
||||
return nil, ErrEmptyDocument
|
||||
}
|
||||
|
||||
parentPaths := make(map[string]int)
|
||||
var containingDirectoryPath string
|
||||
for _,res := range document.Filter(func(d data.Declaration) bool {
|
||||
return d.ResourceType() == "file"
|
||||
}) {
|
||||
var f data.FileResource = res.(*folio.Declaration).Attributes.(data.FileResource)
|
||||
var parent string
|
||||
|
||||
if f.FileInfo().IsDir() {
|
||||
parent, err = filepath.Abs(f.FilePath())
|
||||
} else {
|
||||
parent, err = filepath.Abs(filepath.Dir(f.FilePath()))
|
||||
}
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
containingDirectoryPath, _ = d.isParent(&parentPaths, parent, containingDirectoryPath)
|
||||
}
|
||||
|
||||
uri := fmt.Sprintf("file://%s", containingDirectoryPath)
|
||||
|
||||
dirFileDeclaration := folio.NewDeclaration()
|
||||
dirFileDeclaration.Type = "file"
|
||||
if err = dirFileDeclaration.NewResource(&uri); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
resourceTarget = dirFileDeclaration.Attributes
|
||||
return
|
||||
}
|
||||
|
||||
func (d *Dir) Extract(resourceSource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
|
||||
ctx := context.Background()
|
||||
if resourceSource.Type() != "file" {
|
||||
return nil, fmt.Errorf("%w", ErrInvalidResource)
|
||||
}
|
||||
slog.Info("fan.Dir.Extract()", "path", d.Path, "resource", resourceSource)
|
||||
d.Path = resourceSource.(data.FileResource).FilePath()
|
||||
document = folio.DocumentRegistry.NewDocument("")
|
||||
|
||||
d.fs = fs.NewWalkDir(os.DirFS(d.Path), d.Path, func(fsys fs.FS, path string, file fs.DirEntry) (err error) {
|
||||
u := fmt.Sprintf("file://%s", path)
|
||||
slog.Info("Fan.Dir.Extract() WalkDir", "file", u, "root", d.Path)
|
||||
if path != "" {
|
||||
var f data.Resource
|
||||
if f, err = document.NewResource(u); err != nil {
|
||||
return
|
||||
}
|
||||
if d.Relative {
|
||||
f.(data.FileResource).SetBasePath(len(d.Path) + 1)
|
||||
slog.Info("Fan.Dir.Extract() WalkDir Relative", "file", f, "path", path)
|
||||
}
|
||||
slog.Info("Fan.Dir.Extract() WalkDir Resource.Read", "file", f)
|
||||
_, err = f.Read(ctx)
|
||||
}
|
||||
return
|
||||
})
|
||||
|
||||
slog.Info("Fan.Dir.Extract()", "fs", d.fs)
|
||||
err = d.fs.Walk(nil)
|
||||
return
|
||||
}
|
||||
|
||||
func (d *Dir) Close() error {
|
||||
return nil
|
||||
}
|
130
internal/fan/dir_test.go
Normal file
130
internal/fan/dir_test.go
Normal file
@ -0,0 +1,130 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
"decl/internal/folio"
|
||||
"decl/internal/data"
|
||||
"log/slog"
|
||||
"path/filepath"
|
||||
"os"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func TestNewDirSource(t *testing.T) {
|
||||
s := NewDir()
|
||||
assert.NotNil(t, s)
|
||||
}
|
||||
|
||||
func TestExtractDirectory(t *testing.T) {
|
||||
s := NewDir()
|
||||
assert.NotNil(t, s)
|
||||
|
||||
document := folio.DocumentRegistry.NewDocument("")
|
||||
assert.NotNil(t, document)
|
||||
|
||||
assert.Nil(t, s.ExtractDirectory(TempDir, document))
|
||||
assert.Greater(t, 2, document.Len())
|
||||
|
||||
}
|
||||
|
||||
func TestIsParent(t *testing.T) {
|
||||
s := NewDir()
|
||||
assert.NotNil(t, s)
|
||||
|
||||
m := map[string]int{
|
||||
"/foo/bar": 3,
|
||||
"/foo": 1,
|
||||
}
|
||||
res, count := s.isParent(&m, "/foo/bar/baz/quuz", "/foo/bar")
|
||||
|
||||
assert.Equal(t, "/foo/bar", res)
|
||||
assert.Equal(t, 4, count)
|
||||
assert.Equal(t, 2, m["/foo"])
|
||||
}
|
||||
|
||||
func TestLCPath(t *testing.T) {
|
||||
s := NewDir()
|
||||
assert.NotNil(t, s)
|
||||
result := s.LCPath([]string{
|
||||
"/foo/bar/baz/quuz",
|
||||
"/foo/bar/baz/quuz/abc.txt",
|
||||
"/foo/bar/baz/quuz/def.txt",
|
||||
"/foo/bar/baz/quz/ghi.txt",
|
||||
"/foo/bar/kiw",
|
||||
"/tmp",
|
||||
})
|
||||
assert.Equal(t, "/foo/bar", result)
|
||||
|
||||
result = s.LCPath([]string{
|
||||
"/foo/bar/baz/quuz",
|
||||
"/foo/eer/voo",
|
||||
"/foo/bar/baz/quuz/abc.txt",
|
||||
"/foo/bar/baz/quuz/def.txt",
|
||||
"/foo/bar/baz/quz/ghi.txt",
|
||||
"/foo/bar/kiw",
|
||||
"/tmp",
|
||||
"/usr",
|
||||
"/usr/lib",
|
||||
})
|
||||
assert.Equal(t, "/foo", result)
|
||||
}
|
||||
|
||||
func BenchmarkLCPath(b *testing.B) {
|
||||
s := NewDir()
|
||||
assert.NotNil(b, s)
|
||||
for i := 0; i < b.N; i++ {
|
||||
s.LCPath([]string{
|
||||
"/foo/bar/baz/quuz",
|
||||
"/foo/eer/voo",
|
||||
"/foo/bar/baz/quuz/abc.txt",
|
||||
"/foo/bar/baz/quuz/def.txt",
|
||||
"/foo/bar/baz/quz/ghi.txt",
|
||||
"/foo/bar/kiw",
|
||||
"/tmp",
|
||||
"/usr",
|
||||
"/usr/lib",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEmit(t *testing.T) {
|
||||
s := NewDir()
|
||||
assert.NotNil(t, s)
|
||||
|
||||
contextDir, _ := filepath.Abs(filepath.Join(TempDir, "context"))
|
||||
etcDir := filepath.Join(contextDir, "etc")
|
||||
binDir := filepath.Join(contextDir, "bin")
|
||||
usrDir := filepath.Join(contextDir, "usr")
|
||||
usrLibDir := filepath.Join(contextDir, "usr/lib")
|
||||
usrBinDir := filepath.Join(contextDir, "usr/bin")
|
||||
|
||||
assert.Nil(t, os.Mkdir(contextDir, os.ModePerm))
|
||||
assert.Nil(t, os.Mkdir(etcDir, os.ModePerm))
|
||||
assert.Nil(t, os.Mkdir(binDir, os.ModePerm))
|
||||
assert.Nil(t, os.Mkdir(usrDir, os.ModePerm))
|
||||
assert.Nil(t, os.Mkdir(usrLibDir, os.ModePerm))
|
||||
assert.Nil(t, os.Mkdir(usrBinDir, os.ModePerm))
|
||||
|
||||
|
||||
decl := folio.NewDeclaration()
|
||||
srcFile := fmt.Sprintf("file://%s", contextDir)
|
||||
resErr := decl.NewResource(&srcFile)
|
||||
assert.Nil(t, resErr)
|
||||
|
||||
slog.Info("TestEmit()", "file", decl, "res", decl.Attributes)
|
||||
|
||||
document, extractErr := s.Extract(decl.Resource(), nil)
|
||||
slog.Info("TestEmit() - Extract", "document", document, "error", extractErr)
|
||||
assert.Nil(t, extractErr)
|
||||
assert.Greater(t, document.Len(), 4)
|
||||
|
||||
res, emitErr := s.Emit(document, nil)
|
||||
slog.Info("TestEmit()", "res", res, "error", emitErr)
|
||||
|
||||
assert.Nil(t, emitErr)
|
||||
assert.Equal(t, contextDir, res.(data.FileResource).FilePath())
|
||||
|
||||
}
|
30
internal/fan/fan.go
Normal file
30
internal/fan/fan.go
Normal file
@ -0,0 +1,30 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
|
||||
// Convert a resource to a document and a document to a resource
|
||||
/*
|
||||
type Emitter interface {
|
||||
Emit(document *resource.Document) (resource.Resource, error)
|
||||
}
|
||||
|
||||
type Extracter interface {
|
||||
Extract(resource resource.Resource, filter resource.ResourceSelector) (*resource.Document, error)
|
||||
}
|
||||
|
||||
type Converter interface {
|
||||
Emitter
|
||||
Extracter
|
||||
}
|
||||
*/
|
||||
|
||||
var (
|
||||
ErrInvalidSource error = errors.New("Invalid source")
|
||||
ErrInvalidResource error = errors.New("Invalid resource")
|
||||
ErrEmptyDocument error = errors.New("Document containers no resources")
|
||||
)
|
23
internal/fan/fan_test.go
Normal file
23
internal/fan/fan_test.go
Normal file
@ -0,0 +1,23 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"os"
|
||||
"log"
|
||||
)
|
||||
|
||||
var TempDir string
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
var err error
|
||||
TempDir, err = os.MkdirTemp("", "testfan")
|
||||
if err != nil || TempDir == "" {
|
||||
log.Fatal(err)
|
||||
}
|
||||
//folio.DocumentRegistry.ResourceTypes = resource.ResourceTypes
|
||||
rc := m.Run()
|
||||
os.RemoveAll(TempDir)
|
||||
os.Exit(rc)
|
||||
}
|
76
internal/fan/group.go
Normal file
76
internal/fan/group.go
Normal file
@ -0,0 +1,76 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
_ "context"
|
||||
_ "encoding/json"
|
||||
_ "fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
_ "path/filepath"
|
||||
"decl/internal/resource"
|
||||
"decl/internal/folio"
|
||||
"decl/internal/data"
|
||||
_ "os"
|
||||
_ "io"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type Group struct {
|
||||
GroupType resource.GroupType `yaml:"type" json:"type"`
|
||||
}
|
||||
|
||||
func NewGroup() *Group {
|
||||
return &Group{ GroupType: resource.SystemGroupType }
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"group"}, func(u *url.URL) data.Converter {
|
||||
groupSource := NewGroup()
|
||||
groupType := u.Query().Get("type")
|
||||
if len(groupType) > 0 {
|
||||
groupSource.GroupType = resource.GroupType(groupType)
|
||||
}
|
||||
return groupSource
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (g *Group) Type() data.TypeName { return "group" }
|
||||
|
||||
func (g *Group) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
|
||||
slog.Info("group source ExtractResources()", "group", g)
|
||||
Groups := make([]*resource.Group, 0, 100)
|
||||
cmd := g.GroupType.NewReadGroupsCommand()
|
||||
if cmd == nil {
|
||||
return document, resource.ErrUnsupportedGroupType
|
||||
}
|
||||
if out, err := cmd.Execute(g); err == nil {
|
||||
slog.Info("group source Extract()", "output", out)
|
||||
if exErr := cmd.Extractor(out, &Groups); exErr != nil {
|
||||
return document, exErr
|
||||
}
|
||||
document = folio.DocumentRegistry.NewDocument("group://-")
|
||||
for _, grp := range Groups {
|
||||
if grp == nil {
|
||||
grp = resource.NewGroup()
|
||||
}
|
||||
grp.GroupType = g.GroupType
|
||||
document.AddResourceDeclaration("group", grp)
|
||||
}
|
||||
} else {
|
||||
slog.Info("group source ExtractResources()", "output", out, "error", err)
|
||||
return document, err
|
||||
}
|
||||
return document, nil
|
||||
}
|
||||
|
||||
func (g *Group) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
|
||||
return nil, data.ErrUnsupportedConversion
|
||||
}
|
||||
|
||||
func (g *Group) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
197
internal/fan/http.go
Normal file
197
internal/fan/http.go
Normal file
@ -0,0 +1,197 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
_ "context"
|
||||
_ "encoding/json"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
_ "net/http"
|
||||
_ "path/filepath"
|
||||
_ "decl/internal/resource"
|
||||
"decl/internal/codec"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
_ "os"
|
||||
"io"
|
||||
"errors"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type HTTP struct {
|
||||
Endpoint folio.URI `yaml:"endpoint,omitempty" json:"endpoint,omitempty"`
|
||||
url *url.URL `yaml:"-" json:"-"`
|
||||
Format codec.Format `yaml:"format,omitempty" json:"format,omitempty"`
|
||||
|
||||
reader io.ReadCloser `yaml:"-" json:"-"`
|
||||
writer io.WriteCloser `yaml:"-" json:"-"`
|
||||
decoder codec.Decoder `yaml:"-" json:"-"`
|
||||
encoder codec.Encoder `yaml:"-" json:"-"`
|
||||
closer func() error `yaml:"-" json:"-"`
|
||||
index int `yaml:"-" json:"-"`
|
||||
signature data.Signature `yaml:"-" json:"-"`
|
||||
}
|
||||
|
||||
func NewHTTP() *HTTP {
|
||||
return &HTTP{ Format: codec.FormatYaml, index: 0, closer: func() error { return nil } }
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"http","https"}, func(u *url.URL) data.Converter {
|
||||
t := NewHTTP()
|
||||
t.Endpoint = folio.URI(u.String())
|
||||
t.url = u
|
||||
return t
|
||||
})
|
||||
}
|
||||
|
||||
func (h *HTTP) Type() data.TypeName { return "http" }
|
||||
|
||||
/*
|
||||
func (h *HTTP) setencoder(target data.ContentIdentifier) {
|
||||
if formatErr := h.Format.Set(target.ContentType()); formatErr != nil {
|
||||
h.Format = codec.FormatYaml
|
||||
if format,ok := h.url.Query()["format"]; ok {
|
||||
if queryFormatErr := h.Format.Set(format[0]); queryFormatErr != nil {
|
||||
h.Format = codec.FormatYaml
|
||||
}
|
||||
}
|
||||
}
|
||||
if h.encoder == nil {
|
||||
h.encoder = codec.NewEncoder(h.writer, h.Format)
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
func (h *HTTP) setdecoder(source data.ContentIdentifier) {
|
||||
if h.decoder == nil {
|
||||
_ = h.Format.Set(source.ContentType())
|
||||
h.decoder = codec.NewDecoder(h.reader, h.Format)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *HTTP) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
|
||||
if h.index == 0 {
|
||||
if sourceResource == nil {
|
||||
if len(h.Endpoint) > 0 {
|
||||
sourceResource, err = h.Endpoint.NewResource(nil)
|
||||
} else {
|
||||
return nil, ErrInvalidSource
|
||||
}
|
||||
}
|
||||
slog.Info("HTTP.Extract()", "source", sourceResource, "error", err)
|
||||
var jxSourceFile data.FileResource = sourceResource.(data.FileResource)
|
||||
h.reader, err = jxSourceFile.(data.ContentGetter).GetContent(nil)
|
||||
slog.Info("HTTP.Extract()", "file", h, "error", err)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
h.signature = sourceResource.(data.Signed).Signature()
|
||||
h.setdecoder(jxSourceFile.(data.ContentIdentifier))
|
||||
slog.Info("HTTP.Extract()", "jx", h)
|
||||
}
|
||||
|
||||
u := fmt.Sprintf("%s?index=%d", sourceResource.URI(), h.index)
|
||||
document = folio.DocumentRegistry.NewDocument(folio.URI(u))
|
||||
err = h.decoder.Decode(document)
|
||||
slog.Info("HTTP.Extract()", "doc", document, "http", h, "error", err)
|
||||
h.index++
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if err = document.Validate(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if h.signature.String() != "" {
|
||||
if v, ok := sourceResource.(data.ContentHasher); ok {
|
||||
err = h.signature.Verify(v)
|
||||
}
|
||||
}
|
||||
return
|
||||
|
||||
/*
|
||||
defer h.Close()
|
||||
documentSignature := h.transport.Signature()
|
||||
|
||||
hash := sha256.New()
|
||||
sumReadData := iofilter.NewReader(h.transport, func(p []byte, readn int, readerr error) (n int, err error) {
|
||||
hash.Write(p)
|
||||
return
|
||||
})
|
||||
|
||||
decoder := codec.NewYAMLDecoder(sumReadData)
|
||||
index := 0
|
||||
for {
|
||||
doc = folio.DocumentRegistry.NewDocument(folio.URI(u))
|
||||
|
||||
doc := resource.NewDocument()
|
||||
e := decoder.Decode(doc)
|
||||
if errors.Is(e, io.EOF) {
|
||||
break
|
||||
}
|
||||
if e != nil {
|
||||
return documents, e
|
||||
}
|
||||
if validationErr := doc.Validate(); validationErr != nil {
|
||||
return documents, validationErr
|
||||
}
|
||||
documents = append(documents, doc)
|
||||
index++
|
||||
}
|
||||
|
||||
if documentSignature != "" {
|
||||
sig := &signature.Ident{}
|
||||
sigErr := sig.VerifySum(hash.Sum(nil), []byte(documentSignature))
|
||||
if sigErr != nil {
|
||||
return documents, sigErr
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
func (h *HTTP) ExtractMany(resourceSource data.Resource, filter data.ElementSelector) (documents []data.Document, err error) {
|
||||
documents = make([]data.Document, 0, 100)
|
||||
defer h.Close()
|
||||
|
||||
h.index = 0
|
||||
for {
|
||||
var doc data.Document
|
||||
if doc, err = h.Extract(resourceSource, filter); err == nil {
|
||||
documents = append(documents, doc)
|
||||
} else {
|
||||
if errors.Is(err, io.EOF) {
|
||||
err = nil
|
||||
//documents = append(documents, doc)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
slog.Info("HTTP.ExtractMany()", "file", h, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
func (h *HTTP) Emit(document data.Document, filter data.ElementSelector) (resource data.Resource, err error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (h *HTTP) Close() (err error) {
|
||||
/*
|
||||
if h.decoder != nil {
|
||||
h.decoder.Close()
|
||||
}
|
||||
*/
|
||||
if h.encoder != nil {
|
||||
h.encoder.Close()
|
||||
}
|
||||
if h.reader != nil {
|
||||
h.reader.Close()
|
||||
}
|
||||
if h.writer != nil {
|
||||
h.writer.Close()
|
||||
}
|
||||
return
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package source
|
||||
package fan
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
83
internal/fan/iptable.go
Normal file
83
internal/fan/iptable.go
Normal file
@ -0,0 +1,83 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
_ "context"
|
||||
_ "encoding/json"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
_ "path/filepath"
|
||||
"decl/internal/data"
|
||||
"decl/internal/resource"
|
||||
"decl/internal/folio"
|
||||
_ "os"
|
||||
_ "io"
|
||||
"strings"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type Iptable struct {
|
||||
Table string `yaml:"table" json:"table"`
|
||||
Chain string `yaml:"chain" json:"chain"`
|
||||
}
|
||||
|
||||
func NewIptable() *Iptable {
|
||||
return &Iptable{}
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"iptable"}, func(u *url.URL) data.Converter {
|
||||
t := NewIptable()
|
||||
t.Table = u.Hostname()
|
||||
elements := strings.FieldsFunc(u.Path, func(c rune) bool { return c == '/' })
|
||||
if len(elements) >= 1 {
|
||||
t.Chain = elements[0]
|
||||
}
|
||||
slog.Info("iptable chain source factory", "table", t, "uri", u, "table", u.Hostname())
|
||||
return t
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
|
||||
func (i *Iptable) Type() data.TypeName { return "iptable" }
|
||||
|
||||
func (i *Iptable) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
|
||||
|
||||
slog.Info("fan.Iptable.Extract()", "table", i)
|
||||
|
||||
iptRules := make([]*resource.Iptable, 0, 100)
|
||||
cmd := resource.NewIptableReadChainCommand()
|
||||
if cmd == nil {
|
||||
return document, fmt.Errorf("Iptable read chain: invalid command")
|
||||
}
|
||||
|
||||
var out []byte
|
||||
if out, err = cmd.Execute(i); err == nil {
|
||||
|
||||
if err = cmd.Extractor(out, &iptRules); err == nil {
|
||||
document = folio.DocumentRegistry.NewDocument(folio.URI(sourceResource.URI()))
|
||||
for _, rule := range iptRules {
|
||||
if rule == nil {
|
||||
rule = resource.NewIptable()
|
||||
}
|
||||
rule.Table = resource.IptableName(i.Table)
|
||||
rule.Chain = resource.IptableChain(i.Chain)
|
||||
slog.Info("iptable chain source Extract()", "rule", rule)
|
||||
document.(*folio.Document).AddResourceDeclaration("iptable", rule)
|
||||
}
|
||||
}
|
||||
}
|
||||
slog.Info("fan.Iptable.Extract()", "output", out, "error", err)
|
||||
return document, err
|
||||
}
|
||||
|
||||
func (i *Iptable) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (i *Iptable) Close() error {
|
||||
return nil
|
||||
}
|
274
internal/fan/jx.go
Normal file
274
internal/fan/jx.go
Normal file
@ -0,0 +1,274 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "encoding/json"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"decl/internal/codec"
|
||||
"decl/internal/folio"
|
||||
"decl/internal/data"
|
||||
_ "os"
|
||||
"io"
|
||||
"errors"
|
||||
"log/slog"
|
||||
"strings"
|
||||
)
|
||||
|
||||
/*
|
||||
Converts a file container an encoded (yaml, json, etc) JX document into a Document by using `Extract` or
|
||||
`ExtractMany`.
|
||||
Converts a JX Document structure into a yaml, json, etc encoded resource.
|
||||
*/
|
||||
type JxFile struct {
|
||||
Uri folio.URI `yaml:"uri,omitempty" json:"uri,omitempty"`
|
||||
url *url.URL `yaml:"-" json:"-"`
|
||||
|
||||
emitResource data.Resource `yaml:"-" json:"-"`
|
||||
|
||||
Path string `yaml:"path" json:"path"`
|
||||
Format codec.Format `yaml:"format,omitempty" json:"format,omitempty"`
|
||||
reader io.ReadCloser `yaml:"-" json:"-"`
|
||||
writer io.WriteCloser `yaml:"-" json:"-"`
|
||||
decoder codec.Decoder `yaml:"-" json:"-"`
|
||||
encoder codec.Encoder `yaml:"-" json:"-"`
|
||||
closer func() error `yaml:"-" json:"-"`
|
||||
index int `yaml:"-" json:"-"`
|
||||
}
|
||||
|
||||
func NewJxFile() *JxFile {
|
||||
return &JxFile{ Format: codec.FormatYaml, index: 0, closer: func() error { return nil } }
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"decl", "jx", "yaml", "yml", "json"}, func(u *url.URL) data.Converter {
|
||||
j := NewJxFile()
|
||||
j.SetURI(u)
|
||||
return j
|
||||
})
|
||||
|
||||
folio.DocumentRegistry.ConverterTypes.RegisterContentType([]string{"jx.yaml","jx.yml","jx.yaml.gz","jx.yml.gz", "jx.json", "jx.json.gz"}, func(u *url.URL) data.Converter {
|
||||
j := NewJxFile()
|
||||
slog.Info("JxFile.Factory", "jx", j)
|
||||
j.SetURI(u)
|
||||
slog.Info("JxFile.Factory", "jx", j)
|
||||
return j
|
||||
})
|
||||
}
|
||||
|
||||
/*
|
||||
Schemes: file, json, yaml, yml, decl, jx, http, https, other transport schemes?
|
||||
Format: URL scheme name, `format` query param, file extension
|
||||
|
||||
If the input url is a file
|
||||
Detect Format
|
||||
*/
|
||||
func (j *JxFile) SetURI(u *url.URL) {
|
||||
slog.Info("JxFile.SetURI()", "jx", j)
|
||||
if ! errors.Is(j.Format.Set(u.Scheme), codec.ErrInvalidFormat) {
|
||||
u.Scheme = "file"
|
||||
q := u.Query()
|
||||
q.Set("format", string(j.Format))
|
||||
u.RawQuery = q.Encode()
|
||||
} else {
|
||||
if format,ok := u.Query()["format"]; ok {
|
||||
_ = j.Format.Set(format[0])
|
||||
}
|
||||
}
|
||||
if u.Scheme == "file" {
|
||||
if u.Path == "" || u.Path == "-" {
|
||||
j.Path = "-"
|
||||
} else {
|
||||
fileAbsolutePath, _ := filepath.Abs(filepath.Join(u.Hostname(), u.Path))
|
||||
j.Path = fileAbsolutePath
|
||||
if _, err := u.Parse(j.Path); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
j.Path = filepath.Join(u.Hostname(), u.RequestURI())
|
||||
}
|
||||
j.Uri.SetURL(u)
|
||||
if j.Format == codec.FormatYaml {
|
||||
exttype, ext := j.Uri.Extension()
|
||||
if j.Format.Set(exttype) != nil {
|
||||
_ = j.Format.Set(ext)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (j *JxFile) setencoder(target data.ContentIdentifier) {
|
||||
if formatErr := j.Format.Set(target.ContentType()); formatErr != nil {
|
||||
j.Format = codec.FormatYaml
|
||||
if format,ok := j.url.Query()["format"]; ok {
|
||||
if queryFormatErr := j.Format.Set(format[0]); queryFormatErr != nil {
|
||||
j.Format = codec.FormatYaml
|
||||
}
|
||||
}
|
||||
}
|
||||
if j.encoder == nil {
|
||||
j.encoder = codec.NewEncoder(j.writer, j.Format)
|
||||
}
|
||||
}
|
||||
|
||||
func (j *JxFile) setdecoder(source data.ContentIdentifier) {
|
||||
if j.decoder == nil {
|
||||
for _,v := range strings.Split(source.ContentType(), ".") {
|
||||
_ = j.Format.Set(v)
|
||||
}
|
||||
slog.Info("JxFile.setdecoder()", "type", source.ContentType(), "format", j.Format)
|
||||
j.decoder = codec.NewDecoder(j.reader, j.Format)
|
||||
}
|
||||
slog.Info("JxFile.setdecoder()", "decoder", j.decoder)
|
||||
}
|
||||
|
||||
func (j *JxFile) Type() data.TypeName { return "jx" }
|
||||
|
||||
func (j *JxFile) Extract(resourceSource data.Resource, filter data.ElementSelector) (doc data.Document, err error) {
|
||||
if j.index == 0 {
|
||||
if resourceSource == nil {
|
||||
if len(j.Uri) > 0 {
|
||||
resourceSource, err = j.Uri.NewResource(nil)
|
||||
} else {
|
||||
return nil, ErrInvalidSource
|
||||
}
|
||||
}
|
||||
slog.Info("JxFile.Extract()", "source", resourceSource, "error", err)
|
||||
var jxSourceFile data.FileResource = resourceSource.(data.FileResource)
|
||||
j.reader, err = jxSourceFile.(data.ContentGetter).GetContent(nil)
|
||||
slog.Info("JxFile.Extract()", "jxfile", j, "error", err)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
j.setdecoder(jxSourceFile.(data.ContentIdentifier))
|
||||
slog.Info("JxFile.Extract()", "jxfile", j)
|
||||
}
|
||||
|
||||
uri := resourceSource.URI()
|
||||
if folio.DocumentRegistry.HasDocument(folio.URI(uri)) {
|
||||
uri = fmt.Sprintf("%s?index=%d", uri, j.index)
|
||||
}
|
||||
doc = folio.DocumentRegistry.NewDocument(folio.URI(uri))
|
||||
err = j.decoder.Decode(doc)
|
||||
slog.Info("JxFile.Extract()", "doc", doc, "jxfile", j, "error", err)
|
||||
j.index++
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if err = doc.Validate(); err != nil {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (j *JxFile) ExtractMany(resourceSource data.Resource, filter data.ElementSelector) (documents []data.Document, err error) {
|
||||
documents = make([]data.Document, 0, 100)
|
||||
defer j.Close()
|
||||
|
||||
j.index = 0
|
||||
for {
|
||||
var doc data.Document
|
||||
if doc, err = j.Extract(resourceSource, filter); err == nil {
|
||||
documents = append(documents, doc)
|
||||
} else {
|
||||
if errors.Is(err, io.EOF) {
|
||||
err = nil
|
||||
//documents = append(documents, doc)
|
||||
}
|
||||
break
|
||||
}
|
||||
slog.Info("JxFile.ExtractMany() loading", "document", j.index)
|
||||
}
|
||||
slog.Info("JxFile.ExtractMany()", "jxfile", j, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
func (j *JxFile) targetResource() (target data.Resource, err error) {
|
||||
if j.emitResource == nil {
|
||||
targetUrl := j.Uri.Parse().URL()
|
||||
targetUrl.Scheme = "file"
|
||||
q := targetUrl.Query()
|
||||
q.Set("format", string(j.Format))
|
||||
targetUrl.RawQuery = q.Encode()
|
||||
j.Uri.SetURL(targetUrl)
|
||||
slog.Info("JxFile.targetResource() SetURI", "uri", j.Uri, "targetUrl", targetUrl)
|
||||
j.url = targetUrl
|
||||
slog.Info("JxFile.targetResource()", "target", targetUrl, "jxfile", j)
|
||||
|
||||
if j.emitResource, err = j.Uri.NewResource(nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var jxTargetFile data.FileResource = j.emitResource.(data.FileResource)
|
||||
jxTargetFile.SetContentSourceRef(j.Uri.String())
|
||||
|
||||
slog.Info("JxFile.targetResource() SetContentSourceRef", "target", jxTargetFile, "uri", j.Uri.String())
|
||||
j.writer, err = jxTargetFile.(data.ContentReadWriter).ContentWriterStream()
|
||||
j.setencoder(j.emitResource.(data.ContentIdentifier))
|
||||
}
|
||||
target = j.emitResource
|
||||
return
|
||||
}
|
||||
|
||||
func (j *JxFile) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
|
||||
ctx := context.Background()
|
||||
|
||||
resourceTarget, err = j.targetResource()
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
emitDoc := folio.DocumentRegistry.NewDocument("")
|
||||
if err = document.Validate(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
slog.Info("JxFile.Emit()", "document", document, "context", ctx)
|
||||
for _, declaration := range document.Filter(func (d data.Declaration) bool {
|
||||
if filter != nil {
|
||||
return filter(d.(*folio.Declaration).Attributes)
|
||||
}
|
||||
return true
|
||||
}) {
|
||||
//declaration.(*folio.Declaration).Resource().Read(ctx) // XXX added read here since it was removed from SetURI
|
||||
emitDoc.ResourceDeclarations = append(emitDoc.ResourceDeclarations, declaration.(*folio.Declaration))
|
||||
}
|
||||
|
||||
document.(*folio.Document).Format = j.Format
|
||||
slog.Info("Emit", "target", j, "encoder", j.encoder, "emit", emitDoc)
|
||||
if err = j.encoder.Encode(document); err != nil {
|
||||
slog.Info("Emit", "err", err)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (j *JxFile) EmitMany(documents []data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
|
||||
for _, doc := range documents {
|
||||
if resourceTarget, err = j.Emit(doc, filter); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (j *JxFile) Close() (err error) {
|
||||
if j.closer != nil {
|
||||
err = j.closer()
|
||||
}
|
||||
if j.reader != nil {
|
||||
j.reader.Close()
|
||||
}
|
||||
if j.encoder != nil {
|
||||
j.encoder.Close()
|
||||
}
|
||||
if j.writer != nil {
|
||||
j.writer.Close()
|
||||
}
|
||||
return
|
||||
}
|
46
internal/fan/jx_test.go
Normal file
46
internal/fan/jx_test.go
Normal file
@ -0,0 +1,46 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
"decl/internal/codec"
|
||||
"decl/internal/folio"
|
||||
"decl/internal/data"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
func TestNewJxSource(t *testing.T) {
|
||||
s := NewJxFile()
|
||||
assert.NotNil(t, s)
|
||||
}
|
||||
|
||||
func TestJxSetURI(t *testing.T) {
|
||||
for _,v := range []struct{ url string; expectedformat codec.Format; expecteduri string }{
|
||||
{ url: "file://foo", expectedformat: codec.FormatYaml, expecteduri: "file://foo" },
|
||||
{ url: "json://foo", expectedformat: codec.FormatJson, expecteduri: "file://foo?format=json" },
|
||||
{ url: "yaml://foo", expectedformat: codec.FormatYaml, expecteduri: "file://foo?format=yaml" },
|
||||
{ url: "file://foo?format=json", expectedformat: codec.FormatJson, expecteduri: "file://foo?format=json" },
|
||||
{ url: "file://foo.jx.json", expectedformat: codec.FormatJson, expecteduri: "file://foo.jx.json" },
|
||||
{ url: "file://foo.jx.json.gz", expectedformat: codec.FormatJson, expecteduri: "file://foo.jx.json.gz" },
|
||||
{ url: "https://foo.jx.json.gz", expectedformat: codec.FormatJson, expecteduri: "https://foo.jx.json.gz" },
|
||||
} {
|
||||
j := NewJxFile()
|
||||
assert.NotNil(t, j)
|
||||
u,_ := url.Parse(v.url)
|
||||
j.SetURI(u)
|
||||
assert.Equal(t, v.expectedformat, j.Format)
|
||||
assert.Equal(t, v.expecteduri, string(j.Uri))
|
||||
}
|
||||
}
|
||||
|
||||
func TestJxFactory(t *testing.T) {
|
||||
converter, err := folio.DocumentRegistry.ConverterTypes.New("json://-")
|
||||
assert.Nil(t, err)
|
||||
assert.NotNil(t, converter)
|
||||
assert.Equal(t, data.TypeName("jx"), converter.Type())
|
||||
jxfile := converter.(*JxFile)
|
||||
assert.Equal(t, "-", jxfile.Path)
|
||||
assert.Equal(t, codec.FormatJson, jxfile.Format)
|
||||
}
|
76
internal/fan/package.go
Normal file
76
internal/fan/package.go
Normal file
@ -0,0 +1,76 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
_ "context"
|
||||
_ "encoding/json"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
_ "path/filepath"
|
||||
"decl/internal/data"
|
||||
"decl/internal/resource"
|
||||
"decl/internal/folio"
|
||||
_ "os"
|
||||
_ "io"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type Package struct {
|
||||
PackageType resource.PackageType `yaml:"type" json:"type"`
|
||||
}
|
||||
|
||||
func NewPackage() *Package {
|
||||
return &Package{ PackageType: resource.SystemPackageType }
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"package"}, func(u *url.URL) data.Converter {
|
||||
p := NewPackage()
|
||||
packageType := u.Query().Get("type")
|
||||
if len(packageType) > 0 {
|
||||
p.PackageType = resource.PackageType(packageType)
|
||||
}
|
||||
return p
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (p *Package) Type() data.TypeName { return "package" }
|
||||
|
||||
func (p *Package) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
|
||||
slog.Info("fan.Package.Extract()", "package", p)
|
||||
|
||||
installedPackages := make([]*resource.Package, 0, 100)
|
||||
cmd := p.PackageType.NewReadPackagesCommand()
|
||||
if cmd == nil {
|
||||
return document, fmt.Errorf("%w: %s", resource.ErrUnsupportedPackageType, p.PackageType)
|
||||
}
|
||||
|
||||
var out []byte
|
||||
if out, err = cmd.Execute(p); err == nil {
|
||||
slog.Info("fan.Package.Extract()", "output", out)
|
||||
if err = cmd.Extractor(out, &installedPackages); err == nil {
|
||||
document = folio.DocumentRegistry.NewDocument("file://-")
|
||||
for _, pkg := range installedPackages {
|
||||
if pkg == nil {
|
||||
pkg = resource.NewPackage()
|
||||
}
|
||||
if _, err = document.NewResource(pkg.URI()); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
slog.Info("fan.Package.Extract()", "output", out, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
func (p *Package) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
|
||||
return nil, data.ErrUnsupportedConversion
|
||||
}
|
||||
|
||||
func (p *Package) Close() error {
|
||||
return nil
|
||||
}
|
23
internal/fan/package_test.go
Normal file
23
internal/fan/package_test.go
Normal file
@ -0,0 +1,23 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNewPackageSource(t *testing.T) {
|
||||
s := NewPackage()
|
||||
assert.NotNil(t, s)
|
||||
}
|
||||
|
||||
func TestExtractPackages(t *testing.T) {
|
||||
p := NewPackage()
|
||||
assert.NotNil(t, p)
|
||||
|
||||
document, err := p.Extract(nil, nil)
|
||||
assert.Nil(t, err)
|
||||
assert.NotNil(t, document)
|
||||
assert.Greater(t, document.Len(), 0)
|
||||
}
|
212
internal/fan/tar.go
Normal file
212
internal/fan/tar.go
Normal file
@ -0,0 +1,212 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
_ "context"
|
||||
_ "encoding/json"
|
||||
"fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
"decl/internal/transport"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
"archive/tar"
|
||||
_ "regexp"
|
||||
"io"
|
||||
"io/fs"
|
||||
"log"
|
||||
"log/slog"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type Tar struct {
|
||||
Uri folio.URI `yaml:"uri" json:"uri"`
|
||||
parsedURI *url.URL `yaml:"-" json:"-"`
|
||||
emitResource data.Resource `yaml:"-" json:"-"`
|
||||
reader io.ReadCloser `yaml:"-" json:"-"`
|
||||
writer io.WriteCloser `yaml:"-" json:"-"`
|
||||
targetArchive *tar.Writer `yaml:"-" json:"-"`
|
||||
}
|
||||
|
||||
func NewTar() *Tar {
|
||||
return &Tar{}
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"tar"}, func(u *url.URL) data.Converter {
|
||||
t := NewTar()
|
||||
t.SetURI(u)
|
||||
return t
|
||||
})
|
||||
|
||||
folio.DocumentRegistry.ConverterTypes.RegisterContentType([]string{"tar", "tar.gz", "tgz"}, func(u *url.URL) data.Converter {
|
||||
t := NewTar()
|
||||
t.SetURI(u)
|
||||
return t
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (t *Tar) Type() data.TypeName { return "tar" }
|
||||
|
||||
func (t *Tar) SetURI(u *url.URL) {
|
||||
slog.Info("Tar.SetURI()", "tar", t)
|
||||
u.Scheme = "file"
|
||||
if u.Path == "" || u.Path == "-" {
|
||||
} else {
|
||||
fileAbsolutePath, _ := filepath.Abs(filepath.Join(u.Hostname(), u.Path))
|
||||
u.Path = fileAbsolutePath
|
||||
}
|
||||
t.Uri.SetURL(u)
|
||||
t.parsedURI = u
|
||||
/*
|
||||
exttype, fileext := t.Uri.Extension()
|
||||
if exttype == "tgz" || fileext == "tgz" {
|
||||
q := u.Query()
|
||||
q.Set("gzip", string("true"))
|
||||
u.RawQuery = q.Encode()
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
func (t *Tar) targetResource() (target data.Resource, err error) {
|
||||
if t.emitResource == nil {
|
||||
|
||||
if t.emitResource, err = t.Uri.NewResource(nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tarTargetFile data.FileResource = t.emitResource.(data.FileResource)
|
||||
tarTargetFile.SetContentSourceRef(t.Uri.String())
|
||||
tarTargetFile.SetGzipContent(true)
|
||||
|
||||
t.writer, err = tarTargetFile.(data.ContentReadWriter).ContentWriterStream()
|
||||
if err == io.EOF {
|
||||
slog.Info("Tar.targetResource() ContentWriterStream", "target", tarTargetFile, "tar", t.writer.(*transport.Writer), "error", err)
|
||||
panic(err)
|
||||
}
|
||||
t.targetArchive = tar.NewWriter(t.writer)
|
||||
slog.Info("Tar.targetResource() SetContentSourceRef", "target", tarTargetFile, "uri", t.Uri.String(), "tar", t.targetArchive, "error", err)
|
||||
|
||||
}
|
||||
target = t.emitResource
|
||||
return
|
||||
}
|
||||
|
||||
// Convert a document of file resources to a tar file resource
|
||||
func (t *Tar) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
|
||||
|
||||
resourceTarget, err = t.targetResource()
|
||||
|
||||
slog.Info("Tar.Emit()", "writer", t.writer.(*transport.Writer), "error", err)
|
||||
|
||||
for _,res := range document.Filter(func(d data.Declaration) bool {
|
||||
return d.ResourceType() == "file"
|
||||
}) {
|
||||
|
||||
var f data.FileResource = res.(*folio.Declaration).Attributes.(data.FileResource)
|
||||
|
||||
//f.PathNormalization(true)
|
||||
//err = f.NormalizePath()
|
||||
|
||||
fileInfo := f.FileInfo()
|
||||
slog.Info("Tar.Emit() FileInfo", "fileinfo", fileInfo, "size", fileInfo.Size(), "file", f)
|
||||
if fileInfo.Size() < 1 {
|
||||
if len(f.GetContentSourceRef()) > 0 {
|
||||
rs, _ := f.(data.ContentReader).ContentReaderStream()
|
||||
info, _ := rs.Stat()
|
||||
err = f.SetFileInfo(info)
|
||||
slog.Info("Tar.Emit() Set FileInfo from ContentSourceRef", "fileinfo", f.FileInfo(), "file", f)
|
||||
rs.Close()
|
||||
} else {
|
||||
if err = f.(data.Info).ReadStat(); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
slog.Info("Tar.Emit", "file", f, "size", fileInfo.Size(), "error", err)
|
||||
hdr, fiErr := tar.FileInfoHeader(fileInfo, "")
|
||||
|
||||
if fileInfo.Mode() & fs.ModeSymlink != 0 {
|
||||
hdr.Linkname = f.GetTarget()
|
||||
}
|
||||
|
||||
slog.Info("Tar.Emit", "header", hdr, "size", fileInfo.Size(), "err", fiErr)
|
||||
if err := t.targetArchive.WriteHeader(hdr); err != nil {
|
||||
slog.Error("Tar.Emit() WriteHeader", "target", t.targetArchive, "header", hdr, "resource", f, "fileinfo", fileInfo, "error", err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if fileInfo.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
slog.Info("Tar.Emit - writing resource to target archive", "target", t.targetArchive, "resource", f, "err", err)
|
||||
if _, err := f.GetContent(t.targetArchive); err != nil {
|
||||
slog.Error("Tar.Emit() Content", "target", t.targetArchive, "resource", f, "fileinfo", fileInfo, "error", err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
slog.Info("Tar.Emit - wrote", "resource", f, "err", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Convert a tar file resource to a document of file resources
|
||||
func (t *Tar) Extract(resourceSource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
|
||||
document = folio.DocumentRegistry.NewDocument("")
|
||||
var tarSourceFile data.FileResource = resourceSource.(data.FileResource)
|
||||
//tarSourceFile := resourceSource.(*resource.File)
|
||||
|
||||
tarSourceFile.SetGzipContent(true)
|
||||
t.reader, err = tarSourceFile.GetContent(nil)
|
||||
sourceArchive := tar.NewReader(t.reader)
|
||||
|
||||
defer t.reader.Close()
|
||||
|
||||
for {
|
||||
var hdr *tar.Header
|
||||
hdr, err = sourceArchive.Next()
|
||||
if err == io.EOF {
|
||||
slog.Info("Tar.Extract() EOF", "source", sourceArchive)
|
||||
err = nil
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
slog.Info("Tar.Extract() ERROR", "source", sourceArchive, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
var fileResource data.Resource
|
||||
uri := fmt.Sprintf("file://%s", hdr.Name)
|
||||
if fileResource, err = document.(*folio.Document).NewResource(uri); err != nil {
|
||||
return
|
||||
}
|
||||
var f data.FileResource = fileResource.(data.FileResource)
|
||||
|
||||
if err = f.SetFileInfo(hdr.FileInfo()); err != nil {
|
||||
return
|
||||
}
|
||||
err = f.SetContent(sourceArchive)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *Tar) Close() (err error) {
|
||||
if t.reader != nil {
|
||||
if err = t.reader.Close(); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
if err = t.targetArchive.Close(); err == nil {
|
||||
if t.writer != nil {
|
||||
err = t.writer.Close()
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
105
internal/fan/tar_test.go
Normal file
105
internal/fan/tar_test.go
Normal file
@ -0,0 +1,105 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
"bytes"
|
||||
"archive/tar"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
"decl/internal/resource"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"io"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
var tarArchiveBuffer bytes.Buffer
|
||||
|
||||
func TarArchive() (err error) {
|
||||
tw := tar.NewWriter(&tarArchiveBuffer)
|
||||
defer tw.Close()
|
||||
|
||||
fileContent := "test file content"
|
||||
|
||||
if err = tw.WriteHeader(&tar.Header{
|
||||
Name: "testfile",
|
||||
Mode: 0600,
|
||||
Size: int64(len(fileContent)),
|
||||
}); err == nil {
|
||||
_, err = tw.Write([]byte(fileContent))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func TestNewTar(t *testing.T) {
|
||||
a := NewTar()
|
||||
assert.NotNil(t, a)
|
||||
}
|
||||
|
||||
func TestExtractFiles(t *testing.T) {
|
||||
a := NewTar()
|
||||
assert.NotNil(t, a)
|
||||
e := TarArchive()
|
||||
assert.Nil(t, e)
|
||||
assert.Greater(t, tarArchiveBuffer.Len(), 0)
|
||||
|
||||
d := folio.NewDeclaration()
|
||||
d.ResourceTypes = folio.DocumentRegistry.ResourceTypes
|
||||
slog.Info("TestExtractFiles", "resourcetypes", folio.DocumentRegistry.ResourceTypes, "declarationtypes", d.ResourceTypes, "resource.ResourceTypes", resource.ResourceTypes)
|
||||
d.Type = "file"
|
||||
assert.Nil(t, d.NewResource(nil))
|
||||
|
||||
var sourceResource data.FileResource = d.Attributes.(data.FileResource)
|
||||
assert.Nil(t, sourceResource.SetContent(&tarArchiveBuffer))
|
||||
|
||||
exDoc, err := a.Extract(d.Attributes, nil)
|
||||
assert.Nil(t, err)
|
||||
assert.NotNil(t, exDoc)
|
||||
document := exDoc.(*folio.Document)
|
||||
assert.Greater(t, document.Len(), 0)
|
||||
|
||||
assert.Equal(t, folio.TypeName("file"), document.ResourceDeclarations[0].Type)
|
||||
f := document.ResourceDeclarations[0].Resource().(data.FileResource)
|
||||
assert.Equal(t, "testfile", f.FilePath())
|
||||
}
|
||||
|
||||
func TestEmitFiles(t *testing.T) {
|
||||
expected := "some test data"
|
||||
|
||||
a := NewTar()
|
||||
assert.NotNil(t, a)
|
||||
|
||||
a.Uri = folio.URI(fmt.Sprintf("file://%s", filepath.Join(TempDir, "testemitfiles.tar")))
|
||||
|
||||
doc := folio.DocumentRegistry.NewDocument("")
|
||||
|
||||
uri := fmt.Sprintf("file://%s", filepath.Join(TempDir, "foo.txt"))
|
||||
res, resErr := doc.NewResource(uri)
|
||||
assert.Nil(t, resErr)
|
||||
assert.NotNil(t, res)
|
||||
|
||||
assert.Equal(t, res, doc.GetResource(uri).Resource())
|
||||
f := doc.GetResource(uri).Attributes.(data.FileResource)
|
||||
|
||||
assert.Nil(t, f.SetContent(strings.NewReader(expected)))
|
||||
|
||||
target, emitErr := a.Emit(doc, nil)
|
||||
assert.Nil(t, emitErr)
|
||||
assert.Equal(t, folio.URI(fmt.Sprintf("file://%s", target.(data.FileResource).FilePath())), a.Uri)
|
||||
|
||||
tarArchiveBuffer.Reset()
|
||||
_, contentErr := target.(data.FileResource).GetContent(&tarArchiveBuffer)
|
||||
assert.Nil(t, contentErr)
|
||||
tr := tar.NewReader(&tarArchiveBuffer)
|
||||
hdr, err := tr.Next()
|
||||
assert.NotEqual(t, io.EOF, err)
|
||||
assert.NotNil(t, hdr)
|
||||
assert.Equal(t, f.FilePath(), hdr.Name)
|
||||
data, err := io.ReadAll(tr)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, expected, string(data))
|
||||
}
|
75
internal/fan/user.go
Normal file
75
internal/fan/user.go
Normal file
@ -0,0 +1,75 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package fan
|
||||
|
||||
import (
|
||||
_ "context"
|
||||
_ "encoding/json"
|
||||
_ "fmt"
|
||||
_ "gopkg.in/yaml.v3"
|
||||
"net/url"
|
||||
_ "path/filepath"
|
||||
"decl/internal/resource"
|
||||
"decl/internal/data"
|
||||
"decl/internal/folio"
|
||||
_ "os"
|
||||
_ "io"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
UserType resource.UserType `yaml:"type" json:"type"`
|
||||
}
|
||||
|
||||
func NewUser() *User {
|
||||
return &User{ UserType: resource.SystemUserType }
|
||||
}
|
||||
|
||||
func init() {
|
||||
folio.DocumentRegistry.ConverterTypes.Register([]string{"user"}, func(u *url.URL) data.Converter {
|
||||
userSource := NewUser()
|
||||
userType := u.Query().Get("type")
|
||||
if len(userType) > 0 {
|
||||
userSource.UserType = resource.UserType(userType)
|
||||
}
|
||||
return userSource
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (u *User) Type() data.TypeName { return "user" }
|
||||
|
||||
func (u *User) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
|
||||
slog.Info("user source Extract()", "user", u)
|
||||
Users := make([]*resource.User, 0, 100)
|
||||
cmd := u.UserType.NewReadUsersCommand()
|
||||
if cmd == nil {
|
||||
return document, resource.ErrUnsupportedUserType
|
||||
}
|
||||
if out, err := cmd.Execute(u); err == nil {
|
||||
slog.Info("user source ExtractResources()", "output", out)
|
||||
if exErr := cmd.Extractor(out, &Users); exErr != nil {
|
||||
return document, exErr
|
||||
}
|
||||
document = folio.DocumentRegistry.NewDocument("user://-")
|
||||
for _, usr := range Users {
|
||||
if usr == nil {
|
||||
usr = resource.NewUser()
|
||||
}
|
||||
usr.UserType = u.UserType
|
||||
document.AddResourceDeclaration("user", usr)
|
||||
}
|
||||
} else {
|
||||
slog.Info("user source Extract()", "output", out, "error", err)
|
||||
return document, err
|
||||
}
|
||||
return document, nil
|
||||
}
|
||||
|
||||
func (u *User) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
|
||||
return nil, data.ErrUnsupportedConversion
|
||||
}
|
||||
|
||||
func (u *User) Close() error {
|
||||
return nil
|
||||
}
|
@ -1,23 +1,23 @@
|
||||
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
|
||||
|
||||
package source
|
||||
package fan
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNewDirSource(t *testing.T) {
|
||||
s := NewDir()
|
||||
func TestNewUserSource(t *testing.T) {
|
||||
s := NewUser()
|
||||
assert.NotNil(t, s)
|
||||
}
|
||||
|
||||
func TestExtractDirectory(t *testing.T) {
|
||||
s := NewDir()
|
||||
assert.NotNil(t, s)
|
||||
func TestExtractUsers(t *testing.T) {
|
||||
u := NewUser()
|
||||
assert.NotNil(t, u)
|
||||
|
||||
document, err := s.ExtractDirectory(TempDir)
|
||||
document, err := u.Extract(nil, nil)
|
||||
assert.Nil(t, err)
|
||||
assert.NotNil(t, document)
|
||||
|
||||
assert.Greater(t, document.Len(), 0)
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user