move source/target converters to fan pkg
Some checks are pending
Lint / golangci-lint (push) Waiting to run
Declarative Tests / test (push) Waiting to run
Declarative Tests / build-fedora (push) Waiting to run
Declarative Tests / build-ubuntu-focal (push) Waiting to run

This commit is contained in:
Matthew Rich 2024-09-19 08:03:23 +00:00
parent ba9b37f512
commit c34a76981e
44 changed files with 1545 additions and 1286 deletions

24
internal/data/block.go Normal file
View File

@ -0,0 +1,24 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package data
import (
"errors"
)
var (
ErrConfigUndefinedName = errors.New("Config block is missing a defined name")
)
type Block interface {
Identifier
ConfigurationType() TypeName
Loader
Validator
NewConfiguration(uri *string) error
ConfigurationValueGetter
Configuration() Configuration
Clone() Block
}

View File

@ -3,8 +3,27 @@
package data package data
import ( import (
"errors"
)
var (
ErrUnknownConfigurationType = errors.New("Unknown configuration type")
ErrUnknownConfigurationKey = errors.New("Unknown configuration key")
) )
type ConfigurationValueGetter interface { type ConfigurationValueGetter interface {
GetValue(key string) (any, error) GetValue(key string) (any, error)
} }
type ConfigurationValueChecker interface {
Has(key string) bool
}
type Configuration interface {
Identifier
Type() string
Reader
ConfigurationValueGetter
ConfigurationValueChecker
Clone() Configuration
}

View File

@ -3,24 +3,38 @@
package data package data
import ( import (
"errors"
)
var (
ErrUnsupportedConversion = errors.New("Unsupported conversion")
) )
// Convert a resource to a document and a document to a resource // Convert a resource to a document and a document to a resource
type Emitter interface { type Emitter interface {
Emit(document Document, filter ResourceSelector) (Resource, error) Emit(document Document, filter ElementSelector) (Resource, error)
} }
type Extracter interface { type Extractor interface {
Extract(resource Resource, filter ResourceSelector) (Document, error) Extract(resource Resource, filter ElementSelector) (Document, error)
} }
type Converter interface { type Converter interface {
Typer Typer
Emitter Emitter
Extracter Extractor
Close() error
} }
type ManyExtractor interface { type ManyExtractor interface {
ExtractMany(resource Resource, filter ResourceSelector) ([]Document, error) ExtractMany(resource Resource, filter ElementSelector) ([]Document, error)
}
type ManyEmitter interface {
EmitMany(documents []Document, filter ElementSelector) (Resource, error)
}
type DirectoryConverter interface {
SetRelative(flag bool)
} }

View File

@ -26,6 +26,10 @@ type Deleter interface {
Delete(context.Context) error Delete(context.Context) error
} }
type Info interface {
ReadStat() error
}
type Crudder interface { type Crudder interface {
Creator Creator
Reader Reader

View File

@ -8,6 +8,7 @@ import (
"decl/internal/codec" "decl/internal/codec"
"io" "io"
"decl/internal/mapper" "decl/internal/mapper"
"net/url"
) )
var ( var (
@ -38,14 +39,29 @@ type Document interface {
mapper.Mapper mapper.Mapper
NewResource(uri string) (Resource, error) NewResource(uri string) (Resource, error)
NewResourceFromParsedURI(uri *url.URL) (Resource, error)
AddDeclaration(Declaration)
AddResourceDeclaration(resourceType string, resourceDeclaration Resource)
Types() (TypesRegistry[Resource]) Types() (TypesRegistry[Resource])
// Resources() []Declaration // Resources() []Declaration
SetConfig(config Document) SetConfig(config Document)
ConfigDoc() Document ConfigDoc() Document
HasConfig(string) bool
GetConfig(string) Block
Len() int Len() int
ResolveIds(ctx context.Context) ResolveIds(ctx context.Context)
Filter(filter DeclarationSelector) []Declaration Filter(filter DeclarationSelector) []Declaration
Declarations() []Declaration
//Diff(with *Document, output io.Writer) (returnOutput string, diffErr error) CheckConstraints() bool
Failures() int
ConfigFilter(filter BlockSelector) []Block
AppendConfigurations([]Document)
Diff(with Document, output io.Writer) (returnOutput string, diffErr error)
Clone() Document
} }

View File

@ -4,6 +4,7 @@ package data
import ( import (
"errors" "errors"
"net/url"
) )
var ( var (
@ -13,6 +14,11 @@ var (
type Identifier interface { type Identifier interface {
URI() string URI() string
SetURI(string) error SetURI(string) error
SetParsedURI(*url.URL) error
}
type DocumentElement interface {
Identifier
} }
type Selector[Item comparable] func(r Item) bool type Selector[Item comparable] func(r Item) bool
@ -20,3 +26,7 @@ type Selector[Item comparable] func(r Item) bool
type ResourceSelector Selector[Resource] type ResourceSelector Selector[Resource]
type DeclarationSelector Selector[Declaration] type DeclarationSelector Selector[Declaration]
type BlockSelector Selector[Block]
type ElementSelector Selector[DocumentElement]

View File

@ -51,6 +51,11 @@ func NewResourceMapper() ResourceMapper {
return mapper.New[string, Declaration]() return mapper.New[string, Declaration]()
} }
type ContentHasher interface {
Hash() []byte
HashHexString() string
}
type ContentIdentifier interface { type ContentIdentifier interface {
ContentType() string ContentType() string
} }
@ -82,10 +87,23 @@ type ContentGetSetter interface {
} }
type FileResource interface { type FileResource interface {
SetBasePath(int)
FilePath() string FilePath() string
SetFileInfo(fs.FileInfo) error SetFileInfo(fs.FileInfo) error
FileInfo() fs.FileInfo FileInfo() fs.FileInfo
ContentGetSetter ContentGetSetter
GetContentSourceRef() string
SetContentSourceRef(uri string) SetContentSourceRef(uri string)
SetFS(fs.FS)
PathNormalization(bool)
NormalizePath() error
GetTarget() string
} }
type Signed interface {
Signature() Signature
}
type FileInfoGetter interface {
Stat() (fs.FileInfo, error)
}

View File

@ -0,0 +1,12 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package data
import (
)
type Signature interface {
Verify(ContentHasher) error
SetHexString(string) error
String() string
}

View File

@ -10,6 +10,7 @@ type Factory[Product comparable] func(*url.URL) Product
type TypesRegistry[Product comparable] interface { type TypesRegistry[Product comparable] interface {
New(uri string) (result Product, err error) New(uri string) (result Product, err error)
NewFromParsedURI(uri *url.URL) (result Product, err error)
Has(typename string) bool Has(typename string) bool
//Get(string) Factory[Product] //Get(string) Factory[Product]
} }

View File

@ -1,6 +1,6 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved. // Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source package fan
import ( import (
"context" "context"
@ -10,6 +10,8 @@ _ "gopkg.in/yaml.v3"
"net/url" "net/url"
_ "path/filepath" _ "path/filepath"
"decl/internal/resource" "decl/internal/resource"
"decl/internal/folio"
"decl/internal/data"
_ "os" _ "os"
_ "io" _ "io"
"github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/container"
@ -36,32 +38,40 @@ func NewContainer(containerClientApi resource.ContainerClient) *Container {
} }
func init() { func init() {
SourceTypes.Register([]string{"container"}, func(u *url.URL) DocSource { folio.DocumentRegistry.ConverterTypes.Register([]string{"container"}, func(u *url.URL) data.Converter {
c := NewContainer(nil) c := NewContainer(nil)
return c return c
}) })
} }
func (c *Container) Type() string { return "container" } func (c *Container) Type() data.TypeName { return "container" }
func (c *Container) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) { func (c *Container) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
var extractErr error var extractErr error
ctx := context.Background() ctx := context.Background()
slog.Info("container source ExtractResources()", "container", c) slog.Info("container source Extract()", "container", c)
containers, err := c.apiClient.ContainerList(ctx, container.ListOptions{All: true}) containers, err := c.apiClient.ContainerList(ctx, container.ListOptions{All: true})
if err != nil { if err != nil {
return nil, err return nil, err
} }
document := resource.NewDocument() document = folio.DocumentRegistry.NewDocument(folio.URI(sourceResource.URI()))
for _, container := range containers { for _, container := range containers {
runningContainer := resource.NewContainer(nil) runningContainer := resource.NewContainer(nil)
if inspectErr := runningContainer.Inspect(ctx, container.ID); inspectErr != nil { if inspectErr := runningContainer.Inspect(ctx, container.ID); inspectErr != nil {
extractErr = fmt.Errorf("%w: %w", extractErr, inspectErr) extractErr = fmt.Errorf("%w: %w", extractErr, inspectErr)
} }
document.AddResourceDeclaration("container", runningContainer) document.(*folio.Document).AddResourceDeclaration("container", runningContainer)
} }
return []*resource.Document{document}, extractErr return document, extractErr
}
func (c *Container) Emit(document data.Document, filter data.ElementSelector) (resource data.Resource, err error) {
return nil, nil
}
func (c *Container) Close() error {
return nil
} }

176
internal/fan/dir.go Normal file
View File

@ -0,0 +1,176 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
"context"
_ "encoding/json"
"fmt"
_ "gopkg.in/yaml.v3"
"net/url"
"path/filepath"
"decl/internal/data"
"decl/internal/folio"
"os"
_ "io"
"log/slog"
"decl/internal/fs"
)
type Dir struct {
Path string `yaml:"path" json:"path"`
Relative bool `yaml:"relative" json:"relative"`
subDirsStack []string `yaml:"-" json:"-"`
fs *fs.WalkDir `yaml:"-" json:"-"`
}
func NewDir() *Dir {
return &Dir{
subDirsStack: make([]string, 0, 100),
}
}
func init() {
folio.DocumentRegistry.ConverterTypes.Register([]string{"file"}, func(u *url.URL) data.Converter {
t := NewDir()
t.Path,_ = filepath.Abs(filepath.Join(u.Hostname(), u.Path))
t.Relative = false
return t
})
}
func (d *Dir) SetRelative(flag bool) { d.Relative = flag }
func (d *Dir) Type() data.TypeName { return "dir" }
func (d *Dir) ExtractDirectory(path string, document data.Document) (err error) {
ctx := context.Background()
files, readDirErr := os.ReadDir(path)
slog.Info("fan.Dir.ExtractDirectory()", "path", path, "error", readDirErr)
if readDirErr != nil {
return readDirErr
}
for _,file := range files {
filePath := filepath.Join(path, file.Name())
u := fmt.Sprintf("file://%s", filePath)
var f data.Resource
if f, err = document.NewResource(u); err != nil {
return
}
if _, err = f.Read(ctx); err != nil {
return
}
if file.IsDir() {
d.subDirsStack = append(d.subDirsStack, filePath)
}
}
return nil
}
func (d *Dir) isParent(m *map[string]int, path string, containingDirectoryPath string) (newCDP string, cdpCount int) {
newCDP = containingDirectoryPath
cdpCount = (*m)[containingDirectoryPath]
pathLen := len(path)
for i, p := range path {
if p == '/' || i == pathLen {
sPath := path[:i]
if len(sPath) > 0 {
(*m)[sPath]++
superDirCount := (*m)[sPath]
if superDirCount >= cdpCount {
newCDP = sPath
cdpCount = superDirCount
}
}
}
}
return
}
func (d *Dir) LCPath(files []string) (lcPath string) {
parentPaths := make(map[string]int)
var containingDirectoryPath string
for _,filePath := range files {
containingDirectoryPath, _ = d.isParent(&parentPaths, filePath, containingDirectoryPath)
}
lcPath = containingDirectoryPath
return
}
func (d *Dir) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
if document == nil || document.Len() <= 0 {
return nil, ErrEmptyDocument
}
dirFileDeclaration := folio.NewDeclaration()
dirFileDeclaration.Type = "file"
if err = dirFileDeclaration.NewResource(nil); err != nil {
return
}
parentPaths := make(map[string]int)
var containingDirectoryPath string
for _,res := range document.Filter(func(d data.Declaration) bool {
return d.ResourceType() == "file"
}) {
var f data.FileResource = res.(*folio.Declaration).Attributes.(data.FileResource)
var parent string
if f.FileInfo().IsDir() {
parent, err = filepath.Abs(f.FilePath())
} else {
parent, err = filepath.Abs(filepath.Dir(f.FilePath()))
}
if err != nil {
return
}
containingDirectoryPath, _ = d.isParent(&parentPaths, parent, containingDirectoryPath)
}
uri := fmt.Sprintf("file://%s", containingDirectoryPath)
if err = dirFileDeclaration.SetURI(uri); err != nil {
return
}
resourceTarget = dirFileDeclaration.Attributes
return
}
func (d *Dir) Extract(resourceSource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
ctx := context.Background()
if resourceSource.Type() != "file" {
return nil, fmt.Errorf("%w", ErrInvalidResource)
}
slog.Info("fan.Dir.Extract()", "path", d.Path, "resource", resourceSource)
d.Path = resourceSource.(data.FileResource).FilePath()
document = folio.DocumentRegistry.NewDocument("")
d.fs = fs.NewWalkDir(os.DirFS(d.Path), d.Path, func(fsys fs.FS, path string, file fs.DirEntry) (err error) {
u := fmt.Sprintf("file://%s", path)
slog.Info("Fan.Dir.Extract() WalkDir", "file", u, "root", d.Path)
if path != "" {
var f data.Resource
if f, err = document.NewResource(u); err != nil {
return
}
if d.Relative {
f.(data.FileResource).SetBasePath(len(d.Path) + 1)
slog.Info("Fan.Dir.Extract() WalkDir Relative", "file", f, "path", path)
}
slog.Info("Fan.Dir.Extract() WalkDir Resource.Read", "file", f)
_, err = f.Read(ctx)
}
return
})
slog.Info("Fan.Dir.Extract()", "fs", d.fs)
err = d.fs.Walk(nil)
return
}
func (d *Dir) Close() error {
return nil
}

130
internal/fan/dir_test.go Normal file
View File

@ -0,0 +1,130 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
"github.com/stretchr/testify/assert"
"testing"
"decl/internal/folio"
"decl/internal/data"
"log/slog"
"path/filepath"
"os"
"fmt"
)
func TestNewDirSource(t *testing.T) {
s := NewDir()
assert.NotNil(t, s)
}
func TestExtractDirectory(t *testing.T) {
s := NewDir()
assert.NotNil(t, s)
document := folio.DocumentRegistry.NewDocument("")
assert.NotNil(t, document)
assert.Nil(t, s.ExtractDirectory(TempDir, document))
assert.Greater(t, 2, document.Len())
}
func TestIsParent(t *testing.T) {
s := NewDir()
assert.NotNil(t, s)
m := map[string]int{
"/foo/bar": 3,
"/foo": 1,
}
res, count := s.isParent(&m, "/foo/bar/baz/quuz", "/foo/bar")
assert.Equal(t, "/foo/bar", res)
assert.Equal(t, 4, count)
assert.Equal(t, 2, m["/foo"])
}
func TestLCPath(t *testing.T) {
s := NewDir()
assert.NotNil(t, s)
result := s.LCPath([]string{
"/foo/bar/baz/quuz",
"/foo/bar/baz/quuz/abc.txt",
"/foo/bar/baz/quuz/def.txt",
"/foo/bar/baz/quz/ghi.txt",
"/foo/bar/kiw",
"/tmp",
})
assert.Equal(t, "/foo/bar", result)
result = s.LCPath([]string{
"/foo/bar/baz/quuz",
"/foo/eer/voo",
"/foo/bar/baz/quuz/abc.txt",
"/foo/bar/baz/quuz/def.txt",
"/foo/bar/baz/quz/ghi.txt",
"/foo/bar/kiw",
"/tmp",
"/usr",
"/usr/lib",
})
assert.Equal(t, "/foo", result)
}
func BenchmarkLCPath(b *testing.B) {
s := NewDir()
assert.NotNil(b, s)
for i := 0; i < b.N; i++ {
s.LCPath([]string{
"/foo/bar/baz/quuz",
"/foo/eer/voo",
"/foo/bar/baz/quuz/abc.txt",
"/foo/bar/baz/quuz/def.txt",
"/foo/bar/baz/quz/ghi.txt",
"/foo/bar/kiw",
"/tmp",
"/usr",
"/usr/lib",
})
}
}
func TestEmit(t *testing.T) {
s := NewDir()
assert.NotNil(t, s)
contextDir, _ := filepath.Abs(filepath.Join(TempDir, "context"))
etcDir := filepath.Join(contextDir, "etc")
binDir := filepath.Join(contextDir, "bin")
usrDir := filepath.Join(contextDir, "usr")
usrLibDir := filepath.Join(contextDir, "usr/lib")
usrBinDir := filepath.Join(contextDir, "usr/bin")
assert.Nil(t, os.Mkdir(contextDir, os.ModePerm))
assert.Nil(t, os.Mkdir(etcDir, os.ModePerm))
assert.Nil(t, os.Mkdir(binDir, os.ModePerm))
assert.Nil(t, os.Mkdir(usrDir, os.ModePerm))
assert.Nil(t, os.Mkdir(usrLibDir, os.ModePerm))
assert.Nil(t, os.Mkdir(usrBinDir, os.ModePerm))
decl := folio.NewDeclaration()
srcFile := fmt.Sprintf("file://%s", contextDir)
resErr := decl.NewResource(&srcFile)
assert.Nil(t, resErr)
slog.Info("TestEmit()", "file", decl, "res", decl.Attributes)
document, extractErr := s.Extract(decl.Resource(), nil)
slog.Info("TestEmit() - Extract", "document", document, "error", extractErr)
assert.Nil(t, extractErr)
assert.Greater(t, document.Len(), 4)
res, emitErr := s.Emit(document, nil)
slog.Info("TestEmit()", "res", res, "error", emitErr)
assert.Nil(t, emitErr)
assert.Equal(t, contextDir, res.(data.FileResource).FilePath())
}

30
internal/fan/fan.go Normal file
View File

@ -0,0 +1,30 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
"errors"
)
// Convert a resource to a document and a document to a resource
/*
type Emitter interface {
Emit(document *resource.Document) (resource.Resource, error)
}
type Extracter interface {
Extract(resource resource.Resource, filter resource.ResourceSelector) (*resource.Document, error)
}
type Converter interface {
Emitter
Extracter
}
*/
var (
ErrInvalidSource error = errors.New("Invalid source")
ErrInvalidResource error = errors.New("Invalid resource")
ErrEmptyDocument error = errors.New("Document containers no resources")
)

23
internal/fan/fan_test.go Normal file
View File

@ -0,0 +1,23 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
"testing"
"os"
"log"
)
var TempDir string
func TestMain(m *testing.M) {
var err error
TempDir, err = os.MkdirTemp("", "testfan")
if err != nil || TempDir == "" {
log.Fatal(err)
}
//folio.DocumentRegistry.ResourceTypes = resource.ResourceTypes
rc := m.Run()
os.RemoveAll(TempDir)
os.Exit(rc)
}

View File

@ -1,6 +1,6 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved. // Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source package fan
import ( import (
_ "context" _ "context"
@ -10,6 +10,8 @@ _ "gopkg.in/yaml.v3"
"net/url" "net/url"
_ "path/filepath" _ "path/filepath"
"decl/internal/resource" "decl/internal/resource"
"decl/internal/folio"
"decl/internal/data"
_ "os" _ "os"
_ "io" _ "io"
"log/slog" "log/slog"
@ -24,7 +26,7 @@ func NewGroup() *Group {
} }
func init() { func init() {
SourceTypes.Register([]string{"group"}, func(u *url.URL) DocSource { folio.DocumentRegistry.ConverterTypes.Register([]string{"group"}, func(u *url.URL) data.Converter {
groupSource := NewGroup() groupSource := NewGroup()
groupType := u.Query().Get("type") groupType := u.Query().Get("type")
if len(groupType) > 0 { if len(groupType) > 0 {
@ -35,23 +37,21 @@ func init() {
} }
func (g *Group) Type() string { return "group" } func (g *Group) Type() data.TypeName { return "group" }
func (g *Group) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) {
documents := make([]*resource.Document, 0, 100)
func (g *Group) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
slog.Info("group source ExtractResources()", "group", g) slog.Info("group source ExtractResources()", "group", g)
Groups := make([]*resource.Group, 0, 100) Groups := make([]*resource.Group, 0, 100)
cmd := g.GroupType.NewReadGroupsCommand() cmd := g.GroupType.NewReadGroupsCommand()
if cmd == nil { if cmd == nil {
return documents, resource.ErrUnsupportedGroupType return document, resource.ErrUnsupportedGroupType
} }
if out, err := cmd.Execute(g); err == nil { if out, err := cmd.Execute(g); err == nil {
slog.Info("group source ExtractResources()", "output", out) slog.Info("group source Extract()", "output", out)
if exErr := cmd.Extractor(out, &Groups); exErr != nil { if exErr := cmd.Extractor(out, &Groups); exErr != nil {
return documents, exErr return document, exErr
} }
document := resource.NewDocument() document = folio.DocumentRegistry.NewDocument("group://-")
for _, grp := range Groups { for _, grp := range Groups {
if grp == nil { if grp == nil {
grp = resource.NewGroup() grp = resource.NewGroup()
@ -59,10 +59,18 @@ func (g *Group) ExtractResources(filter ResourceSelector) ([]*resource.Document,
grp.GroupType = g.GroupType grp.GroupType = g.GroupType
document.AddResourceDeclaration("group", grp) document.AddResourceDeclaration("group", grp)
} }
documents = append(documents, document)
} else { } else {
slog.Info("group source ExtractResources()", "output", out, "error", err) slog.Info("group source ExtractResources()", "output", out, "error", err)
return documents, err return document, err
} }
return documents, nil return document, nil
} }
func (g *Group) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
return nil, data.ErrUnsupportedConversion
}
func (g *Group) Close() error {
return nil
}

197
internal/fan/http.go Normal file
View File

@ -0,0 +1,197 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
_ "context"
_ "encoding/json"
"fmt"
_ "gopkg.in/yaml.v3"
"net/url"
_ "net/http"
_ "path/filepath"
_ "decl/internal/resource"
"decl/internal/codec"
"decl/internal/data"
"decl/internal/folio"
_ "os"
"io"
"errors"
"log/slog"
)
type HTTP struct {
Endpoint folio.URI `yaml:"endpoint,omitempty" json:"endpoint,omitempty"`
url *url.URL `yaml:"-" json:"-"`
Format codec.Format `yaml:"format,omitempty" json:"format,omitempty"`
reader io.ReadCloser `yaml:"-" json:"-"`
writer io.WriteCloser `yaml:"-" json:"-"`
decoder codec.Decoder `yaml:"-" json:"-"`
encoder codec.Encoder `yaml:"-" json:"-"`
closer func() error `yaml:"-" json:"-"`
index int `yaml:"-" json:"-"`
signature data.Signature `yaml:"-" json:"-"`
}
func NewHTTP() *HTTP {
return &HTTP{ Format: codec.FormatYaml, index: 0, closer: func() error { return nil } }
}
func init() {
folio.DocumentRegistry.ConverterTypes.Register([]string{"http","https"}, func(u *url.URL) data.Converter {
t := NewHTTP()
t.Endpoint = folio.URI(u.String())
t.url = u
return t
})
}
func (h *HTTP) Type() data.TypeName { return "http" }
/*
func (h *HTTP) setencoder(target data.ContentIdentifier) {
if formatErr := h.Format.Set(target.ContentType()); formatErr != nil {
h.Format = codec.FormatYaml
if format,ok := h.url.Query()["format"]; ok {
if queryFormatErr := h.Format.Set(format[0]); queryFormatErr != nil {
h.Format = codec.FormatYaml
}
}
}
if h.encoder == nil {
h.encoder = codec.NewEncoder(h.writer, h.Format)
}
}
*/
func (h *HTTP) setdecoder(source data.ContentIdentifier) {
if h.decoder == nil {
_ = h.Format.Set(source.ContentType())
h.decoder = codec.NewDecoder(h.reader, h.Format)
}
}
func (h *HTTP) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
if h.index == 0 {
if sourceResource == nil {
if len(h.Endpoint) > 0 {
sourceResource, err = h.Endpoint.NewResource(nil)
} else {
return nil, ErrInvalidSource
}
}
slog.Info("HTTP.Extract()", "source", sourceResource, "error", err)
var jxSourceFile data.FileResource = sourceResource.(data.FileResource)
h.reader, err = jxSourceFile.(data.ContentGetter).GetContent(nil)
slog.Info("HTTP.Extract()", "file", h, "error", err)
if err != nil {
return
}
h.signature = sourceResource.(data.Signed).Signature()
h.setdecoder(jxSourceFile.(data.ContentIdentifier))
slog.Info("HTTP.Extract()", "jx", h)
}
u := fmt.Sprintf("%s?index=%d", sourceResource.URI(), h.index)
document = folio.DocumentRegistry.NewDocument(folio.URI(u))
err = h.decoder.Decode(document)
slog.Info("HTTP.Extract()", "doc", document, "http", h, "error", err)
h.index++
if err != nil {
return
}
if err = document.Validate(); err != nil {
return
}
if h.signature.String() != "" {
if v, ok := sourceResource.(data.ContentHasher); ok {
err = h.signature.Verify(v)
}
}
return
/*
defer h.Close()
documentSignature := h.transport.Signature()
hash := sha256.New()
sumReadData := iofilter.NewReader(h.transport, func(p []byte, readn int, readerr error) (n int, err error) {
hash.Write(p)
return
})
decoder := codec.NewYAMLDecoder(sumReadData)
index := 0
for {
doc = folio.DocumentRegistry.NewDocument(folio.URI(u))
doc := resource.NewDocument()
e := decoder.Decode(doc)
if errors.Is(e, io.EOF) {
break
}
if e != nil {
return documents, e
}
if validationErr := doc.Validate(); validationErr != nil {
return documents, validationErr
}
documents = append(documents, doc)
index++
}
if documentSignature != "" {
sig := &signature.Ident{}
sigErr := sig.VerifySum(hash.Sum(nil), []byte(documentSignature))
if sigErr != nil {
return documents, sigErr
}
}
*/
}
func (h *HTTP) ExtractMany(resourceSource data.Resource, filter data.ElementSelector) (documents []data.Document, err error) {
documents = make([]data.Document, 0, 100)
defer h.Close()
h.index = 0
for {
var doc data.Document
if doc, err = h.Extract(resourceSource, filter); err == nil {
documents = append(documents, doc)
} else {
if errors.Is(err, io.EOF) {
err = nil
//documents = append(documents, doc)
}
break
}
}
slog.Info("HTTP.ExtractMany()", "file", h, "error", err)
return
}
func (h *HTTP) Emit(document data.Document, filter data.ElementSelector) (resource data.Resource, err error) {
return nil, nil
}
func (h *HTTP) Close() (err error) {
/*
if h.decoder != nil {
h.decoder.Close()
}
*/
if h.encoder != nil {
h.encoder.Close()
}
if h.reader != nil {
h.reader.Close()
}
if h.writer != nil {
h.writer.Close()
}
return
}

View File

@ -1,6 +1,6 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved. // Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source package fan
import ( import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"

83
internal/fan/iptable.go Normal file
View File

@ -0,0 +1,83 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
_ "context"
_ "encoding/json"
"fmt"
_ "gopkg.in/yaml.v3"
"net/url"
_ "path/filepath"
"decl/internal/data"
"decl/internal/resource"
"decl/internal/folio"
_ "os"
_ "io"
"strings"
"log/slog"
)
type Iptable struct {
Table string `yaml:"table" json:"table"`
Chain string `yaml:"chain" json:"chain"`
}
func NewIptable() *Iptable {
return &Iptable{}
}
func init() {
folio.DocumentRegistry.ConverterTypes.Register([]string{"iptable"}, func(u *url.URL) data.Converter {
t := NewIptable()
t.Table = u.Hostname()
elements := strings.FieldsFunc(u.Path, func(c rune) bool { return c == '/' })
if len(elements) >= 1 {
t.Chain = elements[0]
}
slog.Info("iptable chain source factory", "table", t, "uri", u, "table", u.Hostname())
return t
})
}
func (i *Iptable) Type() data.TypeName { return "iptable" }
func (i *Iptable) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
slog.Info("fan.Iptable.Extract()", "table", i)
iptRules := make([]*resource.Iptable, 0, 100)
cmd := resource.NewIptableReadChainCommand()
if cmd == nil {
return document, fmt.Errorf("Iptable read chain: invalid command")
}
var out []byte
if out, err = cmd.Execute(i); err == nil {
if err = cmd.Extractor(out, &iptRules); err == nil {
document = folio.DocumentRegistry.NewDocument(folio.URI(sourceResource.URI()))
for _, rule := range iptRules {
if rule == nil {
rule = resource.NewIptable()
}
rule.Table = resource.IptableName(i.Table)
rule.Chain = resource.IptableChain(i.Chain)
slog.Info("iptable chain source Extract()", "rule", rule)
document.(*folio.Document).AddResourceDeclaration("iptable", rule)
}
}
}
slog.Info("fan.Iptable.Extract()", "output", out, "error", err)
return document, err
}
func (i *Iptable) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
return nil, nil
}
func (i *Iptable) Close() error {
return nil
}

271
internal/fan/jx.go Normal file
View File

@ -0,0 +1,271 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
"context"
_ "encoding/json"
"fmt"
_ "gopkg.in/yaml.v3"
"net/url"
"path/filepath"
"decl/internal/codec"
"decl/internal/folio"
"decl/internal/data"
_ "os"
"io"
"errors"
"log/slog"
"strings"
)
/*
Converts a file container an encoded (yaml, json, etc) JX document into a Document by using `Extract` or
`ExtractMany`.
Converts a JX Document structure into a yaml, json, etc encoded resource.
*/
type JxFile struct {
Uri folio.URI `yaml:"uri,omitempty" json:"uri,omitempty"`
url *url.URL `yaml:"-" json:"-"`
emitResource data.Resource `yaml:"-" json:"-"`
Path string `yaml:"path" json:"path"`
Format codec.Format `yaml:"format,omitempty" json:"format,omitempty"`
reader io.ReadCloser `yaml:"-" json:"-"`
writer io.WriteCloser `yaml:"-" json:"-"`
decoder codec.Decoder `yaml:"-" json:"-"`
encoder codec.Encoder `yaml:"-" json:"-"`
closer func() error `yaml:"-" json:"-"`
index int `yaml:"-" json:"-"`
}
func NewJxFile() *JxFile {
return &JxFile{ Format: codec.FormatYaml, index: 0, closer: func() error { return nil } }
}
func init() {
folio.DocumentRegistry.ConverterTypes.Register([]string{"decl", "jx", "yaml", "yml", "json"}, func(u *url.URL) data.Converter {
j := NewJxFile()
j.SetURI(u)
return j
})
folio.DocumentRegistry.ConverterTypes.RegisterContentType([]string{"jx.yaml","jx.yml","jx.yaml.gz","jx.yml.gz", "jx.json", "jx.json.gz"}, func(u *url.URL) data.Converter {
j := NewJxFile()
slog.Info("JxFile.Factory", "jx", j)
j.SetURI(u)
slog.Info("JxFile.Factory", "jx", j)
return j
})
}
/*
Schemes: file, json, yaml, yml, decl, jx, http, https, other transport schemes?
Format: URL scheme name, `format` query param, file extension
If the input url is a file
Detect Format
*/
func (j *JxFile) SetURI(u *url.URL) {
slog.Info("JxFile.SetURI()", "jx", j)
if ! errors.Is(j.Format.Set(u.Scheme), codec.ErrInvalidFormat) {
u.Scheme = "file"
q := u.Query()
q.Set("format", string(j.Format))
u.RawQuery = q.Encode()
} else {
if format,ok := u.Query()["format"]; ok {
_ = j.Format.Set(format[0])
}
}
if u.Scheme == "file" {
if u.Path == "" || u.Path == "-" {
j.Path = "-"
} else {
fileAbsolutePath, _ := filepath.Abs(filepath.Join(u.Hostname(), u.Path))
j.Path = fileAbsolutePath
if _, err := u.Parse(j.Path); err != nil {
panic(err)
}
}
} else {
j.Path = filepath.Join(u.Hostname(), u.RequestURI())
}
j.Uri.SetURL(u)
if j.Format == codec.FormatYaml {
exttype, ext := j.Uri.Extension()
if j.Format.Set(exttype) != nil {
_ = j.Format.Set(ext)
}
}
}
func (j *JxFile) setencoder(target data.ContentIdentifier) {
if formatErr := j.Format.Set(target.ContentType()); formatErr != nil {
j.Format = codec.FormatYaml
if format,ok := j.url.Query()["format"]; ok {
if queryFormatErr := j.Format.Set(format[0]); queryFormatErr != nil {
j.Format = codec.FormatYaml
}
}
}
if j.encoder == nil {
j.encoder = codec.NewEncoder(j.writer, j.Format)
}
}
func (j *JxFile) setdecoder(source data.ContentIdentifier) {
if j.decoder == nil {
for _,v := range strings.Split(source.ContentType(), ".") {
_ = j.Format.Set(v)
}
j.decoder = codec.NewDecoder(j.reader, j.Format)
}
}
func (j *JxFile) Type() data.TypeName { return "jx" }
func (j *JxFile) Extract(resourceSource data.Resource, filter data.ElementSelector) (doc data.Document, err error) {
if j.index == 0 { // XXX
if resourceSource == nil {
if len(j.Uri) > 0 {
resourceSource, err = j.Uri.NewResource(nil)
} else {
return nil, ErrInvalidSource
}
}
slog.Info("JxFile.Extract()", "source", resourceSource, "error", err)
var jxSourceFile data.FileResource = resourceSource.(data.FileResource)
j.reader, err = jxSourceFile.(data.ContentGetter).GetContent(nil)
slog.Info("JxFile.Extract()", "jxfile", j, "error", err)
if err != nil {
return
}
j.setdecoder(jxSourceFile.(data.ContentIdentifier))
slog.Info("JxFile.Extract()", "jxfile", j)
}
uri := resourceSource.URI()
if folio.DocumentRegistry.HasDocument(folio.URI(uri)) {
uri = fmt.Sprintf("%s?index=%d", uri, j.index)
}
doc = folio.DocumentRegistry.NewDocument(folio.URI(uri))
err = j.decoder.Decode(doc)
slog.Info("JxFile.Extract()", "doc", doc, "jxfile", j, "error", err)
j.index++
if err != nil {
return
}
if err = doc.Validate(); err != nil {
return
}
return
}
func (j *JxFile) ExtractMany(resourceSource data.Resource, filter data.ElementSelector) (documents []data.Document, err error) {
documents = make([]data.Document, 0, 100)
defer j.Close()
j.index = 0
for {
var doc data.Document
if doc, err = j.Extract(resourceSource, filter); err == nil {
documents = append(documents, doc)
} else {
if errors.Is(err, io.EOF) {
err = nil
//documents = append(documents, doc)
}
break
}
}
slog.Info("JxFile.ExtractMany()", "jxfile", j, "error", err)
return
}
func (j *JxFile) targetResource() (target data.Resource, err error) {
if j.emitResource == nil {
targetUrl := j.Uri.Parse()
targetUrl.Scheme = "file"
q := targetUrl.Query()
q.Set("format", string(j.Format))
targetUrl.RawQuery = q.Encode()
j.Uri.SetURL(targetUrl)
slog.Info("JxFile.targetResource() SetURI", "uri", j.Uri, "targetUrl", targetUrl)
j.url = targetUrl
slog.Info("JxFile.targetResource()", "target", targetUrl, "jxfile", j)
if j.emitResource, err = j.Uri.NewResource(nil); err != nil {
return nil, err
}
var jxTargetFile data.FileResource = j.emitResource.(data.FileResource)
jxTargetFile.SetContentSourceRef(j.Uri.String())
slog.Info("JxFile.targetResource() SetContentSourceRef", "target", jxTargetFile, "uri", j.Uri.String())
j.writer, err = jxTargetFile.(data.ContentReadWriter).ContentWriterStream()
j.setencoder(j.emitResource.(data.ContentIdentifier))
}
target = j.emitResource
return
}
func (j *JxFile) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
ctx := context.Background()
resourceTarget, err = j.targetResource()
if err != nil {
return
}
emitDoc := folio.DocumentRegistry.NewDocument("")
if err = document.Validate(); err != nil {
return
}
slog.Info("JxFile.Emit()", "document", document, "context", ctx)
for _, declaration := range document.Filter(func (d data.Declaration) bool {
if filter != nil {
return filter(d.(*folio.Declaration).Attributes)
}
return true
}) {
//declaration.(*folio.Declaration).Resource().Read(ctx) // XXX added read here since it was removed from SetURI
emitDoc.ResourceDeclarations = append(emitDoc.ResourceDeclarations, declaration.(*folio.Declaration))
}
document.(*folio.Document).Format = j.Format
slog.Info("Emit", "target", j, "encoder", j.encoder, "emit", emitDoc)
if err = j.encoder.Encode(document); err != nil {
slog.Info("Emit", "err", err)
return
}
return
}
func (j *JxFile) EmitMany(documents []data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
for _, doc := range documents {
if resourceTarget, err = j.Emit(doc, filter); err != nil {
return
}
}
return
}
func (j *JxFile) Close() (err error) {
if j.closer != nil {
err = j.closer()
}
if j.reader != nil {
j.reader.Close()
}
if j.encoder != nil {
j.encoder.Close()
}
if j.writer != nil {
j.writer.Close()
}
return
}

46
internal/fan/jx_test.go Normal file
View File

@ -0,0 +1,46 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
"github.com/stretchr/testify/assert"
"testing"
"decl/internal/codec"
"decl/internal/folio"
"decl/internal/data"
"net/url"
)
func TestNewJxSource(t *testing.T) {
s := NewJxFile()
assert.NotNil(t, s)
}
func TestJxSetURI(t *testing.T) {
for _,v := range []struct{ url string; expectedformat codec.Format; expecteduri string }{
{ url: "file://foo", expectedformat: codec.FormatYaml, expecteduri: "file://foo" },
{ url: "json://foo", expectedformat: codec.FormatJson, expecteduri: "file://foo?format=json" },
{ url: "yaml://foo", expectedformat: codec.FormatYaml, expecteduri: "file://foo?format=yaml" },
{ url: "file://foo?format=json", expectedformat: codec.FormatJson, expecteduri: "file://foo?format=json" },
{ url: "file://foo.jx.json", expectedformat: codec.FormatJson, expecteduri: "file://foo.jx.json" },
{ url: "file://foo.jx.json.gz", expectedformat: codec.FormatJson, expecteduri: "file://foo.jx.json.gz" },
{ url: "https://foo.jx.json.gz", expectedformat: codec.FormatJson, expecteduri: "https://foo.jx.json.gz" },
} {
j := NewJxFile()
assert.NotNil(t, j)
u,_ := url.Parse(v.url)
j.SetURI(u)
assert.Equal(t, v.expectedformat, j.Format)
assert.Equal(t, v.expecteduri, string(j.Uri))
}
}
func TestJxFactory(t *testing.T) {
converter, err := folio.DocumentRegistry.ConverterTypes.New("json://-")
assert.Nil(t, err)
assert.NotNil(t, converter)
assert.Equal(t, data.TypeName("jx"), converter.Type())
jxfile := converter.(*JxFile)
assert.Equal(t, "-", jxfile.Path)
assert.Equal(t, codec.FormatJson, jxfile.Format)
}

76
internal/fan/package.go Normal file
View File

@ -0,0 +1,76 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
_ "context"
_ "encoding/json"
"fmt"
_ "gopkg.in/yaml.v3"
"net/url"
_ "path/filepath"
"decl/internal/data"
"decl/internal/resource"
"decl/internal/folio"
_ "os"
_ "io"
"log/slog"
)
type Package struct {
PackageType resource.PackageType `yaml:"type" json:"type"`
}
func NewPackage() *Package {
return &Package{ PackageType: resource.SystemPackageType }
}
func init() {
folio.DocumentRegistry.ConverterTypes.Register([]string{"package"}, func(u *url.URL) data.Converter {
p := NewPackage()
packageType := u.Query().Get("type")
if len(packageType) > 0 {
p.PackageType = resource.PackageType(packageType)
}
return p
})
}
func (p *Package) Type() data.TypeName { return "package" }
func (p *Package) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
slog.Info("fan.Package.Extract()", "package", p)
installedPackages := make([]*resource.Package, 0, 100)
cmd := p.PackageType.NewReadPackagesCommand()
if cmd == nil {
return document, fmt.Errorf("%w: %s", resource.ErrUnsupportedPackageType, p.PackageType)
}
var out []byte
if out, err = cmd.Execute(p); err == nil {
slog.Info("fan.Package.Extract()", "output", out)
if err = cmd.Extractor(out, &installedPackages); err == nil {
document = folio.DocumentRegistry.NewDocument("file://-")
for _, pkg := range installedPackages {
if pkg == nil {
pkg = resource.NewPackage()
}
if _, err = document.NewResource(pkg.URI()); err != nil {
return
}
}
}
}
slog.Info("fan.Package.Extract()", "output", out, "error", err)
return
}
func (p *Package) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
return nil, data.ErrUnsupportedConversion
}
func (p *Package) Close() error {
return nil
}

View File

@ -1,6 +1,6 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved. // Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source package fan
import ( import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -16,8 +16,8 @@ func TestExtractPackages(t *testing.T) {
p := NewPackage() p := NewPackage()
assert.NotNil(t, p) assert.NotNil(t, p)
document, err := p.ExtractResources(nil) document, err := p.Extract(nil, nil)
assert.Nil(t, err) assert.Nil(t, err)
assert.NotNil(t, document) assert.NotNil(t, document)
assert.Greater(t, len(document), 0) assert.Greater(t, document.Len(), 0)
} }

217
internal/fan/tar.go Normal file
View File

@ -0,0 +1,217 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
_ "context"
_ "encoding/json"
"fmt"
_ "gopkg.in/yaml.v3"
"net/url"
"decl/internal/transport"
"decl/internal/data"
"decl/internal/folio"
"archive/tar"
_ "regexp"
"io"
"io/fs"
"log"
"log/slog"
"path/filepath"
)
type Tar struct {
Uri folio.URI `yaml:"uri" json:"uri"`
parsedURI *url.URL `yaml:"-" json:"-"`
emitResource data.Resource `yaml:"-" json:"-"`
reader io.ReadCloser `yaml:"-" json:"-"`
writer io.WriteCloser `yaml:"-" json:"-"`
targetArchive *tar.Writer `yaml:"-" json:"-"`
}
func NewTar() *Tar {
return &Tar{}
}
func init() {
folio.DocumentRegistry.ConverterTypes.Register([]string{"tar"}, func(u *url.URL) data.Converter {
t := NewTar()
t.SetURI(u)
return t
})
folio.DocumentRegistry.ConverterTypes.RegisterContentType([]string{"tar", "tar.gz", "tgz"}, func(u *url.URL) data.Converter {
t := NewTar()
t.SetURI(u)
return t
})
}
func (t *Tar) Type() data.TypeName { return "tar" }
func (t *Tar) SetURI(u *url.URL) {
slog.Info("Tar.SetURI()", "tar", t)
u.Scheme = "file"
if u.Path == "" || u.Path == "-" {
} else {
fileAbsolutePath, _ := filepath.Abs(filepath.Join(u.Hostname(), u.Path))
u.Path = fileAbsolutePath
}
t.Uri.SetURL(u)
t.parsedURI = u
exttype, _ := t.Uri.Extension()
if exttype == "tgz" {
q := u.Query()
q.Set("gzip", string("true"))
u.RawQuery = q.Encode()
}
}
func (t *Tar) targetResource() (target data.Resource, err error) {
if t.emitResource == nil {
if t.emitResource, err = t.Uri.NewResource(nil); err != nil {
return nil, err
}
var tarTargetFile data.FileResource = t.emitResource.(data.FileResource)
tarTargetFile.SetContentSourceRef(t.Uri.String())
t.writer, err = tarTargetFile.(data.ContentReadWriter).ContentWriterStream()
if err == io.EOF {
slog.Info("Tar.targetResource() ContentWriterStream", "target", tarTargetFile, "tar", t.writer.(*transport.Writer), "error", err)
panic(err)
}
t.targetArchive = tar.NewWriter(t.writer)
slog.Info("Tar.targetResource() SetContentSourceRef", "target", tarTargetFile, "uri", t.Uri.String(), "tar", t.targetArchive, "error", err)
}
target = t.emitResource
return
}
// Convert a document of file resources to a tar file resource
func (t *Tar) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
resourceTarget, err = t.targetResource()
slog.Info("Tar.Emit()", "writer", t.writer.(*transport.Writer), "error", err)
/*
tarFile := resource.NewFile()
resourceTarget = tarFile
tarFile.Path = t.Path
tarFile.ContentSourceRef = folio.ResourceReference(t.Path)
t.writer, err = tarFile.ContentSourceRef.ContentWriterStream()
targetArchive := tar.NewWriter(t.writer)
defer t.writer.Close()
*/
for _,res := range document.Filter(func(d data.Declaration) bool {
return d.ResourceType() == "file"
}) {
var f data.FileResource = res.(*folio.Declaration).Attributes.(data.FileResource)
//f.PathNormalization(true)
//err = f.NormalizePath()
fileInfo := f.FileInfo()
slog.Info("Tar.Emit() FileInfo", "fileinfo", fileInfo, "size", fileInfo.Size(), "file", f)
if fileInfo.Size() < 1 {
if len(f.GetContentSourceRef()) > 0 {
rs, _ := f.(data.ContentReader).ContentReaderStream()
info, _ := rs.Stat()
err = f.SetFileInfo(info)
slog.Info("Tar.Emit() Set FileInfo from ContentSourceRef", "fileinfo", f.FileInfo(), "file", f)
rs.Close()
} else {
if err = f.(data.Info).ReadStat(); err != nil {
return
}
}
}
slog.Info("Tar.Emit", "file", f, "size", fileInfo.Size(), "error", err)
hdr, fiErr := tar.FileInfoHeader(fileInfo, "")
if fileInfo.Mode() & fs.ModeSymlink != 0 {
hdr.Linkname = f.GetTarget()
}
slog.Info("Tar.Emit", "header", hdr, "size", fileInfo.Size(), "err", fiErr)
if err := t.targetArchive.WriteHeader(hdr); err != nil {
slog.Error("Tar.Emit() WriteHeader", "target", t.targetArchive, "header", hdr, "resource", f, "fileinfo", fileInfo, "error", err)
log.Fatal(err)
}
if fileInfo.IsDir() {
continue
}
slog.Info("Tar.Emit - writing resource to target archive", "target", t.targetArchive, "resource", f, "err", err)
if _, err := f.GetContent(t.targetArchive); err != nil {
slog.Error("Tar.Emit() Content", "target", t.targetArchive, "resource", f, "fileinfo", fileInfo, "error", err)
log.Fatal(err)
}
slog.Info("Tar.Emit - wrote", "resource", f, "err", err)
}
return
}
// Convert a tar file resource to a document of file resources
func (t *Tar) Extract(resourceSource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
document = folio.DocumentRegistry.NewDocument("")
var tarSourceFile data.FileResource = resourceSource.(data.FileResource)
//tarSourceFile := resourceSource.(*resource.File)
t.reader, err = tarSourceFile.GetContent(nil)
sourceArchive := tar.NewReader(t.reader)
defer t.reader.Close()
for {
var hdr *tar.Header
hdr, err = sourceArchive.Next()
if err == io.EOF {
err = nil
break
}
if err != nil {
return
}
var fileResource data.Resource
uri := fmt.Sprintf("file://%s", hdr.Name)
if fileResource, err = document.(*folio.Document).NewResource(uri); err != nil {
return
}
var f data.FileResource = fileResource.(data.FileResource)
if err = f.SetFileInfo(hdr.FileInfo()); err != nil {
return
}
err = f.SetContent(sourceArchive)
if err != nil {
return
}
}
return
}
func (t *Tar) Close() (err error) {
if t.reader != nil {
if err = t.reader.Close(); err != nil {
return
}
}
if err = t.targetArchive.Close(); err == nil {
if t.writer != nil {
err = t.writer.Close()
}
}
return
}

105
internal/fan/tar_test.go Normal file
View File

@ -0,0 +1,105 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package fan
import (
"github.com/stretchr/testify/assert"
"testing"
"bytes"
"archive/tar"
"decl/internal/data"
"decl/internal/folio"
"decl/internal/resource"
"path/filepath"
"strings"
"io"
"fmt"
"log/slog"
)
var tarArchiveBuffer bytes.Buffer
func TarArchive() (err error) {
tw := tar.NewWriter(&tarArchiveBuffer)
defer tw.Close()
fileContent := "test file content"
if err = tw.WriteHeader(&tar.Header{
Name: "testfile",
Mode: 0600,
Size: int64(len(fileContent)),
}); err == nil {
_, err = tw.Write([]byte(fileContent))
}
return
}
func TestNewTar(t *testing.T) {
a := NewTar()
assert.NotNil(t, a)
}
func TestExtractFiles(t *testing.T) {
a := NewTar()
assert.NotNil(t, a)
e := TarArchive()
assert.Nil(t, e)
assert.Greater(t, tarArchiveBuffer.Len(), 0)
d := folio.NewDeclaration()
d.ResourceTypes = folio.DocumentRegistry.ResourceTypes
slog.Info("TestExtractFiles", "resourcetypes", folio.DocumentRegistry.ResourceTypes, "declarationtypes", d.ResourceTypes, "resource.ResourceTypes", resource.ResourceTypes)
d.Type = "file"
assert.Nil(t, d.NewResource(nil))
var sourceResource data.FileResource = d.Attributes.(data.FileResource)
assert.Nil(t, sourceResource.SetContent(&tarArchiveBuffer))
exDoc, err := a.Extract(d.Attributes, nil)
assert.Nil(t, err)
assert.NotNil(t, exDoc)
document := exDoc.(*folio.Document)
assert.Greater(t, document.Len(), 0)
assert.Equal(t, folio.TypeName("file"), document.ResourceDeclarations[0].Type)
f := document.ResourceDeclarations[0].Resource().(data.FileResource)
assert.Equal(t, "testfile", f.FilePath())
}
func TestEmitFiles(t *testing.T) {
expected := "some test data"
a := NewTar()
assert.NotNil(t, a)
a.Uri = folio.URI(fmt.Sprintf("file://%s", filepath.Join(TempDir, "testemitfiles.tar")))
doc := folio.DocumentRegistry.NewDocument("")
uri := fmt.Sprintf("file://%s", filepath.Join(TempDir, "foo.txt"))
res, resErr := doc.NewResource(uri)
assert.Nil(t, resErr)
assert.NotNil(t, res)
assert.Equal(t, res, doc.GetResource(uri).Resource())
f := doc.GetResource(uri).Attributes.(data.FileResource)
assert.Nil(t, f.SetContent(strings.NewReader(expected)))
target, emitErr := a.Emit(doc, nil)
assert.Nil(t, emitErr)
assert.Equal(t, folio.URI(fmt.Sprintf("file://%s", target.(data.FileResource).FilePath())), a.Uri)
tarArchiveBuffer.Reset()
_, contentErr := target.(data.FileResource).GetContent(&tarArchiveBuffer)
assert.Nil(t, contentErr)
tr := tar.NewReader(&tarArchiveBuffer)
hdr, err := tr.Next()
assert.NotEqual(t, io.EOF, err)
assert.NotNil(t, hdr)
assert.Equal(t, f.FilePath(), hdr.Name)
data, err := io.ReadAll(tr)
assert.Nil(t, err)
assert.Equal(t, expected, string(data))
}

View File

@ -1,6 +1,6 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved. // Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source package fan
import ( import (
_ "context" _ "context"
@ -10,6 +10,8 @@ _ "gopkg.in/yaml.v3"
"net/url" "net/url"
_ "path/filepath" _ "path/filepath"
"decl/internal/resource" "decl/internal/resource"
"decl/internal/data"
"decl/internal/folio"
_ "os" _ "os"
_ "io" _ "io"
"log/slog" "log/slog"
@ -24,7 +26,7 @@ func NewUser() *User {
} }
func init() { func init() {
SourceTypes.Register([]string{"user"}, func(u *url.URL) DocSource { folio.DocumentRegistry.ConverterTypes.Register([]string{"user"}, func(u *url.URL) data.Converter {
userSource := NewUser() userSource := NewUser()
userType := u.Query().Get("type") userType := u.Query().Get("type")
if len(userType) > 0 { if len(userType) > 0 {
@ -35,23 +37,21 @@ func init() {
} }
func (u *User) Type() string { return "user" } func (u *User) Type() data.TypeName { return "user" }
func (u *User) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) { func (u *User) Extract(sourceResource data.Resource, filter data.ElementSelector) (document data.Document, err error) {
documents := make([]*resource.Document, 0, 100) slog.Info("user source Extract()", "user", u)
slog.Info("user source ExtractResources()", "user", u)
Users := make([]*resource.User, 0, 100) Users := make([]*resource.User, 0, 100)
cmd := u.UserType.NewReadUsersCommand() cmd := u.UserType.NewReadUsersCommand()
if cmd == nil { if cmd == nil {
return documents, resource.ErrUnsupportedUserType return document, resource.ErrUnsupportedUserType
} }
if out, err := cmd.Execute(u); err == nil { if out, err := cmd.Execute(u); err == nil {
slog.Info("user source ExtractResources()", "output", out) slog.Info("user source ExtractResources()", "output", out)
if exErr := cmd.Extractor(out, &Users); exErr != nil { if exErr := cmd.Extractor(out, &Users); exErr != nil {
return documents, exErr return document, exErr
} }
document := resource.NewDocument() document = folio.DocumentRegistry.NewDocument("user://-")
for _, usr := range Users { for _, usr := range Users {
if usr == nil { if usr == nil {
usr = resource.NewUser() usr = resource.NewUser()
@ -59,10 +59,17 @@ func (u *User) ExtractResources(filter ResourceSelector) ([]*resource.Document,
usr.UserType = u.UserType usr.UserType = u.UserType
document.AddResourceDeclaration("user", usr) document.AddResourceDeclaration("user", usr)
} }
documents = append(documents, document)
} else { } else {
slog.Info("user source ExtractResources()", "output", out, "error", err) slog.Info("user source Extract()", "output", out, "error", err)
return documents, err return document, err
} }
return documents, nil return document, nil
}
func (u *User) Emit(document data.Document, filter data.ElementSelector) (resourceTarget data.Resource, err error) {
return nil, data.ErrUnsupportedConversion
}
func (u *User) Close() error {
return nil
} }

View File

@ -1,6 +1,6 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved. // Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source package fan
import ( import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -16,8 +16,8 @@ func TestExtractUsers(t *testing.T) {
u := NewUser() u := NewUser()
assert.NotNil(t, u) assert.NotNil(t, u)
document, err := u.ExtractResources(nil) document, err := u.Extract(nil, nil)
assert.Nil(t, err) assert.Nil(t, err)
assert.NotNil(t, document) assert.NotNil(t, document)
assert.Greater(t, len(document), 0) assert.Greater(t, document.Len(), 0)
} }

View File

@ -1,98 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
"net/url"
"path/filepath"
"decl/internal/resource"
"decl/internal/transport"
"decl/internal/codec"
"regexp"
_ "os"
"io"
"compress/gzip"
"errors"
"log/slog"
)
type DeclFile struct {
Path string `yaml:"path" json:"path"`
transport *transport.Reader `yaml:"-" json:"-"`
}
func NewDeclFile() *DeclFile {
return &DeclFile{}
}
func init() {
SourceTypes.Register([]string{"decl"}, func(u *url.URL) DocSource {
t := NewDeclFile()
t.Path,_ = filepath.Abs(filepath.Join(u.Hostname(), u.Path))
t.transport,_ = transport.NewReader(u)
return t
})
SourceTypes.Register([]string{"yaml","yml","yaml.gz","yml.gz"}, func(u *url.URL) DocSource {
t := NewDeclFile()
if u.Scheme == "file" {
fileAbsolutePath, _ := filepath.Abs(filepath.Join(u.Hostname(), u.Path))
t.Path = fileAbsolutePath
} else {
t.Path = filepath.Join(u.Hostname(), u.Path)
}
t.transport,_ = transport.NewReader(u)
return t
})
}
func (d *DeclFile) Type() string { return "decl" }
func (d *DeclFile) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) {
documents := make([]*resource.Document, 0, 100)
GzipFileName := regexp.MustCompile(`^.*\.gz$`)
defer d.transport.Close()
var fileReader io.Reader
if GzipFileName.FindString(d.Path) == d.Path {
slog.Info("decompressing gzip", "path", d.Path)
zr, err := gzip.NewReader(d.transport)
if err != nil {
return documents, err
}
fileReader = zr
} else {
fileReader = d.transport
}
decoder := codec.NewYAMLDecoder(fileReader)
slog.Info("ExtractResources()", "documents", documents)
index := 0
for {
doc := resource.NewDocument()
e := decoder.Decode(doc)
slog.Info("ExtractResources().Decode()", "document", doc, "error", e)
if errors.Is(e, io.EOF) {
break
}
if e != nil {
return documents, e
}
slog.Info("ExtractResources()", "res", doc.ResourceDecls[0].Attributes)
if validationErr := doc.Validate(); validationErr != nil {
return documents, validationErr
}
documents = append(documents, doc)
index++
}
return documents, nil
}

View File

@ -1,101 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
"net/url"
"path/filepath"
"decl/internal/resource"
"os"
"io"
)
type Dir struct {
Path string `yaml:"path" json:"path"`
subDirsStack []string `yaml:"-" json:"-"`
}
func NewDir() *Dir {
return &Dir{
subDirsStack: make([]string, 0, 100),
}
}
func init() {
SourceTypes.Register([]string{"file"}, func(u *url.URL) DocSource {
t := NewDir()
t.Path,_ = filepath.Abs(filepath.Join(u.Hostname(), u.Path))
return t
})
}
func (d *Dir) Type() string { return "dir" }
func (d *Dir) ExtractDirectory(path string) (*resource.Document, error) {
document := resource.NewDocument()
files, err := os.ReadDir(path)
if err != nil {
return nil, err
}
for _,file := range files {
f := resource.NewFile()
f.Path = filepath.Join(path, file.Name())
info, infoErr := file.Info()
if infoErr != nil {
return document, infoErr
}
if fiErr := f.UpdateAttributesFromFileInfo(info); fiErr != nil {
return document, fiErr
}
f.FileType.SetMode(file.Type())
if file.IsDir() {
d.subDirsStack = append(d.subDirsStack, f.Path)
} else {
fileReader, fileReaderErr := os.Open(f.Path)
if fileReaderErr != nil {
return document, fileReaderErr
}
readFileData, readErr := io.ReadAll(fileReader)
if readErr != nil {
return document, readErr
}
f.Content = string(readFileData)
f.UpdateContentAttributes()
}
document.AddResourceDeclaration("file", f)
}
return document, nil
}
func (d *Dir) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) {
documents := make([]*resource.Document, 0, 100)
d.subDirsStack = append(d.subDirsStack, d.Path)
for {
if len(d.subDirsStack) == 0 {
break
}
var dirPath string
dirPath, d.subDirsStack = d.subDirsStack[len(d.subDirsStack) - 1], d.subDirsStack[:len(d.subDirsStack) - 1]
document, dirErr := d.ExtractDirectory(dirPath)
if dirErr != nil {
return documents, dirErr
}
documents = append(documents, document)
}
return documents, nil
}

View File

@ -1,23 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestNewDirSource(t *testing.T) {
s := NewDir()
assert.NotNil(t, s)
}
func TestExtractDirectory(t *testing.T) {
s := NewDir()
assert.NotNil(t, s)
document, err := s.ExtractDirectory(TempDir)
assert.Nil(t, err)
assert.NotNil(t, document)
}

View File

@ -1,37 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
_ "net/url"
_ "regexp"
_ "strings"
_ "os"
_ "io"
_ "compress/gzip"
_ "archive/tar"
_ "errors"
_ "path/filepath"
"decl/internal/resource"
_ "decl/internal/codec"
)
type ResourceSelector func(r resource.Resource) bool
type DocSource interface {
Type() string
ExtractResources(filter ResourceSelector) ([]*resource.Document, error)
}
func NewDocSource(uri string) DocSource {
s, e := SourceTypes.New(uri)
if e == nil {
return s
}
return nil
}

View File

@ -1,48 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
_ "fmt"
"github.com/stretchr/testify/assert"
_ "log"
"testing"
"os"
"log"
)
var TempDir string
func TestMain(m *testing.M) {
var err error
TempDir, err = os.MkdirTemp("", "testdocsourcefile")
if err != nil || TempDir == "" {
log.Fatal(err)
}
rc := m.Run()
os.RemoveAll(TempDir)
os.Exit(rc)
}
func TestNewDocSource(t *testing.T) {
resourceUri := "tar://foo"
testFile := NewDocSource(resourceUri)
assert.NotNil(t, testFile)
}
/*
func TestResolveId(t *testing.T) {
testFile := NewResource("file://../../README.md")
assert.NotNil(t, testFile)
absolutePath, e := filepath.Abs("../../README.md")
assert.Nil(t, e)
testFile.ResolveId(context.Background())
assert.Equal(t, absolutePath, testFile.(*File).Path)
}
*/

View File

@ -1,83 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
"net/url"
_ "net/http"
_ "path/filepath"
"decl/internal/resource"
"decl/internal/iofilter"
"decl/internal/signature"
"decl/internal/transport"
"decl/internal/codec"
_ "os"
"io"
"errors"
"crypto/sha256"
)
type HTTP struct {
Endpoint string `yaml:"endpoint" json:"endpoint"`
transport *transport.Reader `yaml:"-" json:"-"`
}
func NewHTTP() *HTTP {
return &HTTP{}
}
func init() {
SourceTypes.Register([]string{"http","https"}, func(u *url.URL) DocSource {
t := NewHTTP()
t.Endpoint = u.String()
t.transport,_ = transport.NewReader(u)
return t
})
}
func (d *HTTP) Type() string { return "http" }
func (h *HTTP) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) {
documents := make([]*resource.Document, 0, 100)
defer h.transport.Close()
documentSignature := h.transport.Signature()
hash := sha256.New()
sumReadData := iofilter.NewReader(h.transport, func(p []byte, readn int, readerr error) (n int, err error) {
hash.Write(p)
return
})
decoder := codec.NewYAMLDecoder(sumReadData)
index := 0
for {
doc := resource.NewDocument()
e := decoder.Decode(doc)
if errors.Is(e, io.EOF) {
break
}
if e != nil {
return documents, e
}
if validationErr := doc.Validate(); validationErr != nil {
return documents, validationErr
}
documents = append(documents, doc)
index++
}
if documentSignature != "" {
sig := &signature.Ident{}
sigErr := sig.VerifySum(hash.Sum(nil), []byte(documentSignature))
if sigErr != nil {
return documents, sigErr
}
}
return documents, nil
}

View File

@ -1,69 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
"net/url"
_ "path/filepath"
"decl/internal/resource"
_ "os"
_ "io"
"strings"
"log/slog"
)
type Iptable struct {
Table string `yaml:"table" json:"table"`
Chain string `yaml:"chain" json:"chain"`
}
func NewIptable() *Iptable {
return &Iptable{}
}
func init() {
SourceTypes.Register([]string{"iptable"}, func(u *url.URL) DocSource {
t := NewIptable()
t.Table = u.Hostname()
t.Chain = strings.Split(u.RequestURI(), "/")[1]
slog.Info("iptable chain source factory", "table", t, "uri", u, "table", u.Hostname())
return t
})
}
func (i *Iptable) Type() string { return "iptable" }
func (i *Iptable) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) {
documents := make([]*resource.Document, 0, 100)
slog.Info("iptable chain source ExtractResources()", "table", i)
iptRules := make([]*resource.Iptable, 0, 100)
cmd := resource.NewIptableReadChainCommand()
if out, err := cmd.Execute(i); err == nil {
slog.Info("iptable chain source ExtractResources()", "output", out)
if exErr := cmd.Extractor(out, &iptRules); exErr != nil {
return documents, exErr
}
document := resource.NewDocument()
for _, rule := range iptRules {
if rule == nil {
rule = resource.NewIptable()
}
rule.Table = resource.IptableName(i.Table)
rule.Chain = resource.IptableChain(i.Chain)
document.AddResourceDeclaration("iptable", rule)
}
documents = append(documents, document)
} else {
slog.Info("iptable chain source ExtractResources()", "output", out, "error", err)
return documents, err
}
return documents, nil
}

View File

@ -1,68 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
"net/url"
_ "path/filepath"
"decl/internal/resource"
_ "os"
_ "io"
"log/slog"
)
type Package struct {
PackageType resource.PackageType `yaml:"type" json:"type"`
}
func NewPackage() *Package {
return &Package{ PackageType: resource.SystemPackageType }
}
func init() {
SourceTypes.Register([]string{"package"}, func(u *url.URL) DocSource {
p := NewPackage()
packageType := u.Query().Get("type")
if len(packageType) > 0 {
p.PackageType = resource.PackageType(packageType)
}
return p
})
}
func (p *Package) Type() string { return "package" }
func (p *Package) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) {
documents := make([]*resource.Document, 0, 100)
slog.Info("package source ExtractResources()", "package", p)
installedPackages := make([]*resource.Package, 0, 100)
cmd := p.PackageType.NewReadPackagesCommand()
if cmd == nil {
return documents, resource.ErrUnsupportedPackageType
}
if out, err := cmd.Execute(p); err == nil {
slog.Info("package source ExtractResources()", "output", out)
if exErr := cmd.Extractor(out, &installedPackages); exErr != nil {
return documents, exErr
}
document := resource.NewDocument()
for _, pkg := range installedPackages {
if pkg == nil {
pkg = resource.NewPackage()
}
pkg.PackageType = p.PackageType
document.AddResourceDeclaration("package", pkg)
}
documents = append(documents, document)
} else {
slog.Info("package source ExtractResources()", "output", out, "error", err)
return documents, err
}
return documents, nil
}

View File

@ -1,103 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
"net/url"
"path/filepath"
"decl/internal/resource"
"decl/internal/transport"
"compress/gzip"
"archive/tar"
"regexp"
_ "os"
"io"
)
type Tar struct {
Path string `yaml:"path" json:"path"`
transport *transport.Reader `yaml:"-" json:"-"`
}
func NewTar() *Tar {
return &Tar{}
}
func init() {
SourceTypes.Register([]string{"tar"}, func(u *url.URL) DocSource {
t := NewTar()
t.Path,_ = filepath.Abs(filepath.Join(u.Hostname(), u.Path))
t.transport,_ = transport.NewReader(u)
return t
})
SourceTypes.Register([]string{"tar.gz", "tgz"}, func(u *url.URL) DocSource {
t := NewTar()
if u.Scheme == "file" {
fileAbsolutePath, _ := filepath.Abs(filepath.Join(u.Hostname(), u.Path))
t.Path = fileAbsolutePath
} else {
t.Path = filepath.Join(u.Hostname(), u.Path)
}
t.transport,_ = transport.NewReader(u)
return t
})
}
func (t *Tar) Type() string { return "tar" }
func (t *Tar) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) {
documents := make([]*resource.Document, 0, 100)
d := resource.NewDocument()
documents = append(documents, d)
TarGzipFileName := regexp.MustCompile(`^.*\.(tar\.gz|tgz)$`)
TarFileName := regexp.MustCompile(`^.*\.tar$`)
defer t.transport.Close()
var gzipReader io.Reader
switch t.Path {
case TarGzipFileName.FindString(t.Path):
zr, err := gzip.NewReader(t.transport)
if err != nil {
return documents, err
}
gzipReader = zr
fallthrough
case TarFileName.FindString(t.Path):
var fileReader io.Reader
if gzipReader == nil {
fileReader = t.transport
} else {
fileReader = gzipReader
}
tarReader := tar.NewReader(fileReader)
for {
hdr, err := tarReader.Next()
if err == io.EOF {
break
}
if err != nil {
return documents, err
}
f := resource.NewFile()
f.Path = hdr.Name
if fiErr := f.UpdateAttributesFromFileInfo(hdr.FileInfo()); fiErr != nil {
return documents, fiErr
}
readErr := f.SetContent(tarReader)
if readErr != nil {
return documents, readErr
}
d.AddResourceDeclaration("file", f)
}
}
return documents, nil
}

View File

@ -1,14 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestNewTarSource(t *testing.T) {
s := NewTar()
assert.NotNil(t, s)
}

View File

@ -1,28 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
"errors"
"fmt"
_ "net/url"
"strings"
_ "path/filepath"
"decl/internal/types"
)
var (
ErrUnknownSourceType = errors.New("Unknown source type")
SourceTypes *types.Types[DocSource] = types.New[DocSource]()
)
type TypeName string //`json:"type"`
func (n *TypeName) UnmarshalJSON(b []byte) error {
SourceTypeName := strings.Trim(string(b), "\"")
if SourceTypes.Has(SourceTypeName) {
*n = TypeName(SourceTypeName)
return nil
}
return fmt.Errorf("%w: %s", ErrUnknownSourceType, SourceTypeName)
}

View File

@ -1,47 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package source
import (
_ "context"
"encoding/json"
"github.com/stretchr/testify/assert"
"net/url"
"testing"
"decl/internal/resource"
)
type MockDocSource struct {
InjectType func() string
InjectExtractResources func(filter ResourceSelector) ([]*resource.Document, error)
}
func (m *MockDocSource) Type() string { return m.InjectType() }
func (m *MockDocSource) ExtractResources(filter ResourceSelector) ([]*resource.Document, error) { return m.InjectExtractResources(filter) }
func NewFooDocSource() DocSource {
return &MockDocSource{
InjectType: func() string { return "foo" },
InjectExtractResources: func(filter ResourceSelector) ([]*resource.Document, error) { return nil,nil },
}
}
func NewFileDocSource() DocSource {
return &MockDocSource{
InjectType: func() string { return "file" },
InjectExtractResources: func(filter ResourceSelector) ([]*resource.Document, error) { return nil,nil },
}
}
func TestDocSourceTypeName(t *testing.T) {
SourceTypes.Register([]string{"file"}, func(*url.URL) DocSource { return NewFileDocSource() })
type fDocSourceName struct {
Name TypeName `json:"type"`
}
fTypeName := &fDocSourceName{}
jsonType := `{ "type": "file" }`
e := json.Unmarshal([]byte(jsonType), &fTypeName)
assert.Nil(t, e)
assert.Equal(t, "file", string(fTypeName.Name))
}

View File

@ -1,174 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package target
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
"net/url"
"path/filepath"
"decl/internal/resource"
"decl/internal/codec"
"os"
"compress/gzip"
"io"
_ "errors"
"log/slog"
)
const (
FormatYaml = "yaml"
FormatJson = "json"
)
type DeclFile struct {
Path string `yaml:"path" json:"path"`
Gzip bool `yaml:"gzip,omitempty" json:"gzip,omitempty"`
Format string `yaml:"format,omitempty" json:"format,omitempty"`
encoder codec.Encoder `yaml:"-" json:"-"`
closer func() error `yaml:"-" json:"-"`
}
func NewDeclFile() *DeclFile {
return &DeclFile{ Gzip: false, closer: func() error { return nil } }
}
func NewFileDocTarget(u *url.URL, format string, gzip bool, fileUri bool) DocTarget {
t := NewDeclFile()
t.Format = format
t.Gzip = gzip
if fileUri {
fileAbsolutePath, _ := filepath.Abs(filepath.Join(u.Hostname(), u.Path))
t.Path = fileAbsolutePath
} else {
t.Path = filepath.Join(u.Hostname(), u.Path)
}
if e := t.Open(); e != nil {
return nil
}
return t
}
func init() {
TargetTypes.Register([]string{"decl", "file"}, func(u *url.URL) DocTarget {
t := NewDeclFile()
if u.Path != "-" {
t.Path,_ = filepath.Abs(filepath.Join(u.Hostname(), u.Path))
} else {
t.Path = "-"
}
if _,ok := u.Query()["gzip"]; ok {
t.Gzip = true
}
if format,ok := u.Query()["format"]; ok {
switch format[0] {
case string(FormatYaml):
t.Format = FormatYaml
case string(FormatJson):
t.Format = FormatJson
}
}
if e := t.Open(); e != nil {
return nil
}
return t
})
TargetTypes.Register([]string{"yaml.gz","yml.gz"}, func(u *url.URL) DocTarget {
switch u.Scheme {
case "yaml", "yml", "file":
return NewFileDocTarget(u, FormatYaml, true, false)
}
return NewFileDocTarget(u, FormatYaml, true, false)
})
TargetTypes.Register([]string{"json.gz"}, func(u *url.URL) DocTarget {
switch u.Scheme {
case "json", "file":
return NewFileDocTarget(u, FormatJson, true, false)
}
return NewFileDocTarget(u, FormatJson, true, false)
})
TargetTypes.Register([]string{"yaml","yml"}, func(u *url.URL) DocTarget {
switch u.Scheme {
case "yaml", "yml", "file":
return NewFileDocTarget(u, FormatYaml, false, false)
}
return NewFileDocTarget(u, FormatYaml, false, false)
})
TargetTypes.Register([]string{"json"}, func(u *url.URL) DocTarget {
switch u.Scheme {
case "json", "file":
return NewFileDocTarget(u, FormatJson, false, false)
}
return NewFileDocTarget(u, FormatJson, false, false)
})
}
func (d *DeclFile) Open() error {
var file *os.File
var fileErr error
var fileWriter io.WriteCloser
if d.Path == "" || d.Path == "-" {
file = os.Stdout
} else {
file, fileErr = os.Open(d.Path)
if fileErr != nil {
return fileErr
}
d.closer = func() error {
d.encoder.Close()
fileWriter.Close()
if file != fileWriter {
file.Close()
}
return nil
}
}
if d.Gzip {
fileWriter = gzip.NewWriter(file)
} else {
fileWriter = file
}
switch d.Format {
case FormatJson:
d.encoder = codec.NewJSONEncoder(fileWriter)
case FormatYaml:
fallthrough
default:
d.encoder = codec.NewYAMLEncoder(fileWriter)
}
return nil
}
func (d *DeclFile) Close() error {
return d.closer()
}
func (d *DeclFile) Type() string { return "decl" }
func (d *DeclFile) EmitResources(documents []*resource.Document, filter resource.ResourceSelector) (error) {
for _, doc := range documents {
emitDoc := resource.NewDocument()
if validationErr := doc.Validate(); validationErr != nil {
return validationErr
}
for _, declaration := range doc.Filter(filter) {
emitDoc.ResourceDecls = append(emitDoc.ResourceDecls, *declaration)
}
slog.Info("EmitResources", "doctarget", d, "encoder", d.encoder, "emit", emitDoc)
if documentErr := d.encoder.Encode(emitDoc); documentErr != nil {
slog.Info("EmitResources", "err", documentErr)
return documentErr
}
}
return nil
}

View File

@ -1,36 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package target
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
_ "net/url"
_ "regexp"
_ "strings"
_ "os"
_ "io"
"decl/internal/resource"
)
// convert a document into some other container type
// move selector to resource pkg
// type ResourceSelector func(r resource.Resource) bool
type DocTarget interface {
Type() string
EmitResources(documents []*resource.Document, filter resource.ResourceSelector) error
Close() error
}
func NewDocTarget(uri string) DocTarget {
s, e := TargetTypes.New(uri)
if e == nil {
return s
}
return nil
}

View File

@ -1,105 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package target
import (
_ "context"
_ "encoding/json"
_ "fmt"
_ "gopkg.in/yaml.v3"
"net/url"
"path/filepath"
"decl/internal/resource"
"compress/gzip"
"archive/tar"
_ "regexp"
"os"
"io"
"log"
"log/slog"
)
type Tar struct {
Path string `yaml:"path" json:"path"`
Gzip bool `yaml:"gzip" json:"gzip"`
writer *tar.Writer `yaml:"-" json:"-"`
closer func() error `yaml:"-" json:"-"`
}
func NewTar() *Tar {
return &Tar{ Gzip: false, closer: func() error { return nil } }
}
func init() {
TargetTypes.Register([]string{"tar"}, func(u *url.URL) DocTarget {
t := NewTar()
t.Path,_ = filepath.Abs(filepath.Join(u.Hostname(), u.Path))
if e := t.Open(); e != nil {
return nil
}
return t
})
TargetTypes.Register([]string{"tar.gz", "tgz"}, func(u *url.URL) DocTarget {
t := NewTar()
if u.Scheme == "file" {
fileAbsolutePath, _ := filepath.Abs(filepath.Join(u.Hostname(), u.Path))
t.Path = fileAbsolutePath
} else {
t.Path = filepath.Join(u.Hostname(), u.Path)
}
t.Gzip = true
if e := t.Open(); e != nil {
return nil
}
return t
})
}
func (t *Tar) Open() error {
file, fileErr := os.Create(t.Path)
if fileErr != nil {
return fileErr
}
var fileWriter io.WriteCloser
if t.Gzip {
fileWriter = gzip.NewWriter(file)
} else {
fileWriter = file
}
t.writer = tar.NewWriter(fileWriter)
t.closer = func() error {
t.writer.Close()
fileWriter.Close()
return file.Close()
}
return nil
}
func (t *Tar) Close() error {
return t.closer()
}
func (t *Tar) Type() string { return "tar" }
func (t *Tar) EmitResources(documents []*resource.Document, filter resource.ResourceSelector) error {
for _,document := range documents {
for _,res := range document.Filter(func(d *resource.Declaration) bool {
return d.Type == "file"
}) {
var f *resource.File = res.Attributes.(*resource.File)
slog.Info("Tar.EmitResources", "file", f)
hdr, fiErr := tar.FileInfoHeader(f.FileInfo(), "")
slog.Info("Tar.EmitResources", "header", hdr, "err", fiErr)
if err := t.writer.WriteHeader(hdr); err != nil {
log.Fatal(err)
}
if _, err := t.writer.Write([]byte(f.Content)); err != nil {
log.Fatal(err)
}
}
}
return nil
}

View File

@ -1,14 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package target
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestNewTarSource(t *testing.T) {
s := NewTar()
assert.NotNil(t, s)
}

View File

@ -1,100 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package target
import (
"errors"
"fmt"
"net/url"
"strings"
"path/filepath"
"log/slog"
)
var (
ErrUnknownTargetType = errors.New("Unknown target type")
TargetTypes *Types = NewTypes()
)
type TypeName string //`json:"type"`
type TypeFactory func(*url.URL) DocTarget
type Types struct {
registry map[string]TypeFactory
}
func NewTypes() *Types {
return &Types{registry: make(map[string]TypeFactory)}
}
func (t *Types) Register(names []string, factory TypeFactory) {
for _,name := range names {
t.registry[name] = factory
}
}
func (t *Types) FromExtension(path string) (TypeFactory, error) {
elements := strings.Split(path, ".")
numberOfElements := len(elements)
if numberOfElements > 2 {
if src := t.Get(strings.Join(elements[numberOfElements - 2: numberOfElements - 1], ".")); src != nil {
return src, nil
}
}
if src := t.Get(elements[numberOfElements - 1]); src != nil {
return src, nil
}
return nil, fmt.Errorf("%w: %s", ErrUnknownTargetType, path)
}
func (t *Types) New(uri string) (DocTarget, error) {
if uri == "" {
uri = "file://-"
}
u, e := url.Parse(uri)
if u == nil || e != nil {
return nil, fmt.Errorf("%w: %s", ErrUnknownTargetType, e)
}
if u.Scheme == "" {
u.Scheme = "file"
}
path := filepath.Join(u.Hostname(), u.Path)
if d, lookupErr := t.FromExtension(path); d != nil {
slog.Info("Target.New", "target", t, "err", lookupErr)
return d(u), lookupErr
} else {
slog.Info("Target.New", "target", t, "err", lookupErr)
}
if r, ok := t.registry[u.Scheme]; ok {
return r(u), nil
}
return nil, fmt.Errorf("%w: %s", ErrUnknownTargetType, u.Scheme)
}
func (t *Types) Has(typename string) bool {
if _, ok := t.registry[typename]; ok {
return true
}
return false
}
func (t *Types) Get(typename string) TypeFactory {
if d, ok := t.registry[typename]; ok {
return d
}
return nil
}
func (n *TypeName) UnmarshalJSON(b []byte) error {
TargetTypeName := strings.Trim(string(b), "\"")
if TargetTypes.Has(TargetTypeName) {
*n = TypeName(TargetTypeName)
return nil
}
return fmt.Errorf("%w: %s", ErrUnknownTargetType, TargetTypeName)
}

View File

@ -1,90 +0,0 @@
// Copyright 2024 Matthew Rich <matthewrich.conf@gmail.com>. All rights reserved.
package target
import (
_ "context"
"encoding/json"
"github.com/stretchr/testify/assert"
"net/url"
"testing"
"decl/internal/resource"
)
type MockDocTarget struct {
InjectType func() string
InjectEmitResources func(documents []*resource.Document, filter resource.ResourceSelector) error
}
func (m *MockDocTarget) Type() string { return m.InjectType() }
func (m *MockDocTarget) Close() error { return nil }
func (m *MockDocTarget) EmitResources(documents []*resource.Document, filter resource.ResourceSelector) error { return m.InjectEmitResources(documents, filter) }
func NewFooDocTarget() DocTarget {
return &MockDocTarget{
InjectType: func() string { return "foo" },
InjectEmitResources: func(documents []*resource.Document, filter resource.ResourceSelector) error { return nil },
}
}
func NewMockFileDocTarget() DocTarget {
return &MockDocTarget{
InjectType: func() string { return "file" },
InjectEmitResources: func(documents []*resource.Document, filter resource.ResourceSelector) error { return nil },
}
}
func TestNewTargetTypes(t *testing.T) {
targetTypes := NewTypes()
assert.NotNil(t, targetTypes)
}
func TestNewTargetTypesRegister(t *testing.T) {
m := NewFooDocTarget()
targetTypes := NewTypes()
assert.NotNil(t, targetTypes)
targetTypes.Register([]string{"foo"}, func(*url.URL) DocTarget { return m })
r, e := targetTypes.New("foo://")
assert.Nil(t, e)
assert.Equal(t, m, r)
}
func TestResourceTypesFromURI(t *testing.T) {
m := NewFooDocTarget()
targetTypes := NewTypes()
assert.NotNil(t, targetTypes)
targetTypes.Register([]string{"foo"}, func(*url.URL) DocTarget { return m })
r, e := targetTypes.New("foo://bar")
assert.Nil(t, e)
assert.Equal(t, m, r)
}
func TestResourceTypesHasType(t *testing.T) {
m := NewFooDocTarget()
targetTypes := NewTypes()
assert.NotNil(t, targetTypes)
targetTypes.Register([]string{"foo"}, func(*url.URL) DocTarget { return m })
assert.True(t, targetTypes.Has("foo"))
}
func TestDocTargetTypeName(t *testing.T) {
TargetTypes.Register([]string{"file"}, func(*url.URL) DocTarget { return NewMockFileDocTarget() })
type fDocTargetName struct {
Name TypeName `json:"type"`
}
fTypeName := &fDocTargetName{}
jsonType := `{ "type": "file" }`
e := json.Unmarshal([]byte(jsonType), &fTypeName)
assert.Nil(t, e)
assert.Equal(t, "file", string(fTypeName.Name))
}