fix closing the output writer; test tar output
This commit is contained in:
parent
c2ae42bd7a
commit
cb923b96c9
@ -94,6 +94,7 @@ func (a *App) SetOutput(uri string) (err error) {
|
||||
if a.emitter, err = folio.DocumentRegistry.ConverterTypes.New(uri); err != nil {
|
||||
return fmt.Errorf("Failed opening target: %s, %w", uri, err)
|
||||
}
|
||||
slog.Info("Client.SetOutput()", "uri", uri, "emitter", a.emitter)
|
||||
return
|
||||
}
|
||||
|
||||
@ -148,9 +149,8 @@ func (a *App) ImportResource(ctx context.Context, uri string) (err error) {
|
||||
func (a *App) ImportSource(uri string) (loadedDocuments []data.Document, err error) {
|
||||
if loadedDocuments, err = folio.DocumentRegistry.Load(folio.URI(uri)); err == nil && loadedDocuments != nil {
|
||||
a.Documents = append(a.Documents, loadedDocuments...)
|
||||
} else {
|
||||
return
|
||||
}
|
||||
slog.Info("Client.ImportSource()", "uri", uri, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
@ -201,6 +201,7 @@ func (a *App) Apply(ctx context.Context, deleteResources bool) (err error) {
|
||||
}
|
||||
|
||||
func (a *App) ImportCmd(ctx context.Context, docs []string, resourceURI string, quiet bool, merge bool) (err error) {
|
||||
defer a.Close()
|
||||
if err = a.Import(docs); err != nil {
|
||||
return
|
||||
}
|
||||
@ -230,6 +231,7 @@ func (a *App) ImportCmd(ctx context.Context, docs []string, resourceURI string,
|
||||
}
|
||||
|
||||
func (a *App) ApplyCmd(ctx context.Context, docs []string, quiet bool, deleteResources bool) (err error) {
|
||||
defer a.Close()
|
||||
var failedResources error
|
||||
if err = a.Import(docs); err != nil {
|
||||
return
|
||||
@ -319,6 +321,7 @@ func (a *App) DiffCmd(docs []string) (err error) {
|
||||
}
|
||||
|
||||
func (a *App) ConfigCmd(docs []string, includeSystemConfig bool) (err error) {
|
||||
defer a.Close()
|
||||
if err = a.BuiltInConfiguration(); err != nil {
|
||||
slog.Warn("BuiltInConfiguration()", "error", err)
|
||||
}
|
||||
@ -355,6 +358,7 @@ func (a *App) Quiet() (err error) {
|
||||
func (a *App) Emit() (err error) {
|
||||
if a.merged == nil {
|
||||
for _, d := range a.Documents {
|
||||
slog.Info("Client.Emit() document", "document", d)
|
||||
if _, err = a.emitter.Emit(d, nil); err != nil {
|
||||
return
|
||||
}
|
||||
@ -368,5 +372,9 @@ func (a *App) Emit() (err error) {
|
||||
}
|
||||
|
||||
func (a *App) Close() (err error) {
|
||||
return a.emitter.Close()
|
||||
if a.emitter != nil {
|
||||
slog.Info("Client.Close() emitter", "emitter", a.emitter)
|
||||
return a.emitter.Close()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
@ -14,9 +14,14 @@ import (
|
||||
_ "decl/internal/fan"
|
||||
"decl/internal/codec"
|
||||
"decl/internal/data"
|
||||
"decl/internal/ext"
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
var programLevel = new(slog.LevelVar)
|
||||
@ -243,3 +248,85 @@ resources:
|
||||
|
||||
assert.NoFileExists(t, TestFile)
|
||||
}
|
||||
|
||||
|
||||
|
||||
var tarArchiveBuffer bytes.Buffer
|
||||
|
||||
func TarArchive(compress bool) (err error) {
|
||||
var fileWriter io.WriteCloser
|
||||
|
||||
if compress {
|
||||
gz := gzip.NewWriter(&tarArchiveBuffer)
|
||||
defer gz.Close()
|
||||
fileWriter = gz
|
||||
} else {
|
||||
fileWriter = ext.WriteNopCloser(&tarArchiveBuffer)
|
||||
}
|
||||
|
||||
tw := tar.NewWriter(fileWriter)
|
||||
|
||||
fileContent := "test file content"
|
||||
|
||||
if err = tw.WriteHeader(&tar.Header{
|
||||
Name: "testfile",
|
||||
Mode: 0600,
|
||||
Size: int64(len(fileContent)),
|
||||
}); err == nil {
|
||||
_, err = tw.Write([]byte(fileContent))
|
||||
}
|
||||
tw.Close()
|
||||
return
|
||||
}
|
||||
|
||||
func TestClientConverters(t *testing.T) {
|
||||
for _, v := range []struct { Expected data.TypeName; URI string } {
|
||||
{ Expected: data.TypeName("dir"), URI: "file:///tmp" },
|
||||
{ Expected: data.TypeName("http"), URI: "https://localhost/test" },
|
||||
{ Expected: data.TypeName("iptable"), URI: "iptable://filter/INPUT" },
|
||||
{ Expected: data.TypeName("jx"), URI: "file:///tmp/test.jx.yaml" },
|
||||
{ Expected: data.TypeName("package"), URI: "package://" },
|
||||
{ Expected: data.TypeName("container"), URI: "container://" },
|
||||
{ Expected: data.TypeName("user"), URI: "user://" },
|
||||
{ Expected: data.TypeName("group"), URI: "group://" },
|
||||
{ Expected: data.TypeName("tar"), URI: "tar://" },
|
||||
{ Expected: data.TypeName("tar"), URI: "file:///tmp/foo.tar" },
|
||||
{ Expected: data.TypeName("tar"), URI: "file:///tmp/foo.tar.gz" },
|
||||
{ Expected: data.TypeName("tar"), URI: "file:///tmp/foo.tgz" },
|
||||
} {
|
||||
c, e := folio.DocumentRegistry.ConverterTypes.New(v.URI)
|
||||
assert.Nil(t, e)
|
||||
assert.NotNil(t, c)
|
||||
assert.Equal(t, v.Expected, c.Type())
|
||||
}
|
||||
}
|
||||
|
||||
func TestClientImportTar(t *testing.T) {
|
||||
c := NewClient()
|
||||
assert.NotNil(t, c)
|
||||
|
||||
/*
|
||||
e := TarArchive(true)
|
||||
assert.Nil(t, e)
|
||||
assert.Greater(t, tarArchiveBuffer.Len(), 0)
|
||||
|
||||
path, err := TempDir.CreateFileFromReader("test.tar.gz", &tarArchiveBuffer)
|
||||
assert.Nil(t, err)
|
||||
uri := fmt.Sprintf("file://%s", path)
|
||||
*/
|
||||
|
||||
uri := "file:///tmp/jx-v0.2.3.tar.gz"
|
||||
|
||||
d := folio.NewDeclaration()
|
||||
assert.Nil(t, d.NewResource(&uri))
|
||||
|
||||
//assert.Equal(t, "11f08defe14cf8710583158c86e175bab40bfdbcd5ffb491479aacb8492ebf67", )
|
||||
//var sourceResource data.FileResource = d.Attributes.(data.FileResource)
|
||||
//assert.Nil(t, sourceResource.SetContent(&tarArchiveBuffer))
|
||||
//sourceResource.SetDecodeContent(true)
|
||||
|
||||
docs, importErr := c.ImportSource(uri)
|
||||
|
||||
assert.Nil(t, importErr)
|
||||
assert.Greater(t, len(docs), 0)
|
||||
}
|
||||
|
@ -98,6 +98,7 @@ type FileResource interface {
|
||||
PathNormalization(bool)
|
||||
NormalizePath() error
|
||||
GetTarget() string
|
||||
SetGzipContent(bool)
|
||||
}
|
||||
|
||||
type Signed interface {
|
||||
|
@ -127,7 +127,7 @@ func (j *JxFile) setdecoder(source data.ContentIdentifier) {
|
||||
func (j *JxFile) Type() data.TypeName { return "jx" }
|
||||
|
||||
func (j *JxFile) Extract(resourceSource data.Resource, filter data.ElementSelector) (doc data.Document, err error) {
|
||||
if j.index == 0 { // XXX
|
||||
if j.index == 0 {
|
||||
if resourceSource == nil {
|
||||
if len(j.Uri) > 0 {
|
||||
resourceSource, err = j.Uri.NewResource(nil)
|
||||
|
@ -60,12 +60,14 @@ func (t *Tar) SetURI(u *url.URL) {
|
||||
}
|
||||
t.Uri.SetURL(u)
|
||||
t.parsedURI = u
|
||||
exttype, _ := t.Uri.Extension()
|
||||
if exttype == "tgz" {
|
||||
/*
|
||||
exttype, fileext := t.Uri.Extension()
|
||||
if exttype == "tgz" || fileext == "tgz" {
|
||||
q := u.Query()
|
||||
q.Set("gzip", string("true"))
|
||||
u.RawQuery = q.Encode()
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
func (t *Tar) targetResource() (target data.Resource, err error) {
|
||||
@ -77,6 +79,7 @@ func (t *Tar) targetResource() (target data.Resource, err error) {
|
||||
|
||||
var tarTargetFile data.FileResource = t.emitResource.(data.FileResource)
|
||||
tarTargetFile.SetContentSourceRef(t.Uri.String())
|
||||
tarTargetFile.SetGzipContent(true)
|
||||
|
||||
t.writer, err = tarTargetFile.(data.ContentReadWriter).ContentWriterStream()
|
||||
if err == io.EOF {
|
||||
@ -85,6 +88,7 @@ func (t *Tar) targetResource() (target data.Resource, err error) {
|
||||
}
|
||||
t.targetArchive = tar.NewWriter(t.writer)
|
||||
slog.Info("Tar.targetResource() SetContentSourceRef", "target", tarTargetFile, "uri", t.Uri.String(), "tar", t.targetArchive, "error", err)
|
||||
|
||||
}
|
||||
target = t.emitResource
|
||||
return
|
||||
@ -97,18 +101,6 @@ func (t *Tar) Emit(document data.Document, filter data.ElementSelector) (resourc
|
||||
|
||||
slog.Info("Tar.Emit()", "writer", t.writer.(*transport.Writer), "error", err)
|
||||
|
||||
/*
|
||||
tarFile := resource.NewFile()
|
||||
resourceTarget = tarFile
|
||||
|
||||
tarFile.Path = t.Path
|
||||
tarFile.ContentSourceRef = folio.ResourceReference(t.Path)
|
||||
t.writer, err = tarFile.ContentSourceRef.ContentWriterStream()
|
||||
targetArchive := tar.NewWriter(t.writer)
|
||||
|
||||
defer t.writer.Close()
|
||||
*/
|
||||
|
||||
for _,res := range document.Filter(func(d data.Declaration) bool {
|
||||
return d.ResourceType() == "file"
|
||||
}) {
|
||||
@ -168,6 +160,7 @@ func (t *Tar) Extract(resourceSource data.Resource, filter data.ElementSelector)
|
||||
var tarSourceFile data.FileResource = resourceSource.(data.FileResource)
|
||||
//tarSourceFile := resourceSource.(*resource.File)
|
||||
|
||||
tarSourceFile.SetGzipContent(true)
|
||||
t.reader, err = tarSourceFile.GetContent(nil)
|
||||
sourceArchive := tar.NewReader(t.reader)
|
||||
|
||||
@ -177,10 +170,12 @@ func (t *Tar) Extract(resourceSource data.Resource, filter data.ElementSelector)
|
||||
var hdr *tar.Header
|
||||
hdr, err = sourceArchive.Next()
|
||||
if err == io.EOF {
|
||||
slog.Info("Tar.Extract() EOF", "source", sourceArchive)
|
||||
err = nil
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
slog.Info("Tar.Extract() ERROR", "source", sourceArchive, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -349,7 +349,6 @@ func (d *Document) AddResourceDeclaration(resourceType string, resourceDeclarati
|
||||
}
|
||||
}
|
||||
|
||||
// XXX NewResource is not commonly used by the underlying resource Read() is no longer called so it needs more testing
|
||||
func (d *Document) NewResource(uri string) (newResource data.Resource, err error) {
|
||||
|
||||
decl := NewDeclarationFromDocument(d)
|
||||
|
@ -107,8 +107,16 @@ func (r *Registry) Append(uri URI, documents []data.Document) (addedDocuments []
|
||||
var convertUri data.Converter
|
||||
var sourceResource data.Resource
|
||||
|
||||
if fileResource, ok := sourceResource.(data.FileResource); ok {
|
||||
fileResource.SetGzipContent(true)
|
||||
}
|
||||
|
||||
slog.Info("folio.Registry.Append()", "uri", uri, "converter", r.ConverterTypes)
|
||||
if convertUri, err = r.ConverterTypes.New(string(uri)); err == nil {
|
||||
|
||||
convertUri, err = uri.Converter()
|
||||
if err == nil {
|
||||
|
||||
slog.Info("folio.Registry.Append() Converter", "uri", uri, "converter", convertUri, "type", convertUri.Type(), "error", err)
|
||||
if sourceResource, err = uri.NewResource(nil); err == nil {
|
||||
switch extractor := convertUri.(type) {
|
||||
case data.ManyExtractor:
|
||||
|
@ -35,6 +35,10 @@ func (u URI) NewResource(document data.Document) (newResource data.Resource, err
|
||||
return
|
||||
}
|
||||
|
||||
func (u URI) Converter() (converter data.Converter, err error) {
|
||||
return DocumentRegistry.ConverterTypes.New(string(u))
|
||||
}
|
||||
|
||||
func (u URI) Parse() *url.URL {
|
||||
url, e := url.Parse(string(u))
|
||||
if e == nil {
|
||||
|
@ -27,6 +27,7 @@ import (
|
||||
"decl/internal/transport"
|
||||
"strings"
|
||||
"embed"
|
||||
"compress/gzip"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -102,16 +103,17 @@ type File struct {
|
||||
Ctime time.Time `json:"ctime,omitempty" yaml:"ctime,omitempty"`
|
||||
Mtime time.Time `json:"mtime,omitempty" yaml:"mtime,omitempty"`
|
||||
|
||||
Content string `json:"content,omitempty" yaml:"content,omitempty"`
|
||||
ContentSourceRef folio.ResourceReference `json:"sourceref,omitempty" yaml:"sourceref,omitempty"`
|
||||
Sha256 string `json:"sha256,omitempty" yaml:"sha256,omitempty"`
|
||||
Size int64 `json:"size,omitempty" yaml:"size,omitempty"`
|
||||
Target string `json:"target,omitempty" yaml:"target,omitempty"`
|
||||
FileType FileType `json:"filetype" yaml:"filetype"`
|
||||
State string `json:"state,omitempty" yaml:"state,omitempty"`
|
||||
SerializeContent bool `json:"serializecontent,omitempty" yaml:"serializecontent,omitempty"`
|
||||
config data.ConfigurationValueGetter
|
||||
Resources data.ResourceMapper `json:"-" yaml:"-"`
|
||||
Content string `json:"content,omitempty" yaml:"content,omitempty"`
|
||||
ContentSourceRef folio.ResourceReference `json:"sourceref,omitempty" yaml:"sourceref,omitempty"`
|
||||
Sha256 string `json:"sha256,omitempty" yaml:"sha256,omitempty"`
|
||||
Size int64 `json:"size,omitempty" yaml:"size,omitempty"`
|
||||
Target string `json:"target,omitempty" yaml:"target,omitempty"`
|
||||
FileType FileType `json:"filetype" yaml:"filetype"`
|
||||
State string `json:"state,omitempty" yaml:"state,omitempty"`
|
||||
SerializeContent bool `json:"serializecontent,omitempty" yaml:"serializecontent,omitempty"`
|
||||
GzipContent bool `json:"gzipcontent,omitempty" yaml:"gzipcontent,omitempty"`
|
||||
config data.ConfigurationValueGetter
|
||||
Resources data.ResourceMapper `json:"-" yaml:"-"`
|
||||
}
|
||||
|
||||
type ResourceFileInfo struct {
|
||||
@ -234,6 +236,10 @@ func (f *File) Notify(m *machine.EventMessage) {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *File) SetGzipContent(flag bool) {
|
||||
f.GzipContent = flag
|
||||
}
|
||||
|
||||
func (f *File) PathNormalization(flag bool) {
|
||||
f.normalizePath = flag
|
||||
}
|
||||
@ -267,6 +273,10 @@ func (f *File) SetURI(uri string) (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func (f *File) DetectGzip() bool {
|
||||
return (f.parsedURI.Query().Get("gzip") == "true" || f.fileext == "gz" || f.exttype == "tgz" || f.exttype == "gz" || f.fileext == "tgz" )
|
||||
}
|
||||
|
||||
func (f *File) SetURIFromString(uri string) {
|
||||
f.Uri = folio.URI(uri)
|
||||
f.exttype, f.fileext = f.Uri.Extension()
|
||||
@ -475,17 +485,28 @@ func (f *File) Create(ctx context.Context) error {
|
||||
return readn, readerr
|
||||
})
|
||||
|
||||
createdFile, e := os.Create(f.Path)
|
||||
if e != nil {
|
||||
return e
|
||||
var createdFileWriter io.WriteCloser
|
||||
createdFile, fileErr := os.Create(f.Path)
|
||||
if fileErr != nil {
|
||||
return fileErr
|
||||
}
|
||||
|
||||
if f.GzipContent && f.DetectGzip() {
|
||||
createdFileWriter = gzip.NewWriter(createdFile)
|
||||
defer createdFileWriter.Close()
|
||||
} else {
|
||||
createdFileWriter = createdFile
|
||||
}
|
||||
|
||||
defer createdFile.Close()
|
||||
|
||||
if chmodErr := createdFile.Chmod(mode); chmodErr != nil {
|
||||
return chmodErr
|
||||
}
|
||||
_, writeErr := io.CopyBuffer(createdFile, sumReadData, copyBuffer)
|
||||
|
||||
_, writeErr := io.CopyBuffer(createdFileWriter, sumReadData, copyBuffer)
|
||||
if writeErr != nil {
|
||||
return fmt.Errorf("File.Create(): CopyBuffer failed %v %v: %w", createdFile, contentReader, writeErr)
|
||||
return fmt.Errorf("File.Create(): CopyBuffer failed %v %v: %w", createdFileWriter, contentReader, writeErr)
|
||||
}
|
||||
|
||||
f.Sha256 = fmt.Sprintf("%x", hash.Sum(nil))
|
||||
@ -591,13 +612,16 @@ func (f *File) ReadStat() (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func (f *File) open() (file fs.File, err error) {
|
||||
func (f *File) open() (file io.ReadCloser, err error) {
|
||||
slog.Info("open()", "file", f.Path, "fs", f.Filesystem)
|
||||
if _, ok := f.Filesystem.(embed.FS); ok {
|
||||
file, err = f.Filesystem.Open(f.Path)
|
||||
} else {
|
||||
file, err = os.Open(f.Path)
|
||||
}
|
||||
if f.GzipContent && f.DetectGzip() {
|
||||
file, err = gzip.NewReader(file)
|
||||
}
|
||||
slog.Info("open()", "file", f.Path, "error", err)
|
||||
return
|
||||
}
|
||||
@ -643,7 +667,11 @@ func (f *File) Read(ctx context.Context) ([]byte, error) {
|
||||
func (f *File) readThru() (contentReader io.ReadCloser, err error) {
|
||||
if len(f.ContentSourceRef) != 0 {
|
||||
contentReader, err = f.ContentSourceRef.Lookup(nil).ContentReaderStream()
|
||||
contentReader.(*transport.Reader).SetGzip(false)
|
||||
if f.GzipContent {
|
||||
contentReader.(*transport.Reader).DetectGzip()
|
||||
} else {
|
||||
contentReader.(*transport.Reader).SetGzip(false)
|
||||
}
|
||||
slog.Info("File.readThru()", "reader", contentReader)
|
||||
} else {
|
||||
if len(f.Content) != 0 {
|
||||
|
@ -42,6 +42,12 @@ func (t *Path) Remove() {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Path) Open(name string) (r io.ReadCloser, err error) {
|
||||
path := t.FilePath(name)
|
||||
r, err = os.Open(path)
|
||||
return
|
||||
}
|
||||
|
||||
func (t *Path) CreateFileFromReader(name string, r io.Reader) (path string, err error) {
|
||||
path = filepath.Join(string(*t), name)
|
||||
_, statErr := os.Stat(path)
|
||||
@ -71,6 +77,12 @@ func (t *Path) CreateFile(name string, content string) (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func (t *Path) FileExists(name string) bool {
|
||||
path := t.FilePath(name)
|
||||
_, statErr := os.Stat(path)
|
||||
return ! os.IsNotExist(statErr)
|
||||
}
|
||||
|
||||
func (t *Path) Exists() (bool) {
|
||||
_, statErr := os.Stat(string(*t))
|
||||
return ! os.IsNotExist(statErr)
|
||||
@ -80,6 +92,10 @@ func (t *Path) FilePath(name string) string {
|
||||
return filepath.Join(string(*t), name)
|
||||
}
|
||||
|
||||
func (t *Path) URIPath(name string) string {
|
||||
return fmt.Sprintf("file://%s", t.FilePath(name))
|
||||
}
|
||||
|
||||
func (t *Path) Mkdir(name string, mode os.FileMode) (err error) {
|
||||
var path string
|
||||
if path, err = filepath.Abs(filepath.Join(string(*t), name)); err == nil {
|
||||
|
@ -9,7 +9,7 @@ import (
|
||||
"strings"
|
||||
"path/filepath"
|
||||
"log/slog"
|
||||
"runtime/debug"
|
||||
_ "runtime/debug"
|
||||
)
|
||||
|
||||
/*
|
||||
@ -52,17 +52,22 @@ func (t *Types[Product]) RegisterContentType(contenttypes []string, factory Fact
|
||||
func (t *Types[Product]) FromExtension(path string) (Factory[Product], error) {
|
||||
elements := strings.Split(path, ".")
|
||||
numberOfElements := len(elements)
|
||||
slog.Info("Types[Product].FromExtension()", "path", path, "elements", elements, "types", t.contentTypes, "stacktrace", string(debug.Stack()))
|
||||
if numberOfElements > 2 {
|
||||
ext := strings.Join(elements[numberOfElements - 2: numberOfElements], ".")
|
||||
slog.Info("Types[Product].FromExtension() - Lookup", "ext", ext, "stacktrace", string(debug.Stack()))
|
||||
if src := t.GetContentType(ext); src != nil {
|
||||
slog.Info("Types[Product].FromExtension()", "path", path, "elements", elements, "types", t.contentTypes, "numberOfElements", numberOfElements)
|
||||
if numberOfElements >= 2 {
|
||||
// slog.Info("Types[Product].FromExtension()", "path", path, "elements", elements, "types", t.contentTypes, "stacktrace", string(debug.Stack()))
|
||||
if numberOfElements > 2 {
|
||||
ext := strings.Join(elements[numberOfElements - 2: numberOfElements], ".")
|
||||
// slog.Info("Types[Product].FromExtension() - Lookup", "ext", ext, "stacktrace", string(debug.Stack()))
|
||||
slog.Info("Types[Product].FromExtension() - Lookup", "ext", ext, "types", t)
|
||||
if src := t.GetContentType(ext); src != nil {
|
||||
return src, nil
|
||||
}
|
||||
}
|
||||
slog.Info("Types[Product].FromExtension() - Lookup", "ext", elements[numberOfElements - 1], "types", t.contentTypes)
|
||||
if src := t.GetContentType(elements[numberOfElements - 1]); src != nil {
|
||||
return src, nil
|
||||
}
|
||||
}
|
||||
if src := t.GetContentType(elements[numberOfElements - 1]); src != nil {
|
||||
return src, nil
|
||||
}
|
||||
return nil, fmt.Errorf("%w: %s", ErrUnknownType, path)
|
||||
}
|
||||
|
||||
@ -81,10 +86,12 @@ func (t *Types[Product]) NewFromParsedURI(u *url.URL) (result Product, err error
|
||||
}
|
||||
|
||||
path := filepath.Join(u.Hostname(), u.Path)
|
||||
if d, lookupErr := t.FromExtension(path); d != nil {
|
||||
return d(u), lookupErr
|
||||
} else {
|
||||
slog.Info("Types[Product].NewFromParsedURI() - FromExtension()", "uri", u, "path", path, "error", lookupErr, "stacktrace", string(debug.Stack()))
|
||||
if len(path) > 0 {
|
||||
if d, lookupErr := t.FromExtension(filepath.Base(path)); d != nil {
|
||||
return d(u), lookupErr
|
||||
} else {
|
||||
slog.Info("Types[Product].NewFromParsedURI() - FromExtension()", "uri", u, "path", path, "error", lookupErr) //, "stacktrace", string(debug.Stack()))
|
||||
}
|
||||
}
|
||||
|
||||
if r, ok := t.registry[u.Scheme]; ok {
|
||||
|
Loading…
Reference in New Issue
Block a user