data.go - hugo - [fork] hugo port for 9front
HTML git clone https://git.drkhsh.at/hugo.git
DIR Log
DIR Files
DIR Refs
DIR Submodules
DIR README
DIR LICENSE
---
data.go (5705B)
---
1 // Copyright 2017 The Hugo Authors. All rights reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 // http://www.apache.org/licenses/LICENSE-2.0
7 //
8 // Unless required by applicable law or agreed to in writing, software
9 // distributed under the License is distributed on an "AS IS" BASIS,
10 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 // See the License for the specific language governing permissions and
12 // limitations under the License.
13
14 // Package data provides template functions for working with external data
15 // sources.
16 package data
17
18 import (
19 "bytes"
20 "encoding/csv"
21 "encoding/json"
22 "errors"
23 "fmt"
24 "net/http"
25 "strings"
26
27 "github.com/gohugoio/hugo/cache/filecache"
28 "github.com/gohugoio/hugo/common/hugo"
29 "github.com/gohugoio/hugo/common/maps"
30 "github.com/gohugoio/hugo/config/security"
31
32 "github.com/gohugoio/hugo/common/types"
33
34 "github.com/gohugoio/hugo/common/constants"
35
36 "github.com/spf13/cast"
37
38 "github.com/gohugoio/hugo/deps"
39 "slices"
40 )
41
42 // New returns a new instance of the data-namespaced template functions.
43 func New(deps *deps.Deps) *Namespace {
44 return &Namespace{
45 deps: deps,
46 cacheGetCSV: deps.ResourceSpec.FileCaches.GetCSVCache(),
47 cacheGetJSON: deps.ResourceSpec.FileCaches.GetJSONCache(),
48 client: http.DefaultClient,
49 }
50 }
51
52 // Namespace provides template functions for the "data" namespace.
53 type Namespace struct {
54 deps *deps.Deps
55
56 cacheGetJSON *filecache.Cache
57 cacheGetCSV *filecache.Cache
58
59 client *http.Client
60 }
61
62 // GetCSV expects the separator sep and one or n-parts of a URL to a resource which
63 // can either be a local or a remote one.
64 // The data separator can be a comma, semi-colon, pipe, etc, but only one character.
65 // If you provide multiple parts for the URL they will be joined together to the final URL.
66 // GetCSV returns nil or a slice slice to use in a short code.
67 func (ns *Namespace) GetCSV(sep string, args ...any) (d [][]string, err error) {
68 hugo.Deprecate("data.GetCSV", "use resources.Get or resources.GetRemote with transform.Unmarshal.", "v0.123.0")
69
70 url, headers := toURLAndHeaders(args)
71 cache := ns.cacheGetCSV
72
73 unmarshal := func(b []byte) (bool, error) {
74 if d, err = parseCSV(b, sep); err != nil {
75 err = fmt.Errorf("failed to parse CSV file %s: %w", url, err)
76
77 return true, err
78 }
79
80 return false, nil
81 }
82
83 var req *http.Request
84 req, err = http.NewRequest("GET", url, nil)
85 if err != nil {
86 return nil, fmt.Errorf("failed to create request for getCSV for resource %s: %w", url, err)
87 }
88
89 // Add custom user headers.
90 addUserProvidedHeaders(headers, req)
91 addDefaultHeaders(req, "text/csv", "text/plain")
92
93 err = ns.getResource(cache, unmarshal, req)
94 if err != nil {
95 if security.IsAccessDenied(err) {
96 return nil, err
97 }
98 ns.deps.Log.Erroridf(constants.ErrRemoteGetCSV, "Failed to get CSV resource %q: %s", url, err)
99 return nil, nil
100 }
101
102 return
103 }
104
105 // GetJSON expects one or n-parts of a URL in args to a resource which can either be a local or a remote one.
106 // If you provide multiple parts they will be joined together to the final URL.
107 // GetJSON returns nil or parsed JSON to use in a short code.
108 func (ns *Namespace) GetJSON(args ...any) (any, error) {
109 hugo.Deprecate("data.GetJSON", "use resources.Get or resources.GetRemote with transform.Unmarshal.", "v0.123.0")
110
111 var v any
112 url, headers := toURLAndHeaders(args)
113 cache := ns.cacheGetJSON
114
115 req, err := http.NewRequest("GET", url, nil)
116 if err != nil {
117 return nil, fmt.Errorf("failed to create request for getJSON resource %s: %w", url, err)
118 }
119
120 unmarshal := func(b []byte) (bool, error) {
121 err := json.Unmarshal(b, &v)
122 if err != nil {
123 return true, err
124 }
125 return false, nil
126 }
127
128 addUserProvidedHeaders(headers, req)
129 addDefaultHeaders(req, "application/json")
130
131 err = ns.getResource(cache, unmarshal, req)
132 if err != nil {
133 if security.IsAccessDenied(err) {
134 return nil, err
135 }
136 ns.deps.Log.Erroridf(constants.ErrRemoteGetJSON, "Failed to get JSON resource %q: %s", url, err)
137 return nil, nil
138 }
139
140 return v, nil
141 }
142
143 func addDefaultHeaders(req *http.Request, accepts ...string) {
144 for _, accept := range accepts {
145 if !hasHeaderValue(req.Header, "Accept", accept) {
146 req.Header.Add("Accept", accept)
147 }
148 }
149 if !hasHeaderKey(req.Header, "User-Agent") {
150 req.Header.Add("User-Agent", "Hugo Static Site Generator")
151 }
152 }
153
154 func addUserProvidedHeaders(headers map[string]any, req *http.Request) {
155 if headers == nil {
156 return
157 }
158 for key, val := range headers {
159 vals := types.ToStringSlicePreserveString(val)
160 for _, s := range vals {
161 req.Header.Add(key, s)
162 }
163 }
164 }
165
166 func hasHeaderValue(m http.Header, key, value string) bool {
167 var s []string
168 var ok bool
169
170 if s, ok = m[key]; !ok {
171 return false
172 }
173
174 return slices.Contains(s, value)
175 }
176
177 func hasHeaderKey(m http.Header, key string) bool {
178 _, ok := m[key]
179 return ok
180 }
181
182 func toURLAndHeaders(urlParts []any) (string, map[string]any) {
183 if len(urlParts) == 0 {
184 return "", nil
185 }
186
187 // The last argument may be a map.
188 headers, err := maps.ToStringMapE(urlParts[len(urlParts)-1])
189 if err == nil {
190 urlParts = urlParts[:len(urlParts)-1]
191 } else {
192 headers = nil
193 }
194
195 return strings.Join(cast.ToStringSlice(urlParts), ""), headers
196 }
197
198 // parseCSV parses bytes of CSV data into a slice slice string or an error
199 func parseCSV(c []byte, sep string) ([][]string, error) {
200 if len(sep) != 1 {
201 return nil, errors.New("Incorrect length of CSV separator: " + sep)
202 }
203 b := bytes.NewReader(c)
204 r := csv.NewReader(b)
205 rSep := []rune(sep)
206 r.Comma = rSep[0]
207 r.FieldsPerRecord = 0
208 return r.ReadAll()
209 }