mirror of
https://github.com/aykhans/sarin.git
synced 2026-02-28 14:59:14 +00:00
v1.0.0: here we go again
This commit is contained in:
310
internal/sarin/client.go
Normal file
310
internal/sarin/client.go
Normal file
@@ -0,0 +1,310 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"math"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/valyala/fasthttp"
|
||||
"github.com/valyala/fasthttp/fasthttpproxy"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
utilsSlice "go.aykhans.me/utils/slice"
|
||||
"golang.org/x/net/proxy"
|
||||
)
|
||||
|
||||
type HostClientGenerator func() *fasthttp.HostClient
|
||||
|
||||
func safeUintToInt(u uint) int {
|
||||
if u > math.MaxInt {
|
||||
return math.MaxInt
|
||||
}
|
||||
return int(u)
|
||||
}
|
||||
|
||||
// NewHostClients creates a list of fasthttp.HostClient instances for the given proxies.
|
||||
// If no proxies are provided, a single client without a proxy is returned.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func NewHostClients(
|
||||
ctx context.Context,
|
||||
timeout time.Duration,
|
||||
proxies []url.URL,
|
||||
maxConns uint,
|
||||
requestURL *url.URL,
|
||||
skipVerify bool,
|
||||
) ([]*fasthttp.HostClient, error) {
|
||||
isTLS := requestURL.Scheme == "https"
|
||||
|
||||
if proxiesLen := len(proxies); proxiesLen > 0 {
|
||||
clients := make([]*fasthttp.HostClient, 0, proxiesLen)
|
||||
addr := requestURL.Host
|
||||
if isTLS && requestURL.Port() == "" {
|
||||
addr += ":443"
|
||||
}
|
||||
|
||||
for _, proxy := range proxies {
|
||||
dialFunc, err := NewProxyDialFunc(ctx, &proxy, timeout)
|
||||
if err != nil {
|
||||
return nil, types.NewProxyDialError(proxy.String(), err)
|
||||
}
|
||||
|
||||
clients = append(clients, &fasthttp.HostClient{
|
||||
MaxConns: safeUintToInt(maxConns),
|
||||
IsTLS: isTLS,
|
||||
TLSConfig: &tls.Config{
|
||||
InsecureSkipVerify: skipVerify, //nolint:gosec
|
||||
},
|
||||
Addr: addr,
|
||||
Dial: dialFunc,
|
||||
MaxIdleConnDuration: timeout,
|
||||
MaxConnDuration: timeout,
|
||||
WriteTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
DisableHeaderNamesNormalizing: true,
|
||||
DisablePathNormalizing: true,
|
||||
NoDefaultUserAgentHeader: true,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
return clients, nil
|
||||
}
|
||||
|
||||
client := &fasthttp.HostClient{
|
||||
MaxConns: safeUintToInt(maxConns),
|
||||
IsTLS: isTLS,
|
||||
TLSConfig: &tls.Config{
|
||||
InsecureSkipVerify: skipVerify, //nolint:gosec
|
||||
},
|
||||
Addr: requestURL.Host,
|
||||
MaxIdleConnDuration: timeout,
|
||||
MaxConnDuration: timeout,
|
||||
WriteTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
DisableHeaderNamesNormalizing: true,
|
||||
DisablePathNormalizing: true,
|
||||
NoDefaultUserAgentHeader: true,
|
||||
}
|
||||
return []*fasthttp.HostClient{client}, nil
|
||||
}
|
||||
|
||||
func NewProxyDialFunc(ctx context.Context, proxyURL *url.URL, timeout time.Duration) (fasthttp.DialFunc, error) {
|
||||
var (
|
||||
dialer fasthttp.DialFunc
|
||||
err error
|
||||
)
|
||||
|
||||
switch proxyURL.Scheme {
|
||||
case "socks5":
|
||||
dialer, err = fasthttpSocksDialerDualStackTimeout(ctx, proxyURL, timeout, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "socks5h":
|
||||
dialer, err = fasthttpSocksDialerDualStackTimeout(ctx, proxyURL, timeout, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "http":
|
||||
dialer = fasthttpproxy.FasthttpHTTPDialerDualStackTimeout(proxyURL.String(), timeout)
|
||||
case "https":
|
||||
dialer = fasthttpHTTPSDialerDualStackTimeout(proxyURL, timeout)
|
||||
default:
|
||||
return nil, errors.New("unsupported proxy scheme")
|
||||
}
|
||||
|
||||
if dialer == nil {
|
||||
return nil, errors.New("internal error: proxy dialer is nil")
|
||||
}
|
||||
|
||||
return dialer, nil
|
||||
}
|
||||
|
||||
func fasthttpSocksDialerDualStackTimeout(ctx context.Context, proxyURL *url.URL, timeout time.Duration, resolveLocally bool) (fasthttp.DialFunc, error) {
|
||||
netDialer := &net.Dialer{}
|
||||
|
||||
// Parse auth from proxy URL if present
|
||||
var auth *proxy.Auth
|
||||
if proxyURL.User != nil {
|
||||
auth = &proxy.Auth{
|
||||
User: proxyURL.User.Username(),
|
||||
}
|
||||
if password, ok := proxyURL.User.Password(); ok {
|
||||
auth.Password = password
|
||||
}
|
||||
}
|
||||
|
||||
// Create SOCKS5 dialer with net.Dialer as forward dialer
|
||||
socksDialer, err := proxy.SOCKS5("tcp", proxyURL.Host, auth, netDialer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Assert to ContextDialer for timeout support
|
||||
contextDialer, ok := socksDialer.(proxy.ContextDialer)
|
||||
if !ok {
|
||||
// Fallback without timeout (should not happen with net.Dialer)
|
||||
return func(addr string) (net.Conn, error) {
|
||||
return socksDialer.Dial("tcp", addr)
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Return dial function that uses context with timeout
|
||||
return func(addr string) (net.Conn, error) {
|
||||
deadline := time.Now().Add(timeout)
|
||||
|
||||
if resolveLocally {
|
||||
host, port, err := net.SplitHostPort(addr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Cap DNS resolution to half the timeout to reserve time for dial
|
||||
dnsCtx, dnsCancel := context.WithTimeout(ctx, timeout)
|
||||
ips, err := net.DefaultResolver.LookupIP(dnsCtx, "ip", host)
|
||||
dnsCancel()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(ips) == 0 {
|
||||
return nil, errors.New("no IP addresses found for host: " + host)
|
||||
}
|
||||
|
||||
// Use the first resolved IP
|
||||
addr = net.JoinHostPort(ips[0].String(), port)
|
||||
}
|
||||
|
||||
// Use remaining time for dial
|
||||
remaining := time.Until(deadline)
|
||||
if remaining <= 0 {
|
||||
return nil, context.DeadlineExceeded
|
||||
}
|
||||
|
||||
dialCtx, dialCancel := context.WithTimeout(ctx, remaining)
|
||||
defer dialCancel()
|
||||
|
||||
return contextDialer.DialContext(dialCtx, "tcp", addr)
|
||||
}, nil
|
||||
}
|
||||
|
||||
func fasthttpHTTPSDialerDualStackTimeout(proxyURL *url.URL, timeout time.Duration) fasthttp.DialFunc {
|
||||
proxyAddr := proxyURL.Host
|
||||
if proxyURL.Port() == "" {
|
||||
proxyAddr = net.JoinHostPort(proxyURL.Hostname(), "443")
|
||||
}
|
||||
|
||||
// Build Proxy-Authorization header if auth is present
|
||||
var proxyAuth string
|
||||
if proxyURL.User != nil {
|
||||
username := proxyURL.User.Username()
|
||||
password, _ := proxyURL.User.Password()
|
||||
credentials := username + ":" + password
|
||||
proxyAuth = "Basic " + base64.StdEncoding.EncodeToString([]byte(credentials))
|
||||
}
|
||||
|
||||
return func(addr string) (net.Conn, error) {
|
||||
// Establish TCP connection to proxy with timeout
|
||||
start := time.Now()
|
||||
conn, err := fasthttp.DialDualStackTimeout(proxyAddr, timeout)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
remaining := timeout - time.Since(start)
|
||||
if remaining <= 0 {
|
||||
conn.Close() //nolint:errcheck,gosec
|
||||
return nil, context.DeadlineExceeded
|
||||
}
|
||||
|
||||
// Set deadline for the TLS handshake and CONNECT request
|
||||
if err := conn.SetDeadline(time.Now().Add(remaining)); err != nil {
|
||||
conn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Upgrade to TLS
|
||||
tlsConn := tls.Client(conn, &tls.Config{ //nolint:gosec
|
||||
ServerName: proxyURL.Hostname(),
|
||||
})
|
||||
if err := tlsConn.Handshake(); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build and send CONNECT request
|
||||
connectReq := &http.Request{
|
||||
Method: http.MethodConnect,
|
||||
URL: &url.URL{Opaque: addr},
|
||||
Host: addr,
|
||||
Header: make(http.Header),
|
||||
}
|
||||
if proxyAuth != "" {
|
||||
connectReq.Header.Set("Proxy-Authorization", proxyAuth)
|
||||
}
|
||||
|
||||
if err := connectReq.Write(tlsConn); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Read response using buffered reader, but return wrapped connection
|
||||
// to preserve any buffered data
|
||||
bufReader := bufio.NewReader(tlsConn)
|
||||
resp, err := http.ReadResponse(bufReader, connectReq)
|
||||
if err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
resp.Body.Close() //nolint:errcheck,gosec
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, errors.New("proxy CONNECT failed: " + resp.Status)
|
||||
}
|
||||
|
||||
// Clear deadline for the tunneled connection
|
||||
if err := tlsConn.SetDeadline(time.Time{}); err != nil {
|
||||
tlsConn.Close() //nolint:errcheck,gosec
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Return wrapped connection that uses the buffered reader
|
||||
// to avoid losing any data that was read ahead
|
||||
return &bufferedConn{Conn: tlsConn, reader: bufReader}, nil
|
||||
}
|
||||
}
|
||||
|
||||
// bufferedConn wraps a net.Conn with a buffered reader to preserve
|
||||
// any data that was read during HTTP response parsing.
|
||||
type bufferedConn struct {
|
||||
net.Conn
|
||||
|
||||
reader *bufio.Reader
|
||||
}
|
||||
|
||||
func (c *bufferedConn) Read(b []byte) (int, error) {
|
||||
return c.reader.Read(b)
|
||||
}
|
||||
|
||||
func NewHostClientGenerator(clients ...*fasthttp.HostClient) HostClientGenerator {
|
||||
switch len(clients) {
|
||||
case 0:
|
||||
hostClient := &fasthttp.HostClient{}
|
||||
return func() *fasthttp.HostClient {
|
||||
return hostClient
|
||||
}
|
||||
case 1:
|
||||
return func() *fasthttp.HostClient {
|
||||
return clients[0]
|
||||
}
|
||||
default:
|
||||
return utilsSlice.RandomCycle(nil, clients...)
|
||||
}
|
||||
}
|
||||
14
internal/sarin/helpers.go
Normal file
14
internal/sarin/helpers.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"math/rand/v2"
|
||||
"time"
|
||||
)
|
||||
|
||||
func NewDefaultRandSource() rand.Source {
|
||||
now := time.Now().UnixNano()
|
||||
return rand.NewPCG(
|
||||
uint64(now), //nolint:gosec // G115: Safe conversion; UnixNano timestamp used as random seed, bit pattern is intentional
|
||||
uint64(now>>32), //nolint:gosec // G115: Safe conversion; right-shifted timestamp for seed entropy, overflow is acceptable
|
||||
)
|
||||
}
|
||||
336
internal/sarin/request.go
Normal file
336
internal/sarin/request.go
Normal file
@@ -0,0 +1,336 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"maps"
|
||||
"math/rand/v2"
|
||||
"net/url"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/valyala/fasthttp"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
utilsSlice "go.aykhans.me/utils/slice"
|
||||
)
|
||||
|
||||
type RequestGenerator func(*fasthttp.Request) error
|
||||
|
||||
type RequestGeneratorWithData func(*fasthttp.Request, any) error
|
||||
|
||||
type valuesData struct {
|
||||
Values map[string]string
|
||||
}
|
||||
|
||||
// NewRequestGenerator creates a new RequestGenerator function that generates HTTP requests
|
||||
// with the specified configuration. The returned RequestGenerator is NOT safe for concurrent
|
||||
// use by multiple goroutines.
|
||||
func NewRequestGenerator(
|
||||
methods []string,
|
||||
requestURL *url.URL,
|
||||
params types.Params,
|
||||
headers types.Headers,
|
||||
cookies types.Cookies,
|
||||
bodies []string,
|
||||
values []string,
|
||||
) (RequestGenerator, bool) {
|
||||
randSource := NewDefaultRandSource()
|
||||
//nolint:gosec // G404: Using non-cryptographic rand for load testing, not security
|
||||
localRand := rand.New(randSource)
|
||||
templateFuncMap := NewDefaultTemplateFuncMap(randSource)
|
||||
|
||||
methodGenerator, isMethodGeneratorDynamic := NewMethodGeneratorFunc(localRand, methods, templateFuncMap)
|
||||
paramsGenerator, isParamsGeneratorDynamic := NewParamsGeneratorFunc(localRand, params, templateFuncMap)
|
||||
headersGenerator, isHeadersGeneratorDynamic := NewHeadersGeneratorFunc(localRand, headers, templateFuncMap)
|
||||
cookiesGenerator, isCookiesGeneratorDynamic := NewCookiesGeneratorFunc(localRand, cookies, templateFuncMap)
|
||||
|
||||
bodyTemplateFuncMapData := &BodyTemplateFuncMapData{}
|
||||
bodyTemplateFuncMap := NewDefaultBodyTemplateFuncMap(randSource, bodyTemplateFuncMapData)
|
||||
bodyGenerator, isBodyGeneratorDynamic := NewBodyGeneratorFunc(localRand, bodies, bodyTemplateFuncMap)
|
||||
|
||||
valuesGenerator := NewValuesGeneratorFunc(values, templateFuncMap)
|
||||
|
||||
return func(req *fasthttp.Request) error {
|
||||
req.SetRequestURI(requestURL.Path)
|
||||
req.Header.SetHost(requestURL.Host)
|
||||
|
||||
data, err := valuesGenerator()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := methodGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
bodyTemplateFuncMapData.ClearFormDataContenType()
|
||||
if err := bodyGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := headersGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
if bodyTemplateFuncMapData.GetFormDataContenType() != "" {
|
||||
req.Header.Add("Content-Type", bodyTemplateFuncMapData.GetFormDataContenType())
|
||||
}
|
||||
|
||||
if err := paramsGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := cookiesGenerator(req, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if requestURL.Scheme == "https" {
|
||||
req.URI().SetScheme("https")
|
||||
}
|
||||
return nil
|
||||
}, isMethodGeneratorDynamic ||
|
||||
isParamsGeneratorDynamic ||
|
||||
isHeadersGeneratorDynamic ||
|
||||
isCookiesGeneratorDynamic ||
|
||||
isBodyGeneratorDynamic
|
||||
}
|
||||
|
||||
func NewMethodGeneratorFunc(localRand *rand.Rand, methods []string, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
methodGenerator, isDynamic := buildStringSliceGenerator(localRand, methods, templateFunctions)
|
||||
|
||||
var (
|
||||
method string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
method, err = methodGenerator()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.Header.SetMethod(method)
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewBodyGeneratorFunc(localRand *rand.Rand, bodies []string, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
bodyGenerator, isDynamic := buildStringSliceGenerator(localRand, bodies, templateFunctions)
|
||||
|
||||
var (
|
||||
body string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
body, err = bodyGenerator()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.SetBody([]byte(body))
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewParamsGeneratorFunc(localRand *rand.Rand, params types.Params, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, params, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.URI().QueryArgs().Add(key, value)
|
||||
}
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewHeadersGeneratorFunc(localRand *rand.Rand, headers types.Headers, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, headers, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.Header.Add(key, value)
|
||||
}
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewCookiesGeneratorFunc(localRand *rand.Rand, cookies types.Cookies, templateFunctions template.FuncMap) (RequestGeneratorWithData, bool) {
|
||||
generators, isDynamic := buildKeyValueGenerators(localRand, cookies, templateFunctions)
|
||||
|
||||
var (
|
||||
key, value string
|
||||
err error
|
||||
)
|
||||
if len(generators) > 0 {
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
cookieStrings := make([]string, 0, len(generators))
|
||||
for _, gen := range generators {
|
||||
key, err = gen.Key(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = gen.Value()(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cookieStrings = append(cookieStrings, key+"="+value)
|
||||
}
|
||||
req.Header.Add("Cookie", strings.Join(cookieStrings, "; "))
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
return func(req *fasthttp.Request, data any) error {
|
||||
return nil
|
||||
}, isDynamic
|
||||
}
|
||||
|
||||
func NewValuesGeneratorFunc(values []string, templateFunctions template.FuncMap) func() (valuesData, error) {
|
||||
generators := make([]func(_ any) (string, error), len(values))
|
||||
|
||||
for i, v := range values {
|
||||
generators[i], _ = createTemplateFunc(v, templateFunctions)
|
||||
}
|
||||
|
||||
var (
|
||||
rendered string
|
||||
data map[string]string
|
||||
err error
|
||||
)
|
||||
return func() (valuesData, error) {
|
||||
result := make(map[string]string)
|
||||
for _, generator := range generators {
|
||||
rendered, err = generator(nil)
|
||||
if err != nil {
|
||||
return valuesData{}, fmt.Errorf("values rendering: %w", err)
|
||||
}
|
||||
|
||||
data, err = godotenv.Unmarshal(rendered)
|
||||
if err != nil {
|
||||
return valuesData{}, fmt.Errorf("values rendering: %w", err)
|
||||
}
|
||||
|
||||
maps.Copy(result, data)
|
||||
}
|
||||
|
||||
return valuesData{Values: result}, nil
|
||||
}
|
||||
}
|
||||
|
||||
func createTemplateFunc(value string, templateFunctions template.FuncMap) (func(data any) (string, error), bool) {
|
||||
tmpl, err := template.New("").Funcs(templateFunctions).Parse(value)
|
||||
if err == nil && hasTemplateActions(tmpl) {
|
||||
var err error
|
||||
return func(data any) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
if err = tmpl.Execute(&buf, data); err != nil {
|
||||
return "", fmt.Errorf("template rendering: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}, true
|
||||
}
|
||||
return func(_ any) (string, error) { return value, nil }, false
|
||||
}
|
||||
|
||||
type keyValueGenerator struct {
|
||||
Key func(data any) (string, error)
|
||||
Value func() func(data any) (string, error)
|
||||
}
|
||||
|
||||
type keyValueItem interface {
|
||||
types.Param | types.Header | types.Cookie
|
||||
}
|
||||
|
||||
func buildKeyValueGenerators[T keyValueItem](
|
||||
localRand *rand.Rand,
|
||||
items []T,
|
||||
templateFunctions template.FuncMap,
|
||||
) ([]keyValueGenerator, bool) {
|
||||
isDynamic := false
|
||||
generators := make([]keyValueGenerator, len(items))
|
||||
|
||||
for generatorIndex, item := range items {
|
||||
// Convert to KeyValue to access fields
|
||||
keyValue := types.KeyValue[string, []string](item)
|
||||
|
||||
// Generate key function
|
||||
keyFunc, keyIsDynamic := createTemplateFunc(keyValue.Key, templateFunctions)
|
||||
if keyIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
|
||||
// Generate value functions
|
||||
valueFuncs := make([]func(data any) (string, error), len(keyValue.Value))
|
||||
for j, v := range keyValue.Value {
|
||||
valueFunc, valueIsDynamic := createTemplateFunc(v, templateFunctions)
|
||||
if valueIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
valueFuncs[j] = valueFunc
|
||||
}
|
||||
|
||||
generators[generatorIndex] = keyValueGenerator{
|
||||
Key: keyFunc,
|
||||
Value: utilsSlice.RandomCycle(localRand, valueFuncs...),
|
||||
}
|
||||
|
||||
if len(keyValue.Value) > 1 {
|
||||
isDynamic = true
|
||||
}
|
||||
}
|
||||
|
||||
return generators, isDynamic
|
||||
}
|
||||
|
||||
func buildStringSliceGenerator(
|
||||
localRand *rand.Rand,
|
||||
values []string,
|
||||
templateFunctions template.FuncMap,
|
||||
) (func() func(data any) (string, error), bool) {
|
||||
// Return a function that returns an empty string generator if values is empty
|
||||
if len(values) == 0 {
|
||||
emptyFunc := func(_ any) (string, error) { return "", nil }
|
||||
return func() func(_ any) (string, error) { return emptyFunc }, false
|
||||
}
|
||||
|
||||
isDynamic := len(values) > 1
|
||||
valueFuncs := make([]func(data any) (string, error), len(values))
|
||||
|
||||
for i, value := range values {
|
||||
valueFunc, valueIsDynamic := createTemplateFunc(value, templateFunctions)
|
||||
if valueIsDynamic {
|
||||
isDynamic = true
|
||||
}
|
||||
valueFuncs[i] = valueFunc
|
||||
}
|
||||
|
||||
return utilsSlice.RandomCycle(localRand, valueFuncs...), isDynamic
|
||||
}
|
||||
348
internal/sarin/response.go
Normal file
348
internal/sarin/response.go
Normal file
@@ -0,0 +1,348 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/charmbracelet/lipgloss/table"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
const DefaultResponseDurationAccuracy uint32 = 1
|
||||
const DefaultResponseColumnMaxWidth = 50
|
||||
|
||||
// Duration wraps time.Duration to provide consistent JSON/YAML marshaling as human-readable strings.
|
||||
type Duration time.Duration
|
||||
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
//nolint:wrapcheck
|
||||
return json.Marshal(time.Duration(d).String())
|
||||
}
|
||||
|
||||
func (d Duration) MarshalYAML() (any, error) {
|
||||
return time.Duration(d).String(), nil
|
||||
}
|
||||
|
||||
func (d Duration) String() string {
|
||||
dur := time.Duration(d)
|
||||
switch {
|
||||
case dur >= time.Second:
|
||||
return dur.Round(time.Millisecond).String()
|
||||
case dur >= time.Millisecond:
|
||||
return dur.Round(time.Microsecond).String()
|
||||
default:
|
||||
return dur.String()
|
||||
}
|
||||
}
|
||||
|
||||
// BigInt wraps big.Int to provide consistent JSON/YAML marshaling as numbers.
|
||||
type BigInt struct {
|
||||
*big.Int
|
||||
}
|
||||
|
||||
func (b BigInt) MarshalJSON() ([]byte, error) {
|
||||
return []byte(b.Int.String()), nil
|
||||
}
|
||||
|
||||
func (b BigInt) MarshalYAML() (any, error) {
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: b.Int.String(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b BigInt) String() string {
|
||||
return b.Int.String()
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
durations map[time.Duration]uint64
|
||||
}
|
||||
|
||||
type SarinResponseData struct {
|
||||
sync.Mutex
|
||||
|
||||
Responses map[string]*Response
|
||||
|
||||
// accuracy is the time bucket size in nanoseconds for storing response durations.
|
||||
// Larger values (e.g., 1000) save memory but reduce accuracy by grouping more durations together.
|
||||
// Smaller values (e.g., 10) improve accuracy but increase memory usage.
|
||||
// Minimum value is 1 (most accurate, highest memory usage).
|
||||
// Default value is 1.
|
||||
accuracy time.Duration
|
||||
}
|
||||
|
||||
func NewSarinResponseData(accuracy uint32) *SarinResponseData {
|
||||
if accuracy == 0 {
|
||||
accuracy = DefaultResponseDurationAccuracy
|
||||
}
|
||||
|
||||
return &SarinResponseData{
|
||||
Responses: make(map[string]*Response),
|
||||
accuracy: time.Duration(accuracy),
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) Add(responseKey string, responseTime time.Duration) {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
response, ok := data.Responses[responseKey]
|
||||
if !ok {
|
||||
data.Responses[responseKey] = &Response{
|
||||
durations: map[time.Duration]uint64{
|
||||
responseTime / data.accuracy: 1,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
response.durations[responseTime/data.accuracy]++
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintTable() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
|
||||
headerStyle := lipgloss.NewStyle().
|
||||
Bold(true).
|
||||
Foreground(lipgloss.Color("246")).
|
||||
Padding(0, 1)
|
||||
|
||||
cellStyle := lipgloss.NewStyle().
|
||||
Padding(0, 1)
|
||||
|
||||
rows := make([][]string, 0, len(output.Responses)+1)
|
||||
for key, stats := range output.Responses {
|
||||
rows = append(rows, []string{
|
||||
wrapText(key, DefaultResponseColumnMaxWidth),
|
||||
stats.Count.String(),
|
||||
stats.Min.String(),
|
||||
stats.Max.String(),
|
||||
stats.Average.String(),
|
||||
stats.P90.String(),
|
||||
stats.P95.String(),
|
||||
stats.P99.String(),
|
||||
})
|
||||
}
|
||||
|
||||
rows = append(rows, []string{
|
||||
"Total",
|
||||
output.Total.Count.String(),
|
||||
output.Total.Min.String(),
|
||||
output.Total.Max.String(),
|
||||
output.Total.Average.String(),
|
||||
output.Total.P90.String(),
|
||||
output.Total.P95.String(),
|
||||
output.Total.P99.String(),
|
||||
})
|
||||
|
||||
tbl := table.New().
|
||||
Border(lipgloss.NormalBorder()).
|
||||
BorderStyle(lipgloss.NewStyle().Foreground(lipgloss.Color("240"))).
|
||||
BorderRow(true).
|
||||
Headers("Response", "Count", "Min", "Max", "Average", "P90", "P95", "P99").
|
||||
Rows(rows...).
|
||||
StyleFunc(func(row, col int) lipgloss.Style {
|
||||
if row == table.HeaderRow {
|
||||
return headerStyle
|
||||
}
|
||||
return cellStyle
|
||||
})
|
||||
|
||||
fmt.Println(tbl)
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintJSON() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
encoder := json.NewEncoder(os.Stdout)
|
||||
encoder.SetIndent("", " ")
|
||||
if err := encoder.Encode(output); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) PrintYAML() {
|
||||
data.Lock()
|
||||
defer data.Unlock()
|
||||
|
||||
output := data.prepareOutputData()
|
||||
encoder := yaml.NewEncoder(os.Stdout)
|
||||
encoder.SetIndent(2)
|
||||
if err := encoder.Encode(output); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
type responseStat struct {
|
||||
Count BigInt `json:"count" yaml:"count"`
|
||||
Min Duration `json:"min" yaml:"min"`
|
||||
Max Duration `json:"max" yaml:"max"`
|
||||
Average Duration `json:"average" yaml:"average"`
|
||||
P90 Duration `json:"p90" yaml:"p90"`
|
||||
P95 Duration `json:"p95" yaml:"p95"`
|
||||
P99 Duration `json:"p99" yaml:"p99"`
|
||||
}
|
||||
|
||||
type responseStats map[string]responseStat
|
||||
|
||||
type outputData struct {
|
||||
Responses map[string]responseStat `json:"responses" yaml:"responses"`
|
||||
Total responseStat `json:"total" yaml:"total"`
|
||||
}
|
||||
|
||||
func (data *SarinResponseData) prepareOutputData() outputData {
|
||||
switch len(data.Responses) {
|
||||
case 0:
|
||||
return outputData{
|
||||
Responses: make(map[string]responseStat),
|
||||
Total: responseStat{},
|
||||
}
|
||||
case 1:
|
||||
var (
|
||||
responseKey string
|
||||
stats responseStat
|
||||
)
|
||||
for key, response := range data.Responses {
|
||||
stats = calculateStats(response.durations, data.accuracy)
|
||||
responseKey = key
|
||||
}
|
||||
return outputData{
|
||||
Responses: responseStats{
|
||||
responseKey: stats,
|
||||
},
|
||||
Total: stats,
|
||||
}
|
||||
default:
|
||||
// Calculate stats for each response
|
||||
allStats := make(responseStats)
|
||||
var totalDurations = make(map[time.Duration]uint64)
|
||||
|
||||
for key, response := range data.Responses {
|
||||
stats := calculateStats(response.durations, data.accuracy)
|
||||
allStats[key] = stats
|
||||
|
||||
// Aggregate for total row
|
||||
for duration, count := range response.durations {
|
||||
totalDurations[duration] += count
|
||||
}
|
||||
}
|
||||
|
||||
return outputData{
|
||||
Responses: allStats,
|
||||
Total: calculateStats(totalDurations, data.accuracy),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func calculateStats(durations map[time.Duration]uint64, accuracy time.Duration) responseStat {
|
||||
if len(durations) == 0 {
|
||||
return responseStat{}
|
||||
}
|
||||
|
||||
// Extract and sort unique durations
|
||||
sortedDurations := make([]time.Duration, 0, len(durations))
|
||||
for duration := range durations {
|
||||
sortedDurations = append(sortedDurations, duration)
|
||||
}
|
||||
slices.Sort(sortedDurations)
|
||||
|
||||
sum := new(big.Int)
|
||||
totalCount := new(big.Int)
|
||||
minDuration := sortedDurations[0] * accuracy
|
||||
maxDuration := sortedDurations[len(sortedDurations)-1] * accuracy
|
||||
|
||||
for _, duration := range sortedDurations {
|
||||
actualDuration := duration * accuracy
|
||||
count := durations[duration]
|
||||
|
||||
totalCount.Add(
|
||||
totalCount,
|
||||
new(big.Int).SetUint64(count),
|
||||
)
|
||||
|
||||
sum.Add(
|
||||
sum,
|
||||
new(big.Int).Mul(
|
||||
new(big.Int).SetInt64(int64(actualDuration)),
|
||||
new(big.Int).SetUint64(count),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// Calculate percentiles
|
||||
p90 := calculatePercentile(sortedDurations, durations, totalCount, 90, accuracy)
|
||||
p95 := calculatePercentile(sortedDurations, durations, totalCount, 95, accuracy)
|
||||
p99 := calculatePercentile(sortedDurations, durations, totalCount, 99, accuracy)
|
||||
|
||||
return responseStat{
|
||||
Count: BigInt{totalCount},
|
||||
Min: Duration(minDuration),
|
||||
Max: Duration(maxDuration),
|
||||
Average: Duration(div(sum, totalCount).Int64()),
|
||||
P90: p90,
|
||||
P95: p95,
|
||||
P99: p99,
|
||||
}
|
||||
}
|
||||
|
||||
func calculatePercentile(sortedDurations []time.Duration, durations map[time.Duration]uint64, totalCount *big.Int, percentile int, accuracy time.Duration) Duration {
|
||||
// Calculate the target position for the percentile
|
||||
// Using ceiling method: position = ceil(totalCount * percentile / 100)
|
||||
target := new(big.Int).Mul(totalCount, big.NewInt(int64(percentile)))
|
||||
target.Add(target, big.NewInt(99)) // Add 99 to achieve ceiling division by 100
|
||||
target.Div(target, big.NewInt(100))
|
||||
|
||||
// Accumulate counts until we reach the target position
|
||||
cumulative := new(big.Int)
|
||||
for _, duration := range sortedDurations {
|
||||
count := durations[duration]
|
||||
cumulative.Add(cumulative, new(big.Int).SetUint64(count))
|
||||
|
||||
if cumulative.Cmp(target) >= 0 {
|
||||
return Duration(duration * accuracy)
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to the last duration (shouldn't happen with valid data)
|
||||
return Duration(sortedDurations[len(sortedDurations)-1] * accuracy)
|
||||
}
|
||||
|
||||
// div performs division with rounding to the nearest integer.
|
||||
func div(x, y *big.Int) *big.Int {
|
||||
quotient, remainder := new(big.Int).DivMod(x, y, new(big.Int))
|
||||
if remainder.Mul(remainder, big.NewInt(2)).Cmp(y) >= 0 {
|
||||
quotient.Add(quotient, big.NewInt(1))
|
||||
}
|
||||
return quotient
|
||||
}
|
||||
|
||||
// wrapText wraps a string to multiple lines if it exceeds maxWidth.
|
||||
func wrapText(s string, maxWidth int) string {
|
||||
if len(s) <= maxWidth {
|
||||
return s
|
||||
}
|
||||
|
||||
var lines []string
|
||||
for len(s) > maxWidth {
|
||||
lines = append(lines, s[:maxWidth])
|
||||
s = s[maxWidth:]
|
||||
}
|
||||
if len(s) > 0 {
|
||||
lines = append(lines, s)
|
||||
}
|
||||
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
776
internal/sarin/sarin.go
Normal file
776
internal/sarin/sarin.go
Normal file
@@ -0,0 +1,776 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/bubbles/progress"
|
||||
"github.com/charmbracelet/bubbles/spinner"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/valyala/fasthttp"
|
||||
"go.aykhans.me/sarin/internal/types"
|
||||
)
|
||||
|
||||
type runtimeMessageLevel uint8
|
||||
|
||||
const (
|
||||
runtimeMessageLevelWarning runtimeMessageLevel = iota
|
||||
runtimeMessageLevelError
|
||||
)
|
||||
|
||||
type runtimeMessage struct {
|
||||
timestamp time.Time
|
||||
level runtimeMessageLevel
|
||||
text string
|
||||
}
|
||||
|
||||
type messageSender func(level runtimeMessageLevel, text string)
|
||||
|
||||
type sarin struct {
|
||||
workers uint
|
||||
requestURL *url.URL
|
||||
methods []string
|
||||
params types.Params
|
||||
headers types.Headers
|
||||
cookies types.Cookies
|
||||
bodies []string
|
||||
totalRequests *uint64
|
||||
totalDuration *time.Duration
|
||||
timeout time.Duration
|
||||
quiet bool
|
||||
skipCertVerify bool
|
||||
values []string
|
||||
collectStats bool
|
||||
dryRun bool
|
||||
|
||||
hostClients []*fasthttp.HostClient
|
||||
responses *SarinResponseData
|
||||
}
|
||||
|
||||
// NewSarin creates a new sarin instance for load testing.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func NewSarin(
|
||||
ctx context.Context,
|
||||
methods []string,
|
||||
requestURL *url.URL,
|
||||
timeout time.Duration,
|
||||
workers uint,
|
||||
totalRequests *uint64,
|
||||
totalDuration *time.Duration,
|
||||
quiet bool,
|
||||
skipCertVerify bool,
|
||||
params types.Params,
|
||||
headers types.Headers,
|
||||
cookies types.Cookies,
|
||||
bodies []string,
|
||||
proxies types.Proxies,
|
||||
values []string,
|
||||
collectStats bool,
|
||||
dryRun bool,
|
||||
) (*sarin, error) {
|
||||
if workers == 0 {
|
||||
workers = 1
|
||||
}
|
||||
|
||||
hostClients, err := newHostClients(ctx, timeout, proxies, workers, requestURL, skipCertVerify)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
srn := &sarin{
|
||||
workers: workers,
|
||||
requestURL: requestURL,
|
||||
methods: methods,
|
||||
params: params,
|
||||
headers: headers,
|
||||
cookies: cookies,
|
||||
bodies: bodies,
|
||||
totalRequests: totalRequests,
|
||||
totalDuration: totalDuration,
|
||||
timeout: timeout,
|
||||
quiet: quiet,
|
||||
skipCertVerify: skipCertVerify,
|
||||
values: values,
|
||||
collectStats: collectStats,
|
||||
dryRun: dryRun,
|
||||
hostClients: hostClients,
|
||||
}
|
||||
|
||||
if collectStats {
|
||||
srn.responses = NewSarinResponseData(uint32(100))
|
||||
}
|
||||
|
||||
return srn, nil
|
||||
}
|
||||
|
||||
func (q sarin) GetResponses() *SarinResponseData {
|
||||
return q.responses
|
||||
}
|
||||
|
||||
func (q sarin) Start(ctx context.Context) {
|
||||
jobsCtx, jobsCancel := context.WithCancel(ctx)
|
||||
|
||||
var workersWG sync.WaitGroup
|
||||
jobsCh := make(chan struct{}, max(q.workers, 1))
|
||||
|
||||
var counter atomic.Uint64
|
||||
|
||||
totalRequests := uint64(0)
|
||||
if q.totalRequests != nil {
|
||||
totalRequests = *q.totalRequests
|
||||
}
|
||||
|
||||
var streamCtx context.Context
|
||||
var streamCancel context.CancelFunc
|
||||
var streamCh chan struct{}
|
||||
var messageChannel chan runtimeMessage
|
||||
var sendMessage messageSender
|
||||
|
||||
if q.quiet {
|
||||
sendMessage = func(level runtimeMessageLevel, text string) {}
|
||||
} else {
|
||||
streamCtx, streamCancel = context.WithCancel(context.Background())
|
||||
defer streamCancel()
|
||||
streamCh = make(chan struct{})
|
||||
messageChannel = make(chan runtimeMessage, max(q.workers, 1))
|
||||
sendMessage = func(level runtimeMessageLevel, text string) {
|
||||
messageChannel <- runtimeMessage{
|
||||
timestamp: time.Now(),
|
||||
level: level,
|
||||
text: text,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start workers
|
||||
q.startWorkers(&workersWG, jobsCh, q.hostClients, &counter, sendMessage)
|
||||
|
||||
if !q.quiet {
|
||||
// Start streaming to terminal
|
||||
//nolint:contextcheck // streamCtx must remain active until all workers complete to ensure all collected data is streamed
|
||||
go q.streamProgress(streamCtx, jobsCancel, streamCh, totalRequests, &counter, messageChannel)
|
||||
}
|
||||
|
||||
// Setup duration-based cancellation
|
||||
q.setupDurationTimeout(ctx, jobsCancel)
|
||||
// Distribute jobs to workers.
|
||||
// This blocks until all jobs are sent or the context is canceled.
|
||||
q.sendJobs(jobsCtx, jobsCh)
|
||||
|
||||
// Close the jobs channel so workers stop after completing their current job
|
||||
close(jobsCh)
|
||||
// Wait until all workers stopped
|
||||
workersWG.Wait()
|
||||
if messageChannel != nil {
|
||||
close(messageChannel)
|
||||
}
|
||||
|
||||
if !q.quiet {
|
||||
// Stop the progress streaming
|
||||
streamCancel()
|
||||
// Wait until progress streaming has completely stopped
|
||||
<-streamCh
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) Worker(
|
||||
jobs <-chan struct{},
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
req := fasthttp.AcquireRequest()
|
||||
resp := fasthttp.AcquireResponse()
|
||||
defer fasthttp.ReleaseRequest(req)
|
||||
defer fasthttp.ReleaseResponse(resp)
|
||||
|
||||
requestGenerator, isDynamic := NewRequestGenerator(q.methods, q.requestURL, q.params, q.headers, q.cookies, q.bodies, q.values)
|
||||
|
||||
if q.dryRun {
|
||||
switch {
|
||||
case q.collectStats && isDynamic:
|
||||
q.workerDryRunStatsWithDynamic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
case q.collectStats && !isDynamic:
|
||||
q.workerDryRunStatsWithStatic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
case !q.collectStats && isDynamic:
|
||||
q.workerDryRunNoStatsWithDynamic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
default:
|
||||
q.workerDryRunNoStatsWithStatic(jobs, req, requestGenerator, counter, sendMessage)
|
||||
}
|
||||
} else {
|
||||
switch {
|
||||
case q.collectStats && isDynamic:
|
||||
q.workerStatsWithDynamic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
case q.collectStats && !isDynamic:
|
||||
q.workerStatsWithStatic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
case !q.collectStats && isDynamic:
|
||||
q.workerNoStatsWithDynamic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
default:
|
||||
q.workerNoStatsWithStatic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
resp.Reset()
|
||||
|
||||
if err := requestGenerator(req); err != nil {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
|
||||
startTime := time.Now()
|
||||
err := hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
if err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
} else {
|
||||
q.responses.Add(statusCodeToString(resp.StatusCode()), time.Since(startTime))
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
// Static request generation failed - record all jobs as errors
|
||||
for range jobs {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
resp.Reset()
|
||||
|
||||
startTime := time.Now()
|
||||
err := hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
if err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
} else {
|
||||
q.responses.Add(statusCodeToString(resp.StatusCode()), time.Since(startTime))
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerNoStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
resp.Reset()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
_ = hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerNoStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
resp *fasthttp.Response,
|
||||
requestGenerator RequestGenerator,
|
||||
hostClientGenerator HostClientGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
|
||||
// Static request generation failed - just count the jobs without sending
|
||||
for range jobs {
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
resp.Reset()
|
||||
_ = hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
const dryRunResponseKey = "dry-run"
|
||||
|
||||
// statusCodeStrings contains pre-computed string representations for HTTP status codes 100-599.
|
||||
var statusCodeStrings = func() map[int]string {
|
||||
m := make(map[int]string, 500)
|
||||
for i := 100; i < 600; i++ {
|
||||
m[i] = strconv.Itoa(i)
|
||||
}
|
||||
return m
|
||||
}()
|
||||
|
||||
// statusCodeToString returns a string representation of the HTTP status code.
|
||||
// Uses a pre-computed map for codes 100-599, falls back to strconv.Itoa for others.
|
||||
func statusCodeToString(code int) string {
|
||||
if s, ok := statusCodeStrings[code]; ok {
|
||||
return s
|
||||
}
|
||||
return strconv.Itoa(code)
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
startTime := time.Now()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
q.responses.Add(err.Error(), time.Since(startTime))
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
continue
|
||||
}
|
||||
q.responses.Add(dryRunResponseKey, time.Since(startTime))
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
// Static request generation failed - record all jobs as errors
|
||||
for range jobs {
|
||||
q.responses.Add(err.Error(), 0)
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
counter.Add(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
q.responses.Add(dryRunResponseKey, 0)
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunNoStatsWithDynamic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
for range jobs {
|
||||
req.Reset()
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
}
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) workerDryRunNoStatsWithStatic(
|
||||
jobs <-chan struct{},
|
||||
req *fasthttp.Request,
|
||||
requestGenerator RequestGenerator,
|
||||
counter *atomic.Uint64,
|
||||
sendMessage messageSender,
|
||||
) {
|
||||
if err := requestGenerator(req); err != nil {
|
||||
sendMessage(runtimeMessageLevelError, err.Error())
|
||||
}
|
||||
|
||||
for range jobs {
|
||||
counter.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
// newHostClients initializes HTTP clients for the given configuration.
|
||||
// It can return the following errors:
|
||||
// - types.ProxyDialError
|
||||
func newHostClients(
|
||||
ctx context.Context,
|
||||
timeout time.Duration,
|
||||
proxies types.Proxies,
|
||||
workers uint,
|
||||
requestURL *url.URL,
|
||||
skipCertVerify bool,
|
||||
) ([]*fasthttp.HostClient, error) {
|
||||
proxiesRaw := make([]url.URL, len(proxies))
|
||||
for i, proxy := range proxies {
|
||||
proxiesRaw[i] = url.URL(proxy)
|
||||
}
|
||||
|
||||
maxConns := max(fasthttp.DefaultMaxConnsPerHost, workers)
|
||||
maxConns = ((maxConns * 50 / 100) + maxConns)
|
||||
return NewHostClients(
|
||||
ctx,
|
||||
timeout,
|
||||
proxiesRaw,
|
||||
maxConns,
|
||||
requestURL,
|
||||
skipCertVerify,
|
||||
)
|
||||
}
|
||||
|
||||
func (q sarin) startWorkers(wg *sync.WaitGroup, jobs <-chan struct{}, hostClients []*fasthttp.HostClient, counter *atomic.Uint64, sendMessage messageSender) {
|
||||
for range max(q.workers, 1) {
|
||||
wg.Go(func() {
|
||||
q.Worker(jobs, NewHostClientGenerator(hostClients...), counter, sendMessage)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) setupDurationTimeout(ctx context.Context, cancel context.CancelFunc) {
|
||||
if q.totalDuration != nil {
|
||||
go func() {
|
||||
timer := time.NewTimer(*q.totalDuration)
|
||||
defer timer.Stop()
|
||||
select {
|
||||
case <-timer.C:
|
||||
cancel()
|
||||
case <-ctx.Done():
|
||||
// Context cancelled, cleanup
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
func (q sarin) sendJobs(ctx context.Context, jobs chan<- struct{}) {
|
||||
if q.totalRequests != nil && *q.totalRequests > 0 {
|
||||
for range *q.totalRequests {
|
||||
if ctx.Err() != nil {
|
||||
break
|
||||
}
|
||||
jobs <- struct{}{}
|
||||
}
|
||||
} else {
|
||||
for ctx.Err() == nil {
|
||||
jobs <- struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type tickMsg time.Time
|
||||
|
||||
var (
|
||||
helpStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#d1d1d1"))
|
||||
errorStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#FC5B5B")).Bold(true)
|
||||
warningStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#FFD93D")).Bold(true)
|
||||
messageChannelStyle = lipgloss.NewStyle().
|
||||
Border(lipgloss.ThickBorder(), false, false, false, true).
|
||||
BorderForeground(lipgloss.Color("#757575")).
|
||||
PaddingLeft(1).
|
||||
Margin(1, 0, 0, 0).
|
||||
Foreground(lipgloss.Color("#888888"))
|
||||
)
|
||||
|
||||
type progressModel struct {
|
||||
progress progress.Model
|
||||
startTime time.Time
|
||||
messages []string
|
||||
counter *atomic.Uint64
|
||||
current uint64
|
||||
maxValue uint64
|
||||
ctx context.Context //nolint:containedctx
|
||||
cancel context.CancelFunc
|
||||
cancelling bool
|
||||
}
|
||||
|
||||
func (m progressModel) Init() tea.Cmd {
|
||||
return tea.Batch(progressTickCmd())
|
||||
}
|
||||
|
||||
func (m progressModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
if msg.Type == tea.KeyCtrlC {
|
||||
m.cancelling = true
|
||||
m.cancel()
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case tea.WindowSizeMsg:
|
||||
m.progress.Width = max(10, msg.Width-1)
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case runtimeMessage:
|
||||
var msgBuilder strings.Builder
|
||||
msgBuilder.WriteString("[")
|
||||
msgBuilder.WriteString(msg.timestamp.Format("15:04:05"))
|
||||
msgBuilder.WriteString("] ")
|
||||
switch msg.level {
|
||||
case runtimeMessageLevelError:
|
||||
msgBuilder.WriteString(errorStyle.Render("ERROR: "))
|
||||
case runtimeMessageLevelWarning:
|
||||
msgBuilder.WriteString(warningStyle.Render("WARNING: "))
|
||||
}
|
||||
msgBuilder.WriteString(msg.text)
|
||||
m.messages = append(m.messages[1:], msgBuilder.String())
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case tickMsg:
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, progressTickCmd()
|
||||
|
||||
default:
|
||||
if m.ctx.Err() != nil {
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m progressModel) View() string {
|
||||
var messagesBuilder strings.Builder
|
||||
for i, msg := range m.messages {
|
||||
if len(msg) > 0 {
|
||||
messagesBuilder.WriteString(msg)
|
||||
if i < len(m.messages)-1 {
|
||||
messagesBuilder.WriteString("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var finalBuilder strings.Builder
|
||||
if messagesBuilder.Len() > 0 {
|
||||
finalBuilder.WriteString(messageChannelStyle.Render(messagesBuilder.String()))
|
||||
finalBuilder.WriteString("\n")
|
||||
}
|
||||
|
||||
m.current = m.counter.Load()
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.current, 10))
|
||||
finalBuilder.WriteString("/")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.maxValue, 10))
|
||||
finalBuilder.WriteString(" - ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(m.progress.ViewAs(float64(m.current) / float64(m.maxValue)))
|
||||
finalBuilder.WriteString("\n\n ")
|
||||
if m.cancelling {
|
||||
finalBuilder.WriteString(helpStyle.Render("Stopping..."))
|
||||
} else {
|
||||
finalBuilder.WriteString(helpStyle.Render("Press Ctrl+C to quit"))
|
||||
}
|
||||
return finalBuilder.String()
|
||||
}
|
||||
|
||||
func progressTickCmd() tea.Cmd {
|
||||
return tea.Tick(time.Millisecond*250, func(t time.Time) tea.Msg {
|
||||
return tickMsg(t)
|
||||
})
|
||||
}
|
||||
|
||||
var infiniteProgressStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#00D4FF"))
|
||||
|
||||
type infiniteProgressModel struct {
|
||||
spinner spinner.Model
|
||||
startTime time.Time
|
||||
counter *atomic.Uint64
|
||||
messages []string
|
||||
ctx context.Context //nolint:containedctx
|
||||
quit bool
|
||||
cancel context.CancelFunc
|
||||
cancelling bool
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) Init() tea.Cmd {
|
||||
return m.spinner.Tick
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
if msg.Type == tea.KeyCtrlC {
|
||||
m.cancelling = true
|
||||
m.cancel()
|
||||
}
|
||||
return m, nil
|
||||
|
||||
case runtimeMessage:
|
||||
var msgBuilder strings.Builder
|
||||
msgBuilder.WriteString("[")
|
||||
msgBuilder.WriteString(msg.timestamp.Format("15:04:05"))
|
||||
msgBuilder.WriteString("] ")
|
||||
switch msg.level {
|
||||
case runtimeMessageLevelError:
|
||||
msgBuilder.WriteString(errorStyle.Render("ERROR: "))
|
||||
case runtimeMessageLevelWarning:
|
||||
msgBuilder.WriteString(warningStyle.Render("WARNING: "))
|
||||
}
|
||||
msgBuilder.WriteString(msg.text)
|
||||
m.messages = append(m.messages[1:], msgBuilder.String())
|
||||
if m.ctx.Err() != nil {
|
||||
m.quit = true
|
||||
return m, tea.Quit
|
||||
}
|
||||
return m, nil
|
||||
|
||||
default:
|
||||
if m.ctx.Err() != nil {
|
||||
m.quit = true
|
||||
return m, tea.Quit
|
||||
}
|
||||
var cmd tea.Cmd
|
||||
m.spinner, cmd = m.spinner.Update(msg)
|
||||
return m, cmd
|
||||
}
|
||||
}
|
||||
|
||||
func (m infiniteProgressModel) View() string {
|
||||
var messagesBuilder strings.Builder
|
||||
for i, msg := range m.messages {
|
||||
if len(msg) > 0 {
|
||||
messagesBuilder.WriteString(msg)
|
||||
if i < len(m.messages)-1 {
|
||||
messagesBuilder.WriteString("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var finalBuilder strings.Builder
|
||||
if messagesBuilder.Len() > 0 {
|
||||
finalBuilder.WriteString(messageChannelStyle.Render(messagesBuilder.String()))
|
||||
finalBuilder.WriteString("\n")
|
||||
}
|
||||
|
||||
if m.quit {
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.counter.Load(), 10))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(infiniteProgressStyle.Render("∙∙∙∙∙"))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n\n")
|
||||
} else {
|
||||
finalBuilder.WriteString("\n ")
|
||||
finalBuilder.WriteString(strconv.FormatUint(m.counter.Load(), 10))
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(m.spinner.View())
|
||||
finalBuilder.WriteString(" ")
|
||||
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||
finalBuilder.WriteString("\n\n ")
|
||||
if m.cancelling {
|
||||
finalBuilder.WriteString(helpStyle.Render("Stopping..."))
|
||||
} else {
|
||||
finalBuilder.WriteString(helpStyle.Render("Press Ctrl+C to quit"))
|
||||
}
|
||||
}
|
||||
return finalBuilder.String()
|
||||
}
|
||||
|
||||
func (q sarin) streamProgress(
|
||||
ctx context.Context,
|
||||
cancel context.CancelFunc,
|
||||
done chan<- struct{},
|
||||
total uint64,
|
||||
counter *atomic.Uint64,
|
||||
messageChannel <-chan runtimeMessage,
|
||||
) {
|
||||
var program *tea.Program
|
||||
if total > 0 {
|
||||
model := progressModel{
|
||||
progress: progress.New(progress.WithGradient("#151594", "#00D4FF")),
|
||||
startTime: time.Now(),
|
||||
messages: make([]string, 8),
|
||||
counter: counter,
|
||||
current: 0,
|
||||
maxValue: total,
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
}
|
||||
|
||||
program = tea.NewProgram(model)
|
||||
} else {
|
||||
model := infiniteProgressModel{
|
||||
spinner: spinner.New(
|
||||
spinner.WithSpinner(
|
||||
spinner.Spinner{
|
||||
Frames: []string{
|
||||
"●∙∙∙∙",
|
||||
"∙●∙∙∙",
|
||||
"∙∙●∙∙",
|
||||
"∙∙∙●∙",
|
||||
"∙∙∙∙●",
|
||||
"∙∙∙●∙",
|
||||
"∙∙●∙∙",
|
||||
"∙●∙∙∙",
|
||||
},
|
||||
FPS: time.Second / 8, //nolint:mnd
|
||||
},
|
||||
),
|
||||
spinner.WithStyle(infiniteProgressStyle),
|
||||
),
|
||||
startTime: time.Now(),
|
||||
counter: counter,
|
||||
messages: make([]string, 8),
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
quit: false,
|
||||
}
|
||||
|
||||
program = tea.NewProgram(model)
|
||||
}
|
||||
|
||||
go func() {
|
||||
for msg := range messageChannel {
|
||||
program.Send(msg)
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err := program.Run(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
done <- struct{}{}
|
||||
}
|
||||
579
internal/sarin/template.go
Normal file
579
internal/sarin/template.go
Normal file
@@ -0,0 +1,579 @@
|
||||
package sarin
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"math/rand/v2"
|
||||
"mime/multipart"
|
||||
"strings"
|
||||
"text/template"
|
||||
"text/template/parse"
|
||||
"time"
|
||||
|
||||
"github.com/brianvoe/gofakeit/v7"
|
||||
)
|
||||
|
||||
func NewDefaultTemplateFuncMap(randSource rand.Source) template.FuncMap {
|
||||
fakeit := gofakeit.NewFaker(randSource, false)
|
||||
|
||||
return template.FuncMap{
|
||||
// Strings
|
||||
"strings_ToUpper": strings.ToUpper,
|
||||
"strings_ToLower": strings.ToLower,
|
||||
"strings_RemoveSpaces": func(s string) string { return strings.ReplaceAll(s, " ", "") },
|
||||
"strings_Replace": strings.Replace,
|
||||
"strings_ToDate": func(dateString string) time.Time {
|
||||
date, err := time.Parse("2006-01-02", dateString)
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
return date
|
||||
},
|
||||
"strings_First": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return ""
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[:n])
|
||||
},
|
||||
"strings_Last": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return ""
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[len(runes)-n:])
|
||||
},
|
||||
"strings_Truncate": func(s string, n int) string {
|
||||
runes := []rune(s)
|
||||
if n <= 0 {
|
||||
return "..."
|
||||
}
|
||||
if n >= len(runes) {
|
||||
return s
|
||||
}
|
||||
return string(runes[:n]) + "..."
|
||||
},
|
||||
"strings_TrimPrefix": strings.TrimPrefix,
|
||||
"strings_TrimSuffix": strings.TrimSuffix,
|
||||
"strings_Join": func(sep string, values ...string) string {
|
||||
return strings.Join(values, sep)
|
||||
},
|
||||
|
||||
// Dict
|
||||
"dict_Str": func(values ...string) map[string]string {
|
||||
dict := make(map[string]string)
|
||||
for i := 0; i < len(values); i += 2 {
|
||||
if i+1 < len(values) {
|
||||
key := values[i]
|
||||
value := values[i+1]
|
||||
dict[key] = value
|
||||
}
|
||||
}
|
||||
return dict
|
||||
},
|
||||
|
||||
// Slice
|
||||
"slice_Str": func(values ...string) []string { return values },
|
||||
"slice_Int": func(values ...int) []int { return values },
|
||||
"slice_Uint": func(values ...uint) []uint { return values },
|
||||
|
||||
// Fakeit / File
|
||||
// "fakeit_CSV": fakeit.CSV(nil),
|
||||
// "fakeit_JSON": fakeit.JSON(nil),
|
||||
// "fakeit_XML": fakeit.XML(nil),
|
||||
"fakeit_FileExtension": fakeit.FileExtension,
|
||||
"fakeit_FileMimeType": fakeit.FileMimeType,
|
||||
|
||||
// Fakeit / ID
|
||||
"fakeit_ID": fakeit.ID,
|
||||
"fakeit_UUID": fakeit.UUID,
|
||||
|
||||
// Fakeit / Template
|
||||
// "fakeit_Template": fakeit.Template(nil) (string, error),
|
||||
// "fakeit_Markdown": fakeit.Markdown(nil) (string, error),
|
||||
// "fakeit_EmailText": fakeit.EmailText(nil) (string, error),
|
||||
// "fakeit_FixedWidth": fakeit.FixedWidth(nil) (string, error),
|
||||
|
||||
// Fakeit / Product
|
||||
// "fakeit_Product": fakeit.Product() *ProductInfo,
|
||||
"fakeit_ProductName": fakeit.ProductName,
|
||||
"fakeit_ProductDescription": fakeit.ProductDescription,
|
||||
"fakeit_ProductCategory": fakeit.ProductCategory,
|
||||
"fakeit_ProductFeature": fakeit.ProductFeature,
|
||||
"fakeit_ProductMaterial": fakeit.ProductMaterial,
|
||||
"fakeit_ProductUPC": fakeit.ProductUPC,
|
||||
"fakeit_ProductAudience": fakeit.ProductAudience,
|
||||
"fakeit_ProductDimension": fakeit.ProductDimension,
|
||||
"fakeit_ProductUseCase": fakeit.ProductUseCase,
|
||||
"fakeit_ProductBenefit": fakeit.ProductBenefit,
|
||||
"fakeit_ProductSuffix": fakeit.ProductSuffix,
|
||||
"fakeit_ProductISBN": func() string { return fakeit.ProductISBN(nil) },
|
||||
|
||||
// Fakeit / Person
|
||||
// "fakeit_Person": fakeit.Person() *PersonInfo,
|
||||
"fakeit_Name": fakeit.Name,
|
||||
"fakeit_NamePrefix": fakeit.NamePrefix,
|
||||
"fakeit_NameSuffix": fakeit.NameSuffix,
|
||||
"fakeit_FirstName": fakeit.FirstName,
|
||||
"fakeit_MiddleName": fakeit.MiddleName,
|
||||
"fakeit_LastName": fakeit.LastName,
|
||||
"fakeit_Gender": fakeit.Gender,
|
||||
"fakeit_Age": fakeit.Age,
|
||||
"fakeit_Ethnicity": fakeit.Ethnicity,
|
||||
"fakeit_SSN": fakeit.SSN,
|
||||
"fakeit_EIN": fakeit.EIN,
|
||||
"fakeit_Hobby": fakeit.Hobby,
|
||||
// "fakeit_Contact": fakeit.Contact() *ContactInfo,
|
||||
"fakeit_Email": fakeit.Email,
|
||||
"fakeit_Phone": fakeit.Phone,
|
||||
"fakeit_PhoneFormatted": fakeit.PhoneFormatted,
|
||||
// "fakeit_Teams": fakeit.Teams(peopleArray []string, teamsArray []string) map[string][]string,
|
||||
|
||||
// Fakeit / Generate
|
||||
// "fakeit_Struct": fakeit.Struct(v any),
|
||||
// "fakeit_Slice": fakeit.Slice(v any),
|
||||
// "fakeit_Map": fakeit.Map() map[string]any,
|
||||
// "fakeit_Generate": fakeit.Generate(value string) string,
|
||||
"fakeit_Regex": fakeit.Regex,
|
||||
|
||||
// Fakeit / Auth
|
||||
"fakeit_Username": fakeit.Username,
|
||||
"fakeit_Password": fakeit.Password,
|
||||
|
||||
// Fakeit / Address
|
||||
// "fakeit_Address": fakeit.Address() *AddressInfo,
|
||||
"fakeit_City": fakeit.City,
|
||||
"fakeit_Country": fakeit.Country,
|
||||
"fakeit_CountryAbr": fakeit.CountryAbr,
|
||||
"fakeit_State": fakeit.State,
|
||||
"fakeit_StateAbr": fakeit.StateAbr,
|
||||
"fakeit_Street": fakeit.Street,
|
||||
"fakeit_StreetName": fakeit.StreetName,
|
||||
"fakeit_StreetNumber": fakeit.StreetNumber,
|
||||
"fakeit_StreetPrefix": fakeit.StreetPrefix,
|
||||
"fakeit_StreetSuffix": fakeit.StreetSuffix,
|
||||
"fakeit_Unit": fakeit.Unit,
|
||||
"fakeit_Zip": fakeit.Zip,
|
||||
"fakeit_Latitude": fakeit.Latitude,
|
||||
"fakeit_LatitudeInRange": func(minLatitude, maxLatitude float64) float64 {
|
||||
value, err := fakeit.LatitudeInRange(minLatitude, maxLatitude)
|
||||
if err != nil {
|
||||
var zero float64
|
||||
return zero
|
||||
}
|
||||
return value
|
||||
},
|
||||
"fakeit_Longitude": fakeit.Longitude,
|
||||
"fakeit_LongitudeInRange": func(minLongitude, maxLongitude float64) float64 {
|
||||
value, err := fakeit.LongitudeInRange(minLongitude, maxLongitude)
|
||||
if err != nil {
|
||||
var zero float64
|
||||
return zero
|
||||
}
|
||||
return value
|
||||
},
|
||||
|
||||
// Fakeit / Game
|
||||
"fakeit_Gamertag": fakeit.Gamertag,
|
||||
// "fakeit_Dice": fakeit.Dice(numDice uint, sides []uint) []uint,
|
||||
|
||||
// Fakeit / Beer
|
||||
"fakeit_BeerAlcohol": fakeit.BeerAlcohol,
|
||||
"fakeit_BeerBlg": fakeit.BeerBlg,
|
||||
"fakeit_BeerHop": fakeit.BeerHop,
|
||||
"fakeit_BeerIbu": fakeit.BeerIbu,
|
||||
"fakeit_BeerMalt": fakeit.BeerMalt,
|
||||
"fakeit_BeerName": fakeit.BeerName,
|
||||
"fakeit_BeerStyle": fakeit.BeerStyle,
|
||||
"fakeit_BeerYeast": fakeit.BeerYeast,
|
||||
|
||||
// Fakeit / Car
|
||||
// "fakeit_Car": fakeit.Car() *CarInfo,
|
||||
"fakeit_CarMaker": fakeit.CarMaker,
|
||||
"fakeit_CarModel": fakeit.CarModel,
|
||||
"fakeit_CarType": fakeit.CarType,
|
||||
"fakeit_CarFuelType": fakeit.CarFuelType,
|
||||
"fakeit_CarTransmissionType": fakeit.CarTransmissionType,
|
||||
|
||||
// Fakeit / Words
|
||||
// Nouns
|
||||
"fakeit_Noun": fakeit.Noun,
|
||||
"fakeit_NounCommon": fakeit.NounCommon,
|
||||
"fakeit_NounConcrete": fakeit.NounConcrete,
|
||||
"fakeit_NounAbstract": fakeit.NounAbstract,
|
||||
"fakeit_NounCollectivePeople": fakeit.NounCollectivePeople,
|
||||
"fakeit_NounCollectiveAnimal": fakeit.NounCollectiveAnimal,
|
||||
"fakeit_NounCollectiveThing": fakeit.NounCollectiveThing,
|
||||
"fakeit_NounCountable": fakeit.NounCountable,
|
||||
"fakeit_NounUncountable": fakeit.NounUncountable,
|
||||
|
||||
// Verbs
|
||||
"fakeit_Verb": fakeit.Verb,
|
||||
"fakeit_VerbAction": fakeit.VerbAction,
|
||||
"fakeit_VerbLinking": fakeit.VerbLinking,
|
||||
"fakeit_VerbHelping": fakeit.VerbHelping,
|
||||
|
||||
// Adverbs
|
||||
"fakeit_Adverb": fakeit.Adverb,
|
||||
"fakeit_AdverbManner": fakeit.AdverbManner,
|
||||
"fakeit_AdverbDegree": fakeit.AdverbDegree,
|
||||
"fakeit_AdverbPlace": fakeit.AdverbPlace,
|
||||
"fakeit_AdverbTimeDefinite": fakeit.AdverbTimeDefinite,
|
||||
"fakeit_AdverbTimeIndefinite": fakeit.AdverbTimeIndefinite,
|
||||
"fakeit_AdverbFrequencyDefinite": fakeit.AdverbFrequencyDefinite,
|
||||
"fakeit_AdverbFrequencyIndefinite": fakeit.AdverbFrequencyIndefinite,
|
||||
|
||||
// Propositions
|
||||
"fakeit_Preposition": fakeit.Preposition,
|
||||
"fakeit_PrepositionSimple": fakeit.PrepositionSimple,
|
||||
"fakeit_PrepositionDouble": fakeit.PrepositionDouble,
|
||||
"fakeit_PrepositionCompound": fakeit.PrepositionCompound,
|
||||
|
||||
// Adjectives
|
||||
"fakeit_Adjective": fakeit.Adjective,
|
||||
"fakeit_AdjectiveDescriptive": fakeit.AdjectiveDescriptive,
|
||||
"fakeit_AdjectiveQuantitative": fakeit.AdjectiveQuantitative,
|
||||
"fakeit_AdjectiveProper": fakeit.AdjectiveProper,
|
||||
"fakeit_AdjectiveDemonstrative": fakeit.AdjectiveDemonstrative,
|
||||
"fakeit_AdjectivePossessive": fakeit.AdjectivePossessive,
|
||||
"fakeit_AdjectiveInterrogative": fakeit.AdjectiveInterrogative,
|
||||
"fakeit_AdjectiveIndefinite": fakeit.AdjectiveIndefinite,
|
||||
|
||||
// Pronouns
|
||||
"fakeit_Pronoun": fakeit.Pronoun,
|
||||
"fakeit_PronounPersonal": fakeit.PronounPersonal,
|
||||
"fakeit_PronounObject": fakeit.PronounObject,
|
||||
"fakeit_PronounPossessive": fakeit.PronounPossessive,
|
||||
"fakeit_PronounReflective": fakeit.PronounReflective,
|
||||
"fakeit_PronounDemonstrative": fakeit.PronounDemonstrative,
|
||||
"fakeit_PronounInterrogative": fakeit.PronounInterrogative,
|
||||
"fakeit_PronounRelative": fakeit.PronounRelative,
|
||||
|
||||
// Connectives
|
||||
"fakeit_Connective": fakeit.Connective,
|
||||
"fakeit_ConnectiveTime": fakeit.ConnectiveTime,
|
||||
"fakeit_ConnectiveComparative": fakeit.ConnectiveComparative,
|
||||
"fakeit_ConnectiveComplaint": fakeit.ConnectiveComplaint,
|
||||
"fakeit_ConnectiveListing": fakeit.ConnectiveListing,
|
||||
"fakeit_ConnectiveCasual": fakeit.ConnectiveCasual,
|
||||
"fakeit_ConnectiveExamplify": fakeit.ConnectiveExamplify,
|
||||
|
||||
// Words
|
||||
"fakeit_Word": fakeit.Word,
|
||||
|
||||
// Text
|
||||
"fakeit_Sentence": fakeit.Sentence,
|
||||
"fakeit_Paragraph": fakeit.Paragraph,
|
||||
"fakeit_LoremIpsumWord": fakeit.LoremIpsumWord,
|
||||
"fakeit_LoremIpsumSentence": fakeit.LoremIpsumSentence,
|
||||
"fakeit_LoremIpsumParagraph": fakeit.LoremIpsumParagraph,
|
||||
"fakeit_Question": fakeit.Question,
|
||||
"fakeit_Quote": fakeit.Quote,
|
||||
"fakeit_Phrase": fakeit.Phrase,
|
||||
|
||||
// Fakeit / Foods
|
||||
"fakeit_Fruit": fakeit.Fruit,
|
||||
"fakeit_Vegetable": fakeit.Vegetable,
|
||||
"fakeit_Breakfast": fakeit.Breakfast,
|
||||
"fakeit_Lunch": fakeit.Lunch,
|
||||
"fakeit_Dinner": fakeit.Dinner,
|
||||
"fakeit_Snack": fakeit.Snack,
|
||||
"fakeit_Dessert": fakeit.Dessert,
|
||||
|
||||
// Fakeit / Misc
|
||||
"fakeit_Bool": fakeit.Bool,
|
||||
// "fakeit_Weighted": fakeit.Weighted(options []any, weights []float32) (any, error),
|
||||
"fakeit_FlipACoin": fakeit.FlipACoin,
|
||||
// "fakeit_RandomMapKey": fakeit.RandomMapKey(mapI any) any,
|
||||
// "fakeit_ShuffleAnySlice": fakeit.ShuffleAnySlice(v any),
|
||||
|
||||
// Fakeit / Colors
|
||||
"fakeit_Color": fakeit.Color,
|
||||
"fakeit_HexColor": fakeit.HexColor,
|
||||
"fakeit_RGBColor": fakeit.RGBColor,
|
||||
"fakeit_SafeColor": fakeit.SafeColor,
|
||||
"fakeit_NiceColors": fakeit.NiceColors,
|
||||
|
||||
// Fakeit / Images
|
||||
// "fakeit_Image": fakeit.Image(width int, height int) *img.RGBA,
|
||||
"fakeit_ImageJpeg": fakeit.ImageJpeg,
|
||||
"fakeit_ImagePng": fakeit.ImagePng,
|
||||
|
||||
// Fakeit / Internet
|
||||
"fakeit_URL": fakeit.URL,
|
||||
"fakeit_UrlSlug": fakeit.UrlSlug,
|
||||
"fakeit_DomainName": fakeit.DomainName,
|
||||
"fakeit_DomainSuffix": fakeit.DomainSuffix,
|
||||
"fakeit_IPv4Address": fakeit.IPv4Address,
|
||||
"fakeit_IPv6Address": fakeit.IPv6Address,
|
||||
"fakeit_MacAddress": fakeit.MacAddress,
|
||||
"fakeit_HTTPStatusCode": fakeit.HTTPStatusCode,
|
||||
"fakeit_HTTPStatusCodeSimple": fakeit.HTTPStatusCodeSimple,
|
||||
"fakeit_LogLevel": fakeit.LogLevel,
|
||||
"fakeit_HTTPMethod": fakeit.HTTPMethod,
|
||||
"fakeit_HTTPVersion": fakeit.HTTPVersion,
|
||||
"fakeit_UserAgent": fakeit.UserAgent,
|
||||
"fakeit_ChromeUserAgent": fakeit.ChromeUserAgent,
|
||||
"fakeit_FirefoxUserAgent": fakeit.FirefoxUserAgent,
|
||||
"fakeit_OperaUserAgent": fakeit.OperaUserAgent,
|
||||
"fakeit_SafariUserAgent": fakeit.SafariUserAgent,
|
||||
"fakeit_APIUserAgent": fakeit.APIUserAgent,
|
||||
|
||||
// Fakeit / HTML
|
||||
"fakeit_InputName": fakeit.InputName,
|
||||
"fakeit_Svg": func() string { return fakeit.Svg(nil) },
|
||||
|
||||
// Fakeit / Date/Time
|
||||
"fakeit_Date": fakeit.Date,
|
||||
"fakeit_PastDate": fakeit.PastDate,
|
||||
"fakeit_FutureDate": fakeit.FutureDate,
|
||||
"fakeit_DateRange": fakeit.DateRange,
|
||||
"fakeit_NanoSecond": fakeit.NanoSecond,
|
||||
"fakeit_Second": fakeit.Second,
|
||||
"fakeit_Minute": fakeit.Minute,
|
||||
"fakeit_Hour": fakeit.Hour,
|
||||
"fakeit_Month": fakeit.Month,
|
||||
"fakeit_MonthString": fakeit.MonthString,
|
||||
"fakeit_Day": fakeit.Day,
|
||||
"fakeit_WeekDay": fakeit.WeekDay,
|
||||
"fakeit_Year": fakeit.Year,
|
||||
"fakeit_TimeZone": fakeit.TimeZone,
|
||||
"fakeit_TimeZoneAbv": fakeit.TimeZoneAbv,
|
||||
"fakeit_TimeZoneFull": fakeit.TimeZoneFull,
|
||||
"fakeit_TimeZoneOffset": fakeit.TimeZoneOffset,
|
||||
"fakeit_TimeZoneRegion": fakeit.TimeZoneRegion,
|
||||
|
||||
// Fakeit / Payment
|
||||
"fakeit_Price": fakeit.Price,
|
||||
// "fakeit_CreditCard": fakeit.CreditCard() *CreditCardInfo,
|
||||
"fakeit_CreditCardCvv": fakeit.CreditCardCvv,
|
||||
"fakeit_CreditCardExp": fakeit.CreditCardExp,
|
||||
"fakeit_CreditCardNumber": func(gaps bool) string {
|
||||
return fakeit.CreditCardNumber(&gofakeit.CreditCardOptions{Gaps: gaps})
|
||||
},
|
||||
"fakeit_CreditCardType": fakeit.CreditCardType,
|
||||
// "fakeit_Currency": fakeit.Currency() *CurrencyInfo,
|
||||
"fakeit_CurrencyLong": fakeit.CurrencyLong,
|
||||
"fakeit_CurrencyShort": fakeit.CurrencyShort,
|
||||
"fakeit_AchRouting": fakeit.AchRouting,
|
||||
"fakeit_AchAccount": fakeit.AchAccount,
|
||||
"fakeit_BitcoinAddress": fakeit.BitcoinAddress,
|
||||
"fakeit_BitcoinPrivateKey": fakeit.BitcoinPrivateKey,
|
||||
"fakeit_BankName": fakeit.BankName,
|
||||
"fakeit_BankType": fakeit.BankType,
|
||||
|
||||
// Fakeit / Finance
|
||||
"fakeit_Cusip": fakeit.Cusip,
|
||||
"fakeit_Isin": fakeit.Isin,
|
||||
|
||||
// Fakeit / Company
|
||||
"fakeit_BS": fakeit.BS,
|
||||
"fakeit_Blurb": fakeit.Blurb,
|
||||
"fakeit_BuzzWord": fakeit.BuzzWord,
|
||||
"fakeit_Company": fakeit.Company,
|
||||
"fakeit_CompanySuffix": fakeit.CompanySuffix,
|
||||
// "fakeit_Job": fakeit.Job() *JobInfo,
|
||||
"fakeit_JobDescriptor": fakeit.JobDescriptor,
|
||||
"fakeit_JobLevel": fakeit.JobLevel,
|
||||
"fakeit_JobTitle": fakeit.JobTitle,
|
||||
"fakeit_Slogan": fakeit.Slogan,
|
||||
|
||||
// Fakeit / Hacker
|
||||
"fakeit_HackerAbbreviation": fakeit.HackerAbbreviation,
|
||||
"fakeit_HackerAdjective": fakeit.HackerAdjective,
|
||||
"fakeit_HackeringVerb": fakeit.HackeringVerb,
|
||||
"fakeit_HackerNoun": fakeit.HackerNoun,
|
||||
"fakeit_HackerPhrase": fakeit.HackerPhrase,
|
||||
"fakeit_HackerVerb": fakeit.HackerVerb,
|
||||
|
||||
// Fakeit / Hipster
|
||||
"fakeit_HipsterWord": fakeit.HipsterWord,
|
||||
"fakeit_HipsterSentence": fakeit.HipsterSentence,
|
||||
"fakeit_HipsterParagraph": fakeit.HipsterParagraph,
|
||||
|
||||
// Fakeit / App
|
||||
"fakeit_AppName": fakeit.AppName,
|
||||
"fakeit_AppVersion": fakeit.AppVersion,
|
||||
"fakeit_AppAuthor": fakeit.AppAuthor,
|
||||
|
||||
// Fakeit / Animal
|
||||
"fakeit_PetName": fakeit.PetName,
|
||||
"fakeit_Animal": fakeit.Animal,
|
||||
"fakeit_AnimalType": fakeit.AnimalType,
|
||||
"fakeit_FarmAnimal": fakeit.FarmAnimal,
|
||||
"fakeit_Cat": fakeit.Cat,
|
||||
"fakeit_Dog": fakeit.Dog,
|
||||
"fakeit_Bird": fakeit.Bird,
|
||||
|
||||
// Fakeit / Emoji
|
||||
"fakeit_Emoji": fakeit.Emoji,
|
||||
"fakeit_EmojiCategory": fakeit.EmojiCategory,
|
||||
"fakeit_EmojiAlias": fakeit.EmojiAlias,
|
||||
"fakeit_EmojiTag": fakeit.EmojiTag,
|
||||
"fakeit_EmojiFlag": fakeit.EmojiFlag,
|
||||
"fakeit_EmojiAnimal": fakeit.EmojiAnimal,
|
||||
"fakeit_EmojiFood": fakeit.EmojiFood,
|
||||
"fakeit_EmojiPlant": fakeit.EmojiPlant,
|
||||
"fakeit_EmojiMusic": fakeit.EmojiMusic,
|
||||
"fakeit_EmojiVehicle": fakeit.EmojiVehicle,
|
||||
"fakeit_EmojiSport": fakeit.EmojiSport,
|
||||
"fakeit_EmojiFace": fakeit.EmojiFace,
|
||||
"fakeit_EmojiHand": fakeit.EmojiHand,
|
||||
"fakeit_EmojiClothing": fakeit.EmojiClothing,
|
||||
"fakeit_EmojiLandmark": fakeit.EmojiLandmark,
|
||||
"fakeit_EmojiElectronics": fakeit.EmojiElectronics,
|
||||
"fakeit_EmojiGame": fakeit.EmojiGame,
|
||||
"fakeit_EmojiTools": fakeit.EmojiTools,
|
||||
"fakeit_EmojiWeather": fakeit.EmojiWeather,
|
||||
"fakeit_EmojiJob": fakeit.EmojiJob,
|
||||
"fakeit_EmojiPerson": fakeit.EmojiPerson,
|
||||
"fakeit_EmojiGesture": fakeit.EmojiGesture,
|
||||
"fakeit_EmojiCostume": fakeit.EmojiCostume,
|
||||
"fakeit_EmojiSentence": fakeit.EmojiSentence,
|
||||
|
||||
// Fakeit / Language
|
||||
"fakeit_Language": fakeit.Language,
|
||||
"fakeit_LanguageAbbreviation": fakeit.LanguageAbbreviation,
|
||||
"fakeit_ProgrammingLanguage": fakeit.ProgrammingLanguage,
|
||||
|
||||
// Fakeit / Number
|
||||
"fakeit_Number": fakeit.Number,
|
||||
"fakeit_Int": fakeit.Int,
|
||||
"fakeit_IntN": fakeit.IntN,
|
||||
"fakeit_Int8": fakeit.Int8,
|
||||
"fakeit_Int16": fakeit.Int16,
|
||||
"fakeit_Int32": fakeit.Int32,
|
||||
"fakeit_Int64": fakeit.Int64,
|
||||
"fakeit_Uint": fakeit.Uint,
|
||||
"fakeit_UintN": fakeit.UintN,
|
||||
"fakeit_Uint8": fakeit.Uint8,
|
||||
"fakeit_Uint16": fakeit.Uint16,
|
||||
"fakeit_Uint32": fakeit.Uint32,
|
||||
"fakeit_Uint64": fakeit.Uint64,
|
||||
"fakeit_Float32": fakeit.Float32,
|
||||
"fakeit_Float32Range": fakeit.Float32Range,
|
||||
"fakeit_Float64": fakeit.Float64,
|
||||
"fakeit_Float64Range": fakeit.Float64Range,
|
||||
// "fakeit_ShuffleInts": fakeit.ShuffleInts,
|
||||
"fakeit_RandomInt": fakeit.RandomInt,
|
||||
"fakeit_HexUint": fakeit.HexUint,
|
||||
|
||||
// Fakeit / String
|
||||
"fakeit_Digit": fakeit.Digit,
|
||||
"fakeit_DigitN": fakeit.DigitN,
|
||||
"fakeit_Letter": fakeit.Letter,
|
||||
"fakeit_LetterN": fakeit.LetterN,
|
||||
"fakeit_Lexify": fakeit.Lexify,
|
||||
"fakeit_Numerify": fakeit.Numerify,
|
||||
// "fakeit_ShuffleStrings": fakeit.ShuffleStrings,
|
||||
"fakeit_RandomString": fakeit.RandomString,
|
||||
|
||||
// Fakeit / Celebrity
|
||||
"fakeit_CelebrityActor": fakeit.CelebrityActor,
|
||||
"fakeit_CelebrityBusiness": fakeit.CelebrityBusiness,
|
||||
"fakeit_CelebritySport": fakeit.CelebritySport,
|
||||
|
||||
// Fakeit / Minecraft
|
||||
"fakeit_MinecraftOre": fakeit.MinecraftOre,
|
||||
"fakeit_MinecraftWood": fakeit.MinecraftWood,
|
||||
"fakeit_MinecraftArmorTier": fakeit.MinecraftArmorTier,
|
||||
"fakeit_MinecraftArmorPart": fakeit.MinecraftArmorPart,
|
||||
"fakeit_MinecraftWeapon": fakeit.MinecraftWeapon,
|
||||
"fakeit_MinecraftTool": fakeit.MinecraftTool,
|
||||
"fakeit_MinecraftDye": fakeit.MinecraftDye,
|
||||
"fakeit_MinecraftFood": fakeit.MinecraftFood,
|
||||
"fakeit_MinecraftAnimal": fakeit.MinecraftAnimal,
|
||||
"fakeit_MinecraftVillagerJob": fakeit.MinecraftVillagerJob,
|
||||
"fakeit_MinecraftVillagerStation": fakeit.MinecraftVillagerStation,
|
||||
"fakeit_MinecraftVillagerLevel": fakeit.MinecraftVillagerLevel,
|
||||
"fakeit_MinecraftMobPassive": fakeit.MinecraftMobPassive,
|
||||
"fakeit_MinecraftMobNeutral": fakeit.MinecraftMobNeutral,
|
||||
"fakeit_MinecraftMobHostile": fakeit.MinecraftMobHostile,
|
||||
"fakeit_MinecraftMobBoss": fakeit.MinecraftMobBoss,
|
||||
"fakeit_MinecraftBiome": fakeit.MinecraftBiome,
|
||||
"fakeit_MinecraftWeather": fakeit.MinecraftWeather,
|
||||
|
||||
// Fakeit / Book
|
||||
// "fakeit_Book": fakeit.Book() *BookInfo,
|
||||
"fakeit_BookTitle": fakeit.BookTitle,
|
||||
"fakeit_BookAuthor": fakeit.BookAuthor,
|
||||
"fakeit_BookGenre": fakeit.BookGenre,
|
||||
|
||||
// Fakeit / Movie
|
||||
// "fakeit_Movie": fakeit.Movie() *MovieInfo,
|
||||
"fakeit_MovieName": fakeit.MovieName,
|
||||
"fakeit_MovieGenre": fakeit.MovieGenre,
|
||||
|
||||
// Fakeit / Error
|
||||
"fakeit_Error": func() string { return fakeit.Error().Error() },
|
||||
"fakeit_ErrorDatabase": func() string { return fakeit.ErrorDatabase().Error() },
|
||||
"fakeit_ErrorGRPC": func() string { return fakeit.ErrorGRPC().Error() },
|
||||
"fakeit_ErrorHTTP": func() string { return fakeit.ErrorHTTP().Error() },
|
||||
"fakeit_ErrorHTTPClient": func() string { return fakeit.ErrorHTTPClient().Error() },
|
||||
"fakeit_ErrorHTTPServer": func() string { return fakeit.ErrorHTTPServer().Error() },
|
||||
// "fakeit_ErrorInput": func() string { return fakeit.ErrorInput().Error() },
|
||||
"fakeit_ErrorRuntime": func() string { return fakeit.ErrorRuntime().Error() },
|
||||
|
||||
// Fakeit / School
|
||||
"fakeit_School": fakeit.School,
|
||||
|
||||
// Fakeit / Song
|
||||
// "fakeit_Song": fakeit.Song() *SongInfo,
|
||||
"fakeit_SongName": fakeit.SongName,
|
||||
"fakeit_SongArtist": fakeit.SongArtist,
|
||||
"fakeit_SongGenre": fakeit.SongGenre,
|
||||
}
|
||||
}
|
||||
|
||||
type BodyTemplateFuncMapData struct {
|
||||
formDataContenType string
|
||||
}
|
||||
|
||||
func (data BodyTemplateFuncMapData) GetFormDataContenType() string {
|
||||
return data.formDataContenType
|
||||
}
|
||||
|
||||
func (data *BodyTemplateFuncMapData) ClearFormDataContenType() {
|
||||
data.formDataContenType = ""
|
||||
}
|
||||
|
||||
func NewDefaultBodyTemplateFuncMap(randSource rand.Source, data *BodyTemplateFuncMapData) template.FuncMap {
|
||||
funcMap := NewDefaultTemplateFuncMap(randSource)
|
||||
|
||||
if data != nil {
|
||||
funcMap["body_FormData"] = func(kv map[string]string) string {
|
||||
var multipartData bytes.Buffer
|
||||
writer := multipart.NewWriter(&multipartData)
|
||||
data.formDataContenType = writer.FormDataContentType()
|
||||
|
||||
for k, v := range kv {
|
||||
_ = writer.WriteField(k, v)
|
||||
}
|
||||
|
||||
_ = writer.Close()
|
||||
return multipartData.String()
|
||||
}
|
||||
}
|
||||
|
||||
return funcMap
|
||||
}
|
||||
|
||||
func hasTemplateActions(tmpl *template.Template) bool {
|
||||
if tmpl.Tree == nil || tmpl.Root == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, node := range tmpl.Root.Nodes {
|
||||
switch node.Type() {
|
||||
case parse.NodeAction, parse.NodeIf, parse.NodeRange,
|
||||
parse.NodeWith, parse.NodeTemplate:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
Reference in New Issue
Block a user