mirror of
https://github.com/aykhans/sarin.git
synced 2026-02-28 06:49:13 +00:00
Compare commits
102 Commits
v0.6.2
...
665be5d98a
| Author | SHA1 | Date | |
|---|---|---|---|
| 665be5d98a | |||
| d346067e8a | |||
| a3e20cd3d3 | |||
| 6d921cf8e3 | |||
|
|
d8b0a1e6a3 | ||
| b21d97192c | |||
| f0606a0f82 | |||
| 3be8ff218c | |||
| 7cb49195f8 | |||
|
|
a154215495 | ||
| c1584eb47b | |||
| 6a713ef241 | |||
| 6dafc082ed | |||
| e83eacf380 | |||
| c2ba1844ab | |||
|
|
054e5fd253 | ||
| 533ced4b54 | |||
| c3ea3a34ad | |||
|
|
c02a079d2a | ||
|
|
f78942bfb6 | ||
| 1369cb9f09 | |||
| 18662e6a64 | |||
| 81f08edc8d | |||
| a9738c0a11 | |||
| 76225884e6 | |||
| a512f3605d | |||
|
|
635c33008b | ||
| 3f2147ec6c | |||
| 92d0c5e003 | |||
| 27bc8f2e96 | |||
| 46c6fa9912 | |||
| a3d311009f | |||
| 710f4c6cb5 | |||
| 2d7ba34cb8 | |||
| 25d4762a3c | |||
| 361d423651 | |||
| ffa724fae7 | |||
| 7930be490d | |||
| e6c54e9cb2 | |||
| b32f567de7 | |||
| b6e85d9443 | |||
| 827e3535cd | |||
| 7ecf534d87 | |||
|
|
17ad5fadb9 | ||
| 7fb59a7989 | |||
| 527909c882 | |||
| 4459675efa | |||
|
|
604af355e6 | ||
| 7d4267c4c2 | |||
|
|
845ab7296c | ||
| 49d004ff06 | |||
| 045deb6120 | |||
| 075ef26203 | |||
|
|
946afbb2c3 | ||
| aacb33cfa5 | |||
| 4a7db48351 | |||
| b73087dce5 | |||
|
|
20a46feab8 | ||
| 0adde6e04e | |||
| ca50de4e2f | |||
| c99e7c66d9 | |||
| 280e5f5c4e | |||
| 47dfad6046 | |||
| 5bb644d55f | |||
| 9152eefdc5 | |||
| a8cd253c63 | |||
| 9aaf2db74d | |||
| 5c3e254e1e | |||
| e5c681a22b | |||
| 79668e4ece | |||
| f248c2af96 | |||
| 924bd819ee | |||
| e567155eb1 | |||
| 23c74bdbb1 | |||
| addf92df91 | |||
| 6aeda3706b | |||
| dc1cd05714 | |||
| 2b9d0520b0 | |||
| bea2e7c040 | |||
|
|
b52b336a52 | ||
| c927e31c49 | |||
| d8e6f532a8 | |||
| cf5cd23d97 | |||
|
|
350ff4d66d | ||
| cb8898d20e | |||
| a552d1c9f9 | |||
| 35263f1dd6 | |||
| 930e173a6a | |||
| bea2a81afa | |||
| 53ed486b23 | |||
| 0b9c32a09d | |||
| 42d5617e3f | |||
| e80ae9ab24 | |||
|
|
86a6f7814b | ||
| 09034b5f9e | |||
| f1ca2041c3 | |||
| f5a29a2657 | |||
| 439f66eb87 | |||
| 415d0130ce | |||
| abaa8e90b2 | |||
| 046ce74cd9 | |||
| 681cafc213 |
@@ -1,11 +0,0 @@
|
|||||||
.github
|
|
||||||
assets
|
|
||||||
binaries
|
|
||||||
dodo
|
|
||||||
.git
|
|
||||||
.gitignore
|
|
||||||
.golangci.yml
|
|
||||||
README.md
|
|
||||||
LICENSE
|
|
||||||
config.json
|
|
||||||
build.sh
|
|
||||||
2
.github/FUNDING.yml
vendored
Normal file
2
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
buy_me_a_coffee: aykhan
|
||||||
|
custom: https://commerce.coinbase.com/checkout/0f33d2fb-54a6-44f5-8783-006ebf70d1a0
|
||||||
25
.github/workflows/golangci-lint.yml
vendored
25
.github/workflows/golangci-lint.yml
vendored
@@ -1,25 +0,0 @@
|
|||||||
name: golangci-lint
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
golangci:
|
|
||||||
name: lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: stable
|
|
||||||
- name: golangci-lint
|
|
||||||
uses: golangci/golangci-lint-action@v6
|
|
||||||
with:
|
|
||||||
version: v1.64
|
|
||||||
args: --timeout=10m --config=.golangci.yml
|
|
||||||
27
.github/workflows/lint.yaml
vendored
Normal file
27
.github/workflows/lint.yaml
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
name: golangci-lint
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
golangci:
|
||||||
|
name: lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
- uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
go-version: 1.26.0
|
||||||
|
- name: go fix
|
||||||
|
run: |
|
||||||
|
go fix ./...
|
||||||
|
git diff --exit-code
|
||||||
|
- name: golangci-lint
|
||||||
|
uses: golangci/golangci-lint-action@v9
|
||||||
|
with:
|
||||||
|
version: v2.9.0
|
||||||
86
.github/workflows/publish-docker-image.yml
vendored
86
.github/workflows/publish-docker-image.yml
vendored
@@ -1,86 +0,0 @@
|
|||||||
name: publish-docker-image
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
# Match stable and pre versions, such as 'v1.0.0', 'v0.23.0-a', 'v0.23.0-a.2', 'v0.23.0-b', 'v0.23.0-b.3'
|
|
||||||
- "v*.*.*"
|
|
||||||
- "v*.*.*-a"
|
|
||||||
- "v*.*.*-a.*"
|
|
||||||
- "v*.*.*-b"
|
|
||||||
- "v*.*.*-b.*"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push-stable-image:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Extract build args
|
|
||||||
# Extract version number and check if it's an pre version
|
|
||||||
run: |
|
|
||||||
if [[ "${GITHUB_REF_NAME}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
echo "PRE_RELEASE=false" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "PRE_RELEASE=true" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: aykhans
|
|
||||||
password: ${{ secrets.DOCKER_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
install: true
|
|
||||||
version: v0.9.1
|
|
||||||
|
|
||||||
# Metadata for stable versions
|
|
||||||
- name: Docker meta for stable
|
|
||||||
id: meta-stable
|
|
||||||
if: env.PRE_RELEASE == 'false'
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: |
|
|
||||||
aykhans/dodo
|
|
||||||
tags: |
|
|
||||||
type=semver,pattern={{version}},value=${{ env.VERSION }}
|
|
||||||
type=raw,value=stable
|
|
||||||
flavor: |
|
|
||||||
latest=true
|
|
||||||
labels: |
|
|
||||||
org.opencontainers.image.version=${{ env.VERSION }}
|
|
||||||
|
|
||||||
# Metadata for pre versions
|
|
||||||
- name: Docker meta for pre
|
|
||||||
id: meta-pre
|
|
||||||
if: env.PRE_RELEASE == 'true'
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: |
|
|
||||||
aykhans/dodo
|
|
||||||
tags: |
|
|
||||||
type=raw,value=${{ env.VERSION }}
|
|
||||||
labels: |
|
|
||||||
org.opencontainers.image.version=${{ env.VERSION }}
|
|
||||||
|
|
||||||
- name: Build and Push
|
|
||||||
id: docker_build
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: ./
|
|
||||||
file: ./Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.meta-stable.outputs.tags || steps.meta-pre.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta-stable.outputs.labels || steps.meta-pre.outputs.labels }}
|
|
||||||
98
.github/workflows/release.yaml
vendored
Normal file
98
.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
name: Build and Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [created]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag:
|
||||||
|
description: "Release tag (e.g., v1.0.0)"
|
||||||
|
required: true
|
||||||
|
build_binaries:
|
||||||
|
description: "Build and upload binaries"
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
build_docker:
|
||||||
|
description: "Build and push Docker image"
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build binaries
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.tag || github.ref }}
|
||||||
|
|
||||||
|
- name: Set build metadata
|
||||||
|
run: |
|
||||||
|
echo "VERSION=$(git describe --tags --always)" >> $GITHUB_ENV
|
||||||
|
echo "GIT_COMMIT=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||||
|
echo "GO_VERSION=1.26.0" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set up Go
|
||||||
|
if: github.event_name == 'release' || inputs.build_binaries
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
cache: true
|
||||||
|
|
||||||
|
- name: Build binaries
|
||||||
|
if: github.event_name == 'release' || inputs.build_binaries
|
||||||
|
run: |
|
||||||
|
LDFLAGS="-X 'go.aykhans.me/sarin/internal/version.Version=${{ env.VERSION }}' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.GitCommit=${{ env.GIT_COMMIT }}' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.BuildDate=$(date -u +%Y-%m-%dT%H:%M:%SZ)' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.GoVersion=$(go version)' \
|
||||||
|
-s -w"
|
||||||
|
|
||||||
|
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "$LDFLAGS" -o ./sarin-linux-amd64 ./cmd/cli/main.go
|
||||||
|
CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -ldflags "$LDFLAGS" -o ./sarin-linux-arm64 ./cmd/cli/main.go
|
||||||
|
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 go build -ldflags "$LDFLAGS" -o ./sarin-darwin-amd64 ./cmd/cli/main.go
|
||||||
|
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 go build -ldflags "$LDFLAGS" -o ./sarin-darwin-arm64 ./cmd/cli/main.go
|
||||||
|
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 go build -ldflags "$LDFLAGS" -o ./sarin-windows-amd64.exe ./cmd/cli/main.go
|
||||||
|
CGO_ENABLED=0 GOOS=windows GOARCH=arm64 go build -ldflags "$LDFLAGS" -o ./sarin-windows-arm64.exe ./cmd/cli/main.go
|
||||||
|
|
||||||
|
- name: Upload Release Assets
|
||||||
|
if: github.event_name == 'release' || inputs.build_binaries
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
tag_name: ${{ inputs.tag || github.ref_name }}
|
||||||
|
files: ./sarin-*
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
if: github.event_name == 'release' || inputs.build_docker
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
if: github.event_name == 'release' || inputs.build_docker
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
if: github.event_name == 'release' || inputs.build_docker
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
if: github.event_name == 'release' || inputs.build_docker
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
build-args: |
|
||||||
|
VERSION=${{ env.VERSION }}
|
||||||
|
GIT_COMMIT=${{ env.GIT_COMMIT }}
|
||||||
|
GO_VERSION=${{ env.GO_VERSION }}
|
||||||
|
tags: |
|
||||||
|
${{ secrets.DOCKERHUB_USERNAME }}/sarin:${{ env.VERSION }}
|
||||||
|
${{ secrets.DOCKERHUB_USERNAME }}/sarin:latest
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,2 +1 @@
|
|||||||
dodo
|
bin/*
|
||||||
binaries/
|
|
||||||
|
|||||||
100
.golangci.yaml
Normal file
100
.golangci.yaml
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
version: "2"
|
||||||
|
|
||||||
|
run:
|
||||||
|
go: "1.26"
|
||||||
|
concurrency: 12
|
||||||
|
|
||||||
|
linters:
|
||||||
|
default: none
|
||||||
|
enable:
|
||||||
|
- asciicheck
|
||||||
|
- errcheck
|
||||||
|
- govet
|
||||||
|
- ineffassign
|
||||||
|
- misspell
|
||||||
|
- nakedret
|
||||||
|
- nolintlint
|
||||||
|
- prealloc
|
||||||
|
- reassign
|
||||||
|
- staticcheck
|
||||||
|
- unconvert
|
||||||
|
- unused
|
||||||
|
- whitespace
|
||||||
|
- bidichk
|
||||||
|
- bodyclose
|
||||||
|
- containedctx
|
||||||
|
- contextcheck
|
||||||
|
- copyloopvar
|
||||||
|
- embeddedstructfieldcheck
|
||||||
|
- errorlint
|
||||||
|
- exptostd
|
||||||
|
- fatcontext
|
||||||
|
- funcorder
|
||||||
|
- gocheckcompilerdirectives
|
||||||
|
- gocritic
|
||||||
|
- gomoddirectives
|
||||||
|
- gosec
|
||||||
|
- gosmopolitan
|
||||||
|
- grouper
|
||||||
|
- importas
|
||||||
|
- inamedparam
|
||||||
|
- intrange
|
||||||
|
- loggercheck
|
||||||
|
- mirror
|
||||||
|
- musttag
|
||||||
|
- perfsprint
|
||||||
|
- predeclared
|
||||||
|
- tagalign
|
||||||
|
- tagliatelle
|
||||||
|
- testifylint
|
||||||
|
- thelper
|
||||||
|
- tparallel
|
||||||
|
- unparam
|
||||||
|
- usestdlibvars
|
||||||
|
- usetesting
|
||||||
|
- wastedassign
|
||||||
|
|
||||||
|
settings:
|
||||||
|
staticcheck:
|
||||||
|
checks:
|
||||||
|
- "all"
|
||||||
|
- "-S1002"
|
||||||
|
- "-ST1000"
|
||||||
|
varnamelen:
|
||||||
|
ignore-decls:
|
||||||
|
- w http.ResponseWriter
|
||||||
|
- wg sync.WaitGroup
|
||||||
|
- wg *sync.WaitGroup
|
||||||
|
|
||||||
|
exclusions:
|
||||||
|
rules:
|
||||||
|
- path: _test\.go$
|
||||||
|
linters:
|
||||||
|
- errorlint
|
||||||
|
- forcetypeassert
|
||||||
|
- perfsprint
|
||||||
|
- errcheck
|
||||||
|
- gosec
|
||||||
|
|
||||||
|
- path: _test\.go$
|
||||||
|
linters:
|
||||||
|
- staticcheck
|
||||||
|
text: "SA5011"
|
||||||
|
|
||||||
|
formatters:
|
||||||
|
enable:
|
||||||
|
- gofmt
|
||||||
|
|
||||||
|
settings:
|
||||||
|
gofmt:
|
||||||
|
# Simplify code: gofmt with `-s` option.
|
||||||
|
# Default: true
|
||||||
|
simplify: false
|
||||||
|
# Apply the rewrite rules to the source before reformatting.
|
||||||
|
# https://pkg.go.dev/cmd/gofmt
|
||||||
|
# Default: []
|
||||||
|
rewrite-rules:
|
||||||
|
- pattern: "interface{}"
|
||||||
|
replacement: "any"
|
||||||
|
- pattern: "a[b:len(a)]"
|
||||||
|
replacement: "a[b:]"
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
run:
|
|
||||||
go: "1.24"
|
|
||||||
concurrency: 8
|
|
||||||
timeout: 10m
|
|
||||||
|
|
||||||
linters:
|
|
||||||
disable-all: true
|
|
||||||
enable:
|
|
||||||
- asasalint
|
|
||||||
- asciicheck
|
|
||||||
- errcheck
|
|
||||||
- gofmt
|
|
||||||
- goimports
|
|
||||||
- gomodguard
|
|
||||||
- goprintffuncname
|
|
||||||
- govet
|
|
||||||
- ineffassign
|
|
||||||
- misspell
|
|
||||||
- nakedret
|
|
||||||
- nolintlint
|
|
||||||
- prealloc
|
|
||||||
- reassign
|
|
||||||
- staticcheck
|
|
||||||
- typecheck
|
|
||||||
- unconvert
|
|
||||||
- unused
|
|
||||||
- whitespace
|
|
||||||
29
Dockerfile
29
Dockerfile
@@ -1,17 +1,32 @@
|
|||||||
FROM golang:1.24-alpine AS builder
|
ARG GO_VERSION=1.26.0
|
||||||
|
|
||||||
|
FROM docker.io/library/golang:${GO_VERSION}-alpine AS builder
|
||||||
|
|
||||||
|
ARG VERSION=unknown
|
||||||
|
ARG GIT_COMMIT=unknown
|
||||||
|
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
|
|
||||||
COPY go.mod go.sum ./
|
RUN --mount=type=bind,source=./go.mod,target=./go.mod \
|
||||||
RUN go mod download
|
--mount=type=bind,source=./go.sum,target=./go.sum \
|
||||||
COPY . .
|
go mod download
|
||||||
|
|
||||||
RUN go build -ldflags "-s -w" -o dodo
|
RUN --mount=type=bind,source=./,target=./ \
|
||||||
|
CGO_ENABLED=0 go build \
|
||||||
|
-ldflags "-X 'go.aykhans.me/sarin/internal/version.Version=${VERSION}' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.GitCommit=${GIT_COMMIT}' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.BuildDate=$(date -u +%Y-%m-%dT%H:%M:%SZ)' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.GoVersion=$(go version)' \
|
||||||
|
-s -w" \
|
||||||
|
-o /sarin ./cmd/cli/main.go
|
||||||
|
|
||||||
FROM gcr.io/distroless/static-debian12:latest
|
FROM gcr.io/distroless/static-debian12:latest
|
||||||
|
|
||||||
|
ENV TERM=xterm-256color
|
||||||
|
ENV COLORTERM=truecolor
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
||||||
COPY --from=builder /src/dodo /dodo
|
COPY --from=builder /sarin /sarin
|
||||||
|
|
||||||
ENTRYPOINT ["./dodo"]
|
ENTRYPOINT ["./sarin"]
|
||||||
|
|||||||
9
Makefile
9
Makefile
@@ -1,9 +0,0 @@
|
|||||||
lint:
|
|
||||||
golangci-lint run
|
|
||||||
|
|
||||||
build:
|
|
||||||
go build -ldflags "-s -w" -o "./dodo"
|
|
||||||
|
|
||||||
build-all:
|
|
||||||
rm -rf ./binaries
|
|
||||||
./build.sh
|
|
||||||
294
README.md
294
README.md
@@ -1,263 +1,123 @@
|
|||||||
<h1 align="center">Dodo - A Fast and Easy-to-Use HTTP Benchmarking Tool</h1>
|
<div align="center">
|
||||||
|
|
||||||
|
## Sarin is a high-performance HTTP load testing tool built with Go and fasthttp.
|
||||||
|
|
||||||
|
[](https://pkg.go.dev/go.aykhans.me/sarin)
|
||||||
|
[](https://goreportcard.com/report/go.aykhans.me/sarin)
|
||||||
|
[](https://opensource.org/licenses/MIT)
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img width="30%" height="30%" src="https://ftp.aykhans.me/web/client/pubshares/VzPtSHS7yPQT7ngoZzZSNU/browse?path=%2Fdodo.png">
|
<a href="#installation">Install</a> •
|
||||||
|
<a href="#quick-start">Quick Start</a> •
|
||||||
|
<a href="docs/examples.md">Examples</a> •
|
||||||
|
<a href="docs/configuration.md">Configuration</a> •
|
||||||
|
<a href="docs/templating.md">Templating</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
## Table of Contents
|
## Overview
|
||||||
|
|
||||||
- [Installation](#installation)
|
Sarin is designed for efficient HTTP load testing with minimal resource consumption. It prioritizes simplicity—features like templating add zero overhead when unused.
|
||||||
- [Using Docker (Recommended)](#using-docker-recommended)
|
|
||||||
- [Using Pre-built Binaries](#using-pre-built-binaries)
|
| ✅ Supported | ❌ Not Supported |
|
||||||
- [Building from Source](#building-from-source)
|
| ---------------------------------------------------------- | ------------------------------- |
|
||||||
- [Usage](#usage)
|
| High-performance with low memory footprint | Detailed response body analysis |
|
||||||
- [1. CLI Usage](#1-cli-usage)
|
| Long-running duration/count based tests | Extensive response statistics |
|
||||||
- [2. Config File Usage](#2-config-file-usage)
|
| Dynamic requests via 320+ template functions | Web UI or complex TUI |
|
||||||
- [2.1 JSON Example](#21-json-example)
|
| Request scripting with Lua and JavaScript | Distributed load testing |
|
||||||
- [2.2 YAML/YML Example](#22-yamlyml-example)
|
| Multiple proxy protocols<br>(HTTP, HTTPS, SOCKS5, SOCKS5H) | HTTP/2, HTTP/3, WebSocket, gRPC |
|
||||||
- [3. CLI & Config File Combination](#3-cli--config-file-combination)
|
| Flexible config (CLI, ENV, YAML) | Plugins / extensions ecosystem |
|
||||||
- [Config Parameters Reference](#config-parameters-reference)
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Using Docker (Recommended)
|
### Docker (Recommended)
|
||||||
|
|
||||||
Pull the latest Dodo image from Docker Hub:
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker pull aykhans/dodo:latest
|
docker pull aykhans/sarin:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
To use Dodo with Docker and a local config file, mount the config file as a volume and pass it as an argument:
|
With a local config file:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run -v /path/to/config.json:/config.json aykhans/dodo -f /config.json
|
docker run --rm -it -v /path/to/config.yaml:/config.yaml aykhans/sarin -f /config.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
If you're using a remote config file via URL, you don't need to mount a volume:
|
With a remote config file:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run aykhans/dodo -f https://raw.githubusercontent.com/aykhans/dodo/main/config.yaml
|
docker run --rm -it aykhans/sarin -f https://example.com/config.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
### Using Pre-built Binaries
|
### Pre-built Binaries
|
||||||
|
|
||||||
Download the latest binaries from the [releases](https://github.com/aykhans/dodo/releases) section.
|
Download the latest binaries from the [releases](https://github.com/aykhans/sarin/releases) page.
|
||||||
|
|
||||||
### Building from Source
|
### Building from Source
|
||||||
|
|
||||||
To build Dodo from source, ensure you have [Go 1.24+](https://golang.org/dl/) installed.
|
Requires [Go 1.26+](https://golang.org/dl/).
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
go install -ldflags "-s -w" github.com/aykhans/dodo@latest
|
git clone https://github.com/aykhans/sarin.git && cd sarin
|
||||||
|
|
||||||
|
CGO_ENABLED=0 go build \
|
||||||
|
-ldflags "-X 'go.aykhans.me/sarin/internal/version.Version=dev' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.GitCommit=$(git rev-parse HEAD)' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.BuildDate=$(date -u +%Y-%m-%dT%H:%M:%SZ)' \
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.GoVersion=$(go version)' \
|
||||||
|
-s -w" \
|
||||||
|
-o sarin ./cmd/cli/main.go
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
## Quick Start
|
||||||
|
|
||||||
Dodo supports CLI arguments, configuration files (JSON/YAML), or a combination of both. If both are used, CLI arguments take precedence.
|
Send 10,000 GET requests with 50 concurrent connections and a random User-Agent for each request:
|
||||||
|
|
||||||
### 1. CLI Usage
|
|
||||||
|
|
||||||
Send 1000 GET requests to https://example.com with 10 parallel dodos (threads), each with a timeout of 2 seconds, within a maximum duration of 1 minute:
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
dodo -u https://example.com -m GET -d 10 -r 1000 -o 1m -t 2s
|
sarin -U http://example.com -r 10_000 -c 50 -H "User-Agent: {{ fakeit_UserAgent }}"
|
||||||
```
|
```
|
||||||
|
|
||||||
With Docker:
|
Run a 5-minute duration-based test:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run --rm -i aykhans/dodo -u https://example.com -m GET -d 10 -r 1000 -o 1m -t 2s
|
sarin -U http://example.com -d 5m -c 100
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2. Config File Usage
|
Use a YAML config file:
|
||||||
|
|
||||||
Send 1000 GET requests to https://example.com with 10 parallel dodos (threads), each with a timeout of 800 milliseconds, within a maximum duration of 250 seconds:
|
|
||||||
|
|
||||||
#### 2.1 JSON Example
|
|
||||||
|
|
||||||
```jsonc
|
|
||||||
{
|
|
||||||
"method": "GET",
|
|
||||||
"url": "https://example.com",
|
|
||||||
"yes": false,
|
|
||||||
"timeout": "800ms",
|
|
||||||
"dodos": 10,
|
|
||||||
"requests": 1000,
|
|
||||||
"duration": "250s",
|
|
||||||
|
|
||||||
"params": [
|
|
||||||
// A random value will be selected from the list for first "key1" param on each request
|
|
||||||
// And always "value" for second "key1" param on each request
|
|
||||||
// e.g. "?key1=value2&key1=value"
|
|
||||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
|
||||||
{ "key1": "value" },
|
|
||||||
|
|
||||||
// A random value will be selected from the list for param "key2" on each request
|
|
||||||
// e.g. "?key2=value2"
|
|
||||||
{ "key2": ["value1", "value2"] },
|
|
||||||
],
|
|
||||||
|
|
||||||
"headers": [
|
|
||||||
// A random value will be selected from the list for first "key1" header on each request
|
|
||||||
// And always "value" for second "key1" header on each request
|
|
||||||
// e.g. "key1: value3", "key1: value"
|
|
||||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
|
||||||
{ "key1": "value" },
|
|
||||||
|
|
||||||
// A random value will be selected from the list for header "key2" on each request
|
|
||||||
// e.g. "key2: value2"
|
|
||||||
{ "key2": ["value1", "value2"] },
|
|
||||||
],
|
|
||||||
|
|
||||||
"cookies": [
|
|
||||||
// A random value will be selected from the list for first "key1" cookie on each request
|
|
||||||
// And always "value" for second "key1" cookie on each request
|
|
||||||
// e.g. "key1=value4; key1=value"
|
|
||||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
|
||||||
{ "key1": "value" },
|
|
||||||
|
|
||||||
// A random value will be selected from the list for cookie "key2" on each request
|
|
||||||
// e.g. "key2=value1"
|
|
||||||
{ "key2": ["value1", "value2"] },
|
|
||||||
],
|
|
||||||
|
|
||||||
"body": "body-text",
|
|
||||||
// OR
|
|
||||||
// A random body value will be selected from the list for each request
|
|
||||||
"body": ["body-text1", "body-text2", "body-text3"],
|
|
||||||
|
|
||||||
"proxy": "http://example.com:8080",
|
|
||||||
// OR
|
|
||||||
// A random proxy will be selected from the list for each request
|
|
||||||
"proxy": [
|
|
||||||
"http://example.com:8080",
|
|
||||||
"http://username:password@example.com:8080",
|
|
||||||
"socks5://example.com:8080",
|
|
||||||
"socks5h://example.com:8080",
|
|
||||||
],
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
dodo -f /path/config.json
|
sarin -f config.yaml
|
||||||
# OR
|
|
||||||
dodo -f https://example.com/config.json
|
|
||||||
```
|
```
|
||||||
|
|
||||||
With Docker:
|
For more usage examples, see the **[Examples Guide](docs/examples.md)**.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Sarin supports environment variables, CLI flags, and YAML files. When the same option is specified in multiple sources, the following priority order applies:
|
||||||
|
|
||||||
|
```
|
||||||
|
CLI Flags (Highest) > YAML > Environment Variables (Lowest)
|
||||||
|
```
|
||||||
|
|
||||||
|
For detailed documentation on all configuration options (URL, method, timeout, concurrency, headers, cookies, proxy, etc.), see the **[Configuration Guide](docs/configuration.md)**.
|
||||||
|
|
||||||
|
## Templating
|
||||||
|
|
||||||
|
Sarin supports Go templates in URL paths, methods, bodies, headers, params, cookies, and values. Use the 320+ built-in functions to generate dynamic data for each request.
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run --rm -i -v /path/to/config.json:/config.json aykhans/dodo
|
sarin -U "http://example.com/users/{{ fakeit_UUID }}" -r 1000 -c 10 \
|
||||||
# OR
|
-V "REQUEST_ID={{ fakeit_UUID }}" \
|
||||||
docker run --rm -i aykhans/dodo -f https://example.com/config.json
|
-H "X-Request-ID: {{ .Values.REQUEST_ID }}" \
|
||||||
|
-B '{"request_id": "{{ .Values.REQUEST_ID }}"}'
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 2.2 YAML/YML Example
|
For the complete templating guide and functions reference, see the **[Templating Guide](docs/templating.md)**.
|
||||||
|
|
||||||
```yaml
|
## License
|
||||||
method: "GET"
|
|
||||||
url: "https://example.com"
|
|
||||||
yes: false
|
|
||||||
timeout: "800ms"
|
|
||||||
dodos: 10
|
|
||||||
requests: 1000
|
|
||||||
duration: "250s"
|
|
||||||
|
|
||||||
params:
|
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
|
||||||
# A random value will be selected from the list for first "key1" param on each request
|
|
||||||
# And always "value" for second "key1" param on each request
|
|
||||||
# e.g. "?key1=value2&key1=value"
|
|
||||||
- key1: ["value1", "value2", "value3", "value4"]
|
|
||||||
- key1: "value"
|
|
||||||
|
|
||||||
# A random value will be selected from the list for param "key2" on each request
|
|
||||||
# e.g. "?key2=value2"
|
|
||||||
- key2: ["value1", "value2"]
|
|
||||||
|
|
||||||
headers:
|
|
||||||
# A random value will be selected from the list for first "key1" header on each request
|
|
||||||
# And always "value" for second "key1" header on each request
|
|
||||||
# e.g. "key1: value3", "key1: value"
|
|
||||||
- key1: ["value1", "value2", "value3", "value4"]
|
|
||||||
- key1: "value"
|
|
||||||
|
|
||||||
# A random value will be selected from the list for header "key2" on each request
|
|
||||||
# e.g. "key2: value2"
|
|
||||||
- key2: ["value1", "value2"]
|
|
||||||
|
|
||||||
cookies:
|
|
||||||
# A random value will be selected from the list for first "key1" cookie on each request
|
|
||||||
# And always "value" for second "key1" cookie on each request
|
|
||||||
# e.g. "key1=value4; key1=value"
|
|
||||||
- key1: ["value1", "value2", "value3", "value4"]
|
|
||||||
- key1: "value"
|
|
||||||
|
|
||||||
# A random value will be selected from the list for cookie "key2" on each request
|
|
||||||
# e.g. "key2=value1"
|
|
||||||
- key2: ["value1", "value2"]
|
|
||||||
|
|
||||||
body: "body-text"
|
|
||||||
# OR
|
|
||||||
# A random body value will be selected from the list for each request
|
|
||||||
body:
|
|
||||||
- "body-text1"
|
|
||||||
- "body-text2"
|
|
||||||
- "body-text3"
|
|
||||||
|
|
||||||
proxy: "http://example.com:8080"
|
|
||||||
# OR
|
|
||||||
# A random proxy will be selected from the list for each request
|
|
||||||
proxy:
|
|
||||||
- "http://example.com:8080"
|
|
||||||
- "http://username:password@example.com:8080"
|
|
||||||
- "socks5://example.com:8080"
|
|
||||||
- "socks5h://example.com:8080"
|
|
||||||
```
|
|
||||||
|
|
||||||
```sh
|
|
||||||
dodo -f /path/config.yaml
|
|
||||||
# OR
|
|
||||||
dodo -f https://example.com/config.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
With Docker:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
docker run --rm -i -v /path/to/config.yaml:/config.yaml aykhans/dodo -f /config.yaml
|
|
||||||
# OR
|
|
||||||
docker run --rm -i aykhans/dodo -f https://example.com/config.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. CLI & Config File Combination
|
|
||||||
|
|
||||||
CLI arguments override config file values:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
dodo -f /path/to/config.yaml -u https://example.com -m GET -d 10 -r 1000 -o 1m -t 5s
|
|
||||||
```
|
|
||||||
|
|
||||||
With Docker:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
docker run --rm -i -v /path/to/config.json:/config.json aykhans/dodo -f /config.json -u https://example.com -m GET -d 10 -r 1000 -o 1m -t 5s
|
|
||||||
```
|
|
||||||
|
|
||||||
## Config Parameters Reference
|
|
||||||
|
|
||||||
If `Headers`, `Params`, `Cookies`, `Body`, or `Proxy` fields have multiple values, each request will choose a random value from the list.
|
|
||||||
|
|
||||||
| Parameter | config file | CLI Flag | CLI Short Flag | Type | Description | Default |
|
|
||||||
| --------------- | ----------- | ------------ | -------------- | ------------------------------ | ----------------------------------------------------------- | ------- |
|
|
||||||
| Config file | - | -config-file | -f | String | Path to local config file or http(s) URL of the config file | - |
|
|
||||||
| Yes | yes | -yes | -y | Boolean | Answer yes to all questions | false |
|
|
||||||
| URL | url | -url | -u | String | URL to send the request to | - |
|
|
||||||
| Method | method | -method | -m | String | HTTP method | GET |
|
|
||||||
| Dodos (Threads) | dodos | -dodos | -d | UnsignedInteger | Number of dodos (threads) to send requests in parallel | 1 |
|
|
||||||
| Requests | requests | -requests | -r | UnsignedInteger | Total number of requests to send | - |
|
|
||||||
| Duration | duration | -duration | -o | Time | Maximum duration for the test | - |
|
|
||||||
| Timeout | timeout | -timeout | -t | Time | Timeout for canceling each request | 10s |
|
|
||||||
| Params | params | -param | -p | [{String: String OR [String]}] | Request parameters | - |
|
|
||||||
| Headers | headers | -header | -H | [{String: String OR [String]}] | Request headers | - |
|
|
||||||
| Cookies | cookies | -cookie | -c | [{String: String OR [String]}] | Request cookies | - |
|
|
||||||
| Body | body | -body | -b | String OR [String] | Request body or list of request bodies | - |
|
|
||||||
| Proxy | proxies | -proxy | -x | String OR [String] | Proxy URL or list of proxy URLs | - |
|
|
||||||
|
|||||||
97
Taskfile.yaml
Normal file
97
Taskfile.yaml
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
# https://taskfile.dev
|
||||||
|
version: "3"
|
||||||
|
|
||||||
|
vars:
|
||||||
|
BIN_DIR: ./bin
|
||||||
|
GOLANGCI_LINT_VERSION: v2.9.0
|
||||||
|
GOLANGCI: "{{.BIN_DIR}}/golangci-lint-{{.GOLANGCI_LINT_VERSION}}"
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
ftl:
|
||||||
|
desc: Run fmt, tidy, and lint.
|
||||||
|
cmds:
|
||||||
|
- task: fmt
|
||||||
|
- task: fix
|
||||||
|
- task: tidy
|
||||||
|
- task: lint
|
||||||
|
|
||||||
|
fmt:
|
||||||
|
desc: Run format
|
||||||
|
deps:
|
||||||
|
- install-golangci-lint
|
||||||
|
cmds:
|
||||||
|
- "{{.GOLANGCI}} fmt"
|
||||||
|
|
||||||
|
fix:
|
||||||
|
desc: Run go fix
|
||||||
|
cmds:
|
||||||
|
- go fix ./...
|
||||||
|
|
||||||
|
tidy:
|
||||||
|
desc: Run go mod tidy.
|
||||||
|
cmds:
|
||||||
|
- go mod tidy {{.CLI_ARGS}}
|
||||||
|
|
||||||
|
lint:
|
||||||
|
desc: Run linters
|
||||||
|
deps:
|
||||||
|
- install-golangci-lint
|
||||||
|
cmds:
|
||||||
|
- "{{.GOLANGCI}} run"
|
||||||
|
|
||||||
|
test:
|
||||||
|
desc: Run Go tests.
|
||||||
|
cmds:
|
||||||
|
- go test ./... {{.CLI_ARGS}}
|
||||||
|
|
||||||
|
create-bin-dir:
|
||||||
|
desc: Create bin directory.
|
||||||
|
cmds:
|
||||||
|
- mkdir -p {{.BIN_DIR}}
|
||||||
|
|
||||||
|
build:
|
||||||
|
desc: Build the application.
|
||||||
|
deps:
|
||||||
|
- create-bin-dir
|
||||||
|
vars:
|
||||||
|
OUTPUT: '{{.OUTPUT | default (printf "%s/sarin" .BIN_DIR)}}'
|
||||||
|
cmds:
|
||||||
|
- rm -f {{.OUTPUT}}
|
||||||
|
- >-
|
||||||
|
CGO_ENABLED=0 go build
|
||||||
|
-ldflags "-X 'go.aykhans.me/sarin/internal/version.Version=$(git describe --tags --always)'
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.GitCommit=$(git rev-parse HEAD)'
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.BuildDate=$(date -u +%Y-%m-%dT%H:%M:%SZ)'
|
||||||
|
-X 'go.aykhans.me/sarin/internal/version.GoVersion=$(go version)'
|
||||||
|
-s -w"
|
||||||
|
-o {{.OUTPUT}} ./cmd/cli/main.go
|
||||||
|
|
||||||
|
install-golangci-lint:
|
||||||
|
desc: Install golangci-lint
|
||||||
|
deps:
|
||||||
|
- create-bin-dir
|
||||||
|
status:
|
||||||
|
- test -f {{.GOLANGCI}}
|
||||||
|
cmds:
|
||||||
|
- rm -f {{.GOLANGCI}}
|
||||||
|
- curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b {{.BIN_DIR}} {{.GOLANGCI_LINT_VERSION}}
|
||||||
|
- mv {{.BIN_DIR}}/golangci-lint {{.GOLANGCI}}
|
||||||
|
|
||||||
|
docker-build:
|
||||||
|
desc: Build the Docker image.
|
||||||
|
vars:
|
||||||
|
IMAGE_NAME: '{{.IMAGE_NAME | default "sarin"}}'
|
||||||
|
TAG: '{{.TAG | default "latest"}}'
|
||||||
|
GO_VERSION: '{{.GO_VERSION | default ""}}'
|
||||||
|
VERSION:
|
||||||
|
sh: git describe --tags --always
|
||||||
|
GIT_COMMIT:
|
||||||
|
sh: git rev-parse HEAD
|
||||||
|
cmds:
|
||||||
|
- >-
|
||||||
|
docker build
|
||||||
|
{{if .GO_VERSION}}--build-arg GO_VERSION={{.GO_VERSION}}{{end}}
|
||||||
|
--build-arg VERSION={{.VERSION}}
|
||||||
|
--build-arg GIT_COMMIT={{.GIT_COMMIT}}
|
||||||
|
-t {{.IMAGE_NAME}}:{{.TAG}}
|
||||||
|
.
|
||||||
32
build.sh
32
build.sh
@@ -1,32 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
platforms=(
|
|
||||||
"darwin,amd64"
|
|
||||||
"darwin,arm64"
|
|
||||||
"freebsd,386"
|
|
||||||
"freebsd,amd64"
|
|
||||||
"freebsd,arm"
|
|
||||||
"linux,386"
|
|
||||||
"linux,amd64"
|
|
||||||
"linux,arm"
|
|
||||||
"linux,arm64"
|
|
||||||
"netbsd,386"
|
|
||||||
"netbsd,amd64"
|
|
||||||
"netbsd,arm"
|
|
||||||
"openbsd,386"
|
|
||||||
"openbsd,amd64"
|
|
||||||
"openbsd,arm"
|
|
||||||
"openbsd,arm64"
|
|
||||||
"windows,386"
|
|
||||||
"windows,amd64"
|
|
||||||
"windows,arm64"
|
|
||||||
)
|
|
||||||
|
|
||||||
for platform in "${platforms[@]}"; do
|
|
||||||
IFS=',' read -r build_os build_arch <<< "$platform"
|
|
||||||
ext=""
|
|
||||||
if [ "$build_os" == "windows" ]; then
|
|
||||||
ext=".exe"
|
|
||||||
fi
|
|
||||||
GOOS="$build_os" GOARCH="$build_arch" go build -ldflags "-s -w" -o "./binaries/dodo-$build_os-$build_arch$ext"
|
|
||||||
done
|
|
||||||
95
cmd/cli/main.go
Normal file
95
cmd/cli/main.go
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
|
||||||
|
"go.aykhans.me/sarin/internal/config"
|
||||||
|
"go.aykhans.me/sarin/internal/sarin"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
utilsErr "go.aykhans.me/utils/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
go listenForTermination(func() { cancel() })
|
||||||
|
|
||||||
|
combinedConfig := config.ReadAllConfigs()
|
||||||
|
|
||||||
|
combinedConfig.SetDefaults()
|
||||||
|
|
||||||
|
if *combinedConfig.ShowConfig {
|
||||||
|
if !combinedConfig.Print() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = utilsErr.MustHandle(combinedConfig.Validate(),
|
||||||
|
utilsErr.OnType(func(err types.FieldValidationErrors) error {
|
||||||
|
for _, fieldErr := range err.Errors {
|
||||||
|
if fieldErr.Value == "" {
|
||||||
|
fmt.Fprintln(os.Stderr,
|
||||||
|
config.StyleYellow.Render(fmt.Sprintf("[VALIDATION] Field '%s': ", fieldErr.Field))+fieldErr.Err.Error(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintln(os.Stderr,
|
||||||
|
config.StyleYellow.Render(fmt.Sprintf("[VALIDATION] Field '%s' (%s): ", fieldErr.Field, fieldErr.Value))+fieldErr.Err.Error(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
srn, err := sarin.NewSarin(
|
||||||
|
ctx,
|
||||||
|
combinedConfig.Methods, combinedConfig.URL, *combinedConfig.Timeout,
|
||||||
|
*combinedConfig.Concurrency, combinedConfig.Requests, combinedConfig.Duration,
|
||||||
|
*combinedConfig.Quiet, *combinedConfig.Insecure, combinedConfig.Params, combinedConfig.Headers,
|
||||||
|
combinedConfig.Cookies, combinedConfig.Bodies, combinedConfig.Proxies, combinedConfig.Values,
|
||||||
|
*combinedConfig.Output != config.ConfigOutputTypeNone,
|
||||||
|
*combinedConfig.DryRun,
|
||||||
|
combinedConfig.Lua, combinedConfig.Js,
|
||||||
|
)
|
||||||
|
_ = utilsErr.MustHandle(err,
|
||||||
|
utilsErr.OnType(func(err types.ProxyDialError) error {
|
||||||
|
fmt.Fprintln(os.Stderr, config.StyleRed.Render("[PROXY] ")+err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
utilsErr.OnSentinel(types.ErrScriptEmpty, func(err error) error {
|
||||||
|
fmt.Fprintln(os.Stderr, config.StyleRed.Render("[SCRIPT] ")+err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
utilsErr.OnType(func(err types.ScriptLoadError) error {
|
||||||
|
fmt.Fprintln(os.Stderr, config.StyleRed.Render("[SCRIPT] ")+err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
srn.Start(ctx)
|
||||||
|
|
||||||
|
switch *combinedConfig.Output {
|
||||||
|
case config.ConfigOutputTypeNone:
|
||||||
|
return
|
||||||
|
case config.ConfigOutputTypeJSON:
|
||||||
|
srn.GetResponses().PrintJSON()
|
||||||
|
case config.ConfigOutputTypeYAML:
|
||||||
|
srn.GetResponses().PrintYAML()
|
||||||
|
default:
|
||||||
|
srn.GetResponses().PrintTable()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func listenForTermination(do func()) {
|
||||||
|
sigChan := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
<-sigChan
|
||||||
|
do()
|
||||||
|
}
|
||||||
36
config.json
36
config.json
@@ -1,36 +0,0 @@
|
|||||||
{
|
|
||||||
"method": "GET",
|
|
||||||
"url": "https://example.com",
|
|
||||||
"yes": false,
|
|
||||||
"timeout": "5s",
|
|
||||||
"dodos": 8,
|
|
||||||
"requests": 1000,
|
|
||||||
"duration": "10s",
|
|
||||||
|
|
||||||
"params": [
|
|
||||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
|
||||||
{ "key1": "value" },
|
|
||||||
{ "key2": ["value1", "value2"] }
|
|
||||||
],
|
|
||||||
|
|
||||||
"headers": [
|
|
||||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
|
||||||
{ "key1": "value" },
|
|
||||||
{ "key2": ["value1", "value2"] }
|
|
||||||
],
|
|
||||||
|
|
||||||
"cookies": [
|
|
||||||
{ "key1": ["value1", "value2", "value3", "value4"] },
|
|
||||||
{ "key1": "value" },
|
|
||||||
{ "key2": ["value1", "value2"] }
|
|
||||||
],
|
|
||||||
|
|
||||||
"body": ["body-text1", "body-text2", "body-text3"],
|
|
||||||
|
|
||||||
"proxy": [
|
|
||||||
"http://example.com:8080",
|
|
||||||
"http://username:password@example.com:8080",
|
|
||||||
"socks5://example.com:8080",
|
|
||||||
"socks5h://example.com:8080"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
39
config.yaml
39
config.yaml
@@ -1,39 +0,0 @@
|
|||||||
method: "GET"
|
|
||||||
url: "https://example.com"
|
|
||||||
yes: false
|
|
||||||
timeout: "5s"
|
|
||||||
dodos: 8
|
|
||||||
requests: 1000
|
|
||||||
duration: "10s"
|
|
||||||
|
|
||||||
params:
|
|
||||||
- key1: ["value1", "value2", "value3", "value4"]
|
|
||||||
- key1: "value"
|
|
||||||
- key2: ["value1", "value2"]
|
|
||||||
|
|
||||||
headers:
|
|
||||||
- key1: ["value1", "value2", "value3", "value4"]
|
|
||||||
- key1: "value"
|
|
||||||
- key2: ["value1", "value2"]
|
|
||||||
|
|
||||||
cookies:
|
|
||||||
- key1: ["value1", "value2", "value3", "value4"]
|
|
||||||
- key1: "value"
|
|
||||||
- key2: ["value1", "value2"]
|
|
||||||
|
|
||||||
# body: "body-text"
|
|
||||||
# OR
|
|
||||||
# A random body value will be selected from the list for each request
|
|
||||||
body:
|
|
||||||
- "body-text1"
|
|
||||||
- "body-text2"
|
|
||||||
- "body-text3"
|
|
||||||
|
|
||||||
# proxy: "http://example.com:8080"
|
|
||||||
# OR
|
|
||||||
# A random proxy will be selected from the list for each request
|
|
||||||
proxy:
|
|
||||||
- "http://example.com:8080"
|
|
||||||
- "http://username:password@example.com:8080"
|
|
||||||
- "socks5://example.com:8080"
|
|
||||||
- "socks5h://example.com:8080"
|
|
||||||
181
config/cli.go
181
config/cli.go
@@ -1,181 +0,0 @@
|
|||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"flag"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/aykhans/dodo/types"
|
|
||||||
"github.com/aykhans/dodo/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
const cliUsageText = `Usage:
|
|
||||||
dodo [flags]
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
Simple usage:
|
|
||||||
dodo -u https://example.com -o 1m
|
|
||||||
|
|
||||||
Usage with config file:
|
|
||||||
dodo -f /path/to/config/file/config.json
|
|
||||||
|
|
||||||
Usage with all flags:
|
|
||||||
dodo -f /path/to/config/file/config.json \
|
|
||||||
-u https://example.com -m POST \
|
|
||||||
-d 10 -r 1000 -o 3m -t 3s \
|
|
||||||
-b "body1" -body "body2" \
|
|
||||||
-H "header1:value1" -header "header2:value2" \
|
|
||||||
-p "param1=value1" -param "param2=value2" \
|
|
||||||
-c "cookie1=value1" -cookie "cookie2=value2" \
|
|
||||||
-x "http://proxy.example.com:8080" -proxy "socks5://proxy2.example.com:8080" \
|
|
||||||
-y
|
|
||||||
|
|
||||||
Flags:
|
|
||||||
-h, -help help for dodo
|
|
||||||
-v, -version version for dodo
|
|
||||||
-y, -yes bool Answer yes to all questions (default %v)
|
|
||||||
-f, -config-file string Path to the local config file or http(s) URL of the config file
|
|
||||||
-d, -dodos uint Number of dodos(threads) (default %d)
|
|
||||||
-r, -requests uint Number of total requests
|
|
||||||
-o, -duration Time Maximum duration for the test (e.g. 30s, 1m, 5h)
|
|
||||||
-t, -timeout Time Timeout for each request (e.g. 400ms, 15s, 1m10s) (default %v)
|
|
||||||
-u, -url string URL for stress testing
|
|
||||||
-m, -method string HTTP Method for the request (default %s)
|
|
||||||
-b, -body [string] Body for the request (e.g. "body text")
|
|
||||||
-p, -param [string] Parameter for the request (e.g. "key1=value1")
|
|
||||||
-H, -header [string] Header for the request (e.g. "key1:value1")
|
|
||||||
-c, -cookie [string] Cookie for the request (e.g. "key1=value1")
|
|
||||||
-x, -proxy [string] Proxy for the request (e.g. "http://proxy.example.com:8080")`
|
|
||||||
|
|
||||||
func (config *Config) ReadCLI() (types.ConfigFile, error) {
|
|
||||||
flag.Usage = func() {
|
|
||||||
fmt.Printf(
|
|
||||||
cliUsageText+"\n",
|
|
||||||
DefaultYes,
|
|
||||||
DefaultDodosCount,
|
|
||||||
DefaultTimeout,
|
|
||||||
DefaultMethod,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
version = false
|
|
||||||
configFile = ""
|
|
||||||
yes = false
|
|
||||||
method = ""
|
|
||||||
url types.RequestURL
|
|
||||||
dodosCount = uint(0)
|
|
||||||
requestCount = uint(0)
|
|
||||||
timeout time.Duration
|
|
||||||
duration time.Duration
|
|
||||||
)
|
|
||||||
|
|
||||||
{
|
|
||||||
flag.BoolVar(&version, "version", false, "Prints the version of the program")
|
|
||||||
flag.BoolVar(&version, "v", false, "Prints the version of the program")
|
|
||||||
|
|
||||||
flag.StringVar(&configFile, "config-file", "", "Path to the configuration file")
|
|
||||||
flag.StringVar(&configFile, "f", "", "Path to the configuration file")
|
|
||||||
|
|
||||||
flag.BoolVar(&yes, "yes", false, "Answer yes to all questions")
|
|
||||||
flag.BoolVar(&yes, "y", false, "Answer yes to all questions")
|
|
||||||
|
|
||||||
flag.StringVar(&method, "method", "", "HTTP Method")
|
|
||||||
flag.StringVar(&method, "m", "", "HTTP Method")
|
|
||||||
|
|
||||||
flag.Var(&url, "url", "URL to send the request")
|
|
||||||
flag.Var(&url, "u", "URL to send the request")
|
|
||||||
|
|
||||||
flag.UintVar(&dodosCount, "dodos", 0, "Number of dodos(threads)")
|
|
||||||
flag.UintVar(&dodosCount, "d", 0, "Number of dodos(threads)")
|
|
||||||
|
|
||||||
flag.UintVar(&requestCount, "requests", 0, "Number of total requests")
|
|
||||||
flag.UintVar(&requestCount, "r", 0, "Number of total requests")
|
|
||||||
|
|
||||||
flag.DurationVar(&duration, "duration", 0, "Maximum duration of the test")
|
|
||||||
flag.DurationVar(&duration, "o", 0, "Maximum duration of the test")
|
|
||||||
|
|
||||||
flag.DurationVar(&timeout, "timeout", 0, "Timeout for each request (e.g. 400ms, 15s, 1m10s)")
|
|
||||||
flag.DurationVar(&timeout, "t", 0, "Timeout for each request (e.g. 400ms, 15s, 1m10s)")
|
|
||||||
|
|
||||||
flag.Var(&config.Params, "param", "URL parameter to send with the request")
|
|
||||||
flag.Var(&config.Params, "p", "URL parameter to send with the request")
|
|
||||||
|
|
||||||
flag.Var(&config.Headers, "header", "Header to send with the request")
|
|
||||||
flag.Var(&config.Headers, "H", "Header to send with the request")
|
|
||||||
|
|
||||||
flag.Var(&config.Cookies, "cookie", "Cookie to send with the request")
|
|
||||||
flag.Var(&config.Cookies, "c", "Cookie to send with the request")
|
|
||||||
|
|
||||||
flag.Var(&config.Body, "body", "Body to send with the request")
|
|
||||||
flag.Var(&config.Body, "b", "Body to send with the request")
|
|
||||||
|
|
||||||
flag.Var(&config.Proxies, "proxy", "Proxy to use for the request")
|
|
||||||
flag.Var(&config.Proxies, "x", "Proxy to use for the request")
|
|
||||||
}
|
|
||||||
|
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
if len(os.Args) <= 1 {
|
|
||||||
flag.CommandLine.Usage()
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
if args := flag.Args(); len(args) > 0 {
|
|
||||||
return types.ConfigFile(configFile), fmt.Errorf("unexpected arguments: %v", strings.Join(args, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
if version {
|
|
||||||
fmt.Printf("dodo version %s\n", VERSION)
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
flag.Visit(func(f *flag.Flag) {
|
|
||||||
switch f.Name {
|
|
||||||
case "method", "m":
|
|
||||||
config.Method = utils.ToPtr(method)
|
|
||||||
case "url", "u":
|
|
||||||
config.URL = utils.ToPtr(url)
|
|
||||||
case "dodos", "d":
|
|
||||||
config.DodosCount = utils.ToPtr(dodosCount)
|
|
||||||
case "requests", "r":
|
|
||||||
config.RequestCount = utils.ToPtr(requestCount)
|
|
||||||
case "duration", "o":
|
|
||||||
config.Duration = &types.Duration{Duration: duration}
|
|
||||||
case "timeout", "t":
|
|
||||||
config.Timeout = &types.Timeout{Duration: timeout}
|
|
||||||
case "yes", "y":
|
|
||||||
config.Yes = utils.ToPtr(yes)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return types.ConfigFile(configFile), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// CLIYesOrNoReader reads a yes or no answer from the command line.
|
|
||||||
// It prompts the user with the given message and default value,
|
|
||||||
// and returns true if the user answers "y" or "Y", and false otherwise.
|
|
||||||
// If there is an error while reading the input, it returns false.
|
|
||||||
// If the user simply presses enter without providing any input,
|
|
||||||
// it returns the default value specified by the `dft` parameter.
|
|
||||||
func CLIYesOrNoReader(message string, dft bool) bool {
|
|
||||||
var answer string
|
|
||||||
defaultMessage := "Y/n"
|
|
||||||
if !dft {
|
|
||||||
defaultMessage = "y/N"
|
|
||||||
}
|
|
||||||
fmt.Printf("%s [%s]: ", message, defaultMessage)
|
|
||||||
if _, err := fmt.Scanln(&answer); err != nil {
|
|
||||||
if err.Error() == "unexpected newline" {
|
|
||||||
return dft
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if answer == "" {
|
|
||||||
return dft
|
|
||||||
}
|
|
||||||
return answer == "y" || answer == "Y"
|
|
||||||
}
|
|
||||||
260
config/config.go
260
config/config.go
@@ -1,260 +0,0 @@
|
|||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
"os"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/aykhans/dodo/types"
|
|
||||||
"github.com/aykhans/dodo/utils"
|
|
||||||
"github.com/jedib0t/go-pretty/v6/table"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
VERSION string = "0.6.2"
|
|
||||||
DefaultUserAgent string = "Dodo/" + VERSION
|
|
||||||
DefaultMethod string = "GET"
|
|
||||||
DefaultTimeout time.Duration = time.Second * 10
|
|
||||||
DefaultDodosCount uint = 1
|
|
||||||
DefaultRequestCount uint = 0
|
|
||||||
DefaultDuration time.Duration = 0
|
|
||||||
DefaultYes bool = false
|
|
||||||
)
|
|
||||||
|
|
||||||
var SupportedProxySchemes []string = []string{"http", "socks5", "socks5h"}
|
|
||||||
|
|
||||||
type RequestConfig struct {
|
|
||||||
Method string
|
|
||||||
URL url.URL
|
|
||||||
Timeout time.Duration
|
|
||||||
DodosCount uint
|
|
||||||
RequestCount uint
|
|
||||||
Duration time.Duration
|
|
||||||
Yes bool
|
|
||||||
Params types.Params
|
|
||||||
Headers types.Headers
|
|
||||||
Cookies types.Cookies
|
|
||||||
Body types.Body
|
|
||||||
Proxies types.Proxies
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewRequestConfig(conf *Config) *RequestConfig {
|
|
||||||
return &RequestConfig{
|
|
||||||
Method: *conf.Method,
|
|
||||||
URL: conf.URL.URL,
|
|
||||||
Timeout: conf.Timeout.Duration,
|
|
||||||
DodosCount: *conf.DodosCount,
|
|
||||||
RequestCount: *conf.RequestCount,
|
|
||||||
Duration: conf.Duration.Duration,
|
|
||||||
Yes: *conf.Yes,
|
|
||||||
Params: conf.Params,
|
|
||||||
Headers: conf.Headers,
|
|
||||||
Cookies: conf.Cookies,
|
|
||||||
Body: conf.Body,
|
|
||||||
Proxies: conf.Proxies,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc *RequestConfig) GetValidDodosCountForRequests() uint {
|
|
||||||
if rc.RequestCount == 0 {
|
|
||||||
return rc.DodosCount
|
|
||||||
}
|
|
||||||
return min(rc.DodosCount, rc.RequestCount)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc *RequestConfig) GetMaxConns(minConns uint) uint {
|
|
||||||
maxConns := max(
|
|
||||||
minConns, rc.GetValidDodosCountForRequests(),
|
|
||||||
)
|
|
||||||
return ((maxConns * 50 / 100) + maxConns)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc *RequestConfig) Print() {
|
|
||||||
t := table.NewWriter()
|
|
||||||
t.SetOutputMirror(os.Stdout)
|
|
||||||
t.SetStyle(table.StyleLight)
|
|
||||||
t.SetColumnConfigs([]table.ColumnConfig{
|
|
||||||
{
|
|
||||||
Number: 2,
|
|
||||||
WidthMaxEnforcer: func(col string, maxLen int) string {
|
|
||||||
lines := strings.Split(col, "\n")
|
|
||||||
for i, line := range lines {
|
|
||||||
if len(line) > maxLen {
|
|
||||||
lines[i] = line[:maxLen-3] + "..."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return strings.Join(lines, "\n")
|
|
||||||
},
|
|
||||||
WidthMax: 50},
|
|
||||||
})
|
|
||||||
|
|
||||||
t.AppendHeader(table.Row{"Request Configuration"})
|
|
||||||
t.AppendRow(table.Row{"URL", rc.URL.String()})
|
|
||||||
t.AppendSeparator()
|
|
||||||
t.AppendRow(table.Row{"Method", rc.Method})
|
|
||||||
t.AppendSeparator()
|
|
||||||
t.AppendRow(table.Row{"Timeout", rc.Timeout})
|
|
||||||
t.AppendSeparator()
|
|
||||||
t.AppendRow(table.Row{"Dodos", rc.DodosCount})
|
|
||||||
t.AppendSeparator()
|
|
||||||
if rc.RequestCount > 0 {
|
|
||||||
t.AppendRow(table.Row{"Requests", rc.RequestCount})
|
|
||||||
} else {
|
|
||||||
t.AppendRow(table.Row{"Requests"})
|
|
||||||
}
|
|
||||||
t.AppendSeparator()
|
|
||||||
if rc.Duration > 0 {
|
|
||||||
t.AppendRow(table.Row{"Duration", rc.Duration})
|
|
||||||
} else {
|
|
||||||
t.AppendRow(table.Row{"Duration"})
|
|
||||||
}
|
|
||||||
t.AppendSeparator()
|
|
||||||
t.AppendRow(table.Row{"Params", rc.Params.String()})
|
|
||||||
t.AppendSeparator()
|
|
||||||
t.AppendRow(table.Row{"Headers", rc.Headers.String()})
|
|
||||||
t.AppendSeparator()
|
|
||||||
t.AppendRow(table.Row{"Cookies", rc.Cookies.String()})
|
|
||||||
t.AppendSeparator()
|
|
||||||
t.AppendRow(table.Row{"Proxy", rc.Proxies.String()})
|
|
||||||
t.AppendSeparator()
|
|
||||||
t.AppendRow(table.Row{"Body", rc.Body.String()})
|
|
||||||
|
|
||||||
t.Render()
|
|
||||||
}
|
|
||||||
|
|
||||||
type Config struct {
|
|
||||||
Method *string `json:"method" yaml:"method"`
|
|
||||||
URL *types.RequestURL `json:"url" yaml:"url"`
|
|
||||||
Timeout *types.Timeout `json:"timeout" yaml:"timeout"`
|
|
||||||
DodosCount *uint `json:"dodos" yaml:"dodos"`
|
|
||||||
RequestCount *uint `json:"requests" yaml:"requests"`
|
|
||||||
Duration *types.Duration `json:"duration" yaml:"duration"`
|
|
||||||
Yes *bool `json:"yes" yaml:"yes"`
|
|
||||||
Params types.Params `json:"params" yaml:"params"`
|
|
||||||
Headers types.Headers `json:"headers" yaml:"headers"`
|
|
||||||
Cookies types.Cookies `json:"cookies" yaml:"cookies"`
|
|
||||||
Body types.Body `json:"body" yaml:"body"`
|
|
||||||
Proxies types.Proxies `json:"proxy" yaml:"proxy"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewConfig() *Config {
|
|
||||||
return &Config{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Config) Validate() []error {
|
|
||||||
var errs []error
|
|
||||||
if utils.IsNilOrZero(c.URL) {
|
|
||||||
errs = append(errs, errors.New("request URL is required"))
|
|
||||||
} else {
|
|
||||||
if c.URL.Scheme == "" {
|
|
||||||
c.URL.Scheme = "http"
|
|
||||||
}
|
|
||||||
if c.URL.Scheme != "http" && c.URL.Scheme != "https" {
|
|
||||||
errs = append(errs, errors.New("request URL scheme must be http or https"))
|
|
||||||
}
|
|
||||||
|
|
||||||
urlParams := types.Params{}
|
|
||||||
for key, values := range c.URL.Query() {
|
|
||||||
for _, value := range values {
|
|
||||||
urlParams = append(urlParams, types.KeyValue[string, []string]{
|
|
||||||
Key: key,
|
|
||||||
Value: []string{value},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
c.Params = append(urlParams, c.Params...)
|
|
||||||
c.URL.RawQuery = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
if utils.IsNilOrZero(c.Method) {
|
|
||||||
errs = append(errs, errors.New("request method is required"))
|
|
||||||
}
|
|
||||||
if utils.IsNilOrZero(c.Timeout) {
|
|
||||||
errs = append(errs, errors.New("request timeout must be greater than 0"))
|
|
||||||
}
|
|
||||||
if utils.IsNilOrZero(c.DodosCount) {
|
|
||||||
errs = append(errs, errors.New("dodos count must be greater than 0"))
|
|
||||||
}
|
|
||||||
if utils.IsNilOrZero(c.Duration) && utils.IsNilOrZero(c.RequestCount) {
|
|
||||||
errs = append(errs, errors.New("you should provide at least one of duration or request count"))
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, proxy := range c.Proxies {
|
|
||||||
if proxy.String() == "" {
|
|
||||||
errs = append(errs, fmt.Errorf("proxies[%d]: proxy cannot be empty", i))
|
|
||||||
} else if schema := proxy.Scheme; !slices.Contains(SupportedProxySchemes, schema) {
|
|
||||||
errs = append(errs,
|
|
||||||
fmt.Errorf("proxies[%d]: proxy has unsupported scheme \"%s\" (supported schemes: %s)",
|
|
||||||
i, proxy.String(), strings.Join(SupportedProxySchemes, ", "),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return errs
|
|
||||||
}
|
|
||||||
|
|
||||||
func (config *Config) MergeConfig(newConfig *Config) {
|
|
||||||
if newConfig.Method != nil {
|
|
||||||
config.Method = newConfig.Method
|
|
||||||
}
|
|
||||||
if newConfig.URL != nil {
|
|
||||||
config.URL = newConfig.URL
|
|
||||||
}
|
|
||||||
if newConfig.Timeout != nil {
|
|
||||||
config.Timeout = newConfig.Timeout
|
|
||||||
}
|
|
||||||
if newConfig.DodosCount != nil {
|
|
||||||
config.DodosCount = newConfig.DodosCount
|
|
||||||
}
|
|
||||||
if newConfig.RequestCount != nil {
|
|
||||||
config.RequestCount = newConfig.RequestCount
|
|
||||||
}
|
|
||||||
if newConfig.Duration != nil {
|
|
||||||
config.Duration = newConfig.Duration
|
|
||||||
}
|
|
||||||
if newConfig.Yes != nil {
|
|
||||||
config.Yes = newConfig.Yes
|
|
||||||
}
|
|
||||||
if len(newConfig.Params) != 0 {
|
|
||||||
config.Params = newConfig.Params
|
|
||||||
}
|
|
||||||
if len(newConfig.Headers) != 0 {
|
|
||||||
config.Headers = newConfig.Headers
|
|
||||||
}
|
|
||||||
if len(newConfig.Cookies) != 0 {
|
|
||||||
config.Cookies = newConfig.Cookies
|
|
||||||
}
|
|
||||||
if len(newConfig.Body) != 0 {
|
|
||||||
config.Body = newConfig.Body
|
|
||||||
}
|
|
||||||
if len(newConfig.Proxies) != 0 {
|
|
||||||
config.Proxies = newConfig.Proxies
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (config *Config) SetDefaults() {
|
|
||||||
if config.Method == nil {
|
|
||||||
config.Method = utils.ToPtr(DefaultMethod)
|
|
||||||
}
|
|
||||||
if config.Timeout == nil {
|
|
||||||
config.Timeout = &types.Timeout{Duration: DefaultTimeout}
|
|
||||||
}
|
|
||||||
if config.DodosCount == nil {
|
|
||||||
config.DodosCount = utils.ToPtr(DefaultDodosCount)
|
|
||||||
}
|
|
||||||
if config.RequestCount == nil {
|
|
||||||
config.RequestCount = utils.ToPtr(DefaultRequestCount)
|
|
||||||
}
|
|
||||||
if config.Duration == nil {
|
|
||||||
config.Duration = &types.Duration{Duration: DefaultDuration}
|
|
||||||
}
|
|
||||||
if config.Yes == nil {
|
|
||||||
config.Yes = utils.ToPtr(DefaultYes)
|
|
||||||
}
|
|
||||||
config.Headers.SetIfNotExists("User-Agent", DefaultUserAgent)
|
|
||||||
}
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/aykhans/dodo/types"
|
|
||||||
"gopkg.in/yaml.v3"
|
|
||||||
)
|
|
||||||
|
|
||||||
var supportedFileTypes = []string{"json", "yaml", "yml"}
|
|
||||||
|
|
||||||
func (config *Config) ReadFile(filePath types.ConfigFile) error {
|
|
||||||
var (
|
|
||||||
data []byte
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
fileExt := filePath.Extension()
|
|
||||||
if slices.Contains(supportedFileTypes, fileExt) {
|
|
||||||
if filePath.LocationType() == types.FileLocationTypeRemoteHTTP {
|
|
||||||
client := &http.Client{
|
|
||||||
Timeout: 10 * time.Second,
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := client.Get(filePath.String())
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to fetch config file from %s", filePath)
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
data, err = io.ReadAll(io.Reader(resp.Body))
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to read config file from %s", filePath)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
data, err = os.ReadFile(filePath.String())
|
|
||||||
if err != nil {
|
|
||||||
return errors.New("failed to read config file from " + filePath.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if fileExt == "json" {
|
|
||||||
return parseJSONConfig(data, config)
|
|
||||||
} else if fileExt == "yml" || fileExt == "yaml" {
|
|
||||||
return parseYAMLConfig(data, config)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Errorf("unsupported config file type (supported types: %v)", strings.Join(supportedFileTypes, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseJSONConfig(data []byte, config *Config) error {
|
|
||||||
err := json.Unmarshal(data, &config)
|
|
||||||
if err != nil {
|
|
||||||
switch parsedErr := err.(type) {
|
|
||||||
case *json.SyntaxError:
|
|
||||||
return fmt.Errorf("JSON Config file: invalid syntax at byte offset %d", parsedErr.Offset)
|
|
||||||
case *json.UnmarshalTypeError:
|
|
||||||
return fmt.Errorf("JSON Config file: invalid type %v for field %s, expected %v", parsedErr.Value, parsedErr.Field, parsedErr.Type)
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("JSON Config file: %s", err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseYAMLConfig(data []byte, config *Config) error {
|
|
||||||
err := yaml.Unmarshal(data, &config)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("YAML Config file: %s", err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
529
docs/configuration.md
Normal file
529
docs/configuration.md
Normal file
@@ -0,0 +1,529 @@
|
|||||||
|
# Configuration
|
||||||
|
|
||||||
|
Sarin supports environment variables, CLI flags, and YAML files. However, they are not exactly equivalent—YAML files have the most configuration options, followed by CLI flags, and then environment variables.
|
||||||
|
|
||||||
|
When the same option is specified in multiple sources, the following priority order applies:
|
||||||
|
|
||||||
|
```
|
||||||
|
CLI Flags (Highest) > YAML > Environment Variables (Lowest)
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `-s` or `--show-config` to see the final merged configuration before sending requests.
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
> **Note:** For CLI flags with `string / []string` type, the flag can be used once with a single value or multiple times to provide multiple values.
|
||||||
|
|
||||||
|
| Name | YAML | CLI | ENV | Default | Description |
|
||||||
|
| --------------------------- | ----------------------------------- | -------------------------------------------- | -------------------------------- | ------- | ---------------------------- |
|
||||||
|
| [Help](#help) | - | `-help` / `-h` | - | - | Show help message |
|
||||||
|
| [Version](#version) | - | `-version` / `-v` | - | - | Show version and build info |
|
||||||
|
| [Show Config](#show-config) | `showConfig`<br>(boolean) | `-show-config` / `-s`<br>(boolean) | `SARIN_SHOW_CONFIG`<br>(boolean) | `false` | Show merged configuration |
|
||||||
|
| [Config File](#config-file) | `configFile`<br>(string / []string) | `-config-file` / `-f`<br>(string / []string) | `SARIN_CONFIG_FILE`<br>(string) | - | Path to config file(s) |
|
||||||
|
| [URL](#url) | `url`<br>(string) | `-url` / `-U`<br>(string) | `SARIN_URL`<br>(string) | - | Target URL (HTTP/HTTPS) |
|
||||||
|
| [Method](#method) | `method`<br>(string / []string) | `-method` / `-M`<br>(string / []string) | `SARIN_METHOD`<br>(string) | `GET` | HTTP method(s) |
|
||||||
|
| [Timeout](#timeout) | `timeout`<br>(duration) | `-timeout` / `-T`<br>(duration) | `SARIN_TIMEOUT`<br>(duration) | `10s` | Request timeout |
|
||||||
|
| [Concurrency](#concurrency) | `concurrency`<br>(number) | `-concurrency` / `-c`<br>(number) | `SARIN_CONCURRENCY`<br>(number) | `1` | Number of concurrent workers |
|
||||||
|
| [Requests](#requests) | `requests`<br>(number) | `-requests` / `-r`<br>(number) | `SARIN_REQUESTS`<br>(number) | - | Total requests to send |
|
||||||
|
| [Duration](#duration) | `duration`<br>(duration) | `-duration` / `-d`<br>(duration) | `SARIN_DURATION`<br>(duration) | - | Test duration |
|
||||||
|
| [Quiet](#quiet) | `quiet`<br>(boolean) | `-quiet` / `-q`<br>(boolean) | `SARIN_QUIET`<br>(boolean) | `false` | Hide progress bar and logs |
|
||||||
|
| [Output](#output) | `output`<br>(string) | `-output` / `-o`<br>(string) | `SARIN_OUTPUT`<br>(string) | `table` | Output format for stats |
|
||||||
|
| [Dry Run](#dry-run) | `dryRun`<br>(boolean) | `-dry-run` / `-z`<br>(boolean) | `SARIN_DRY_RUN`<br>(boolean) | `false` | Generate without sending |
|
||||||
|
| [Insecure](#insecure) | `insecure`<br>(boolean) | `-insecure` / `-I`<br>(boolean) | `SARIN_INSECURE`<br>(boolean) | `false` | Skip TLS verification |
|
||||||
|
| [Body](#body) | `body`<br>(string / []string) | `-body` / `-B`<br>(string / []string) | `SARIN_BODY`<br>(string) | - | Request body |
|
||||||
|
| [Params](#params) | `params`<br>(object) | `-param` / `-P`<br>(string / []string) | `SARIN_PARAM`<br>(string) | - | URL query parameters |
|
||||||
|
| [Headers](#headers) | `headers`<br>(object) | `-header` / `-H`<br>(string / []string) | `SARIN_HEADER`<br>(string) | - | HTTP headers |
|
||||||
|
| [Cookies](#cookies) | `cookies`<br>(object) | `-cookie` / `-C`<br>(string / []string) | `SARIN_COOKIE`<br>(string) | - | HTTP cookies |
|
||||||
|
| [Proxy](#proxy) | `proxy`<br>(string / []string) | `-proxy` / `-X`<br>(string / []string) | `SARIN_PROXY`<br>(string) | - | Proxy URL(s) |
|
||||||
|
| [Values](#values) | `values`<br>(string / []string) | `-values` / `-V`<br>(string / []string) | `SARIN_VALUES`<br>(string) | - | Template values (key=value) |
|
||||||
|
| [Lua](#lua) | `lua`<br>(string / []string) | `-lua`<br>(string / []string) | `SARIN_LUA`<br>(string) | - | Lua script(s) |
|
||||||
|
| [Js](#js) | `js`<br>(string / []string) | `-js`<br>(string / []string) | `SARIN_JS`<br>(string) | - | JavaScript script(s) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Help
|
||||||
|
|
||||||
|
Show help message.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sarin -help
|
||||||
|
```
|
||||||
|
|
||||||
|
## Version
|
||||||
|
|
||||||
|
Show version and build information.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sarin -version
|
||||||
|
```
|
||||||
|
|
||||||
|
## Show Config
|
||||||
|
|
||||||
|
Show the final merged configuration before sending requests.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sarin -show-config
|
||||||
|
```
|
||||||
|
|
||||||
|
## Config File
|
||||||
|
|
||||||
|
Path to configuration file(s). Supports local paths and remote URLs.
|
||||||
|
|
||||||
|
**Priority Rules:**
|
||||||
|
|
||||||
|
1. **CLI flags** (`-f`) have highest priority, processed left to right (rightmost wins)
|
||||||
|
2. **Included files** (via `configFile` property) are processed with lower priority than their parent
|
||||||
|
3. **Environment variable** (`SARIN_CONFIG_FILE`) has lowest priority
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# config2.yaml
|
||||||
|
configFile: /config4.yaml
|
||||||
|
url: http://from-config2.com
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_CONFIG_FILE=/config1.yaml sarin -f /config2.yaml -f https://example.com/config3.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
**Resolution order (lowest to highest priority):**
|
||||||
|
|
||||||
|
| Source | File | Priority |
|
||||||
|
| ------------------------ | ------------ | -------- |
|
||||||
|
| ENV (SARIN_CONFIG_FILE) | config1.yaml | Lowest |
|
||||||
|
| Included by config2.yaml | config4.yaml | ↑ |
|
||||||
|
| CLI -f (first) | config2.yaml | ↑ |
|
||||||
|
| CLI -f (second) | config3.yaml | Highest |
|
||||||
|
|
||||||
|
**Why this order?**
|
||||||
|
|
||||||
|
- `config1.yaml` comes from ENV → lowest priority
|
||||||
|
- `config2.yaml` comes from CLI → higher than ENV
|
||||||
|
- `config4.yaml` is included BY `config2.yaml` → inherits position below its parent
|
||||||
|
- `config3.yaml` comes from CLI after `config2.yaml` → highest priority
|
||||||
|
|
||||||
|
If all four files define `url`, the value from `config3.yaml` wins.
|
||||||
|
|
||||||
|
## URL
|
||||||
|
|
||||||
|
Target URL. Must be HTTP or HTTPS. The URL path supports [templating](templating.md), allowing dynamic path generation per request.
|
||||||
|
|
||||||
|
> **Note:** Templating is only supported in the URL path. Host and scheme must be static.
|
||||||
|
|
||||||
|
**Example with dynamic path:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
url: http://example.com/users/{{ fakeit_UUID }}/profile
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example with dynamic path:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sarin -U "http://example.com/users/{{ fakeit_UUID }}" -r 1000 -c 10
|
||||||
|
```
|
||||||
|
|
||||||
|
## Method
|
||||||
|
|
||||||
|
HTTP method(s). If multiple values are provided, Sarin cycles through them in order, starting from a random index for each request. Supports [templating](templating.md).
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
method: GET
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
method:
|
||||||
|
- GET
|
||||||
|
- POST
|
||||||
|
- PUT
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-method GET -method POST -method PUT
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_METHOD=GET
|
||||||
|
```
|
||||||
|
|
||||||
|
## Timeout
|
||||||
|
|
||||||
|
Request timeout. Must be greater than 0.
|
||||||
|
|
||||||
|
Valid time units: `ns`, `us` (or `µs`), `ms`, `s`, `m`, `h`
|
||||||
|
|
||||||
|
**Examples:** `5s`, `300ms`, `1m20s`
|
||||||
|
|
||||||
|
## Concurrency
|
||||||
|
|
||||||
|
Number of concurrent workers. Must be between 1 and 100,000,000.
|
||||||
|
|
||||||
|
## Requests
|
||||||
|
|
||||||
|
Total number of requests to send. At least one of `requests` or `duration` must be specified. If both are provided, the test stops when either limit is reached first.
|
||||||
|
|
||||||
|
## Duration
|
||||||
|
|
||||||
|
Test duration. At least one of `requests` or `duration` must be specified. If both are provided, the test stops when either limit is reached first.
|
||||||
|
|
||||||
|
Valid time units: `ns`, `us` (or `µs`), `ms`, `s`, `m`, `h`
|
||||||
|
|
||||||
|
**Examples:** `1m30s`, `25s`, `1h`
|
||||||
|
|
||||||
|
## Quiet
|
||||||
|
|
||||||
|
Hide the progress bar and runtime logs.
|
||||||
|
|
||||||
|
## Output
|
||||||
|
|
||||||
|
Output format for response statistics.
|
||||||
|
|
||||||
|
Valid formats: `table`, `json`, `yaml`, `none`
|
||||||
|
|
||||||
|
Using `none` disables output and reduces memory usage since response statistics are not stored.
|
||||||
|
|
||||||
|
## Dry Run
|
||||||
|
|
||||||
|
Generate requests without sending them. Useful for testing templates.
|
||||||
|
|
||||||
|
## Insecure
|
||||||
|
|
||||||
|
Skip TLS certificate verification.
|
||||||
|
|
||||||
|
## Body
|
||||||
|
|
||||||
|
Request body. If multiple values are provided, Sarin cycles through them in order, starting from a random index for each request. Supports [templating](templating.md).
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
body: '{"product": "car"}'
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
body:
|
||||||
|
- '{"product": "car"}'
|
||||||
|
- '{"product": "phone"}'
|
||||||
|
- '{"product": "watch"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-body '{"product": "car"}' -body '{"product": "phone"}' -body '{"product": "watch"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_BODY='{"product": "car"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Params
|
||||||
|
|
||||||
|
URL query parameters. Supports [templating](templating.md).
|
||||||
|
|
||||||
|
When the same key appears as **separate entries** (in CLI or config file), all values are sent in every request. When multiple values are specified as an **array on a single key** (config file only), Sarin cycles through them.
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
params:
|
||||||
|
key1: value1
|
||||||
|
key2: [value2, value3] # cycles between value2 and value3
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
params:
|
||||||
|
- key1: value1
|
||||||
|
- key2: [value2, value3] # cycles between value2 and value3
|
||||||
|
|
||||||
|
# To send both values in every request, use separate entries:
|
||||||
|
params:
|
||||||
|
- key2: value2
|
||||||
|
- key2: value3 # both sent in every request
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-param "key1=value1" -param "key2=value2" -param "key2=value3" # sends both value2 and value3
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_PARAM="key1=value1"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Headers
|
||||||
|
|
||||||
|
HTTP headers. Supports [templating](templating.md).
|
||||||
|
|
||||||
|
When the same key appears as **separate entries** (in CLI or config file), all values are sent in every request. When multiple values are specified as an **array on a single key** (config file only), Sarin cycles through them.
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
headers:
|
||||||
|
key1: value1
|
||||||
|
key2: [value2, value3] # cycles between value2 and value3
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
headers:
|
||||||
|
- key1: value1
|
||||||
|
- key2: [value2, value3] # cycles between value2 and value3
|
||||||
|
|
||||||
|
# To send both values in every request, use separate entries:
|
||||||
|
headers:
|
||||||
|
- key2: value2
|
||||||
|
- key2: value3 # both sent in every request
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-header "key1: value1" -header "key2: value2" -header "key2: value3" # sends both value2 and value3
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_HEADER="key1: value1"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Cookies
|
||||||
|
|
||||||
|
HTTP cookies. Supports [templating](templating.md).
|
||||||
|
|
||||||
|
When the same key appears as **separate entries** (in CLI or config file), all values are sent in every request. When multiple values are specified as an **array on a single key** (config file only), Sarin cycles through them.
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
cookies:
|
||||||
|
key1: value1
|
||||||
|
key2: [value2, value3] # cycles between value2 and value3
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
cookies:
|
||||||
|
- key1: value1
|
||||||
|
- key2: [value2, value3] # cycles between value2 and value3
|
||||||
|
|
||||||
|
# To send both values in every request, use separate entries:
|
||||||
|
cookies:
|
||||||
|
- key2: value2
|
||||||
|
- key2: value3 # both sent in every request
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-cookie "key1=value1" -cookie "key2=value2" -cookie "key2=value3" # sends both value2 and value3
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_COOKIE="key1=value1"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Proxy
|
||||||
|
|
||||||
|
Proxy URL(s). If multiple values are provided, Sarin cycles through them in order, starting from a random index for each request.
|
||||||
|
|
||||||
|
Supported protocols: `http`, `https`, `socks5`, `socks5h`
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
proxy: http://proxy1.com
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
proxy:
|
||||||
|
- http://proxy1.com
|
||||||
|
- socks5://proxy2.com
|
||||||
|
- socks5h://proxy3.com
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-proxy http://proxy1.com -proxy socks5://proxy2.com -proxy socks5h://proxy3.com
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_PROXY="http://proxy1.com"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Values
|
||||||
|
|
||||||
|
Template values in key=value format. Supports [templating](templating.md). Multiple values can be specified and all are rendered for each request.
|
||||||
|
|
||||||
|
See the [Templating Guide](templating.md) for more details on using values and available template functions.
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
values: "key=value"
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
values: |
|
||||||
|
key1=value1
|
||||||
|
key2=value2
|
||||||
|
key3=value3
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-values "key1=value1" -values "key2=value2" -values "key3=value3"
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_VALUES="key1=value1"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Lua
|
||||||
|
|
||||||
|
Lua script(s) for request transformation. Each script must define a global `transform` function that receives a request object and returns the modified request object. Scripts run after template rendering, before the request is sent.
|
||||||
|
|
||||||
|
If multiple Lua scripts are provided, they are chained in order—the output of one becomes the input to the next. When both Lua and JavaScript scripts are specified, all Lua scripts run first, then all JavaScript scripts.
|
||||||
|
|
||||||
|
**Script sources:**
|
||||||
|
|
||||||
|
Scripts can be provided as:
|
||||||
|
|
||||||
|
- **Inline script:** Direct script code
|
||||||
|
- **File reference:** `@/path/to/script.lua` or `@./relative/path.lua`
|
||||||
|
- **URL reference:** `@http://...` or `@https://...`
|
||||||
|
- **Escaped `@`:** `@@...` for inline scripts that start with a literal `@`
|
||||||
|
|
||||||
|
**The `transform` function:**
|
||||||
|
|
||||||
|
```lua
|
||||||
|
function transform(req)
|
||||||
|
-- req.method (string) - HTTP method (e.g. "GET", "POST")
|
||||||
|
-- req.path (string) - URL path (e.g. "/api/users")
|
||||||
|
-- req.body (string) - Request body
|
||||||
|
-- req.headers (table of string/arrays) - HTTP headers (e.g. {["X-Key"] = "value"})
|
||||||
|
-- req.params (table of string/arrays) - Query parameters (e.g. {["id"] = "123"})
|
||||||
|
-- req.cookies (table of string/arrays) - Cookies (e.g. {["session"] = "abc"})
|
||||||
|
|
||||||
|
req.headers["X-Custom"] = "my-value"
|
||||||
|
return req
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Note:** Header, parameter, and cookie values can be a single string or a table (array) for multiple values per key (e.g. `{"val1", "val2"}`).
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
lua: |
|
||||||
|
function transform(req)
|
||||||
|
req.headers["X-Custom"] = "my-value"
|
||||||
|
return req
|
||||||
|
end
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
lua:
|
||||||
|
- "@/path/to/script1.lua"
|
||||||
|
- "@/path/to/script2.lua"
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-lua 'function transform(req) req.headers["X-Custom"] = "my-value" return req end'
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
-lua @/path/to/script1.lua -lua @/path/to/script2.lua
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_LUA='function transform(req) req.headers["X-Custom"] = "my-value" return req end'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Js
|
||||||
|
|
||||||
|
JavaScript script(s) for request transformation. Each script must define a global `transform` function that receives a request object and returns the modified request object. Scripts run after template rendering, before the request is sent.
|
||||||
|
|
||||||
|
If multiple JavaScript scripts are provided, they are chained in order—the output of one becomes the input to the next. When both Lua and JavaScript scripts are specified, all Lua scripts run first, then all JavaScript scripts.
|
||||||
|
|
||||||
|
**Script sources:**
|
||||||
|
|
||||||
|
Scripts can be provided as:
|
||||||
|
|
||||||
|
- **Inline script:** Direct script code
|
||||||
|
- **File reference:** `@/path/to/script.js` or `@./relative/path.js`
|
||||||
|
- **URL reference:** `@http://...` or `@https://...`
|
||||||
|
- **Escaped `@`:** `@@...` for inline scripts that start with a literal `@`
|
||||||
|
|
||||||
|
**The `transform` function:**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
function transform(req) {
|
||||||
|
// req.method (string) - HTTP method (e.g. "GET", "POST")
|
||||||
|
// req.path (string) - URL path (e.g. "/api/users")
|
||||||
|
// req.body (string) - Request body
|
||||||
|
// req.headers (object of string/arrays) - HTTP headers (e.g. {"X-Key": "value"})
|
||||||
|
// req.params (object of string/arrays) - Query parameters (e.g. {"id": "123"})
|
||||||
|
// req.cookies (object of string/arrays) - Cookies (e.g. {"session": "abc"})
|
||||||
|
|
||||||
|
req.headers["X-Custom"] = "my-value";
|
||||||
|
return req;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Note:** Header, parameter, and cookie values can be a single string or an array for multiple values per key (e.g. `["val1", "val2"]`).
|
||||||
|
|
||||||
|
**YAML example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
js: |
|
||||||
|
function transform(req) {
|
||||||
|
req.headers["X-Custom"] = "my-value";
|
||||||
|
return req;
|
||||||
|
}
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
js:
|
||||||
|
- "@/path/to/script1.js"
|
||||||
|
- "@/path/to/script2.js"
|
||||||
|
```
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
-js 'function transform(req) { req.headers["X-Custom"] = "my-value"; return req; }'
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
-js @/path/to/script1.js -js @/path/to/script2.js
|
||||||
|
```
|
||||||
|
|
||||||
|
**ENV example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
SARIN_JS='function transform(req) { req.headers["X-Custom"] = "my-value"; return req; }'
|
||||||
|
```
|
||||||
1030
docs/examples.md
Normal file
1030
docs/examples.md
Normal file
File diff suppressed because it is too large
Load Diff
BIN
docs/static/demo.gif
vendored
Normal file
BIN
docs/static/demo.gif
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
667
docs/templating.md
Normal file
667
docs/templating.md
Normal file
@@ -0,0 +1,667 @@
|
|||||||
|
# Templating
|
||||||
|
|
||||||
|
Sarin supports Go templates in URL paths, methods, bodies, headers, params, cookies, and values.
|
||||||
|
|
||||||
|
> **Note:** Templating in URL host and scheme is not supported. Only the path portion of the URL can contain templates.
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Using Values](#using-values)
|
||||||
|
- [General Functions](#general-functions)
|
||||||
|
- [String Functions](#string-functions)
|
||||||
|
- [Collection Functions](#collection-functions)
|
||||||
|
- [Body Functions](#body-functions)
|
||||||
|
- [File Functions](#file-functions)
|
||||||
|
- [Fake Data Functions](#fake-data-functions)
|
||||||
|
- [File](#file)
|
||||||
|
- [ID](#id)
|
||||||
|
- [Product](#product)
|
||||||
|
- [Person](#person)
|
||||||
|
- [Generate](#generate)
|
||||||
|
- [Auth](#auth)
|
||||||
|
- [Address](#address)
|
||||||
|
- [Game](#game)
|
||||||
|
- [Beer](#beer)
|
||||||
|
- [Car](#car)
|
||||||
|
- [Words](#words)
|
||||||
|
- [Text](#text)
|
||||||
|
- [Foods](#foods)
|
||||||
|
- [Misc](#misc)
|
||||||
|
- [Color](#color)
|
||||||
|
- [Image](#image)
|
||||||
|
- [Internet](#internet)
|
||||||
|
- [HTML](#html)
|
||||||
|
- [Date/Time](#datetime)
|
||||||
|
- [Payment](#payment)
|
||||||
|
- [Finance](#finance)
|
||||||
|
- [Company](#company)
|
||||||
|
- [Hacker](#hacker)
|
||||||
|
- [Hipster](#hipster)
|
||||||
|
- [App](#app)
|
||||||
|
- [Animal](#animal)
|
||||||
|
- [Emoji](#emoji)
|
||||||
|
- [Language](#language)
|
||||||
|
- [Number](#number)
|
||||||
|
- [String](#string)
|
||||||
|
- [Celebrity](#celebrity)
|
||||||
|
- [Minecraft](#minecraft)
|
||||||
|
- [Book](#book)
|
||||||
|
- [Movie](#movie)
|
||||||
|
- [Error](#error)
|
||||||
|
- [School](#school)
|
||||||
|
- [Song](#song)
|
||||||
|
|
||||||
|
## Using Values
|
||||||
|
|
||||||
|
Values are generated once per request and can be referenced in multiple fields using `{{ .Values.KEY }}` syntax. This is useful when you need to use the same generated value (e.g., a UUID) in both headers and body within the same request.
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
values: |
|
||||||
|
REQUEST_ID={{ fakeit_UUID }}
|
||||||
|
USER_ID={{ fakeit_UUID }}
|
||||||
|
|
||||||
|
headers:
|
||||||
|
X-Request-ID: "{{ .Values.REQUEST_ID }}"
|
||||||
|
body: |
|
||||||
|
{
|
||||||
|
"requestId": "{{ .Values.REQUEST_ID }}",
|
||||||
|
"userId": "{{ .Values.USER_ID }}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, `REQUEST_ID` is generated once and the same value is used in both the header and body. Each new request generates a new `REQUEST_ID`.
|
||||||
|
|
||||||
|
**CLI example:**
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sarin -U http://example.com/users \
|
||||||
|
-V "ID={{ fakeit_UUID }}" \
|
||||||
|
-H "X-Request-ID: {{ .Values.ID }}" \
|
||||||
|
-B '{"id": "{{ .Values.ID }}"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
## General Functions
|
||||||
|
|
||||||
|
### String Functions
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ---------------------------------------------------------- | ------------------------------------------------------------------- | --------------------------------------------------------- |
|
||||||
|
| `strings_ToUpper` | Convert string to uppercase | `{{ strings_ToUpper "hello" }}` → `HELLO` |
|
||||||
|
| `strings_ToLower` | Convert string to lowercase | `{{ strings_ToLower "HELLO" }}` → `hello` |
|
||||||
|
| `strings_RemoveSpaces` | Remove all spaces from string | `{{ strings_RemoveSpaces "hello world" }}` → `helloworld` |
|
||||||
|
| `strings_Replace(s string, old string, new string, n int)` | Replace first `n` occurrences of `old` with `new`. Use `-1` for all | `{{ strings_Replace "hello" "l" "L" -1 }}` → `heLLo` |
|
||||||
|
| `strings_ToDate(date string)` | Parse date string (YYYY-MM-DD format) | `{{ strings_ToDate "2024-01-15" }}` |
|
||||||
|
| `strings_First(s string, n int)` | Get first `n` characters | `{{ strings_First "hello" 2 }}` → `he` |
|
||||||
|
| `strings_Last(s string, n int)` | Get last `n` characters | `{{ strings_Last "hello" 2 }}` → `lo` |
|
||||||
|
| `strings_Truncate(s string, n int)` | Truncate to `n` characters with ellipsis | `{{ strings_Truncate "hello world" 5 }}` → `hello...` |
|
||||||
|
| `strings_TrimPrefix(s string, prefix string)` | Remove prefix from string | `{{ strings_TrimPrefix "hello" "he" }}` → `llo` |
|
||||||
|
| `strings_TrimSuffix(s string, suffix string)` | Remove suffix from string | `{{ strings_TrimSuffix "hello" "lo" }}` → `hel` |
|
||||||
|
|
||||||
|
### Collection Functions
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ---------------------------------------- | --------------------------------------------- | -------------------------------------------------------- |
|
||||||
|
| `dict_Str(pairs ...string)` | Create string dictionary from key-value pairs | `{{ dict_Str "key1" "val1" "key2" "val2" }}` |
|
||||||
|
| `slice_Str(values ...string)` | Create string slice | `{{ slice_Str "a" "b" "c" }}` |
|
||||||
|
| `slice_Join(slice []string, sep string)` | Join string slice with separator | `{{ slice_Join (slice_Str "a" "b" "c") "-" }}` → `a-b-c` |
|
||||||
|
| `slice_Int(values ...int)` | Create int slice | `{{ slice_Int 1 2 3 }}` |
|
||||||
|
| `slice_Uint(values ...uint)` | Create uint slice | `{{ slice_Uint 1 2 3 }}` |
|
||||||
|
|
||||||
|
### Body Functions
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| -------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------- |
|
||||||
|
| `body_FormData(pairs ...string)` | Create multipart form data from key-value pairs. Automatically sets the `Content-Type` header. Values starting with `@` are treated as file references (local path or URL). Use `@@` to escape literal `@`. | `{{ body_FormData "field1" "value1" "file" "@/path/to/file.pdf" }}` |
|
||||||
|
|
||||||
|
**`body_FormData` Details:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Text fields only
|
||||||
|
body: '{{ body_FormData "username" "john" "email" "john@example.com" }}'
|
||||||
|
|
||||||
|
# Single file upload
|
||||||
|
body: '{{ body_FormData "document" "@/path/to/file.pdf" }}'
|
||||||
|
|
||||||
|
# File from URL
|
||||||
|
body: '{{ body_FormData "image" "@https://example.com/photo.jpg" }}'
|
||||||
|
|
||||||
|
# Mixed text fields and files
|
||||||
|
body: |
|
||||||
|
{{ body_FormData
|
||||||
|
"title" "My Report"
|
||||||
|
"author" "John Doe"
|
||||||
|
"cover" "@/path/to/cover.jpg"
|
||||||
|
"document" "@/path/to/report.pdf"
|
||||||
|
}}
|
||||||
|
|
||||||
|
# Multiple files with same field name
|
||||||
|
body: |
|
||||||
|
{{ body_FormData
|
||||||
|
"files" "@/path/to/file1.pdf"
|
||||||
|
"files" "@/path/to/file2.pdf"
|
||||||
|
}}
|
||||||
|
|
||||||
|
# Escape @ for literal value (sends "@username")
|
||||||
|
body: '{{ body_FormData "twitter" "@@username" }}'
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Note:** Files are cached in memory after the first read. Subsequent requests reuse the cached content, avoiding repeated disk/network I/O.
|
||||||
|
|
||||||
|
### File Functions
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ---------------------------- | --------------------------------------------------------------------------------------------------------- | --------------------------------------- |
|
||||||
|
| `file_Base64(source string)` | Read a file (local path or URL) and return its Base64 encoded content. Files are cached after first read. | `{{ file_Base64 "/path/to/file.pdf" }}` |
|
||||||
|
|
||||||
|
**`file_Base64` Details:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Local file as Base64 in JSON body
|
||||||
|
body: '{"file": "{{ file_Base64 "/path/to/document.pdf" }}", "filename": "document.pdf"}'
|
||||||
|
|
||||||
|
# Remote file as Base64
|
||||||
|
body: '{"image": "{{ file_Base64 "https://example.com/photo.jpg" }}"}'
|
||||||
|
|
||||||
|
# Combined with values for reuse
|
||||||
|
values: "FILE_DATA={{ file_Base64 \"/path/to/file.bin\" }}"
|
||||||
|
body: '{"data": "{{ .Values.FILE_DATA }}"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Fake Data Functions
|
||||||
|
|
||||||
|
These functions are powered by [gofakeit](https://github.com/brianvoe/gofakeit) library.
|
||||||
|
|
||||||
|
### File
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ---------------------- | -------------- | -------------------- |
|
||||||
|
| `fakeit_FileExtension` | File extension | `"nes"` |
|
||||||
|
| `fakeit_FileMimeType` | MIME type | `"application/json"` |
|
||||||
|
|
||||||
|
### ID
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------- | --------------------------------- | ---------------------------------------- |
|
||||||
|
| `fakeit_ID` | Generate random unique identifier | `"pfsfktb87rcmj6bqha2fz9"` |
|
||||||
|
| `fakeit_UUID` | Generate UUID v4 | `"b4ddf623-4ea6-48e5-9292-541f028d1fdb"` |
|
||||||
|
|
||||||
|
### Product
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| --------------------------- | ------------------- | --------------------------------- |
|
||||||
|
| `fakeit_ProductName` | Product name | `"olive copper monitor"` |
|
||||||
|
| `fakeit_ProductDescription` | Product description | `"Backwards caused quarterly..."` |
|
||||||
|
| `fakeit_ProductCategory` | Product category | `"clothing"` |
|
||||||
|
| `fakeit_ProductFeature` | Product feature | `"ultra-lightweight"` |
|
||||||
|
| `fakeit_ProductMaterial` | Product material | `"brass"` |
|
||||||
|
| `fakeit_ProductUPC` | UPC code | `"012780949980"` |
|
||||||
|
| `fakeit_ProductAudience` | Target audience | `["adults"]` |
|
||||||
|
| `fakeit_ProductDimension` | Product dimension | `"medium"` |
|
||||||
|
| `fakeit_ProductUseCase` | Use case | `"home"` |
|
||||||
|
| `fakeit_ProductBenefit` | Product benefit | `"comfort"` |
|
||||||
|
| `fakeit_ProductSuffix` | Product suffix | `"pro"` |
|
||||||
|
| `fakeit_ProductISBN` | ISBN number | `"978-1-4028-9462-6"` |
|
||||||
|
|
||||||
|
### Person
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ----------------------- | ---------------------- | ------------------------ |
|
||||||
|
| `fakeit_Name` | Full name | `"Markus Moen"` |
|
||||||
|
| `fakeit_NamePrefix` | Name prefix | `"Mr."` |
|
||||||
|
| `fakeit_NameSuffix` | Name suffix | `"Jr."` |
|
||||||
|
| `fakeit_FirstName` | First name | `"Markus"` |
|
||||||
|
| `fakeit_MiddleName` | Middle name | `"Belinda"` |
|
||||||
|
| `fakeit_LastName` | Last name | `"Daniel"` |
|
||||||
|
| `fakeit_Gender` | Gender | `"male"` |
|
||||||
|
| `fakeit_Age` | Age | `40` |
|
||||||
|
| `fakeit_Ethnicity` | Ethnicity | `"German"` |
|
||||||
|
| `fakeit_SSN` | Social Security Number | `"296446360"` |
|
||||||
|
| `fakeit_EIN` | Employer ID Number | `"12-3456789"` |
|
||||||
|
| `fakeit_Hobby` | Hobby | `"Swimming"` |
|
||||||
|
| `fakeit_Email` | Email address | `"markusmoen@pagac.net"` |
|
||||||
|
| `fakeit_Phone` | Phone number | `"6136459948"` |
|
||||||
|
| `fakeit_PhoneFormatted` | Formatted phone | `"136-459-9489"` |
|
||||||
|
|
||||||
|
### Generate
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ------------------------------ | -------------------------------------- | ------------------------------------------------------ |
|
||||||
|
| `fakeit_Regex(pattern string)` | Generate string matching regex pattern | `{{ fakeit_Regex "[a-z]{5}[0-9]{3}" }}` → `"abcde123"` |
|
||||||
|
|
||||||
|
### Auth
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| --------------------------------------------------------------------------------------------- | ----------------------------------------------------------- | ----------------------------------------------------- |
|
||||||
|
| `fakeit_Username` | Username | `"Daniel1364"` |
|
||||||
|
| `fakeit_Password(upper bool, lower bool, numeric bool, special bool, space bool, length int)` | Generate password with specified character types and length | `{{ fakeit_Password true true true false false 16 }}` |
|
||||||
|
|
||||||
|
### Address
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| --------------------------------------------------- | ---------------------------- | --------------------------------------------------- |
|
||||||
|
| `fakeit_City` | City name | `"Marcelside"` |
|
||||||
|
| `fakeit_Country` | Country name | `"United States of America"` |
|
||||||
|
| `fakeit_CountryAbr` | Country abbreviation | `"US"` |
|
||||||
|
| `fakeit_State` | State name | `"Illinois"` |
|
||||||
|
| `fakeit_StateAbr` | State abbreviation | `"IL"` |
|
||||||
|
| `fakeit_Street` | Full street | `"364 East Rapidsborough"` |
|
||||||
|
| `fakeit_StreetName` | Street name | `"View"` |
|
||||||
|
| `fakeit_StreetNumber` | Street number | `"13645"` |
|
||||||
|
| `fakeit_StreetPrefix` | Street prefix | `"East"` |
|
||||||
|
| `fakeit_StreetSuffix` | Street suffix | `"Ave"` |
|
||||||
|
| `fakeit_Unit` | Unit | `"Apt 123"` |
|
||||||
|
| `fakeit_Zip` | ZIP code | `"13645"` |
|
||||||
|
| `fakeit_Latitude` | Random latitude | `-73.534056` |
|
||||||
|
| `fakeit_Longitude` | Random longitude | `-147.068112` |
|
||||||
|
| `fakeit_LatitudeInRange(min float64, max float64)` | Latitude in specified range | `{{ fakeit_LatitudeInRange 0 90 }}` → `22.921026` |
|
||||||
|
| `fakeit_LongitudeInRange(min float64, max float64)` | Longitude in specified range | `{{ fakeit_LongitudeInRange 0 180 }}` → `-8.170450` |
|
||||||
|
|
||||||
|
### Game
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ----------------- | ----------- | ------------------- |
|
||||||
|
| `fakeit_Gamertag` | Gamer tag | `"footinterpret63"` |
|
||||||
|
|
||||||
|
### Beer
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| -------------------- | --------------- | ----------------------------- |
|
||||||
|
| `fakeit_BeerAlcohol` | Alcohol content | `"2.7%"` |
|
||||||
|
| `fakeit_BeerBlg` | Blg | `"6.4°Blg"` |
|
||||||
|
| `fakeit_BeerHop` | Hop | `"Glacier"` |
|
||||||
|
| `fakeit_BeerIbu` | IBU | `"29 IBU"` |
|
||||||
|
| `fakeit_BeerMalt` | Malt | `"Munich"` |
|
||||||
|
| `fakeit_BeerName` | Beer name | `"Duvel"` |
|
||||||
|
| `fakeit_BeerStyle` | Beer style | `"European Amber Lager"` |
|
||||||
|
| `fakeit_BeerYeast` | Yeast | `"1388 - Belgian Strong Ale"` |
|
||||||
|
|
||||||
|
### Car
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ---------------------------- | ------------ | ---------------------- |
|
||||||
|
| `fakeit_CarMaker` | Car maker | `"Nissan"` |
|
||||||
|
| `fakeit_CarModel` | Car model | `"Aveo"` |
|
||||||
|
| `fakeit_CarType` | Car type | `"Passenger car mini"` |
|
||||||
|
| `fakeit_CarFuelType` | Fuel type | `"CNG"` |
|
||||||
|
| `fakeit_CarTransmissionType` | Transmission | `"Manual"` |
|
||||||
|
|
||||||
|
### Words
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ---------------------------------- | --------------------------- | ---------------- |
|
||||||
|
| `fakeit_Word` | Random word | `"example"` |
|
||||||
|
| `fakeit_Noun` | Random noun | `"computer"` |
|
||||||
|
| `fakeit_NounCommon` | Common noun | `"table"` |
|
||||||
|
| `fakeit_NounConcrete` | Concrete noun | `"chair"` |
|
||||||
|
| `fakeit_NounAbstract` | Abstract noun | `"freedom"` |
|
||||||
|
| `fakeit_NounCollectivePeople` | Collective noun (people) | `"team"` |
|
||||||
|
| `fakeit_NounCollectiveAnimal` | Collective noun (animal) | `"herd"` |
|
||||||
|
| `fakeit_NounCollectiveThing` | Collective noun (thing) | `"bunch"` |
|
||||||
|
| `fakeit_NounCountable` | Countable noun | `"book"` |
|
||||||
|
| `fakeit_NounUncountable` | Uncountable noun | `"water"` |
|
||||||
|
| `fakeit_Verb` | Random verb | `"run"` |
|
||||||
|
| `fakeit_VerbAction` | Action verb | `"jump"` |
|
||||||
|
| `fakeit_VerbLinking` | Linking verb | `"is"` |
|
||||||
|
| `fakeit_VerbHelping` | Helping verb | `"can"` |
|
||||||
|
| `fakeit_Adverb` | Random adverb | `"quickly"` |
|
||||||
|
| `fakeit_AdverbManner` | Manner adverb | `"carefully"` |
|
||||||
|
| `fakeit_AdverbDegree` | Degree adverb | `"very"` |
|
||||||
|
| `fakeit_AdverbPlace` | Place adverb | `"here"` |
|
||||||
|
| `fakeit_AdverbTimeDefinite` | Definite time adverb | `"yesterday"` |
|
||||||
|
| `fakeit_AdverbTimeIndefinite` | Indefinite time adverb | `"soon"` |
|
||||||
|
| `fakeit_AdverbFrequencyDefinite` | Definite frequency adverb | `"daily"` |
|
||||||
|
| `fakeit_AdverbFrequencyIndefinite` | Indefinite frequency adverb | `"often"` |
|
||||||
|
| `fakeit_Preposition` | Random preposition | `"on"` |
|
||||||
|
| `fakeit_PrepositionSimple` | Simple preposition | `"in"` |
|
||||||
|
| `fakeit_PrepositionDouble` | Double preposition | `"out of"` |
|
||||||
|
| `fakeit_PrepositionCompound` | Compound preposition | `"according to"` |
|
||||||
|
| `fakeit_Adjective` | Random adjective | `"beautiful"` |
|
||||||
|
| `fakeit_AdjectiveDescriptive` | Descriptive adjective | `"large"` |
|
||||||
|
| `fakeit_AdjectiveQuantitative` | Quantitative adjective | `"many"` |
|
||||||
|
| `fakeit_AdjectiveProper` | Proper adjective | `"American"` |
|
||||||
|
| `fakeit_AdjectiveDemonstrative` | Demonstrative adjective | `"this"` |
|
||||||
|
| `fakeit_AdjectivePossessive` | Possessive adjective | `"my"` |
|
||||||
|
| `fakeit_AdjectiveInterrogative` | Interrogative adjective | `"which"` |
|
||||||
|
| `fakeit_AdjectiveIndefinite` | Indefinite adjective | `"some"` |
|
||||||
|
| `fakeit_Pronoun` | Random pronoun | `"he"` |
|
||||||
|
| `fakeit_PronounPersonal` | Personal pronoun | `"I"` |
|
||||||
|
| `fakeit_PronounObject` | Object pronoun | `"him"` |
|
||||||
|
| `fakeit_PronounPossessive` | Possessive pronoun | `"mine"` |
|
||||||
|
| `fakeit_PronounReflective` | Reflective pronoun | `"myself"` |
|
||||||
|
| `fakeit_PronounDemonstrative` | Demonstrative pronoun | `"that"` |
|
||||||
|
| `fakeit_PronounInterrogative` | Interrogative pronoun | `"who"` |
|
||||||
|
| `fakeit_PronounRelative` | Relative pronoun | `"which"` |
|
||||||
|
| `fakeit_Connective` | Random connective | `"however"` |
|
||||||
|
| `fakeit_ConnectiveTime` | Time connective | `"then"` |
|
||||||
|
| `fakeit_ConnectiveComparative` | Comparative connective | `"similarly"` |
|
||||||
|
| `fakeit_ConnectiveComplaint` | Complaint connective | `"although"` |
|
||||||
|
| `fakeit_ConnectiveListing` | Listing connective | `"firstly"` |
|
||||||
|
| `fakeit_ConnectiveCasual` | Casual connective | `"because"` |
|
||||||
|
| `fakeit_ConnectiveExamplify` | Examplify connective | `"for example"` |
|
||||||
|
|
||||||
|
### Text
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ---------------------------------------------------------------------------------------- | ----------------------------------------------- | --------------------------------------------- |
|
||||||
|
| `fakeit_Sentence` | Random sentence | `{{ fakeit_Sentence }}` |
|
||||||
|
| `fakeit_Paragraph` | Random paragraph | `{{ fakeit_Paragraph }}` |
|
||||||
|
| `fakeit_LoremIpsumWord` | Lorem ipsum word | `"lorem"` |
|
||||||
|
| `fakeit_LoremIpsumSentence(wordCount int)` | Lorem ipsum sentence with specified word count | `{{ fakeit_LoremIpsumSentence 5 }}` |
|
||||||
|
| `fakeit_LoremIpsumParagraph(paragraphs int, sentences int, words int, separator string)` | Lorem ipsum paragraphs with specified structure | `{{ fakeit_LoremIpsumParagraph 1 3 5 "\n" }}` |
|
||||||
|
| `fakeit_Question` | Random question | `"What is your name?"` |
|
||||||
|
| `fakeit_Quote` | Random quote | `"Life is what happens..."` |
|
||||||
|
| `fakeit_Phrase` | Random phrase | `"a piece of cake"` |
|
||||||
|
|
||||||
|
### Foods
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------ | -------------- | ---------------------------------------- |
|
||||||
|
| `fakeit_Fruit` | Fruit | `"Peach"` |
|
||||||
|
| `fakeit_Vegetable` | Vegetable | `"Amaranth Leaves"` |
|
||||||
|
| `fakeit_Breakfast` | Breakfast food | `"Blueberry banana happy face pancakes"` |
|
||||||
|
| `fakeit_Lunch` | Lunch food | `"No bake hersheys bar pie"` |
|
||||||
|
| `fakeit_Dinner` | Dinner food | `"Wild addicting dip"` |
|
||||||
|
| `fakeit_Snack` | Snack | `"Trail mix"` |
|
||||||
|
| `fakeit_Dessert` | Dessert | `"French napoleons"` |
|
||||||
|
|
||||||
|
### Misc
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------ | -------------- | -------------- |
|
||||||
|
| `fakeit_Bool` | Random boolean | `true` |
|
||||||
|
| `fakeit_FlipACoin` | Flip a coin | `"Heads"` |
|
||||||
|
|
||||||
|
### Color
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------- | ------------------ | --------------------------------------------------------- |
|
||||||
|
| `fakeit_Color` | Color name | `"MediumOrchid"` |
|
||||||
|
| `fakeit_HexColor` | Hex color | `"#a99fb4"` |
|
||||||
|
| `fakeit_RGBColor` | RGB color | `[85, 224, 195]` |
|
||||||
|
| `fakeit_SafeColor` | Safe color | `"black"` |
|
||||||
|
| `fakeit_NiceColors` | Nice color palette | `["#cfffdd", "#b4dec1", "#5c5863", "#a85163", "#ff1f4c"]` |
|
||||||
|
|
||||||
|
### Image
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ----------------------------------------- | ------------------------- | -------------------------------- |
|
||||||
|
| `fakeit_ImageJpeg(width int, height int)` | Generate JPEG image bytes | `{{ fakeit_ImageJpeg 100 100 }}` |
|
||||||
|
| `fakeit_ImagePng(width int, height int)` | Generate PNG image bytes | `{{ fakeit_ImagePng 100 100 }}` |
|
||||||
|
|
||||||
|
### Internet
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| --------------------------------- | ------------------------------------------ | ----------------------------------------------------- |
|
||||||
|
| `fakeit_URL` | Random URL | `"http://www.principalproductize.biz/target"` |
|
||||||
|
| `fakeit_UrlSlug(words int)` | URL slug with specified word count | `{{ fakeit_UrlSlug 3 }}` → `"bathe-regularly-quiver"` |
|
||||||
|
| `fakeit_DomainName` | Domain name | `"centraltarget.biz"` |
|
||||||
|
| `fakeit_DomainSuffix` | Domain suffix | `"org"` |
|
||||||
|
| `fakeit_IPv4Address` | IPv4 address | `"222.83.191.222"` |
|
||||||
|
| `fakeit_IPv6Address` | IPv6 address | `"2001:cafe:8898:ee17:bc35:9064:5866:d019"` |
|
||||||
|
| `fakeit_MacAddress` | MAC address | `"cb:ce:06:94:22:e9"` |
|
||||||
|
| `fakeit_HTTPStatusCode` | HTTP status code | `200` |
|
||||||
|
| `fakeit_HTTPStatusCodeSimple` | Simple status code | `404` |
|
||||||
|
| `fakeit_LogLevel(logType string)` | Log level (types: general, syslog, apache) | `{{ fakeit_LogLevel "general" }}` → `"error"` |
|
||||||
|
| `fakeit_HTTPMethod` | HTTP method | `"HEAD"` |
|
||||||
|
| `fakeit_HTTPVersion` | HTTP version | `"HTTP/1.1"` |
|
||||||
|
| `fakeit_UserAgent` | Random User-Agent | `"Mozilla/5.0..."` |
|
||||||
|
| `fakeit_ChromeUserAgent` | Chrome User-Agent | `"Mozilla/5.0 (X11; Linux i686)..."` |
|
||||||
|
| `fakeit_FirefoxUserAgent` | Firefox User-Agent | `"Mozilla/5.0 (Macintosh; U;..."` |
|
||||||
|
| `fakeit_OperaUserAgent` | Opera User-Agent | `"Opera/8.39..."` |
|
||||||
|
| `fakeit_SafariUserAgent` | Safari User-Agent | `"Mozilla/5.0 (iPad;..."` |
|
||||||
|
| `fakeit_APIUserAgent` | API User-Agent | `"curl/8.2.5"` |
|
||||||
|
|
||||||
|
### HTML
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------ | --------------- | ------------------ |
|
||||||
|
| `fakeit_InputName` | HTML input name | `"email"` |
|
||||||
|
| `fakeit_Svg` | SVG image | `"<svg>...</svg>"` |
|
||||||
|
|
||||||
|
### Date/Time
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| -------------------------------------------------- | --------------------------------- | ------------------------------------------------------------------------------------ |
|
||||||
|
| `fakeit_Date` | Random date | `2023-06-15 14:30:00` |
|
||||||
|
| `fakeit_PastDate` | Past date | `2022-03-10 09:15:00` |
|
||||||
|
| `fakeit_FutureDate` | Future date | `2025-12-20 18:45:00` |
|
||||||
|
| `fakeit_DateRange(start time.Time, end time.Time)` | Random date between start and end | `{{ fakeit_DateRange (strings_ToDate "2020-01-01") (strings_ToDate "2025-12-31") }}` |
|
||||||
|
| `fakeit_NanoSecond` | Nanosecond | `123456789` |
|
||||||
|
| `fakeit_Second` | Second (0-59) | `45` |
|
||||||
|
| `fakeit_Minute` | Minute (0-59) | `30` |
|
||||||
|
| `fakeit_Hour` | Hour (0-23) | `14` |
|
||||||
|
| `fakeit_Month` | Month (1-12) | `6` |
|
||||||
|
| `fakeit_MonthString` | Month name | `"June"` |
|
||||||
|
| `fakeit_Day` | Day (1-31) | `15` |
|
||||||
|
| `fakeit_WeekDay` | Weekday | `"Monday"` |
|
||||||
|
| `fakeit_Year` | Year | `2024` |
|
||||||
|
| `fakeit_TimeZone` | Timezone | `"America/New_York"` |
|
||||||
|
| `fakeit_TimeZoneAbv` | Timezone abbreviation | `"EST"` |
|
||||||
|
| `fakeit_TimeZoneFull` | Full timezone | `"Eastern Standard Time"` |
|
||||||
|
| `fakeit_TimeZoneOffset` | Timezone offset | `-5` |
|
||||||
|
| `fakeit_TimeZoneRegion` | Timezone region | `"America"` |
|
||||||
|
|
||||||
|
### Payment
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ---------------------------------------- | ----------------------------------------------------- | -------------------------------------------------------------- |
|
||||||
|
| `fakeit_Price(min float64, max float64)` | Random price in range | `{{ fakeit_Price 1 100 }}` → `92.26` |
|
||||||
|
| `fakeit_CreditCardCvv` | CVV | `"513"` |
|
||||||
|
| `fakeit_CreditCardExp` | Expiration date | `"01/27"` |
|
||||||
|
| `fakeit_CreditCardNumber(gaps bool)` | Credit card number. `gaps`: add spaces between groups | `{{ fakeit_CreditCardNumber true }}` → `"4111 1111 1111 1111"` |
|
||||||
|
| `fakeit_CreditCardType` | Card type | `"Visa"` |
|
||||||
|
| `fakeit_CurrencyLong` | Currency name | `"United States Dollar"` |
|
||||||
|
| `fakeit_CurrencyShort` | Currency code | `"USD"` |
|
||||||
|
| `fakeit_AchRouting` | ACH routing number | `"513715684"` |
|
||||||
|
| `fakeit_AchAccount` | ACH account number | `"491527954328"` |
|
||||||
|
| `fakeit_BitcoinAddress` | Bitcoin address | `"1BoatSLRHtKNngkdXEeobR76b53LETtpyT"` |
|
||||||
|
| `fakeit_BitcoinPrivateKey` | Bitcoin private key | `"5HueCGU8rMjxEXxiPuD5BDuG6o5xjA7QkbPp"` |
|
||||||
|
| `fakeit_BankName` | Bank name | `"Wells Fargo"` |
|
||||||
|
| `fakeit_BankType` | Bank type | `"Investment Bank"` |
|
||||||
|
|
||||||
|
### Finance
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| -------------- | ---------------- | ---------------- |
|
||||||
|
| `fakeit_Cusip` | CUSIP identifier | `"38259P508"` |
|
||||||
|
| `fakeit_Isin` | ISIN identifier | `"US38259P5089"` |
|
||||||
|
|
||||||
|
### Company
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ---------------------- | -------------- | ------------------------------------------ |
|
||||||
|
| `fakeit_BS` | Business speak | `"front-end"` |
|
||||||
|
| `fakeit_Blurb` | Company blurb | `"word"` |
|
||||||
|
| `fakeit_BuzzWord` | Buzzword | `"disintermediate"` |
|
||||||
|
| `fakeit_Company` | Company name | `"Moen, Pagac and Wuckert"` |
|
||||||
|
| `fakeit_CompanySuffix` | Company suffix | `"Inc"` |
|
||||||
|
| `fakeit_JobDescriptor` | Job descriptor | `"Central"` |
|
||||||
|
| `fakeit_JobLevel` | Job level | `"Assurance"` |
|
||||||
|
| `fakeit_JobTitle` | Job title | `"Director"` |
|
||||||
|
| `fakeit_Slogan` | Company slogan | `"Universal seamless Focus, interactive."` |
|
||||||
|
|
||||||
|
### Hacker
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| --------------------------- | ------------------- | --------------------------------------------------------------------------------------------- |
|
||||||
|
| `fakeit_HackerAbbreviation` | Hacker abbreviation | `"ADP"` |
|
||||||
|
| `fakeit_HackerAdjective` | Hacker adjective | `"wireless"` |
|
||||||
|
| `fakeit_HackeringVerb` | Hackering verb | `"connecting"` |
|
||||||
|
| `fakeit_HackerNoun` | Hacker noun | `"driver"` |
|
||||||
|
| `fakeit_HackerPhrase` | Hacker phrase | `"If we calculate the program, we can get to the AI pixel through the redundant XSS matrix!"` |
|
||||||
|
| `fakeit_HackerVerb` | Hacker verb | `"synthesize"` |
|
||||||
|
|
||||||
|
### Hipster
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ------------------------- | ----------------- | ------------------------------------------------------------------- |
|
||||||
|
| `fakeit_HipsterWord` | Hipster word | `"microdosing"` |
|
||||||
|
| `fakeit_HipsterSentence` | Hipster sentence | `"Soul loops with you probably haven't heard of them undertones."` |
|
||||||
|
| `fakeit_HipsterParagraph` | Hipster paragraph | `"Single-origin austin, double why. Tag it Yuccie, keep it any..."` |
|
||||||
|
|
||||||
|
### App
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------- | ----------- | --------------------- |
|
||||||
|
| `fakeit_AppName` | App name | `"Parkrespond"` |
|
||||||
|
| `fakeit_AppVersion` | App version | `"1.12.14"` |
|
||||||
|
| `fakeit_AppAuthor` | App author | `"Qado Energy, Inc."` |
|
||||||
|
|
||||||
|
### Animal
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------- | ----------- | ------------------- |
|
||||||
|
| `fakeit_PetName` | Pet name | `"Ozzy Pawsborne"` |
|
||||||
|
| `fakeit_Animal` | Animal | `"elk"` |
|
||||||
|
| `fakeit_AnimalType` | Animal type | `"amphibians"` |
|
||||||
|
| `fakeit_FarmAnimal` | Farm animal | `"Chicken"` |
|
||||||
|
| `fakeit_Cat` | Cat breed | `"Chausie"` |
|
||||||
|
| `fakeit_Dog` | Dog breed | `"Norwich Terrier"` |
|
||||||
|
| `fakeit_Bird` | Bird | `"goose"` |
|
||||||
|
|
||||||
|
### Emoji
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------------- | ---------------------------------------------- | ------------------------------------------------------ |
|
||||||
|
| `fakeit_Emoji` | Random emoji | `"🤣"` |
|
||||||
|
| `fakeit_EmojiCategory` | Emoji category | `"Smileys & Emotion"` |
|
||||||
|
| `fakeit_EmojiAlias` | Emoji alias | `"smile"` |
|
||||||
|
| `fakeit_EmojiTag` | Emoji tag | `"happy"` |
|
||||||
|
| `fakeit_EmojiFlag` | Flag emoji | `"🇺🇸"` |
|
||||||
|
| `fakeit_EmojiAnimal` | Animal emoji | `"🐱"` |
|
||||||
|
| `fakeit_EmojiFood` | Food emoji | `"🍕"` |
|
||||||
|
| `fakeit_EmojiPlant` | Plant emoji | `"🌸"` |
|
||||||
|
| `fakeit_EmojiMusic` | Music emoji | `"🎵"` |
|
||||||
|
| `fakeit_EmojiVehicle` | Vehicle emoji | `"🚗"` |
|
||||||
|
| `fakeit_EmojiSport` | Sport emoji | `"⚽"` |
|
||||||
|
| `fakeit_EmojiFace` | Face emoji | `"😊"` |
|
||||||
|
| `fakeit_EmojiHand` | Hand emoji | `"👋"` |
|
||||||
|
| `fakeit_EmojiClothing` | Clothing emoji | `"👕"` |
|
||||||
|
| `fakeit_EmojiLandmark` | Landmark emoji | `"🗽"` |
|
||||||
|
| `fakeit_EmojiElectronics` | Electronics emoji | `"📱"` |
|
||||||
|
| `fakeit_EmojiGame` | Game emoji | `"🎮"` |
|
||||||
|
| `fakeit_EmojiTools` | Tools emoji | `"🔧"` |
|
||||||
|
| `fakeit_EmojiWeather` | Weather emoji | `"☀️"` |
|
||||||
|
| `fakeit_EmojiJob` | Job emoji | `"👨💻"` |
|
||||||
|
| `fakeit_EmojiPerson` | Person emoji | `"👤"` |
|
||||||
|
| `fakeit_EmojiGesture` | Gesture emoji | `"🙌"` |
|
||||||
|
| `fakeit_EmojiCostume` | Costume emoji | `"🎃"` |
|
||||||
|
| `fakeit_EmojiSentence` | Emoji sentence with random emojis interspersed | `"Weekends reserve time for 🖼️ Disc 🏨 golf and day."` |
|
||||||
|
|
||||||
|
### Language
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ----------------------------- | --------------------- | -------------- |
|
||||||
|
| `fakeit_Language` | Language | `"English"` |
|
||||||
|
| `fakeit_LanguageAbbreviation` | Language abbreviation | `"en"` |
|
||||||
|
| `fakeit_ProgrammingLanguage` | Programming language | `"Go"` |
|
||||||
|
|
||||||
|
### Number
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ----------------------------------------------- | ----------------------------------- | ------------------------------------------ |
|
||||||
|
| `fakeit_Number(min int, max int)` | Random number in range | `{{ fakeit_Number 1 100 }}` → `42` |
|
||||||
|
| `fakeit_Int` | Random int | `{{ fakeit_Int }}` |
|
||||||
|
| `fakeit_IntN(n int)` | Random int from 0 to n | `{{ fakeit_IntN 100 }}` |
|
||||||
|
| `fakeit_Int8` | Random int8 | `{{ fakeit_Int8 }}` |
|
||||||
|
| `fakeit_Int16` | Random int16 | `{{ fakeit_Int16 }}` |
|
||||||
|
| `fakeit_Int32` | Random int32 | `{{ fakeit_Int32 }}` |
|
||||||
|
| `fakeit_Int64` | Random int64 | `{{ fakeit_Int64 }}` |
|
||||||
|
| `fakeit_Uint` | Random uint | `{{ fakeit_Uint }}` |
|
||||||
|
| `fakeit_UintN(n uint)` | Random uint from 0 to n | `{{ fakeit_UintN 100 }}` |
|
||||||
|
| `fakeit_Uint8` | Random uint8 | `{{ fakeit_Uint8 }}` |
|
||||||
|
| `fakeit_Uint16` | Random uint16 | `{{ fakeit_Uint16 }}` |
|
||||||
|
| `fakeit_Uint32` | Random uint32 | `{{ fakeit_Uint32 }}` |
|
||||||
|
| `fakeit_Uint64` | Random uint64 | `{{ fakeit_Uint64 }}` |
|
||||||
|
| `fakeit_Float32` | Random float32 | `{{ fakeit_Float32 }}` |
|
||||||
|
| `fakeit_Float32Range(min float32, max float32)` | Random float32 in range | `{{ fakeit_Float32Range 0 100 }}` |
|
||||||
|
| `fakeit_Float64` | Random float64 | `{{ fakeit_Float64 }}` |
|
||||||
|
| `fakeit_Float64Range(min float64, max float64)` | Random float64 in range | `{{ fakeit_Float64Range 0 100 }}` |
|
||||||
|
| `fakeit_RandomInt(slice []int)` | Random int from slice | `{{ fakeit_RandomInt (slice_Int 1 2 3) }}` |
|
||||||
|
| `fakeit_HexUint(bits int)` | Random hex uint with specified bits | `{{ fakeit_HexUint 8 }}` → `"0xff"` |
|
||||||
|
|
||||||
|
### String
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
| ------------------------------------- | ------------------------------- | --------------------------------------------------------------- |
|
||||||
|
| `fakeit_Digit` | Single random digit | `"0"` |
|
||||||
|
| `fakeit_DigitN(n uint)` | Generate `n` random digits | `{{ fakeit_DigitN 5 }}` → `"71364"` |
|
||||||
|
| `fakeit_Letter` | Single random letter | `"g"` |
|
||||||
|
| `fakeit_LetterN(n uint)` | Generate `n` random letters | `{{ fakeit_LetterN 10 }}` → `"gbRMaRxHki"` |
|
||||||
|
| `fakeit_Lexify(pattern string)` | Replace `?` with random letters | `{{ fakeit_Lexify "?????@??????.com" }}` → `"billy@mister.com"` |
|
||||||
|
| `fakeit_Numerify(pattern string)` | Replace `#` with random digits | `{{ fakeit_Numerify "(###)###-####" }}` → `"(555)867-5309"` |
|
||||||
|
| `fakeit_RandomString(slice []string)` | Random string from slice | `{{ fakeit_RandomString (slice_Str "a" "b" "c") }}` |
|
||||||
|
|
||||||
|
### Celebrity
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| -------------------------- | ------------------ | ------------------ |
|
||||||
|
| `fakeit_CelebrityActor` | Celebrity actor | `"Brad Pitt"` |
|
||||||
|
| `fakeit_CelebrityBusiness` | Celebrity business | `"Elon Musk"` |
|
||||||
|
| `fakeit_CelebritySport` | Celebrity sport | `"Michael Phelps"` |
|
||||||
|
|
||||||
|
### Minecraft
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| --------------------------------- | ----------------- | ---------------- |
|
||||||
|
| `fakeit_MinecraftOre` | Minecraft ore | `"coal"` |
|
||||||
|
| `fakeit_MinecraftWood` | Minecraft wood | `"oak"` |
|
||||||
|
| `fakeit_MinecraftArmorTier` | Armor tier | `"iron"` |
|
||||||
|
| `fakeit_MinecraftArmorPart` | Armor part | `"helmet"` |
|
||||||
|
| `fakeit_MinecraftWeapon` | Minecraft weapon | `"bow"` |
|
||||||
|
| `fakeit_MinecraftTool` | Minecraft tool | `"shovel"` |
|
||||||
|
| `fakeit_MinecraftDye` | Minecraft dye | `"white"` |
|
||||||
|
| `fakeit_MinecraftFood` | Minecraft food | `"apple"` |
|
||||||
|
| `fakeit_MinecraftAnimal` | Minecraft animal | `"chicken"` |
|
||||||
|
| `fakeit_MinecraftVillagerJob` | Villager job | `"farmer"` |
|
||||||
|
| `fakeit_MinecraftVillagerStation` | Villager station | `"furnace"` |
|
||||||
|
| `fakeit_MinecraftVillagerLevel` | Villager level | `"master"` |
|
||||||
|
| `fakeit_MinecraftMobPassive` | Passive mob | `"cow"` |
|
||||||
|
| `fakeit_MinecraftMobNeutral` | Neutral mob | `"bee"` |
|
||||||
|
| `fakeit_MinecraftMobHostile` | Hostile mob | `"spider"` |
|
||||||
|
| `fakeit_MinecraftMobBoss` | Boss mob | `"ender dragon"` |
|
||||||
|
| `fakeit_MinecraftBiome` | Minecraft biome | `"forest"` |
|
||||||
|
| `fakeit_MinecraftWeather` | Minecraft weather | `"rain"` |
|
||||||
|
|
||||||
|
### Book
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------- | ----------- | -------------- |
|
||||||
|
| `fakeit_BookTitle` | Book title | `"Hamlet"` |
|
||||||
|
| `fakeit_BookAuthor` | Book author | `"Mark Twain"` |
|
||||||
|
| `fakeit_BookGenre` | Book genre | `"Adventure"` |
|
||||||
|
|
||||||
|
### Movie
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------- | ----------- | -------------- |
|
||||||
|
| `fakeit_MovieName` | Movie name | `"Inception"` |
|
||||||
|
| `fakeit_MovieGenre` | Movie genre | `"Sci-Fi"` |
|
||||||
|
|
||||||
|
### Error
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------------ | ----------------- | ---------------------------------- |
|
||||||
|
| `fakeit_Error` | Random error | `"connection refused"` |
|
||||||
|
| `fakeit_ErrorDatabase` | Database error | `"database connection failed"` |
|
||||||
|
| `fakeit_ErrorGRPC` | gRPC error | `"rpc error: code = Unavailable"` |
|
||||||
|
| `fakeit_ErrorHTTP` | HTTP error | `"HTTP 500 Internal Server Error"` |
|
||||||
|
| `fakeit_ErrorHTTPClient` | HTTP client error | `"HTTP 404 Not Found"` |
|
||||||
|
| `fakeit_ErrorHTTPServer` | HTTP server error | `"HTTP 503 Service Unavailable"` |
|
||||||
|
| `fakeit_ErrorRuntime` | Runtime error | `"panic: runtime error"` |
|
||||||
|
|
||||||
|
### School
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| --------------- | ----------- | ---------------------- |
|
||||||
|
| `fakeit_School` | School name | `"Harvard University"` |
|
||||||
|
|
||||||
|
### Song
|
||||||
|
|
||||||
|
| Function | Description | Example Output |
|
||||||
|
| ------------------- | ----------- | --------------------- |
|
||||||
|
| `fakeit_SongName` | Song name | `"Bohemian Rhapsody"` |
|
||||||
|
| `fakeit_SongArtist` | Song artist | `"Queen"` |
|
||||||
|
| `fakeit_SongGenre` | Song genre | `"Rock"` |
|
||||||
62
go.mod
62
go.mod
@@ -1,21 +1,59 @@
|
|||||||
module github.com/aykhans/dodo
|
module go.aykhans.me/sarin
|
||||||
|
|
||||||
go 1.24.0
|
go 1.26.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/jedib0t/go-pretty/v6 v6.6.7
|
github.com/brianvoe/gofakeit/v7 v7.14.0
|
||||||
github.com/valyala/fasthttp v1.59.0
|
github.com/charmbracelet/bubbles v1.0.0
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
github.com/charmbracelet/bubbletea v1.3.10
|
||||||
|
github.com/charmbracelet/glamour v0.10.0
|
||||||
|
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834
|
||||||
|
github.com/charmbracelet/x/term v0.2.2
|
||||||
|
github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3
|
||||||
|
github.com/joho/godotenv v1.5.1
|
||||||
|
github.com/valyala/fasthttp v1.69.0
|
||||||
|
github.com/yuin/gopher-lua v1.1.1
|
||||||
|
go.aykhans.me/utils v1.0.7
|
||||||
|
go.yaml.in/yaml/v4 v4.0.0-rc.3
|
||||||
|
golang.org/x/net v0.50.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
github.com/alecthomas/chroma/v2 v2.21.1 // indirect
|
||||||
github.com/klauspost/compress v1.17.11 // indirect
|
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||||
|
github.com/aymerick/douceur v0.2.0 // indirect
|
||||||
|
github.com/charmbracelet/colorprofile v0.4.1 // indirect
|
||||||
|
github.com/charmbracelet/harmonica v0.2.0 // indirect
|
||||||
|
github.com/charmbracelet/x/ansi v0.11.6 // indirect
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.15 // indirect
|
||||||
|
github.com/charmbracelet/x/exp/slice v0.0.0-20260109001716-2fbdffcb221f // indirect
|
||||||
|
github.com/clipperhouse/displaywidth v0.9.0 // indirect
|
||||||
|
github.com/clipperhouse/stringish v0.1.1 // indirect
|
||||||
|
github.com/clipperhouse/uax29/v2 v2.5.0 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||||
|
github.com/dlclark/regexp2 v1.11.5 // indirect
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||||
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
|
||||||
|
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
|
||||||
|
github.com/gorilla/css v1.0.1 // indirect
|
||||||
|
github.com/klauspost/compress v1.18.2 // indirect
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/mattn/go-localereader v0.0.1 // indirect
|
||||||
|
github.com/mattn/go-runewidth v0.0.19 // indirect
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
|
||||||
|
github.com/muesli/cancelreader v0.2.2 // indirect
|
||||||
|
github.com/muesli/reflow v0.3.0 // indirect
|
||||||
|
github.com/muesli/termenv v0.16.0 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||||
golang.org/x/net v0.36.0 // indirect
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||||
golang.org/x/sys v0.30.0 // indirect
|
github.com/yuin/goldmark v1.7.16 // indirect
|
||||||
golang.org/x/term v0.29.0 // indirect
|
github.com/yuin/goldmark-emoji v1.0.6 // indirect
|
||||||
golang.org/x/text v0.22.0 // indirect
|
golang.org/x/sys v0.41.0 // indirect
|
||||||
|
golang.org/x/term v0.40.0 // indirect
|
||||||
|
golang.org/x/text v0.34.0 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
144
go.sum
144
go.sum
@@ -1,35 +1,127 @@
|
|||||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
|
||||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
||||||
github.com/jedib0t/go-pretty/v6 v6.6.7 h1:m+LbHpm0aIAPLzLbMfn8dc3Ht8MW7lsSO4MPItz/Uuo=
|
github.com/alecthomas/chroma/v2 v2.21.1 h1:FaSDrp6N+3pphkNKU6HPCiYLgm8dbe5UXIXcoBhZSWA=
|
||||||
github.com/jedib0t/go-pretty/v6 v6.6.7/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
github.com/alecthomas/chroma/v2 v2.21.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o=
|
||||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs=
|
||||||
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
|
github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||||
|
github.com/aymanbagabas/go-udiff v0.3.1 h1:LV+qyBQ2pqe0u42ZsUEtPiCaUoqgA9gYRDs3vj1nolY=
|
||||||
|
github.com/aymanbagabas/go-udiff v0.3.1/go.mod h1:G0fsKmG+P6ylD0r6N/KgQD/nWzgfnl8ZBcNLgcbrw8E=
|
||||||
|
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||||
|
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||||
|
github.com/brianvoe/gofakeit/v7 v7.14.0 h1:R8tmT/rTDJmD2ngpqBL9rAKydiL7Qr2u3CXPqRt59pk=
|
||||||
|
github.com/brianvoe/gofakeit/v7 v7.14.0/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA=
|
||||||
|
github.com/charmbracelet/bubbles v1.0.0 h1:12J8/ak/uCZEMQ6KU7pcfwceyjLlWsDLAxB5fXonfvc=
|
||||||
|
github.com/charmbracelet/bubbles v1.0.0/go.mod h1:9d/Zd5GdnauMI5ivUIVisuEm3ave1XwXtD1ckyV6r3E=
|
||||||
|
github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw=
|
||||||
|
github.com/charmbracelet/bubbletea v1.3.10/go.mod h1:ORQfo0fk8U+po9VaNvnV95UPWA1BitP1E0N6xJPlHr4=
|
||||||
|
github.com/charmbracelet/colorprofile v0.4.1 h1:a1lO03qTrSIRaK8c3JRxJDZOvhvIeSco3ej+ngLk1kk=
|
||||||
|
github.com/charmbracelet/colorprofile v0.4.1/go.mod h1:U1d9Dljmdf9DLegaJ0nGZNJvoXAhayhmidOdcBwAvKk=
|
||||||
|
github.com/charmbracelet/glamour v0.10.0 h1:MtZvfwsYCx8jEPFJm3rIBFIMZUfUJ765oX8V6kXldcY=
|
||||||
|
github.com/charmbracelet/glamour v0.10.0/go.mod h1:f+uf+I/ChNmqo087elLnVdCiVgjSKWuXa/l6NU2ndYk=
|
||||||
|
github.com/charmbracelet/harmonica v0.2.0 h1:8NxJWRWg/bzKqqEaaeFNipOu77YR5t8aSwG4pgaUBiQ=
|
||||||
|
github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao=
|
||||||
|
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 h1:ZR7e0ro+SZZiIZD7msJyA+NjkCNNavuiPBLgerbOziE=
|
||||||
|
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA=
|
||||||
|
github.com/charmbracelet/x/ansi v0.11.6 h1:GhV21SiDz/45W9AnV2R61xZMRri5NlLnl6CVF7ihZW8=
|
||||||
|
github.com/charmbracelet/x/ansi v0.11.6/go.mod h1:2JNYLgQUsyqaiLovhU2Rv/pb8r6ydXKS3NIttu3VGZQ=
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.15 h1:ur3pZy0o6z/R7EylET877CBxaiE1Sp1GMxoFPAIztPI=
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.15/go.mod h1:J1YVbR7MUuEGIFPCaaZ96KDl5NoS0DAWkskup+mOY+Q=
|
||||||
|
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ=
|
||||||
|
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
|
||||||
|
github.com/charmbracelet/x/exp/slice v0.0.0-20260109001716-2fbdffcb221f h1:kvAY8ffwhFuxWqtVI6+9E5vmgTApG96hswFLXJfsxHI=
|
||||||
|
github.com/charmbracelet/x/exp/slice v0.0.0-20260109001716-2fbdffcb221f/go.mod h1:vqEfX6xzqW1pKKZUUiFOKg0OQ7bCh54Q2vR/tserrRA=
|
||||||
|
github.com/charmbracelet/x/term v0.2.2 h1:xVRT/S2ZcKdhhOuSP4t5cLi5o+JxklsoEObBSgfgZRk=
|
||||||
|
github.com/charmbracelet/x/term v0.2.2/go.mod h1:kF8CY5RddLWrsgVwpw4kAa6TESp6EB5y3uxGLeCqzAI=
|
||||||
|
github.com/clipperhouse/displaywidth v0.9.0 h1:Qb4KOhYwRiN3viMv1v/3cTBlz3AcAZX3+y9OLhMtAtA=
|
||||||
|
github.com/clipperhouse/displaywidth v0.9.0/go.mod h1:aCAAqTlh4GIVkhQnJpbL0T/WfcrJXHcj8C0yjYcjOZA=
|
||||||
|
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
|
||||||
|
github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA=
|
||||||
|
github.com/clipperhouse/uax29/v2 v2.5.0 h1:x7T0T4eTHDONxFJsL94uKNKPHrclyFI0lm7+w94cO8U=
|
||||||
|
github.com/clipperhouse/uax29/v2 v2.5.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g=
|
||||||
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||||
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
|
||||||
|
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||||
|
github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3 h1:bVp3yUzvSAJzu9GqID+Z96P+eu5TKnIMJSV4QaZMauM=
|
||||||
|
github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4=
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||||
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
|
||||||
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||||
|
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 h1:4/hN5RUoecvl+RmJRE2YxKWtnnQls6rQjjW5oV7qg2U=
|
||||||
|
github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg=
|
||||||
|
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
|
||||||
|
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
|
||||||
|
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||||
|
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||||
|
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||||
|
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||||
|
github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
|
||||||
|
github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
|
||||||
|
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
||||||
|
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||||
|
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
|
||||||
|
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
|
||||||
|
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
|
||||||
|
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
||||||
|
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
|
||||||
|
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
|
||||||
|
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||||
|
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||||
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||||
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
github.com/valyala/fasthttp v1.59.0 h1:Qu0qYHfXvPk1mSLNqcFtEk6DpxgA26hy6bmydotDpRI=
|
github.com/valyala/fasthttp v1.69.0 h1:fNLLESD2SooWeh2cidsuFtOcrEi4uB4m1mPrkJMZyVI=
|
||||||
github.com/valyala/fasthttp v1.59.0/go.mod h1:GTxNb9Bc6r2a9D0TWNSPwDz78UxnTGBViY3xZNEqyYU=
|
github.com/valyala/fasthttp v1.69.0/go.mod h1:4wA4PfAraPlAsJ5jMSqCE2ug5tqUPwKXxVj8oNECGcw=
|
||||||
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||||
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||||
golang.org/x/net v0.36.0 h1:vWF2fRbw4qslQsQzgFqZff+BItCvGFQqKzKIzx1rmoA=
|
github.com/yuin/goldmark v1.7.16 h1:n+CJdUxaFMiDUNnWC3dMWCIQJSkxH4uz3ZwQBkAlVNE=
|
||||||
golang.org/x/net v0.36.0/go.mod h1:bFmbeoIPfrw4sMHNhb4J9f6+tPziuGjq7Jk/38fxi1I=
|
github.com/yuin/goldmark v1.7.16/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
github.com/yuin/goldmark-emoji v1.0.6 h1:QWfF2FYaXwL74tfGOW5izeiZepUDroDJfWubQI9HTHs=
|
||||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
github.com/yuin/goldmark-emoji v1.0.6/go.mod h1:ukxJDKFpdFb5x0a5HqbdlcKtebh086iJpI31LTKmWuA=
|
||||||
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
|
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
|
||||||
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
go.aykhans.me/utils v1.0.7 h1:ClHXHlWmkjfFlD7+w5BQY29lKCEztxY/yCf543x4hZw=
|
||||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
go.aykhans.me/utils v1.0.7/go.mod h1:0Jz8GlZLN35cCHLOLx39sazWwEe33bF6SYlSeqzEXoI=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
go.yaml.in/yaml/v4 v4.0.0-rc.3 h1:3h1fjsh1CTAPjW7q/EMe+C8shx5d8ctzZTrLcs/j8Go=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
go.yaml.in/yaml/v4 v4.0.0-rc.3/go.mod h1:aZqd9kCMsGL7AuUv/m/PvWLdg5sjJsZ4oHDEnfPPfY0=
|
||||||
|
golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
|
||||||
|
golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
|
||||||
|
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
|
||||||
|
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
|
||||||
|
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||||
|
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
|
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
|
||||||
|
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
|
||||||
|
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||||
|
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|||||||
276
internal/config/cli.go
Normal file
276
internal/config/cli.go
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
versionpkg "go.aykhans.me/sarin/internal/version"
|
||||||
|
)
|
||||||
|
|
||||||
|
const cliUsageText = `Usage:
|
||||||
|
sarin [flags]
|
||||||
|
|
||||||
|
Simple usage:
|
||||||
|
sarin -U https://example.com -r 1
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
General Config:
|
||||||
|
-h, -help Help for sarin
|
||||||
|
-v, -version Version for sarin
|
||||||
|
-s, -show-config bool Show the final config after parsing all sources (default %v)
|
||||||
|
-f, -config-file string Path to the config file (local file / http URL)
|
||||||
|
-c, -concurrency uint Number of concurrent requests (default %d)
|
||||||
|
-r, -requests uint Number of total requests
|
||||||
|
-d, -duration time Maximum duration for the test (e.g. 30s, 1m, 5h)
|
||||||
|
-q, -quiet bool Hide the progress bar and runtime logs (default %v)
|
||||||
|
-o, -output string Output format (possible values: table, json, yaml, none) (default '%v')
|
||||||
|
-z, -dry-run bool Run without sending requests (default %v)
|
||||||
|
|
||||||
|
Request Config:
|
||||||
|
-U, -url string Target URL for the request
|
||||||
|
-M, -method []string HTTP method for the request (default %s)
|
||||||
|
-B, -body []string Body for the request (e.g. "body text")
|
||||||
|
-P, -param []string URL parameter for the request (e.g. "key1=value1")
|
||||||
|
-H, -header []string Header for the request (e.g. "key1: value1")
|
||||||
|
-C, -cookie []string Cookie for the request (e.g. "key1=value1")
|
||||||
|
-X, -proxy []string Proxy for the request (e.g. "http://proxy.example.com:8080")
|
||||||
|
-V, -values []string List of values for templating (e.g. "key1=value1")
|
||||||
|
-T, -timeout time Timeout for the request (e.g. 400ms, 3s, 1m10s) (default %v)
|
||||||
|
-I, -insecure bool Skip SSL/TLS certificate verification (default %v)
|
||||||
|
-lua []string Lua script for request transformation (inline or @file/@url)
|
||||||
|
-js []string JavaScript script for request transformation (inline or @file/@url)`
|
||||||
|
|
||||||
|
var _ IParser = ConfigCLIParser{}
|
||||||
|
|
||||||
|
type ConfigCLIParser struct {
|
||||||
|
args []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewConfigCLIParser(args []string) *ConfigCLIParser {
|
||||||
|
if args == nil {
|
||||||
|
args = []string{}
|
||||||
|
}
|
||||||
|
return &ConfigCLIParser{args: args}
|
||||||
|
}
|
||||||
|
|
||||||
|
type stringSliceArg []string
|
||||||
|
|
||||||
|
func (arg *stringSliceArg) String() string {
|
||||||
|
return strings.Join(*arg, ",")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (arg *stringSliceArg) Set(value string) error {
|
||||||
|
*arg = append(*arg, value)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse parses command-line arguments into a Config object.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.CLIUnexpectedArgsError
|
||||||
|
// - types.FieldParseErrors
|
||||||
|
func (parser ConfigCLIParser) Parse() (*Config, error) {
|
||||||
|
flagSet := flag.NewFlagSet("sarin", flag.ExitOnError)
|
||||||
|
|
||||||
|
flagSet.Usage = func() { parser.PrintHelp() }
|
||||||
|
|
||||||
|
var (
|
||||||
|
config = &Config{}
|
||||||
|
|
||||||
|
// General config
|
||||||
|
version bool
|
||||||
|
showConfig bool
|
||||||
|
configFiles = stringSliceArg{}
|
||||||
|
concurrency uint
|
||||||
|
requestCount uint64
|
||||||
|
duration time.Duration
|
||||||
|
quiet bool
|
||||||
|
output string
|
||||||
|
dryRun bool
|
||||||
|
|
||||||
|
// Request config
|
||||||
|
urlInput string
|
||||||
|
methods = stringSliceArg{}
|
||||||
|
bodies = stringSliceArg{}
|
||||||
|
params = stringSliceArg{}
|
||||||
|
headers = stringSliceArg{}
|
||||||
|
cookies = stringSliceArg{}
|
||||||
|
proxies = stringSliceArg{}
|
||||||
|
values = stringSliceArg{}
|
||||||
|
timeout time.Duration
|
||||||
|
insecure bool
|
||||||
|
luaScripts = stringSliceArg{}
|
||||||
|
jsScripts = stringSliceArg{}
|
||||||
|
)
|
||||||
|
|
||||||
|
{
|
||||||
|
// General config
|
||||||
|
flagSet.BoolVar(&version, "version", false, "Version for sarin")
|
||||||
|
flagSet.BoolVar(&version, "v", false, "Version for sarin")
|
||||||
|
|
||||||
|
flagSet.BoolVar(&showConfig, "show-config", false, "Show the final config after parsing all sources")
|
||||||
|
flagSet.BoolVar(&showConfig, "s", false, "Show the final config after parsing all sources")
|
||||||
|
|
||||||
|
flagSet.Var(&configFiles, "config-file", "Path to the config file")
|
||||||
|
flagSet.Var(&configFiles, "f", "Path to the config file")
|
||||||
|
|
||||||
|
flagSet.UintVar(&concurrency, "concurrency", 0, "Number of concurrent requests")
|
||||||
|
flagSet.UintVar(&concurrency, "c", 0, "Number of concurrent requests")
|
||||||
|
|
||||||
|
flagSet.Uint64Var(&requestCount, "requests", 0, "Number of total requests")
|
||||||
|
flagSet.Uint64Var(&requestCount, "r", 0, "Number of total requests")
|
||||||
|
|
||||||
|
flagSet.DurationVar(&duration, "duration", 0, "Maximum duration for the test")
|
||||||
|
flagSet.DurationVar(&duration, "d", 0, "Maximum duration for the test")
|
||||||
|
|
||||||
|
flagSet.BoolVar(&quiet, "quiet", false, "Hide the progress bar and runtime logs")
|
||||||
|
flagSet.BoolVar(&quiet, "q", false, "Hide the progress bar and runtime logs")
|
||||||
|
|
||||||
|
flagSet.StringVar(&output, "output", "", "Output format (possible values: table, json, yaml, none)")
|
||||||
|
flagSet.StringVar(&output, "o", "", "Output format (possible values: table, json, yaml, none)")
|
||||||
|
|
||||||
|
flagSet.BoolVar(&dryRun, "dry-run", false, "Run without sending requests")
|
||||||
|
flagSet.BoolVar(&dryRun, "z", false, "Run without sending requests")
|
||||||
|
|
||||||
|
// Request config
|
||||||
|
flagSet.StringVar(&urlInput, "url", "", "Target URL for the request")
|
||||||
|
flagSet.StringVar(&urlInput, "U", "", "Target URL for the request")
|
||||||
|
|
||||||
|
flagSet.Var(&methods, "method", "HTTP method for the request")
|
||||||
|
flagSet.Var(&methods, "M", "HTTP method for the request")
|
||||||
|
|
||||||
|
flagSet.Var(&bodies, "body", "Body for the request")
|
||||||
|
flagSet.Var(&bodies, "B", "Body for the request")
|
||||||
|
|
||||||
|
flagSet.Var(¶ms, "param", "URL parameter for the request")
|
||||||
|
flagSet.Var(¶ms, "P", "URL parameter for the request")
|
||||||
|
|
||||||
|
flagSet.Var(&headers, "header", "Header for the request")
|
||||||
|
flagSet.Var(&headers, "H", "Header for the request")
|
||||||
|
|
||||||
|
flagSet.Var(&cookies, "cookie", "Cookie for the request")
|
||||||
|
flagSet.Var(&cookies, "C", "Cookie for the request")
|
||||||
|
|
||||||
|
flagSet.Var(&proxies, "proxy", "Proxy for the request")
|
||||||
|
flagSet.Var(&proxies, "X", "Proxy for the request")
|
||||||
|
|
||||||
|
flagSet.Var(&values, "values", "List of values for templating")
|
||||||
|
flagSet.Var(&values, "V", "List of values for templating")
|
||||||
|
|
||||||
|
flagSet.DurationVar(&timeout, "timeout", 0, "Timeout for the request (e.g. 400ms, 15s, 1m10s)")
|
||||||
|
flagSet.DurationVar(&timeout, "T", 0, "Timeout for the request (e.g. 400ms, 15s, 1m10s)")
|
||||||
|
|
||||||
|
flagSet.BoolVar(&insecure, "insecure", false, "Skip SSL/TLS certificate verification")
|
||||||
|
flagSet.BoolVar(&insecure, "I", false, "Skip SSL/TLS certificate verification")
|
||||||
|
|
||||||
|
flagSet.Var(&luaScripts, "lua", "Lua script for request transformation (inline or @file/@url)")
|
||||||
|
|
||||||
|
flagSet.Var(&jsScripts, "js", "JavaScript script for request transformation (inline or @file/@url)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the specific arguments provided to the parser, skipping the program name.
|
||||||
|
if err := flagSet.Parse(parser.args[1:]); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for any unexpected non-flag arguments remaining after parsing.
|
||||||
|
if args := flagSet.Args(); len(args) > 0 {
|
||||||
|
return nil, types.NewCLIUnexpectedArgsError(args)
|
||||||
|
}
|
||||||
|
|
||||||
|
if version {
|
||||||
|
fmt.Printf("Version: %s\nGit Commit: %s\nBuild Date: %s\nGo Version: %s\n",
|
||||||
|
versionpkg.Version, versionpkg.GitCommit, versionpkg.BuildDate, versionpkg.GoVersion)
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
var fieldParseErrors []types.FieldParseError
|
||||||
|
// Iterate over flags that were explicitly set on the command line.
|
||||||
|
flagSet.Visit(func(flagVar *flag.Flag) {
|
||||||
|
switch flagVar.Name {
|
||||||
|
// General config
|
||||||
|
case "show-config", "s":
|
||||||
|
config.ShowConfig = new(showConfig)
|
||||||
|
case "config-file", "f":
|
||||||
|
for _, configFile := range configFiles {
|
||||||
|
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||||
|
}
|
||||||
|
case "concurrency", "c":
|
||||||
|
config.Concurrency = new(concurrency)
|
||||||
|
case "requests", "r":
|
||||||
|
config.Requests = new(requestCount)
|
||||||
|
case "duration", "d":
|
||||||
|
config.Duration = new(duration)
|
||||||
|
case "quiet", "q":
|
||||||
|
config.Quiet = new(quiet)
|
||||||
|
case "output", "o":
|
||||||
|
config.Output = new(ConfigOutputType(output))
|
||||||
|
case "dry-run", "z":
|
||||||
|
config.DryRun = new(dryRun)
|
||||||
|
|
||||||
|
// Request config
|
||||||
|
case "url", "U":
|
||||||
|
urlParsed, err := url.Parse(urlInput)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(fieldParseErrors, types.NewFieldParseError("url", urlInput, err))
|
||||||
|
} else {
|
||||||
|
config.URL = urlParsed
|
||||||
|
}
|
||||||
|
case "method", "M":
|
||||||
|
config.Methods = append(config.Methods, methods...)
|
||||||
|
case "body", "B":
|
||||||
|
config.Bodies = append(config.Bodies, bodies...)
|
||||||
|
case "param", "P":
|
||||||
|
config.Params.Parse(params...)
|
||||||
|
case "header", "H":
|
||||||
|
config.Headers.Parse(headers...)
|
||||||
|
case "cookie", "C":
|
||||||
|
config.Cookies.Parse(cookies...)
|
||||||
|
case "proxy", "X":
|
||||||
|
for i, proxy := range proxies {
|
||||||
|
err := config.Proxies.Parse(proxy)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(fmt.Sprintf("proxy[%d]", i), proxy, err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "values", "V":
|
||||||
|
config.Values = append(config.Values, values...)
|
||||||
|
case "timeout", "T":
|
||||||
|
config.Timeout = new(timeout)
|
||||||
|
case "insecure", "I":
|
||||||
|
config.Insecure = new(insecure)
|
||||||
|
case "lua":
|
||||||
|
config.Lua = append(config.Lua, luaScripts...)
|
||||||
|
case "js":
|
||||||
|
config.Js = append(config.Js, jsScripts...)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(fieldParseErrors) > 0 {
|
||||||
|
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||||
|
}
|
||||||
|
|
||||||
|
return config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (parser ConfigCLIParser) PrintHelp() {
|
||||||
|
fmt.Printf(
|
||||||
|
cliUsageText+"\n",
|
||||||
|
Defaults.ShowConfig,
|
||||||
|
Defaults.Concurrency,
|
||||||
|
Defaults.Quiet,
|
||||||
|
Defaults.Output,
|
||||||
|
Defaults.DryRun,
|
||||||
|
|
||||||
|
Defaults.Method,
|
||||||
|
Defaults.RequestTimeout,
|
||||||
|
Defaults.Insecure,
|
||||||
|
)
|
||||||
|
}
|
||||||
848
internal/config/config.go
Normal file
848
internal/config/config.go
Normal file
@@ -0,0 +1,848 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"slices"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/bubbles/viewport"
|
||||||
|
tea "github.com/charmbracelet/bubbletea"
|
||||||
|
"github.com/charmbracelet/glamour"
|
||||||
|
"github.com/charmbracelet/glamour/styles"
|
||||||
|
"github.com/charmbracelet/lipgloss"
|
||||||
|
"github.com/charmbracelet/x/term"
|
||||||
|
"go.aykhans.me/sarin/internal/script"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
"go.aykhans.me/sarin/internal/version"
|
||||||
|
"go.aykhans.me/utils/common"
|
||||||
|
utilsErr "go.aykhans.me/utils/errors"
|
||||||
|
"go.yaml.in/yaml/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
var Defaults = struct {
|
||||||
|
UserAgent string
|
||||||
|
Method string
|
||||||
|
RequestTimeout time.Duration
|
||||||
|
Concurrency uint
|
||||||
|
ShowConfig bool
|
||||||
|
Quiet bool
|
||||||
|
Insecure bool
|
||||||
|
Output ConfigOutputType
|
||||||
|
DryRun bool
|
||||||
|
}{
|
||||||
|
UserAgent: "Sarin/" + version.Version,
|
||||||
|
Method: "GET",
|
||||||
|
RequestTimeout: time.Second * 10,
|
||||||
|
Concurrency: 1,
|
||||||
|
ShowConfig: false,
|
||||||
|
Quiet: false,
|
||||||
|
Insecure: false,
|
||||||
|
Output: ConfigOutputTypeTable,
|
||||||
|
DryRun: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
ValidProxySchemes = []string{"http", "https", "socks5", "socks5h"}
|
||||||
|
ValidRequestURLSchemes = []string{"http", "https"}
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
StyleYellow = lipgloss.NewStyle().Foreground(lipgloss.Color("220"))
|
||||||
|
StyleRed = lipgloss.NewStyle().Foreground(lipgloss.Color("196"))
|
||||||
|
)
|
||||||
|
|
||||||
|
type IParser interface {
|
||||||
|
Parse() (*Config, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ConfigOutputType string
|
||||||
|
|
||||||
|
var (
|
||||||
|
ConfigOutputTypeTable ConfigOutputType = "table"
|
||||||
|
ConfigOutputTypeJSON ConfigOutputType = "json"
|
||||||
|
ConfigOutputTypeYAML ConfigOutputType = "yaml"
|
||||||
|
ConfigOutputTypeNone ConfigOutputType = "none"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
ShowConfig *bool `yaml:"showConfig,omitempty"`
|
||||||
|
Files []types.ConfigFile `yaml:"files,omitempty"`
|
||||||
|
Methods []string `yaml:"methods,omitempty"`
|
||||||
|
URL *url.URL `yaml:"url,omitempty"`
|
||||||
|
Timeout *time.Duration `yaml:"timeout,omitempty"`
|
||||||
|
Concurrency *uint `yaml:"concurrency,omitempty"`
|
||||||
|
Requests *uint64 `yaml:"requests,omitempty"`
|
||||||
|
Duration *time.Duration `yaml:"duration,omitempty"`
|
||||||
|
Quiet *bool `yaml:"quiet,omitempty"`
|
||||||
|
Output *ConfigOutputType `yaml:"output,omitempty"`
|
||||||
|
Insecure *bool `yaml:"insecure,omitempty"`
|
||||||
|
DryRun *bool `yaml:"dryRun,omitempty"`
|
||||||
|
Params types.Params `yaml:"params,omitempty"`
|
||||||
|
Headers types.Headers `yaml:"headers,omitempty"`
|
||||||
|
Cookies types.Cookies `yaml:"cookies,omitempty"`
|
||||||
|
Bodies []string `yaml:"bodies,omitempty"`
|
||||||
|
Proxies types.Proxies `yaml:"proxies,omitempty"`
|
||||||
|
Values []string `yaml:"values,omitempty"`
|
||||||
|
Lua []string `yaml:"lua,omitempty"`
|
||||||
|
Js []string `yaml:"js,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (config Config) MarshalYAML() (any, error) {
|
||||||
|
const randomValueComment = "Cycles through all values, with a new random start each round"
|
||||||
|
|
||||||
|
toNode := func(v any) *yaml.Node {
|
||||||
|
node := &yaml.Node{}
|
||||||
|
_ = node.Encode(v)
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
addField := func(content *[]*yaml.Node, key string, value *yaml.Node, comment string) {
|
||||||
|
if value.Kind == 0 || (value.Kind == yaml.ScalarNode && value.Value == "") ||
|
||||||
|
(value.Kind == yaml.SequenceNode && len(value.Content) == 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: key, LineComment: comment}
|
||||||
|
*content = append(*content, keyNode, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
addStringSlice := func(content *[]*yaml.Node, key string, items []string, withComment bool) {
|
||||||
|
comment := ""
|
||||||
|
if withComment && len(items) > 1 {
|
||||||
|
comment = randomValueComment
|
||||||
|
}
|
||||||
|
switch len(items) {
|
||||||
|
case 1:
|
||||||
|
addField(content, key, toNode(items[0]), "")
|
||||||
|
default:
|
||||||
|
addField(content, key, toNode(items), comment)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
marshalKeyValues := func(items []types.KeyValue[string, []string]) *yaml.Node {
|
||||||
|
seqNode := &yaml.Node{Kind: yaml.SequenceNode}
|
||||||
|
for _, item := range items {
|
||||||
|
keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: item.Key}
|
||||||
|
var valueNode *yaml.Node
|
||||||
|
|
||||||
|
switch len(item.Value) {
|
||||||
|
case 1:
|
||||||
|
valueNode = &yaml.Node{Kind: yaml.ScalarNode, Value: item.Value[0]}
|
||||||
|
default:
|
||||||
|
valueNode = &yaml.Node{Kind: yaml.SequenceNode}
|
||||||
|
for _, v := range item.Value {
|
||||||
|
valueNode.Content = append(valueNode.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: v})
|
||||||
|
}
|
||||||
|
if len(item.Value) > 1 {
|
||||||
|
keyNode.LineComment = randomValueComment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mapNode := &yaml.Node{Kind: yaml.MappingNode, Content: []*yaml.Node{keyNode, valueNode}}
|
||||||
|
seqNode.Content = append(seqNode.Content, mapNode)
|
||||||
|
}
|
||||||
|
return seqNode
|
||||||
|
}
|
||||||
|
|
||||||
|
root := &yaml.Node{Kind: yaml.MappingNode}
|
||||||
|
content := &root.Content
|
||||||
|
|
||||||
|
if config.ShowConfig != nil {
|
||||||
|
addField(content, "showConfig", toNode(*config.ShowConfig), "")
|
||||||
|
}
|
||||||
|
|
||||||
|
addStringSlice(content, "method", config.Methods, true)
|
||||||
|
|
||||||
|
if config.URL != nil {
|
||||||
|
addField(content, "url", toNode(config.URL.String()), "")
|
||||||
|
}
|
||||||
|
if config.Timeout != nil {
|
||||||
|
addField(content, "timeout", toNode(*config.Timeout), "")
|
||||||
|
}
|
||||||
|
if config.Concurrency != nil {
|
||||||
|
addField(content, "concurrency", toNode(*config.Concurrency), "")
|
||||||
|
}
|
||||||
|
if config.Requests != nil {
|
||||||
|
addField(content, "requests", toNode(*config.Requests), "")
|
||||||
|
}
|
||||||
|
if config.Duration != nil {
|
||||||
|
addField(content, "duration", toNode(*config.Duration), "")
|
||||||
|
}
|
||||||
|
if config.Quiet != nil {
|
||||||
|
addField(content, "quiet", toNode(*config.Quiet), "")
|
||||||
|
}
|
||||||
|
if config.Output != nil {
|
||||||
|
addField(content, "output", toNode(string(*config.Output)), "")
|
||||||
|
}
|
||||||
|
if config.Insecure != nil {
|
||||||
|
addField(content, "insecure", toNode(*config.Insecure), "")
|
||||||
|
}
|
||||||
|
if config.DryRun != nil {
|
||||||
|
addField(content, "dryRun", toNode(*config.DryRun), "")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.Params) > 0 {
|
||||||
|
items := make([]types.KeyValue[string, []string], len(config.Params))
|
||||||
|
for i, p := range config.Params {
|
||||||
|
items[i] = types.KeyValue[string, []string](p)
|
||||||
|
}
|
||||||
|
addField(content, "params", marshalKeyValues(items), "")
|
||||||
|
}
|
||||||
|
if len(config.Headers) > 0 {
|
||||||
|
items := make([]types.KeyValue[string, []string], len(config.Headers))
|
||||||
|
for i, h := range config.Headers {
|
||||||
|
items[i] = types.KeyValue[string, []string](h)
|
||||||
|
}
|
||||||
|
addField(content, "headers", marshalKeyValues(items), "")
|
||||||
|
}
|
||||||
|
if len(config.Cookies) > 0 {
|
||||||
|
items := make([]types.KeyValue[string, []string], len(config.Cookies))
|
||||||
|
for i, c := range config.Cookies {
|
||||||
|
items[i] = types.KeyValue[string, []string](c)
|
||||||
|
}
|
||||||
|
addField(content, "cookies", marshalKeyValues(items), "")
|
||||||
|
}
|
||||||
|
|
||||||
|
addStringSlice(content, "body", config.Bodies, true)
|
||||||
|
|
||||||
|
if len(config.Proxies) > 0 {
|
||||||
|
proxyStrings := make([]string, len(config.Proxies))
|
||||||
|
for i, p := range config.Proxies {
|
||||||
|
proxyStrings[i] = p.String()
|
||||||
|
}
|
||||||
|
addStringSlice(content, "proxy", proxyStrings, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
addStringSlice(content, "values", config.Values, false)
|
||||||
|
addStringSlice(content, "lua", config.Lua, false)
|
||||||
|
addStringSlice(content, "js", config.Js, false)
|
||||||
|
|
||||||
|
return root, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (config Config) Print() bool {
|
||||||
|
configYAML, err := yaml.Marshal(config)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, StyleRed.Render("Error marshaling config to yaml: "+err.Error()))
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pipe mode: output raw content directly
|
||||||
|
if !term.IsTerminal(os.Stdout.Fd()) {
|
||||||
|
fmt.Println(string(configYAML))
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
style := styles.TokyoNightStyleConfig
|
||||||
|
style.Document.Margin = common.ToPtr[uint](0)
|
||||||
|
style.CodeBlock.Margin = common.ToPtr[uint](0)
|
||||||
|
|
||||||
|
renderer, err := glamour.NewTermRenderer(
|
||||||
|
glamour.WithStyles(style),
|
||||||
|
glamour.WithWordWrap(0),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := renderer.Render("```yaml\n" + string(configYAML) + "```")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
p := tea.NewProgram(
|
||||||
|
printConfigModel{content: strings.Trim(content, "\n"), rawContent: configYAML},
|
||||||
|
tea.WithAltScreen(),
|
||||||
|
tea.WithMouseCellMotion(),
|
||||||
|
)
|
||||||
|
|
||||||
|
m, err := p.Run()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, StyleRed.Render(err.Error()))
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return m.(printConfigModel).start //nolint:forcetypeassert // m is guaranteed to be of type printConfigModel as it was the only model passed to tea.NewProgram
|
||||||
|
}
|
||||||
|
|
||||||
|
func (config *Config) Merge(newConfig *Config) {
|
||||||
|
config.Files = append(config.Files, newConfig.Files...)
|
||||||
|
if len(newConfig.Methods) > 0 {
|
||||||
|
config.Methods = append(config.Methods, newConfig.Methods...)
|
||||||
|
}
|
||||||
|
if newConfig.URL != nil {
|
||||||
|
config.URL = newConfig.URL
|
||||||
|
}
|
||||||
|
if newConfig.Timeout != nil {
|
||||||
|
config.Timeout = newConfig.Timeout
|
||||||
|
}
|
||||||
|
if newConfig.Concurrency != nil {
|
||||||
|
config.Concurrency = newConfig.Concurrency
|
||||||
|
}
|
||||||
|
if newConfig.Requests != nil {
|
||||||
|
config.Requests = newConfig.Requests
|
||||||
|
}
|
||||||
|
if newConfig.Duration != nil {
|
||||||
|
config.Duration = newConfig.Duration
|
||||||
|
}
|
||||||
|
if newConfig.ShowConfig != nil {
|
||||||
|
config.ShowConfig = newConfig.ShowConfig
|
||||||
|
}
|
||||||
|
if newConfig.Quiet != nil {
|
||||||
|
config.Quiet = newConfig.Quiet
|
||||||
|
}
|
||||||
|
if newConfig.Output != nil {
|
||||||
|
config.Output = newConfig.Output
|
||||||
|
}
|
||||||
|
if newConfig.Insecure != nil {
|
||||||
|
config.Insecure = newConfig.Insecure
|
||||||
|
}
|
||||||
|
if newConfig.DryRun != nil {
|
||||||
|
config.DryRun = newConfig.DryRun
|
||||||
|
}
|
||||||
|
if len(newConfig.Params) != 0 {
|
||||||
|
config.Params = append(config.Params, newConfig.Params...)
|
||||||
|
}
|
||||||
|
if len(newConfig.Headers) != 0 {
|
||||||
|
config.Headers = append(config.Headers, newConfig.Headers...)
|
||||||
|
}
|
||||||
|
if len(newConfig.Cookies) != 0 {
|
||||||
|
config.Cookies = append(config.Cookies, newConfig.Cookies...)
|
||||||
|
}
|
||||||
|
if len(newConfig.Bodies) != 0 {
|
||||||
|
config.Bodies = append(config.Bodies, newConfig.Bodies...)
|
||||||
|
}
|
||||||
|
if len(newConfig.Proxies) != 0 {
|
||||||
|
config.Proxies.Append(newConfig.Proxies...)
|
||||||
|
}
|
||||||
|
if len(newConfig.Values) != 0 {
|
||||||
|
config.Values = append(config.Values, newConfig.Values...)
|
||||||
|
}
|
||||||
|
if len(newConfig.Lua) != 0 {
|
||||||
|
config.Lua = append(config.Lua, newConfig.Lua...)
|
||||||
|
}
|
||||||
|
if len(newConfig.Js) != 0 {
|
||||||
|
config.Js = append(config.Js, newConfig.Js...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (config *Config) SetDefaults() {
|
||||||
|
if config.URL != nil && len(config.URL.Query()) > 0 {
|
||||||
|
urlParams := types.Params{}
|
||||||
|
for key, values := range config.URL.Query() {
|
||||||
|
for _, value := range values {
|
||||||
|
urlParams = append(urlParams, types.Param{
|
||||||
|
Key: key,
|
||||||
|
Value: []string{value},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config.Params = append(urlParams, config.Params...)
|
||||||
|
config.URL.RawQuery = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.Methods) == 0 {
|
||||||
|
config.Methods = []string{Defaults.Method}
|
||||||
|
}
|
||||||
|
if config.Timeout == nil {
|
||||||
|
config.Timeout = &Defaults.RequestTimeout
|
||||||
|
}
|
||||||
|
if config.Concurrency == nil {
|
||||||
|
config.Concurrency = new(Defaults.Concurrency)
|
||||||
|
}
|
||||||
|
if config.ShowConfig == nil {
|
||||||
|
config.ShowConfig = new(Defaults.ShowConfig)
|
||||||
|
}
|
||||||
|
if config.Quiet == nil {
|
||||||
|
config.Quiet = new(Defaults.Quiet)
|
||||||
|
}
|
||||||
|
if config.Insecure == nil {
|
||||||
|
config.Insecure = new(Defaults.Insecure)
|
||||||
|
}
|
||||||
|
if config.DryRun == nil {
|
||||||
|
config.DryRun = new(Defaults.DryRun)
|
||||||
|
}
|
||||||
|
if !config.Headers.Has("User-Agent") {
|
||||||
|
config.Headers = append(config.Headers, types.Header{Key: "User-Agent", Value: []string{Defaults.UserAgent}})
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Output == nil {
|
||||||
|
config.Output = new(Defaults.Output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate validates the config fields.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.FieldValidationErrors
|
||||||
|
func (config Config) Validate() error {
|
||||||
|
validationErrors := make([]types.FieldValidationError, 0)
|
||||||
|
|
||||||
|
if len(config.Methods) == 0 {
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Method", "", errors.New("method is required")))
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case config.URL == nil:
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", "", errors.New("URL is required")))
|
||||||
|
case !slices.Contains(ValidRequestURLSchemes, config.URL.Scheme):
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", config.URL.String(), fmt.Errorf("URL scheme must be one of: %s", strings.Join(ValidRequestURLSchemes, ", "))))
|
||||||
|
case config.URL.Host == "":
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("URL", config.URL.String(), errors.New("URL must have a host")))
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case config.Concurrency == nil:
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", "", errors.New("concurrency count is required")))
|
||||||
|
case *config.Concurrency == 0:
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", "0", errors.New("concurrency must be greater than 0")))
|
||||||
|
case *config.Concurrency > 100_000_000:
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Concurrency", strconv.FormatUint(uint64(*config.Concurrency), 10), errors.New("concurrency must not exceed 100,000,000")))
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case config.Requests == nil && config.Duration == nil:
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests / Duration", "", errors.New("either request count or duration must be specified")))
|
||||||
|
case (config.Requests != nil && config.Duration != nil) && (*config.Requests == 0 && *config.Duration == 0):
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests / Duration", "0", errors.New("both request count and duration cannot be zero")))
|
||||||
|
case config.Requests != nil && config.Duration == nil && *config.Requests == 0:
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Requests", "0", errors.New("request count must be greater than 0")))
|
||||||
|
case config.Requests == nil && config.Duration != nil && *config.Duration == 0:
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Duration", "0", errors.New("duration must be greater than 0")))
|
||||||
|
}
|
||||||
|
|
||||||
|
if *config.Timeout < 1 {
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Timeout", "0", errors.New("timeout must be greater than 0")))
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.ShowConfig == nil {
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("ShowConfig", "", errors.New("showConfig field is required")))
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Quiet == nil {
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Quiet", "", errors.New("quiet field is required")))
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Output == nil {
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Output", "", errors.New("output field is required")))
|
||||||
|
} else {
|
||||||
|
switch *config.Output {
|
||||||
|
case "":
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Output", "", errors.New("output field is required")))
|
||||||
|
case ConfigOutputTypeTable, ConfigOutputTypeJSON, ConfigOutputTypeYAML, ConfigOutputTypeNone:
|
||||||
|
default:
|
||||||
|
validOutputs := []string{string(ConfigOutputTypeTable), string(ConfigOutputTypeJSON), string(ConfigOutputTypeYAML), string(ConfigOutputTypeNone)}
|
||||||
|
validationErrors = append(validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
"Output",
|
||||||
|
string(*config.Output),
|
||||||
|
fmt.Errorf(
|
||||||
|
"output type must be one of: %s",
|
||||||
|
strings.Join(validOutputs, ", "),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Insecure == nil {
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("Insecure", "", errors.New("insecure field is required")))
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.DryRun == nil {
|
||||||
|
validationErrors = append(validationErrors, types.NewFieldValidationError("DryRun", "", errors.New("dryRun field is required")))
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, proxy := range config.Proxies {
|
||||||
|
if !slices.Contains(ValidProxySchemes, proxy.Scheme) {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Proxy[%d]", i),
|
||||||
|
proxy.String(),
|
||||||
|
fmt.Errorf("proxy scheme must be one of: %v", ValidProxySchemes),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a context with timeout for script validation (loading from URLs)
|
||||||
|
scriptCtx, scriptCancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||||
|
defer scriptCancel()
|
||||||
|
|
||||||
|
for i, scriptSrc := range config.Lua {
|
||||||
|
if err := validateScriptSource(scriptSrc); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(fmt.Sprintf("Lua[%d]", i), scriptSrc, err),
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Validate script syntax
|
||||||
|
if err := script.ValidateScript(scriptCtx, scriptSrc, script.EngineTypeLua); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(fmt.Sprintf("Lua[%d]", i), scriptSrc, err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, scriptSrc := range config.Js {
|
||||||
|
if err := validateScriptSource(scriptSrc); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(fmt.Sprintf("Js[%d]", i), scriptSrc, err),
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Validate script syntax
|
||||||
|
if err := script.ValidateScript(scriptCtx, scriptSrc, script.EngineTypeJavaScript); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(fmt.Sprintf("Js[%d]", i), scriptSrc, err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
templateErrors := ValidateTemplates(&config)
|
||||||
|
validationErrors = append(validationErrors, templateErrors...)
|
||||||
|
|
||||||
|
if len(validationErrors) > 0 {
|
||||||
|
return types.NewFieldValidationErrors(validationErrors)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReadAllConfigs() *Config {
|
||||||
|
envParser := NewConfigENVParser("SARIN")
|
||||||
|
envConfig, err := envParser.Parse()
|
||||||
|
_ = utilsErr.MustHandle(err,
|
||||||
|
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||||
|
printParseErrors("ENV", err.Errors...)
|
||||||
|
fmt.Println()
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
cliParser := NewConfigCLIParser(os.Args)
|
||||||
|
cliConf, err := cliParser.Parse()
|
||||||
|
_ = utilsErr.MustHandle(err,
|
||||||
|
utilsErr.OnType(func(err types.CLIUnexpectedArgsError) error {
|
||||||
|
cliParser.PrintHelp()
|
||||||
|
fmt.Fprintln(os.Stderr,
|
||||||
|
StyleYellow.Render(
|
||||||
|
"\nUnexpected CLI arguments provided: ",
|
||||||
|
)+strings.Join(err.Args, ", "),
|
||||||
|
)
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||||
|
cliParser.PrintHelp()
|
||||||
|
fmt.Println()
|
||||||
|
printParseErrors("CLI", err.Errors...)
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, configFile := range append(envConfig.Files, cliConf.Files...) {
|
||||||
|
fileConfig, err := parseConfigFile(configFile, 10)
|
||||||
|
_ = utilsErr.MustHandle(err,
|
||||||
|
utilsErr.OnType(func(err types.ConfigFileReadError) error {
|
||||||
|
cliParser.PrintHelp()
|
||||||
|
fmt.Fprintln(os.Stderr,
|
||||||
|
StyleYellow.Render(
|
||||||
|
fmt.Sprintf("\nFailed to read config file (%s): ", configFile.Path())+err.Error(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
utilsErr.OnType(func(err types.UnmarshalError) error {
|
||||||
|
fmt.Fprintln(os.Stderr,
|
||||||
|
StyleYellow.Render(
|
||||||
|
fmt.Sprintf("\nFailed to parse config file (%s): ", configFile.Path())+err.Error(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
utilsErr.OnType(func(err types.FieldParseErrors) error {
|
||||||
|
printParseErrors(fmt.Sprintf("CONFIG FILE '%s'", configFile.Path()), err.Errors...)
|
||||||
|
os.Exit(1)
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
envConfig.Merge(fileConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
envConfig.Merge(cliConf)
|
||||||
|
|
||||||
|
return envConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseConfigFile recursively parses a config file and its nested files up to maxDepth levels.
|
||||||
|
// Returns the merged configuration or an error if parsing fails.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ConfigFileReadError
|
||||||
|
// - types.UnmarshalError
|
||||||
|
// - types.FieldParseErrors
|
||||||
|
func parseConfigFile(configFile types.ConfigFile, maxDepth int) (*Config, error) {
|
||||||
|
configFileParser := NewConfigFileParser(configFile)
|
||||||
|
fileConfig, err := configFileParser.Parse()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if maxDepth <= 0 {
|
||||||
|
return fileConfig, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, c := range fileConfig.Files {
|
||||||
|
innerFileConfig, err := parseConfigFile(c, maxDepth-1)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
innerFileConfig.Merge(fileConfig)
|
||||||
|
fileConfig = innerFileConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileConfig, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateScriptSource validates a script source string.
|
||||||
|
// Scripts can be:
|
||||||
|
// - Inline script: any string not starting with "@"
|
||||||
|
// - Escaped "@": strings starting with "@@" (literal "@" at start)
|
||||||
|
// - File reference: "@/path/to/file" or "@./relative/path"
|
||||||
|
// - URL reference: "@http://..." or "@https://..."
|
||||||
|
//
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ErrScriptEmpty
|
||||||
|
// - types.ErrScriptSourceEmpty
|
||||||
|
// - types.ErrScriptURLNoHost
|
||||||
|
// - types.URLParseError
|
||||||
|
func validateScriptSource(script string) error {
|
||||||
|
// Empty script is invalid
|
||||||
|
if script == "" {
|
||||||
|
return types.ErrScriptEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not a file/URL reference - it's an inline script
|
||||||
|
if !strings.HasPrefix(script, "@") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Escaped @ - it's an inline script starting with literal @
|
||||||
|
if strings.HasPrefix(script, "@@") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// It's a file or URL reference - validate the source
|
||||||
|
source := script[1:] // Remove the @ prefix
|
||||||
|
|
||||||
|
if source == "" {
|
||||||
|
return types.ErrScriptSourceEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a http(s) URL
|
||||||
|
if strings.HasPrefix(source, "http://") || strings.HasPrefix(source, "https://") {
|
||||||
|
parsedURL, err := url.Parse(source)
|
||||||
|
if err != nil {
|
||||||
|
return types.NewURLParseError(source, err)
|
||||||
|
}
|
||||||
|
if parsedURL.Host == "" {
|
||||||
|
return types.ErrScriptURLNoHost
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// It's a file path - basic validation (not empty, checked above)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func printParseErrors(parserName string, errors ...types.FieldParseError) {
|
||||||
|
for _, fieldErr := range errors {
|
||||||
|
if fieldErr.Value == "" {
|
||||||
|
fmt.Fprintln(os.Stderr,
|
||||||
|
StyleYellow.Render(fmt.Sprintf("[%s] Field '%s': ", parserName, fieldErr.Field))+fieldErr.Err.Error(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintln(os.Stderr,
|
||||||
|
StyleYellow.Render(fmt.Sprintf("[%s] Field '%s' (%s): ", parserName, fieldErr.Field, fieldErr.Value))+fieldErr.Err.Error(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
scrollbarWidth = 1
|
||||||
|
scrollbarBottomSpace = 1
|
||||||
|
statusDisplayTime = 3 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
printConfigBorderStyle = func() lipgloss.Border {
|
||||||
|
b := lipgloss.RoundedBorder()
|
||||||
|
return b
|
||||||
|
}()
|
||||||
|
|
||||||
|
printConfigHelpStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1)
|
||||||
|
printConfigSuccessStatusStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1).Foreground(lipgloss.Color("10"))
|
||||||
|
printConfigErrorStatusStyle = lipgloss.NewStyle().BorderStyle(printConfigBorderStyle).Padding(0, 1).Foreground(lipgloss.Color("9"))
|
||||||
|
printConfigKeyStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("12")).Bold(true)
|
||||||
|
printConfigDescStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("252"))
|
||||||
|
)
|
||||||
|
|
||||||
|
type printConfigClearStatusMsg struct{}
|
||||||
|
|
||||||
|
type printConfigModel struct {
|
||||||
|
viewport viewport.Model
|
||||||
|
content string
|
||||||
|
rawContent []byte
|
||||||
|
statusMsg string
|
||||||
|
ready bool
|
||||||
|
start bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m printConfigModel) Init() tea.Cmd { return nil }
|
||||||
|
|
||||||
|
func (m printConfigModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||||
|
var cmd tea.Cmd
|
||||||
|
|
||||||
|
switch msg := msg.(type) {
|
||||||
|
case tea.KeyMsg:
|
||||||
|
switch msg.String() {
|
||||||
|
case "ctrl+c", "esc":
|
||||||
|
return m, tea.Quit
|
||||||
|
case "ctrl+s":
|
||||||
|
return m.saveContent()
|
||||||
|
case "enter":
|
||||||
|
m.start = true
|
||||||
|
return m, tea.Quit
|
||||||
|
}
|
||||||
|
|
||||||
|
case printConfigClearStatusMsg:
|
||||||
|
m.statusMsg = ""
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case tea.WindowSizeMsg:
|
||||||
|
m.handleResize(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
m.viewport, cmd = m.viewport.Update(msg)
|
||||||
|
return m, cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m printConfigModel) View() string {
|
||||||
|
if !m.ready {
|
||||||
|
return "\n Initializing..."
|
||||||
|
}
|
||||||
|
|
||||||
|
content := lipgloss.JoinHorizontal(lipgloss.Top, m.viewport.View(), m.scrollbar())
|
||||||
|
return fmt.Sprintf("%s\n%s\n%s", m.headerView(), content, m.footerView())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *printConfigModel) saveContent() (printConfigModel, tea.Cmd) {
|
||||||
|
filename := fmt.Sprintf("sarin_config_%s.yaml", time.Now().Format("2006-01-02_15-04-05"))
|
||||||
|
if err := os.WriteFile(filename, m.rawContent, 0600); err != nil {
|
||||||
|
m.statusMsg = printConfigErrorStatusStyle.Render("✗ Error saving file: " + err.Error())
|
||||||
|
} else {
|
||||||
|
m.statusMsg = printConfigSuccessStatusStyle.Render("✓ Saved to " + filename)
|
||||||
|
}
|
||||||
|
return *m, tea.Tick(statusDisplayTime, func(time.Time) tea.Msg { return printConfigClearStatusMsg{} })
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *printConfigModel) handleResize(msg tea.WindowSizeMsg) {
|
||||||
|
headerHeight := lipgloss.Height(m.headerView())
|
||||||
|
footerHeight := lipgloss.Height(m.footerView())
|
||||||
|
height := msg.Height - headerHeight - footerHeight
|
||||||
|
width := msg.Width - scrollbarWidth
|
||||||
|
|
||||||
|
if !m.ready {
|
||||||
|
m.viewport = viewport.New(width, height)
|
||||||
|
m.viewport.SetContent(m.contentWithLineNumbers())
|
||||||
|
m.ready = true
|
||||||
|
} else {
|
||||||
|
m.viewport.Width = width
|
||||||
|
m.viewport.Height = height
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m printConfigModel) headerView() string {
|
||||||
|
var title string
|
||||||
|
if m.statusMsg != "" {
|
||||||
|
title = ("" + m.statusMsg)
|
||||||
|
} else {
|
||||||
|
sep := printConfigDescStyle.Render(" / ")
|
||||||
|
help := printConfigKeyStyle.Render("ENTER") + printConfigDescStyle.Render(" start") + sep +
|
||||||
|
printConfigKeyStyle.Render("CTRL+S") + printConfigDescStyle.Render(" save") + sep +
|
||||||
|
printConfigKeyStyle.Render("ESC") + printConfigDescStyle.Render(" exit")
|
||||||
|
title = printConfigHelpStyle.Render(help)
|
||||||
|
}
|
||||||
|
line := strings.Repeat("─", max(0, m.viewport.Width+scrollbarWidth-lipgloss.Width(title)))
|
||||||
|
return lipgloss.JoinHorizontal(lipgloss.Center, title, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m printConfigModel) footerView() string {
|
||||||
|
return strings.Repeat("─", m.viewport.Width+scrollbarWidth)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m printConfigModel) contentWithLineNumbers() string {
|
||||||
|
lines := strings.Split(m.content, "\n")
|
||||||
|
width := len(strconv.Itoa(len(lines)))
|
||||||
|
lineNumStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("246"))
|
||||||
|
|
||||||
|
var sb strings.Builder
|
||||||
|
for i, line := range lines {
|
||||||
|
lineNum := lineNumStyle.Render(fmt.Sprintf("%*d", width, i+1))
|
||||||
|
sb.WriteString(lineNum)
|
||||||
|
sb.WriteString(" ")
|
||||||
|
sb.WriteString(line)
|
||||||
|
if i < len(lines)-1 {
|
||||||
|
sb.WriteByte('\n')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m printConfigModel) scrollbar() string {
|
||||||
|
height := m.viewport.Height
|
||||||
|
trackHeight := height - scrollbarBottomSpace
|
||||||
|
totalLines := m.viewport.TotalLineCount()
|
||||||
|
|
||||||
|
if totalLines <= height {
|
||||||
|
return strings.Repeat(" \n", trackHeight) + " "
|
||||||
|
}
|
||||||
|
|
||||||
|
thumbSize := max(1, (height*trackHeight)/totalLines)
|
||||||
|
thumbPos := int(m.viewport.ScrollPercent() * float64(trackHeight-thumbSize))
|
||||||
|
|
||||||
|
var sb strings.Builder
|
||||||
|
for i := range trackHeight {
|
||||||
|
if i >= thumbPos && i < thumbPos+thumbSize {
|
||||||
|
sb.WriteByte('\xe2') // █ (U+2588)
|
||||||
|
sb.WriteByte('\x96')
|
||||||
|
sb.WriteByte('\x88')
|
||||||
|
} else {
|
||||||
|
sb.WriteByte('\xe2') // ░ (U+2591)
|
||||||
|
sb.WriteByte('\x96')
|
||||||
|
sb.WriteByte('\x91')
|
||||||
|
}
|
||||||
|
sb.WriteByte('\n')
|
||||||
|
}
|
||||||
|
sb.WriteByte(' ')
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
242
internal/config/env.go
Normal file
242
internal/config/env.go
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
utilsParse "go.aykhans.me/utils/parser"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ IParser = ConfigENVParser{}
|
||||||
|
|
||||||
|
type ConfigENVParser struct {
|
||||||
|
envPrefix string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewConfigENVParser(envPrefix string) *ConfigENVParser {
|
||||||
|
return &ConfigENVParser{envPrefix}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse parses env arguments into a Config object.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.FieldParseErrors
|
||||||
|
func (parser ConfigENVParser) Parse() (*Config, error) {
|
||||||
|
var (
|
||||||
|
config = &Config{}
|
||||||
|
fieldParseErrors []types.FieldParseError
|
||||||
|
)
|
||||||
|
|
||||||
|
if showConfig := parser.getEnv("SHOW_CONFIG"); showConfig != "" {
|
||||||
|
showConfigParsed, err := utilsParse.ParseString[bool](showConfig)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("SHOW_CONFIG"),
|
||||||
|
showConfig,
|
||||||
|
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.ShowConfig = &showConfigParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if configFile := parser.getEnv("CONFIG_FILE"); configFile != "" {
|
||||||
|
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||||
|
}
|
||||||
|
|
||||||
|
if quiet := parser.getEnv("QUIET"); quiet != "" {
|
||||||
|
quietParsed, err := utilsParse.ParseString[bool](quiet)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("QUIET"),
|
||||||
|
quiet,
|
||||||
|
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.Quiet = &quietParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if output := parser.getEnv("OUTPUT"); output != "" {
|
||||||
|
config.Output = new(ConfigOutputType(output))
|
||||||
|
}
|
||||||
|
|
||||||
|
if insecure := parser.getEnv("INSECURE"); insecure != "" {
|
||||||
|
insecureParsed, err := utilsParse.ParseString[bool](insecure)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("INSECURE"),
|
||||||
|
insecure,
|
||||||
|
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.Insecure = &insecureParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if dryRun := parser.getEnv("DRY_RUN"); dryRun != "" {
|
||||||
|
dryRunParsed, err := utilsParse.ParseString[bool](dryRun)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("DRY_RUN"),
|
||||||
|
dryRun,
|
||||||
|
errors.New("invalid value for boolean, expected 'true' or 'false'"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.DryRun = &dryRunParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if method := parser.getEnv("METHOD"); method != "" {
|
||||||
|
config.Methods = []string{method}
|
||||||
|
}
|
||||||
|
|
||||||
|
if urlEnv := parser.getEnv("URL"); urlEnv != "" {
|
||||||
|
urlEnvParsed, err := url.Parse(urlEnv)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(parser.getFullEnvName("URL"), urlEnv, err),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.URL = urlEnvParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if concurrency := parser.getEnv("CONCURRENCY"); concurrency != "" {
|
||||||
|
concurrencyParsed, err := utilsParse.ParseString[uint](concurrency)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("CONCURRENCY"),
|
||||||
|
concurrency,
|
||||||
|
errors.New("invalid value for unsigned integer"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.Concurrency = &concurrencyParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if requests := parser.getEnv("REQUESTS"); requests != "" {
|
||||||
|
requestsParsed, err := utilsParse.ParseString[uint64](requests)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("REQUESTS"),
|
||||||
|
requests,
|
||||||
|
errors.New("invalid value for unsigned integer"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.Requests = &requestsParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if duration := parser.getEnv("DURATION"); duration != "" {
|
||||||
|
durationParsed, err := utilsParse.ParseString[time.Duration](duration)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("DURATION"),
|
||||||
|
duration,
|
||||||
|
errors.New("invalid value duration, expected a duration string (e.g., '10s', '1h30m')"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.Duration = &durationParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if timeout := parser.getEnv("TIMEOUT"); timeout != "" {
|
||||||
|
timeoutParsed, err := utilsParse.ParseString[time.Duration](timeout)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("TIMEOUT"),
|
||||||
|
timeout,
|
||||||
|
errors.New("invalid value duration, expected a duration string (e.g., '10s', '1h30m')"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
config.Timeout = &timeoutParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if param := parser.getEnv("PARAM"); param != "" {
|
||||||
|
config.Params.Parse(param)
|
||||||
|
}
|
||||||
|
|
||||||
|
if header := parser.getEnv("HEADER"); header != "" {
|
||||||
|
config.Headers.Parse(header)
|
||||||
|
}
|
||||||
|
|
||||||
|
if cookie := parser.getEnv("COOKIE"); cookie != "" {
|
||||||
|
config.Cookies.Parse(cookie)
|
||||||
|
}
|
||||||
|
|
||||||
|
if body := parser.getEnv("BODY"); body != "" {
|
||||||
|
config.Bodies = []string{body}
|
||||||
|
}
|
||||||
|
|
||||||
|
if proxy := parser.getEnv("PROXY"); proxy != "" {
|
||||||
|
err := config.Proxies.Parse(proxy)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(
|
||||||
|
parser.getFullEnvName("PROXY"),
|
||||||
|
proxy,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if values := parser.getEnv("VALUES"); values != "" {
|
||||||
|
config.Values = []string{values}
|
||||||
|
}
|
||||||
|
|
||||||
|
if lua := parser.getEnv("LUA"); lua != "" {
|
||||||
|
config.Lua = []string{lua}
|
||||||
|
}
|
||||||
|
|
||||||
|
if js := parser.getEnv("JS"); js != "" {
|
||||||
|
config.Js = []string{js}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(fieldParseErrors) > 0 {
|
||||||
|
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||||
|
}
|
||||||
|
|
||||||
|
return config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (parser ConfigENVParser) getFullEnvName(envName string) string {
|
||||||
|
if parser.envPrefix == "" {
|
||||||
|
return envName
|
||||||
|
}
|
||||||
|
return parser.envPrefix + "_" + envName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (parser ConfigENVParser) getEnv(envName string) string {
|
||||||
|
return os.Getenv(parser.getFullEnvName(envName))
|
||||||
|
}
|
||||||
292
internal/config/file.go
Normal file
292
internal/config/file.go
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
"go.yaml.in/yaml/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ IParser = ConfigFileParser{}
|
||||||
|
|
||||||
|
type ConfigFileParser struct {
|
||||||
|
configFile types.ConfigFile
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewConfigFileParser(configFile types.ConfigFile) *ConfigFileParser {
|
||||||
|
return &ConfigFileParser{configFile}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse parses config file arguments into a Config object.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ConfigFileReadError
|
||||||
|
// - types.UnmarshalError
|
||||||
|
// - types.FieldParseErrors
|
||||||
|
func (parser ConfigFileParser) Parse() (*Config, error) {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), time.Second*30)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
configFileData, err := fetchFile(ctx, parser.configFile.Path())
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewConfigFileReadError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch parser.configFile.Type() {
|
||||||
|
case types.ConfigFileTypeYAML, types.ConfigFileTypeUnknown:
|
||||||
|
return parser.ParseYAML(configFileData)
|
||||||
|
default:
|
||||||
|
panic("unhandled config file type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchFile retrieves file contents from a local path or HTTP/HTTPS URL.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.FileReadError
|
||||||
|
// - types.HTTPFetchError
|
||||||
|
// - types.HTTPStatusError
|
||||||
|
func fetchFile(ctx context.Context, src string) ([]byte, error) {
|
||||||
|
if strings.HasPrefix(src, "http://") || strings.HasPrefix(src, "https://") {
|
||||||
|
return fetchHTTP(ctx, src)
|
||||||
|
}
|
||||||
|
return fetchLocal(src)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchHTTP downloads file contents from an HTTP/HTTPS URL.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.HTTPFetchError
|
||||||
|
// - types.HTTPStatusError
|
||||||
|
func fetchHTTP(ctx context.Context, url string) ([]byte, error) {
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewHTTPFetchError(url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewHTTPFetchError(url, err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close() //nolint:errcheck
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, types.NewHTTPStatusError(url, resp.StatusCode, resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewHTTPFetchError(url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchLocal reads file contents from the local filesystem.
|
||||||
|
// It resolves relative paths from the current working directory.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.FileReadError
|
||||||
|
func fetchLocal(src string) ([]byte, error) {
|
||||||
|
path := src
|
||||||
|
if !filepath.IsAbs(src) {
|
||||||
|
pwd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewFileReadError(src, err)
|
||||||
|
}
|
||||||
|
path = filepath.Join(pwd, src)
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := os.ReadFile(path) //nolint:gosec
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewFileReadError(path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type stringOrSliceField []string
|
||||||
|
|
||||||
|
func (ss *stringOrSliceField) UnmarshalYAML(node *yaml.Node) error {
|
||||||
|
switch node.Kind {
|
||||||
|
case yaml.ScalarNode:
|
||||||
|
// Handle single string value
|
||||||
|
*ss = []string{node.Value}
|
||||||
|
return nil
|
||||||
|
case yaml.SequenceNode:
|
||||||
|
// Handle array of strings
|
||||||
|
var slice []string
|
||||||
|
if err := node.Decode(&slice); err != nil {
|
||||||
|
return err //nolint:wrapcheck
|
||||||
|
}
|
||||||
|
*ss = slice
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("expected a string or a sequence of strings, but got %v", node.Kind)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyValuesField handles flexible YAML formats for key-value pairs.
|
||||||
|
// Supported formats:
|
||||||
|
// - Sequence of maps: [{key1: value1}, {key2: [value2, value3]}]
|
||||||
|
// - Single map: {key1: value1, key2: [value2, value3]}
|
||||||
|
//
|
||||||
|
// Values can be either a single string or an array of strings.
|
||||||
|
type keyValuesField []types.KeyValue[string, []string]
|
||||||
|
|
||||||
|
func (kv *keyValuesField) UnmarshalYAML(node *yaml.Node) error {
|
||||||
|
switch node.Kind {
|
||||||
|
case yaml.MappingNode:
|
||||||
|
// Handle single map: {key1: value1, key2: [value2]}
|
||||||
|
return kv.unmarshalMapping(node)
|
||||||
|
case yaml.SequenceNode:
|
||||||
|
// Handle sequence of maps: [{key1: value1}, {key2: value2}]
|
||||||
|
for _, item := range node.Content {
|
||||||
|
if item.Kind != yaml.MappingNode {
|
||||||
|
return fmt.Errorf("expected a mapping in sequence, but got %v", item.Kind)
|
||||||
|
}
|
||||||
|
if err := kv.unmarshalMapping(item); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("expected a mapping or sequence of mappings, but got %v", node.Kind)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (kv *keyValuesField) unmarshalMapping(node *yaml.Node) error {
|
||||||
|
// MappingNode content is [key1, value1, key2, value2, ...]
|
||||||
|
for i := 0; i < len(node.Content); i += 2 {
|
||||||
|
keyNode := node.Content[i]
|
||||||
|
valueNode := node.Content[i+1]
|
||||||
|
|
||||||
|
if keyNode.Kind != yaml.ScalarNode {
|
||||||
|
return fmt.Errorf("expected a string key, but got %v", keyNode.Kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
key := keyNode.Value
|
||||||
|
var values []string
|
||||||
|
|
||||||
|
switch valueNode.Kind {
|
||||||
|
case yaml.ScalarNode:
|
||||||
|
values = []string{valueNode.Value}
|
||||||
|
case yaml.SequenceNode:
|
||||||
|
for _, v := range valueNode.Content {
|
||||||
|
if v.Kind != yaml.ScalarNode {
|
||||||
|
return fmt.Errorf("expected string values in array for key %q, but got %v", key, v.Kind)
|
||||||
|
}
|
||||||
|
values = append(values, v.Value)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("expected a string or array of strings for key %q, but got %v", key, valueNode.Kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
*kv = append(*kv, types.KeyValue[string, []string]{Key: key, Value: values})
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type configYAML struct {
|
||||||
|
ConfigFiles stringOrSliceField `yaml:"configFile"`
|
||||||
|
Method stringOrSliceField `yaml:"method"`
|
||||||
|
URL *string `yaml:"url"`
|
||||||
|
Timeout *time.Duration `yaml:"timeout"`
|
||||||
|
Concurrency *uint `yaml:"concurrency"`
|
||||||
|
RequestCount *uint64 `yaml:"requests"`
|
||||||
|
Duration *time.Duration `yaml:"duration"`
|
||||||
|
Quiet *bool `yaml:"quiet"`
|
||||||
|
Output *string `yaml:"output"`
|
||||||
|
Insecure *bool `yaml:"insecure"`
|
||||||
|
ShowConfig *bool `yaml:"showConfig"`
|
||||||
|
DryRun *bool `yaml:"dryRun"`
|
||||||
|
Params keyValuesField `yaml:"params"`
|
||||||
|
Headers keyValuesField `yaml:"headers"`
|
||||||
|
Cookies keyValuesField `yaml:"cookies"`
|
||||||
|
Bodies stringOrSliceField `yaml:"body"`
|
||||||
|
Proxies stringOrSliceField `yaml:"proxy"`
|
||||||
|
Values stringOrSliceField `yaml:"values"`
|
||||||
|
Lua stringOrSliceField `yaml:"lua"`
|
||||||
|
Js stringOrSliceField `yaml:"js"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseYAML parses YAML config file arguments into a Config object.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.UnmarshalError
|
||||||
|
// - types.FieldParseErrors
|
||||||
|
func (parser ConfigFileParser) ParseYAML(data []byte) (*Config, error) {
|
||||||
|
var (
|
||||||
|
config = &Config{}
|
||||||
|
parsedData = &configYAML{}
|
||||||
|
)
|
||||||
|
|
||||||
|
err := yaml.Unmarshal(data, &parsedData)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewUnmarshalError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var fieldParseErrors []types.FieldParseError
|
||||||
|
|
||||||
|
config.Methods = append(config.Methods, parsedData.Method...)
|
||||||
|
config.Timeout = parsedData.Timeout
|
||||||
|
config.Concurrency = parsedData.Concurrency
|
||||||
|
config.Requests = parsedData.RequestCount
|
||||||
|
config.Duration = parsedData.Duration
|
||||||
|
config.ShowConfig = parsedData.ShowConfig
|
||||||
|
config.Quiet = parsedData.Quiet
|
||||||
|
|
||||||
|
if parsedData.Output != nil {
|
||||||
|
config.Output = new(ConfigOutputType(*parsedData.Output))
|
||||||
|
}
|
||||||
|
|
||||||
|
config.Insecure = parsedData.Insecure
|
||||||
|
config.DryRun = parsedData.DryRun
|
||||||
|
for _, kv := range parsedData.Params {
|
||||||
|
config.Params = append(config.Params, types.Param(kv))
|
||||||
|
}
|
||||||
|
for _, kv := range parsedData.Headers {
|
||||||
|
config.Headers = append(config.Headers, types.Header(kv))
|
||||||
|
}
|
||||||
|
for _, kv := range parsedData.Cookies {
|
||||||
|
config.Cookies = append(config.Cookies, types.Cookie(kv))
|
||||||
|
}
|
||||||
|
config.Bodies = append(config.Bodies, parsedData.Bodies...)
|
||||||
|
config.Values = append(config.Values, parsedData.Values...)
|
||||||
|
config.Lua = append(config.Lua, parsedData.Lua...)
|
||||||
|
config.Js = append(config.Js, parsedData.Js...)
|
||||||
|
|
||||||
|
if len(parsedData.ConfigFiles) > 0 {
|
||||||
|
for _, configFile := range parsedData.ConfigFiles {
|
||||||
|
config.Files = append(config.Files, *types.ParseConfigFile(configFile))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if parsedData.URL != nil {
|
||||||
|
urlParsed, err := url.Parse(*parsedData.URL)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(fieldParseErrors, types.NewFieldParseError("url", *parsedData.URL, err))
|
||||||
|
} else {
|
||||||
|
config.URL = urlParsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, proxy := range parsedData.Proxies {
|
||||||
|
err := config.Proxies.Parse(proxy)
|
||||||
|
if err != nil {
|
||||||
|
fieldParseErrors = append(
|
||||||
|
fieldParseErrors,
|
||||||
|
types.NewFieldParseError(fmt.Sprintf("proxy[%d]", i), proxy, err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(fieldParseErrors) > 0 {
|
||||||
|
return nil, types.NewFieldParseErrors(fieldParseErrors)
|
||||||
|
}
|
||||||
|
|
||||||
|
return config, nil
|
||||||
|
}
|
||||||
229
internal/config/template_validator.go
Normal file
229
internal/config/template_validator.go
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
|
"go.aykhans.me/sarin/internal/sarin"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.TemplateParseError
|
||||||
|
func validateTemplateString(value string, funcMap template.FuncMap) error {
|
||||||
|
if value == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := template.New("").Funcs(funcMap).Parse(value)
|
||||||
|
if err != nil {
|
||||||
|
return types.NewTemplateParseError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTemplateMethods(methods []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||||
|
var validationErrors []types.FieldValidationError
|
||||||
|
|
||||||
|
for i, method := range methods {
|
||||||
|
if err := validateTemplateString(method, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Method[%d]", i),
|
||||||
|
method,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTemplateParams(params types.Params, funcMap template.FuncMap) []types.FieldValidationError {
|
||||||
|
var validationErrors []types.FieldValidationError
|
||||||
|
|
||||||
|
for paramIndex, param := range params {
|
||||||
|
// Validate param key
|
||||||
|
if err := validateTemplateString(param.Key, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Param[%d].Key", paramIndex),
|
||||||
|
param.Key,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate param values
|
||||||
|
for valueIndex, value := range param.Value {
|
||||||
|
if err := validateTemplateString(value, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Param[%d].Value[%d]", paramIndex, valueIndex),
|
||||||
|
value,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTemplateHeaders(headers types.Headers, funcMap template.FuncMap) []types.FieldValidationError {
|
||||||
|
var validationErrors []types.FieldValidationError
|
||||||
|
|
||||||
|
for headerIndex, header := range headers {
|
||||||
|
// Validate header key
|
||||||
|
if err := validateTemplateString(header.Key, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Header[%d].Key", headerIndex),
|
||||||
|
header.Key,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate header values
|
||||||
|
for valueIndex, value := range header.Value {
|
||||||
|
if err := validateTemplateString(value, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Header[%d].Value[%d]", headerIndex, valueIndex),
|
||||||
|
value,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTemplateCookies(cookies types.Cookies, funcMap template.FuncMap) []types.FieldValidationError {
|
||||||
|
var validationErrors []types.FieldValidationError
|
||||||
|
|
||||||
|
for cookieIndex, cookie := range cookies {
|
||||||
|
// Validate cookie key
|
||||||
|
if err := validateTemplateString(cookie.Key, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Cookie[%d].Key", cookieIndex),
|
||||||
|
cookie.Key,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate cookie values
|
||||||
|
for valueIndex, value := range cookie.Value {
|
||||||
|
if err := validateTemplateString(value, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Cookie[%d].Value[%d]", cookieIndex, valueIndex),
|
||||||
|
value,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTemplateBodies(bodies []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||||
|
var validationErrors []types.FieldValidationError
|
||||||
|
|
||||||
|
for i, body := range bodies {
|
||||||
|
if err := validateTemplateString(body, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Body[%d]", i),
|
||||||
|
body,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTemplateValues(values []string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||||
|
var validationErrors []types.FieldValidationError
|
||||||
|
|
||||||
|
for i, value := range values {
|
||||||
|
if err := validateTemplateString(value, funcMap); err != nil {
|
||||||
|
validationErrors = append(
|
||||||
|
validationErrors,
|
||||||
|
types.NewFieldValidationError(
|
||||||
|
fmt.Sprintf("Values[%d]", i),
|
||||||
|
value,
|
||||||
|
err,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTemplateURLPath(urlPath string, funcMap template.FuncMap) []types.FieldValidationError {
|
||||||
|
if err := validateTemplateString(urlPath, funcMap); err != nil {
|
||||||
|
return []types.FieldValidationError{
|
||||||
|
types.NewFieldValidationError("URL.Path", urlPath, err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ValidateTemplates(config *Config) []types.FieldValidationError {
|
||||||
|
// Create template function map using the same functions as sarin package
|
||||||
|
// Use nil for fileCache during validation - templates are only parsed, not executed
|
||||||
|
randSource := sarin.NewDefaultRandSource()
|
||||||
|
funcMap := sarin.NewDefaultTemplateFuncMap(randSource, nil)
|
||||||
|
|
||||||
|
bodyFuncMapData := &sarin.BodyTemplateFuncMapData{}
|
||||||
|
bodyFuncMap := sarin.NewDefaultBodyTemplateFuncMap(randSource, bodyFuncMapData, nil)
|
||||||
|
|
||||||
|
var allErrors []types.FieldValidationError
|
||||||
|
|
||||||
|
// Validate URL path
|
||||||
|
if config.URL != nil {
|
||||||
|
allErrors = append(allErrors, validateTemplateURLPath(config.URL.Path, funcMap)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate methods
|
||||||
|
allErrors = append(allErrors, validateTemplateMethods(config.Methods, funcMap)...)
|
||||||
|
|
||||||
|
// Validate params
|
||||||
|
allErrors = append(allErrors, validateTemplateParams(config.Params, funcMap)...)
|
||||||
|
|
||||||
|
// Validate headers
|
||||||
|
allErrors = append(allErrors, validateTemplateHeaders(config.Headers, funcMap)...)
|
||||||
|
|
||||||
|
// Validate cookies
|
||||||
|
allErrors = append(allErrors, validateTemplateCookies(config.Cookies, funcMap)...)
|
||||||
|
|
||||||
|
// Validate bodies
|
||||||
|
allErrors = append(allErrors, validateTemplateBodies(config.Bodies, bodyFuncMap)...)
|
||||||
|
|
||||||
|
// Validate values
|
||||||
|
allErrors = append(allErrors, validateTemplateValues(config.Values, funcMap)...)
|
||||||
|
|
||||||
|
return allErrors
|
||||||
|
}
|
||||||
324
internal/sarin/client.go
Normal file
324
internal/sarin/client.go
Normal file
@@ -0,0 +1,324 @@
|
|||||||
|
package sarin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"context"
|
||||||
|
"crypto/tls"
|
||||||
|
"encoding/base64"
|
||||||
|
"math"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/valyala/fasthttp"
|
||||||
|
"github.com/valyala/fasthttp/fasthttpproxy"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
utilsSlice "go.aykhans.me/utils/slice"
|
||||||
|
"golang.org/x/net/proxy"
|
||||||
|
)
|
||||||
|
|
||||||
|
type HostClientGenerator func() *fasthttp.HostClient
|
||||||
|
|
||||||
|
func safeUintToInt(u uint) int {
|
||||||
|
if u > math.MaxInt {
|
||||||
|
return math.MaxInt
|
||||||
|
}
|
||||||
|
return int(u)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHostClients creates a list of fasthttp.HostClient instances for the given proxies.
|
||||||
|
// If no proxies are provided, a single client without a proxy is returned.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ProxyDialError
|
||||||
|
func NewHostClients(
|
||||||
|
ctx context.Context,
|
||||||
|
timeout time.Duration,
|
||||||
|
proxies []url.URL,
|
||||||
|
maxConns uint,
|
||||||
|
requestURL *url.URL,
|
||||||
|
skipVerify bool,
|
||||||
|
) ([]*fasthttp.HostClient, error) {
|
||||||
|
isTLS := requestURL.Scheme == "https"
|
||||||
|
|
||||||
|
if proxiesLen := len(proxies); proxiesLen > 0 {
|
||||||
|
clients := make([]*fasthttp.HostClient, 0, proxiesLen)
|
||||||
|
addr := requestURL.Host
|
||||||
|
if isTLS && requestURL.Port() == "" {
|
||||||
|
addr += ":443"
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, proxy := range proxies {
|
||||||
|
dialFunc, err := NewProxyDialFunc(ctx, &proxy, timeout)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewProxyDialError(proxy.String(), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
clients = append(clients, &fasthttp.HostClient{
|
||||||
|
MaxConns: safeUintToInt(maxConns),
|
||||||
|
IsTLS: isTLS,
|
||||||
|
TLSConfig: &tls.Config{
|
||||||
|
InsecureSkipVerify: skipVerify, //nolint:gosec
|
||||||
|
},
|
||||||
|
Addr: addr,
|
||||||
|
Dial: dialFunc,
|
||||||
|
MaxIdleConnDuration: timeout,
|
||||||
|
MaxConnDuration: timeout,
|
||||||
|
WriteTimeout: timeout,
|
||||||
|
ReadTimeout: timeout,
|
||||||
|
DisableHeaderNamesNormalizing: true,
|
||||||
|
DisablePathNormalizing: true,
|
||||||
|
NoDefaultUserAgentHeader: true,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return clients, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
client := &fasthttp.HostClient{
|
||||||
|
MaxConns: safeUintToInt(maxConns),
|
||||||
|
IsTLS: isTLS,
|
||||||
|
TLSConfig: &tls.Config{
|
||||||
|
InsecureSkipVerify: skipVerify, //nolint:gosec
|
||||||
|
},
|
||||||
|
Addr: requestURL.Host,
|
||||||
|
MaxIdleConnDuration: timeout,
|
||||||
|
MaxConnDuration: timeout,
|
||||||
|
WriteTimeout: timeout,
|
||||||
|
ReadTimeout: timeout,
|
||||||
|
DisableHeaderNamesNormalizing: true,
|
||||||
|
DisablePathNormalizing: true,
|
||||||
|
NoDefaultUserAgentHeader: true,
|
||||||
|
}
|
||||||
|
return []*fasthttp.HostClient{client}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewProxyDialFunc creates a dial function for the given proxy URL.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ProxyUnsupportedSchemeError
|
||||||
|
func NewProxyDialFunc(ctx context.Context, proxyURL *url.URL, timeout time.Duration) (fasthttp.DialFunc, error) {
|
||||||
|
var (
|
||||||
|
dialer fasthttp.DialFunc
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
switch proxyURL.Scheme {
|
||||||
|
case "socks5":
|
||||||
|
dialer, err = fasthttpSocksDialerDualStackTimeout(ctx, proxyURL, timeout, true)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
case "socks5h":
|
||||||
|
dialer, err = fasthttpSocksDialerDualStackTimeout(ctx, proxyURL, timeout, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
case "http":
|
||||||
|
dialer = fasthttpproxy.FasthttpHTTPDialerDualStackTimeout(proxyURL.String(), timeout)
|
||||||
|
case "https":
|
||||||
|
dialer = fasthttpHTTPSDialerDualStackTimeout(proxyURL, timeout)
|
||||||
|
default:
|
||||||
|
return nil, types.NewProxyUnsupportedSchemeError(proxyURL.Scheme)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dialer, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// The returned dial function can return the following errors:
|
||||||
|
// - types.ProxyDialError
|
||||||
|
func fasthttpSocksDialerDualStackTimeout(ctx context.Context, proxyURL *url.URL, timeout time.Duration, resolveLocally bool) (fasthttp.DialFunc, error) {
|
||||||
|
netDialer := &net.Dialer{}
|
||||||
|
|
||||||
|
// Parse auth from proxy URL if present
|
||||||
|
var auth *proxy.Auth
|
||||||
|
if proxyURL.User != nil {
|
||||||
|
auth = &proxy.Auth{
|
||||||
|
User: proxyURL.User.Username(),
|
||||||
|
}
|
||||||
|
if password, ok := proxyURL.User.Password(); ok {
|
||||||
|
auth.Password = password
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create SOCKS5 dialer with net.Dialer as forward dialer
|
||||||
|
socksDialer, err := proxy.SOCKS5("tcp", proxyURL.Host, auth, netDialer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
proxyStr := proxyURL.String()
|
||||||
|
|
||||||
|
// Assert to ContextDialer for timeout support
|
||||||
|
contextDialer, ok := socksDialer.(proxy.ContextDialer)
|
||||||
|
if !ok {
|
||||||
|
// Fallback without timeout (should not happen with net.Dialer)
|
||||||
|
return func(addr string) (net.Conn, error) {
|
||||||
|
conn, err := socksDialer.Dial("tcp", addr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
return conn, nil
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return dial function that uses context with timeout
|
||||||
|
return func(addr string) (net.Conn, error) {
|
||||||
|
deadline := time.Now().Add(timeout)
|
||||||
|
|
||||||
|
if resolveLocally {
|
||||||
|
host, port, err := net.SplitHostPort(addr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cap DNS resolution to half the timeout to reserve time for dial
|
||||||
|
dnsCtx, dnsCancel := context.WithTimeout(ctx, timeout)
|
||||||
|
ips, err := net.DefaultResolver.LookupIP(dnsCtx, "ip", host)
|
||||||
|
dnsCancel()
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
if len(ips) == 0 {
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, types.NewProxyResolveError(host))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the first resolved IP
|
||||||
|
addr = net.JoinHostPort(ips[0].String(), port)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use remaining time for dial
|
||||||
|
remaining := time.Until(deadline)
|
||||||
|
if remaining <= 0 {
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, context.DeadlineExceeded)
|
||||||
|
}
|
||||||
|
|
||||||
|
dialCtx, dialCancel := context.WithTimeout(ctx, remaining)
|
||||||
|
defer dialCancel()
|
||||||
|
|
||||||
|
conn, err := contextDialer.DialContext(dialCtx, "tcp", addr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
return conn, nil
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// The returned dial function can return the following errors:
|
||||||
|
// - types.ProxyDialError
|
||||||
|
func fasthttpHTTPSDialerDualStackTimeout(proxyURL *url.URL, timeout time.Duration) fasthttp.DialFunc {
|
||||||
|
proxyAddr := proxyURL.Host
|
||||||
|
if proxyURL.Port() == "" {
|
||||||
|
proxyAddr = net.JoinHostPort(proxyURL.Hostname(), "443")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build Proxy-Authorization header if auth is present
|
||||||
|
var proxyAuth string
|
||||||
|
if proxyURL.User != nil {
|
||||||
|
username := proxyURL.User.Username()
|
||||||
|
password, _ := proxyURL.User.Password()
|
||||||
|
credentials := username + ":" + password
|
||||||
|
proxyAuth = "Basic " + base64.StdEncoding.EncodeToString([]byte(credentials))
|
||||||
|
}
|
||||||
|
|
||||||
|
proxyStr := proxyURL.String()
|
||||||
|
|
||||||
|
return func(addr string) (net.Conn, error) {
|
||||||
|
// Establish TCP connection to proxy with timeout
|
||||||
|
start := time.Now()
|
||||||
|
conn, err := fasthttp.DialDualStackTimeout(proxyAddr, timeout)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
remaining := timeout - time.Since(start)
|
||||||
|
if remaining <= 0 {
|
||||||
|
conn.Close() //nolint:errcheck,gosec
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, context.DeadlineExceeded)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set deadline for the TLS handshake and CONNECT request
|
||||||
|
if err := conn.SetDeadline(time.Now().Add(remaining)); err != nil {
|
||||||
|
conn.Close() //nolint:errcheck,gosec
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upgrade to TLS
|
||||||
|
tlsConn := tls.Client(conn, &tls.Config{ //nolint:gosec
|
||||||
|
ServerName: proxyURL.Hostname(),
|
||||||
|
})
|
||||||
|
if err := tlsConn.Handshake(); err != nil {
|
||||||
|
tlsConn.Close() //nolint:errcheck,gosec
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build and send CONNECT request
|
||||||
|
connectReq := &http.Request{
|
||||||
|
Method: http.MethodConnect,
|
||||||
|
URL: &url.URL{Opaque: addr},
|
||||||
|
Host: addr,
|
||||||
|
Header: make(http.Header),
|
||||||
|
}
|
||||||
|
if proxyAuth != "" {
|
||||||
|
connectReq.Header.Set("Proxy-Authorization", proxyAuth)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := connectReq.Write(tlsConn); err != nil {
|
||||||
|
tlsConn.Close() //nolint:errcheck,gosec
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read response using buffered reader, but return wrapped connection
|
||||||
|
// to preserve any buffered data
|
||||||
|
bufReader := bufio.NewReader(tlsConn)
|
||||||
|
resp, err := http.ReadResponse(bufReader, connectReq)
|
||||||
|
if err != nil {
|
||||||
|
tlsConn.Close() //nolint:errcheck,gosec
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
resp.Body.Close() //nolint:errcheck,gosec
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
tlsConn.Close() //nolint:errcheck,gosec
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, types.NewProxyConnectError(resp.Status))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear deadline for the tunneled connection
|
||||||
|
if err := tlsConn.SetDeadline(time.Time{}); err != nil {
|
||||||
|
tlsConn.Close() //nolint:errcheck,gosec
|
||||||
|
return nil, types.NewProxyDialError(proxyStr, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return wrapped connection that uses the buffered reader
|
||||||
|
// to avoid losing any data that was read ahead
|
||||||
|
return &bufferedConn{Conn: tlsConn, reader: bufReader}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// bufferedConn wraps a net.Conn with a buffered reader to preserve
|
||||||
|
// any data that was read during HTTP response parsing.
|
||||||
|
type bufferedConn struct {
|
||||||
|
net.Conn
|
||||||
|
|
||||||
|
reader *bufio.Reader
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *bufferedConn) Read(b []byte) (int, error) {
|
||||||
|
return c.reader.Read(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHostClientGenerator(clients ...*fasthttp.HostClient) HostClientGenerator {
|
||||||
|
switch len(clients) {
|
||||||
|
case 0:
|
||||||
|
hostClient := &fasthttp.HostClient{}
|
||||||
|
return func() *fasthttp.HostClient {
|
||||||
|
return hostClient
|
||||||
|
}
|
||||||
|
case 1:
|
||||||
|
return func() *fasthttp.HostClient {
|
||||||
|
return clients[0]
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return utilsSlice.RandomCycle(nil, clients...)
|
||||||
|
}
|
||||||
|
}
|
||||||
114
internal/sarin/filecache.go
Normal file
114
internal/sarin/filecache.go
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
package sarin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CachedFile holds the cached content and metadata of a file.
|
||||||
|
type CachedFile struct {
|
||||||
|
Content []byte
|
||||||
|
Filename string
|
||||||
|
}
|
||||||
|
|
||||||
|
type FileCache struct {
|
||||||
|
cache sync.Map // map[string]*CachedFile
|
||||||
|
requestTimeout time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFileCache(requestTimeout time.Duration) *FileCache {
|
||||||
|
return &FileCache{
|
||||||
|
requestTimeout: requestTimeout,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOrLoad retrieves a file from cache or loads it using the provided source.
|
||||||
|
// The source can be a local file path or an HTTP/HTTPS URL.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.FileReadError
|
||||||
|
// - types.HTTPFetchError
|
||||||
|
// - types.HTTPStatusError
|
||||||
|
func (fc *FileCache) GetOrLoad(source string) (*CachedFile, error) {
|
||||||
|
if val, ok := fc.cache.Load(source); ok {
|
||||||
|
return val.(*CachedFile), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
content []byte
|
||||||
|
filename string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
if strings.HasPrefix(source, "http://") || strings.HasPrefix(source, "https://") {
|
||||||
|
content, filename, err = fc.fetchURL(source)
|
||||||
|
} else {
|
||||||
|
content, filename, err = fc.readLocalFile(source)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
file := &CachedFile{Content: content, Filename: filename}
|
||||||
|
|
||||||
|
// LoadOrStore handles race condition - if another goroutine
|
||||||
|
// cached it first, we get theirs (no duplicate storage)
|
||||||
|
actual, _ := fc.cache.LoadOrStore(source, file)
|
||||||
|
return actual.(*CachedFile), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// readLocalFile reads a file from the local filesystem and returns its content and filename.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.FileReadError
|
||||||
|
func (fc *FileCache) readLocalFile(filePath string) ([]byte, string, error) {
|
||||||
|
content, err := os.ReadFile(filePath) //nolint:gosec
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", types.NewFileReadError(filePath, err)
|
||||||
|
}
|
||||||
|
return content, filepath.Base(filePath), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchURL downloads file contents from an HTTP/HTTPS URL.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.HTTPFetchError
|
||||||
|
// - types.HTTPStatusError
|
||||||
|
func (fc *FileCache) fetchURL(url string) ([]byte, string, error) {
|
||||||
|
client := &http.Client{
|
||||||
|
Timeout: fc.requestTimeout,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", types.NewHTTPFetchError(url, err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close() //nolint:errcheck
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, "", types.NewHTTPStatusError(url, resp.StatusCode, resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", types.NewHTTPFetchError(url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract filename from URL path
|
||||||
|
filename := path.Base(url)
|
||||||
|
if filename == "" || filename == "/" || filename == "." {
|
||||||
|
filename = "downloaded_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove query string from filename if present
|
||||||
|
if idx := strings.Index(filename, "?"); idx != -1 {
|
||||||
|
filename = filename[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
return content, filename, nil
|
||||||
|
}
|
||||||
14
internal/sarin/helpers.go
Normal file
14
internal/sarin/helpers.go
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
package sarin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math/rand/v2"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewDefaultRandSource() rand.Source {
|
||||||
|
now := time.Now().UnixNano()
|
||||||
|
return rand.NewPCG(
|
||||||
|
uint64(now), //nolint:gosec // G115: Safe conversion; UnixNano timestamp used as random seed, bit pattern is intentional
|
||||||
|
uint64(now>>32), //nolint:gosec // G115: Safe conversion; right-shifted timestamp for seed entropy, overflow is acceptable
|
||||||
|
)
|
||||||
|
}
|
||||||
404
internal/sarin/request.go
Normal file
404
internal/sarin/request.go
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
package sarin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"maps"
|
||||||
|
"math/rand/v2"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
|
"github.com/joho/godotenv"
|
||||||
|
"github.com/valyala/fasthttp"
|
||||||
|
"go.aykhans.me/sarin/internal/script"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
utilsSlice "go.aykhans.me/utils/slice"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RequestGenerator func(*fasthttp.Request) error
|
||||||
|
|
||||||
|
type requestDataGenerator func(*script.RequestData, any) error
|
||||||
|
|
||||||
|
type valuesData struct {
|
||||||
|
Values map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRequestGenerator creates a new RequestGenerator function that generates HTTP requests
|
||||||
|
// with the specified configuration. The returned RequestGenerator is NOT safe for concurrent
|
||||||
|
// use by multiple goroutines.
|
||||||
|
//
|
||||||
|
// Note: Scripts must be validated before calling this function (e.g., in NewSarin).
|
||||||
|
// The caller is responsible for managing the scriptTransformer lifecycle.
|
||||||
|
func NewRequestGenerator(
|
||||||
|
methods []string,
|
||||||
|
requestURL *url.URL,
|
||||||
|
params types.Params,
|
||||||
|
headers types.Headers,
|
||||||
|
cookies types.Cookies,
|
||||||
|
bodies []string,
|
||||||
|
values []string,
|
||||||
|
fileCache *FileCache,
|
||||||
|
scriptTransformer *script.Transformer,
|
||||||
|
) (RequestGenerator, bool) {
|
||||||
|
randSource := NewDefaultRandSource()
|
||||||
|
//nolint:gosec // G404: Using non-cryptographic rand for load testing, not security
|
||||||
|
localRand := rand.New(randSource)
|
||||||
|
templateFuncMap := NewDefaultTemplateFuncMap(randSource, fileCache)
|
||||||
|
|
||||||
|
pathGenerator, isPathGeneratorDynamic := createTemplateFunc(requestURL.Path, templateFuncMap)
|
||||||
|
methodGenerator, isMethodGeneratorDynamic := NewMethodGeneratorFunc(localRand, methods, templateFuncMap)
|
||||||
|
paramsGenerator, isParamsGeneratorDynamic := NewParamsGeneratorFunc(localRand, params, templateFuncMap)
|
||||||
|
headersGenerator, isHeadersGeneratorDynamic := NewHeadersGeneratorFunc(localRand, headers, templateFuncMap)
|
||||||
|
cookiesGenerator, isCookiesGeneratorDynamic := NewCookiesGeneratorFunc(localRand, cookies, templateFuncMap)
|
||||||
|
|
||||||
|
bodyTemplateFuncMapData := &BodyTemplateFuncMapData{}
|
||||||
|
bodyTemplateFuncMap := NewDefaultBodyTemplateFuncMap(randSource, bodyTemplateFuncMapData, fileCache)
|
||||||
|
bodyGenerator, isBodyGeneratorDynamic := NewBodyGeneratorFunc(localRand, bodies, bodyTemplateFuncMap)
|
||||||
|
|
||||||
|
valuesGenerator := NewValuesGeneratorFunc(values, templateFuncMap)
|
||||||
|
|
||||||
|
hasScripts := scriptTransformer != nil && !scriptTransformer.IsEmpty()
|
||||||
|
|
||||||
|
host := requestURL.Host
|
||||||
|
scheme := requestURL.Scheme
|
||||||
|
|
||||||
|
reqData := &script.RequestData{
|
||||||
|
Headers: make(map[string][]string),
|
||||||
|
Params: make(map[string][]string),
|
||||||
|
Cookies: make(map[string][]string),
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
data valuesData
|
||||||
|
path string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
return func(req *fasthttp.Request) error {
|
||||||
|
resetRequestData(reqData)
|
||||||
|
|
||||||
|
data, err = valuesGenerator()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
path, err = pathGenerator(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
reqData.Path = path
|
||||||
|
|
||||||
|
if err = methodGenerator(reqData, data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
bodyTemplateFuncMapData.ClearFormDataContenType()
|
||||||
|
if err = bodyGenerator(reqData, data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = headersGenerator(reqData, data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if bodyTemplateFuncMapData.GetFormDataContenType() != "" {
|
||||||
|
reqData.Headers["Content-Type"] = append(reqData.Headers["Content-Type"], bodyTemplateFuncMapData.GetFormDataContenType())
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = paramsGenerator(reqData, data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err = cookiesGenerator(reqData, data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasScripts {
|
||||||
|
if err = scriptTransformer.Transform(reqData); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
applyRequestDataToFastHTTP(reqData, req, host, scheme)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}, isPathGeneratorDynamic ||
|
||||||
|
isMethodGeneratorDynamic ||
|
||||||
|
isParamsGeneratorDynamic ||
|
||||||
|
isHeadersGeneratorDynamic ||
|
||||||
|
isCookiesGeneratorDynamic ||
|
||||||
|
isBodyGeneratorDynamic ||
|
||||||
|
hasScripts
|
||||||
|
}
|
||||||
|
|
||||||
|
func resetRequestData(reqData *script.RequestData) {
|
||||||
|
reqData.Method = ""
|
||||||
|
reqData.Path = ""
|
||||||
|
reqData.Body = ""
|
||||||
|
clear(reqData.Headers)
|
||||||
|
clear(reqData.Params)
|
||||||
|
clear(reqData.Cookies)
|
||||||
|
}
|
||||||
|
|
||||||
|
func applyRequestDataToFastHTTP(reqData *script.RequestData, req *fasthttp.Request, host, scheme string) {
|
||||||
|
req.Header.SetHost(host)
|
||||||
|
req.SetRequestURI(reqData.Path)
|
||||||
|
req.Header.SetMethod(reqData.Method)
|
||||||
|
req.SetBody([]byte(reqData.Body))
|
||||||
|
|
||||||
|
for k, values := range reqData.Headers {
|
||||||
|
for _, v := range values {
|
||||||
|
req.Header.Add(k, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for k, values := range reqData.Params {
|
||||||
|
for _, v := range values {
|
||||||
|
req.URI().QueryArgs().Add(k, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(reqData.Cookies) > 0 {
|
||||||
|
cookieStrings := make([]string, 0, len(reqData.Cookies))
|
||||||
|
for k, values := range reqData.Cookies {
|
||||||
|
for _, v := range values {
|
||||||
|
cookieStrings = append(cookieStrings, k+"="+v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
req.Header.Add("Cookie", strings.Join(cookieStrings, "; "))
|
||||||
|
}
|
||||||
|
|
||||||
|
if scheme == "https" {
|
||||||
|
req.URI().SetScheme("https")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewMethodGeneratorFunc(localRand *rand.Rand, methods []string, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||||
|
methodGenerator, isDynamic := buildStringSliceGenerator(localRand, methods, templateFunctions)
|
||||||
|
|
||||||
|
var (
|
||||||
|
method string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
return func(reqData *script.RequestData, data any) error {
|
||||||
|
method, err = methodGenerator()(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
reqData.Method = method
|
||||||
|
return nil
|
||||||
|
}, isDynamic
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBodyGeneratorFunc(localRand *rand.Rand, bodies []string, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||||
|
bodyGenerator, isDynamic := buildStringSliceGenerator(localRand, bodies, templateFunctions)
|
||||||
|
|
||||||
|
var (
|
||||||
|
body string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
return func(reqData *script.RequestData, data any) error {
|
||||||
|
body, err = bodyGenerator()(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
reqData.Body = body
|
||||||
|
return nil
|
||||||
|
}, isDynamic
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewParamsGeneratorFunc(localRand *rand.Rand, params types.Params, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||||
|
generators, isDynamic := buildKeyValueGenerators(localRand, params, templateFunctions)
|
||||||
|
|
||||||
|
var (
|
||||||
|
key, value string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
return func(reqData *script.RequestData, data any) error {
|
||||||
|
for _, gen := range generators {
|
||||||
|
key, err = gen.Key(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
value, err = gen.Value()(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
reqData.Params[key] = append(reqData.Params[key], value)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}, isDynamic
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHeadersGeneratorFunc(localRand *rand.Rand, headers types.Headers, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||||
|
generators, isDynamic := buildKeyValueGenerators(localRand, headers, templateFunctions)
|
||||||
|
|
||||||
|
var (
|
||||||
|
key, value string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
return func(reqData *script.RequestData, data any) error {
|
||||||
|
for _, gen := range generators {
|
||||||
|
key, err = gen.Key(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
value, err = gen.Value()(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
reqData.Headers[key] = append(reqData.Headers[key], value)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}, isDynamic
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCookiesGeneratorFunc(localRand *rand.Rand, cookies types.Cookies, templateFunctions template.FuncMap) (requestDataGenerator, bool) {
|
||||||
|
generators, isDynamic := buildKeyValueGenerators(localRand, cookies, templateFunctions)
|
||||||
|
|
||||||
|
var (
|
||||||
|
key, value string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
return func(reqData *script.RequestData, data any) error {
|
||||||
|
for _, gen := range generators {
|
||||||
|
key, err = gen.Key(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
value, err = gen.Value()(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
reqData.Cookies[key] = append(reqData.Cookies[key], value)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}, isDynamic
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewValuesGeneratorFunc(values []string, templateFunctions template.FuncMap) func() (valuesData, error) {
|
||||||
|
generators := make([]func(_ any) (string, error), len(values))
|
||||||
|
|
||||||
|
for i, v := range values {
|
||||||
|
generators[i], _ = createTemplateFunc(v, templateFunctions)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
rendered string
|
||||||
|
data map[string]string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
return func() (valuesData, error) {
|
||||||
|
result := make(map[string]string)
|
||||||
|
for _, generator := range generators {
|
||||||
|
rendered, err = generator(nil)
|
||||||
|
if err != nil {
|
||||||
|
return valuesData{}, types.NewTemplateRenderError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err = godotenv.Unmarshal(rendered)
|
||||||
|
if err != nil {
|
||||||
|
return valuesData{}, types.NewTemplateRenderError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
maps.Copy(result, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
return valuesData{Values: result}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func createTemplateFunc(value string, templateFunctions template.FuncMap) (func(data any) (string, error), bool) {
|
||||||
|
tmpl, err := template.New("").Funcs(templateFunctions).Parse(value)
|
||||||
|
if err == nil && hasTemplateActions(tmpl) {
|
||||||
|
var err error
|
||||||
|
return func(data any) (string, error) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err = tmpl.Execute(&buf, data); err != nil {
|
||||||
|
return "", types.NewTemplateRenderError(err)
|
||||||
|
}
|
||||||
|
return buf.String(), nil
|
||||||
|
}, true
|
||||||
|
}
|
||||||
|
return func(_ any) (string, error) { return value, nil }, false
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyValueGenerator struct {
|
||||||
|
Key func(data any) (string, error)
|
||||||
|
Value func() func(data any) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyValueItem interface {
|
||||||
|
types.Param | types.Header | types.Cookie
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildKeyValueGenerators[T keyValueItem](
|
||||||
|
localRand *rand.Rand,
|
||||||
|
items []T,
|
||||||
|
templateFunctions template.FuncMap,
|
||||||
|
) ([]keyValueGenerator, bool) {
|
||||||
|
isDynamic := false
|
||||||
|
generators := make([]keyValueGenerator, len(items))
|
||||||
|
|
||||||
|
for generatorIndex, item := range items {
|
||||||
|
// Convert to KeyValue to access fields
|
||||||
|
keyValue := types.KeyValue[string, []string](item)
|
||||||
|
|
||||||
|
// Generate key function
|
||||||
|
keyFunc, keyIsDynamic := createTemplateFunc(keyValue.Key, templateFunctions)
|
||||||
|
if keyIsDynamic {
|
||||||
|
isDynamic = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate value functions
|
||||||
|
valueFuncs := make([]func(data any) (string, error), len(keyValue.Value))
|
||||||
|
for j, v := range keyValue.Value {
|
||||||
|
valueFunc, valueIsDynamic := createTemplateFunc(v, templateFunctions)
|
||||||
|
if valueIsDynamic {
|
||||||
|
isDynamic = true
|
||||||
|
}
|
||||||
|
valueFuncs[j] = valueFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
generators[generatorIndex] = keyValueGenerator{
|
||||||
|
Key: keyFunc,
|
||||||
|
Value: utilsSlice.RandomCycle(localRand, valueFuncs...),
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(keyValue.Value) > 1 {
|
||||||
|
isDynamic = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return generators, isDynamic
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildStringSliceGenerator(
|
||||||
|
localRand *rand.Rand,
|
||||||
|
values []string,
|
||||||
|
templateFunctions template.FuncMap,
|
||||||
|
) (func() func(data any) (string, error), bool) {
|
||||||
|
// Return a function that returns an empty string generator if values is empty
|
||||||
|
if len(values) == 0 {
|
||||||
|
emptyFunc := func(_ any) (string, error) { return "", nil }
|
||||||
|
return func() func(_ any) (string, error) { return emptyFunc }, false
|
||||||
|
}
|
||||||
|
|
||||||
|
isDynamic := len(values) > 1
|
||||||
|
valueFuncs := make([]func(data any) (string, error), len(values))
|
||||||
|
|
||||||
|
for i, value := range values {
|
||||||
|
valueFunc, valueIsDynamic := createTemplateFunc(value, templateFunctions)
|
||||||
|
if valueIsDynamic {
|
||||||
|
isDynamic = true
|
||||||
|
}
|
||||||
|
valueFuncs[i] = valueFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
return utilsSlice.RandomCycle(localRand, valueFuncs...), isDynamic
|
||||||
|
}
|
||||||
348
internal/sarin/response.go
Normal file
348
internal/sarin/response.go
Normal file
@@ -0,0 +1,348 @@
|
|||||||
|
package sarin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"math/big"
|
||||||
|
"os"
|
||||||
|
"slices"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/lipgloss"
|
||||||
|
"github.com/charmbracelet/lipgloss/table"
|
||||||
|
"go.yaml.in/yaml/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
const DefaultResponseDurationAccuracy uint32 = 1
|
||||||
|
const DefaultResponseColumnMaxWidth = 50
|
||||||
|
|
||||||
|
// Duration wraps time.Duration to provide consistent JSON/YAML marshaling as human-readable strings.
|
||||||
|
type Duration time.Duration
|
||||||
|
|
||||||
|
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||||
|
//nolint:wrapcheck
|
||||||
|
return json.Marshal(time.Duration(d).String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Duration) MarshalYAML() (any, error) {
|
||||||
|
return time.Duration(d).String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Duration) String() string {
|
||||||
|
dur := time.Duration(d)
|
||||||
|
switch {
|
||||||
|
case dur >= time.Second:
|
||||||
|
return dur.Round(time.Millisecond).String()
|
||||||
|
case dur >= time.Millisecond:
|
||||||
|
return dur.Round(time.Microsecond).String()
|
||||||
|
default:
|
||||||
|
return dur.String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// BigInt wraps big.Int to provide consistent JSON/YAML marshaling as numbers.
|
||||||
|
type BigInt struct {
|
||||||
|
*big.Int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b BigInt) MarshalJSON() ([]byte, error) {
|
||||||
|
return []byte(b.Int.String()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b BigInt) MarshalYAML() (any, error) {
|
||||||
|
return &yaml.Node{
|
||||||
|
Kind: yaml.ScalarNode,
|
||||||
|
Tag: "!!int",
|
||||||
|
Value: b.Int.String(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b BigInt) String() string {
|
||||||
|
return b.Int.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
type Response struct {
|
||||||
|
durations map[time.Duration]uint64
|
||||||
|
}
|
||||||
|
|
||||||
|
type SarinResponseData struct {
|
||||||
|
sync.Mutex
|
||||||
|
|
||||||
|
Responses map[string]*Response
|
||||||
|
|
||||||
|
// accuracy is the time bucket size in nanoseconds for storing response durations.
|
||||||
|
// Larger values (e.g., 1000) save memory but reduce accuracy by grouping more durations together.
|
||||||
|
// Smaller values (e.g., 10) improve accuracy but increase memory usage.
|
||||||
|
// Minimum value is 1 (most accurate, highest memory usage).
|
||||||
|
// Default value is 1.
|
||||||
|
accuracy time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSarinResponseData(accuracy uint32) *SarinResponseData {
|
||||||
|
if accuracy == 0 {
|
||||||
|
accuracy = DefaultResponseDurationAccuracy
|
||||||
|
}
|
||||||
|
|
||||||
|
return &SarinResponseData{
|
||||||
|
Responses: make(map[string]*Response),
|
||||||
|
accuracy: time.Duration(accuracy),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (data *SarinResponseData) Add(responseKey string, responseTime time.Duration) {
|
||||||
|
data.Lock()
|
||||||
|
defer data.Unlock()
|
||||||
|
|
||||||
|
response, ok := data.Responses[responseKey]
|
||||||
|
if !ok {
|
||||||
|
data.Responses[responseKey] = &Response{
|
||||||
|
durations: map[time.Duration]uint64{
|
||||||
|
responseTime / data.accuracy: 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
response.durations[responseTime/data.accuracy]++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (data *SarinResponseData) PrintTable() {
|
||||||
|
data.Lock()
|
||||||
|
defer data.Unlock()
|
||||||
|
|
||||||
|
output := data.prepareOutputData()
|
||||||
|
|
||||||
|
headerStyle := lipgloss.NewStyle().
|
||||||
|
Bold(true).
|
||||||
|
Foreground(lipgloss.Color("246")).
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
cellStyle := lipgloss.NewStyle().
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
rows := make([][]string, 0, len(output.Responses)+1)
|
||||||
|
for key, stats := range output.Responses {
|
||||||
|
rows = append(rows, []string{
|
||||||
|
wrapText(key, DefaultResponseColumnMaxWidth),
|
||||||
|
stats.Count.String(),
|
||||||
|
stats.Min.String(),
|
||||||
|
stats.Max.String(),
|
||||||
|
stats.Average.String(),
|
||||||
|
stats.P90.String(),
|
||||||
|
stats.P95.String(),
|
||||||
|
stats.P99.String(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
rows = append(rows, []string{
|
||||||
|
"Total",
|
||||||
|
output.Total.Count.String(),
|
||||||
|
output.Total.Min.String(),
|
||||||
|
output.Total.Max.String(),
|
||||||
|
output.Total.Average.String(),
|
||||||
|
output.Total.P90.String(),
|
||||||
|
output.Total.P95.String(),
|
||||||
|
output.Total.P99.String(),
|
||||||
|
})
|
||||||
|
|
||||||
|
tbl := table.New().
|
||||||
|
Border(lipgloss.NormalBorder()).
|
||||||
|
BorderStyle(lipgloss.NewStyle().Foreground(lipgloss.Color("240"))).
|
||||||
|
BorderRow(true).
|
||||||
|
Headers("Response", "Count", "Min", "Max", "Average", "P90", "P95", "P99").
|
||||||
|
Rows(rows...).
|
||||||
|
StyleFunc(func(row, col int) lipgloss.Style {
|
||||||
|
if row == table.HeaderRow {
|
||||||
|
return headerStyle
|
||||||
|
}
|
||||||
|
return cellStyle
|
||||||
|
})
|
||||||
|
|
||||||
|
fmt.Println(tbl)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (data *SarinResponseData) PrintJSON() {
|
||||||
|
data.Lock()
|
||||||
|
defer data.Unlock()
|
||||||
|
|
||||||
|
output := data.prepareOutputData()
|
||||||
|
encoder := json.NewEncoder(os.Stdout)
|
||||||
|
encoder.SetIndent("", " ")
|
||||||
|
if err := encoder.Encode(output); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (data *SarinResponseData) PrintYAML() {
|
||||||
|
data.Lock()
|
||||||
|
defer data.Unlock()
|
||||||
|
|
||||||
|
output := data.prepareOutputData()
|
||||||
|
encoder := yaml.NewEncoder(os.Stdout)
|
||||||
|
encoder.SetIndent(2)
|
||||||
|
if err := encoder.Encode(output); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type responseStat struct {
|
||||||
|
Count BigInt `json:"count" yaml:"count"`
|
||||||
|
Min Duration `json:"min" yaml:"min"`
|
||||||
|
Max Duration `json:"max" yaml:"max"`
|
||||||
|
Average Duration `json:"average" yaml:"average"`
|
||||||
|
P90 Duration `json:"p90" yaml:"p90"`
|
||||||
|
P95 Duration `json:"p95" yaml:"p95"`
|
||||||
|
P99 Duration `json:"p99" yaml:"p99"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type responseStats map[string]responseStat
|
||||||
|
|
||||||
|
type outputData struct {
|
||||||
|
Responses map[string]responseStat `json:"responses" yaml:"responses"`
|
||||||
|
Total responseStat `json:"total" yaml:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (data *SarinResponseData) prepareOutputData() outputData {
|
||||||
|
switch len(data.Responses) {
|
||||||
|
case 0:
|
||||||
|
return outputData{
|
||||||
|
Responses: make(map[string]responseStat),
|
||||||
|
Total: responseStat{},
|
||||||
|
}
|
||||||
|
case 1:
|
||||||
|
var (
|
||||||
|
responseKey string
|
||||||
|
stats responseStat
|
||||||
|
)
|
||||||
|
for key, response := range data.Responses {
|
||||||
|
stats = calculateStats(response.durations, data.accuracy)
|
||||||
|
responseKey = key
|
||||||
|
}
|
||||||
|
return outputData{
|
||||||
|
Responses: responseStats{
|
||||||
|
responseKey: stats,
|
||||||
|
},
|
||||||
|
Total: stats,
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
// Calculate stats for each response
|
||||||
|
allStats := make(responseStats)
|
||||||
|
var totalDurations = make(map[time.Duration]uint64)
|
||||||
|
|
||||||
|
for key, response := range data.Responses {
|
||||||
|
stats := calculateStats(response.durations, data.accuracy)
|
||||||
|
allStats[key] = stats
|
||||||
|
|
||||||
|
// Aggregate for total row
|
||||||
|
for duration, count := range response.durations {
|
||||||
|
totalDurations[duration] += count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return outputData{
|
||||||
|
Responses: allStats,
|
||||||
|
Total: calculateStats(totalDurations, data.accuracy),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func calculateStats(durations map[time.Duration]uint64, accuracy time.Duration) responseStat {
|
||||||
|
if len(durations) == 0 {
|
||||||
|
return responseStat{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract and sort unique durations
|
||||||
|
sortedDurations := make([]time.Duration, 0, len(durations))
|
||||||
|
for duration := range durations {
|
||||||
|
sortedDurations = append(sortedDurations, duration)
|
||||||
|
}
|
||||||
|
slices.Sort(sortedDurations)
|
||||||
|
|
||||||
|
sum := new(big.Int)
|
||||||
|
totalCount := new(big.Int)
|
||||||
|
minDuration := sortedDurations[0] * accuracy
|
||||||
|
maxDuration := sortedDurations[len(sortedDurations)-1] * accuracy
|
||||||
|
|
||||||
|
for _, duration := range sortedDurations {
|
||||||
|
actualDuration := duration * accuracy
|
||||||
|
count := durations[duration]
|
||||||
|
|
||||||
|
totalCount.Add(
|
||||||
|
totalCount,
|
||||||
|
new(big.Int).SetUint64(count),
|
||||||
|
)
|
||||||
|
|
||||||
|
sum.Add(
|
||||||
|
sum,
|
||||||
|
new(big.Int).Mul(
|
||||||
|
new(big.Int).SetInt64(int64(actualDuration)),
|
||||||
|
new(big.Int).SetUint64(count),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate percentiles
|
||||||
|
p90 := calculatePercentile(sortedDurations, durations, totalCount, 90, accuracy)
|
||||||
|
p95 := calculatePercentile(sortedDurations, durations, totalCount, 95, accuracy)
|
||||||
|
p99 := calculatePercentile(sortedDurations, durations, totalCount, 99, accuracy)
|
||||||
|
|
||||||
|
return responseStat{
|
||||||
|
Count: BigInt{totalCount},
|
||||||
|
Min: Duration(minDuration),
|
||||||
|
Max: Duration(maxDuration),
|
||||||
|
Average: Duration(div(sum, totalCount).Int64()),
|
||||||
|
P90: p90,
|
||||||
|
P95: p95,
|
||||||
|
P99: p99,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func calculatePercentile(sortedDurations []time.Duration, durations map[time.Duration]uint64, totalCount *big.Int, percentile int, accuracy time.Duration) Duration {
|
||||||
|
// Calculate the target position for the percentile
|
||||||
|
// Using ceiling method: position = ceil(totalCount * percentile / 100)
|
||||||
|
target := new(big.Int).Mul(totalCount, big.NewInt(int64(percentile)))
|
||||||
|
target.Add(target, big.NewInt(99)) // Add 99 to achieve ceiling division by 100
|
||||||
|
target.Div(target, big.NewInt(100))
|
||||||
|
|
||||||
|
// Accumulate counts until we reach the target position
|
||||||
|
cumulative := new(big.Int)
|
||||||
|
for _, duration := range sortedDurations {
|
||||||
|
count := durations[duration]
|
||||||
|
cumulative.Add(cumulative, new(big.Int).SetUint64(count))
|
||||||
|
|
||||||
|
if cumulative.Cmp(target) >= 0 {
|
||||||
|
return Duration(duration * accuracy)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to the last duration (shouldn't happen with valid data)
|
||||||
|
return Duration(sortedDurations[len(sortedDurations)-1] * accuracy)
|
||||||
|
}
|
||||||
|
|
||||||
|
// div performs division with rounding to the nearest integer.
|
||||||
|
func div(x, y *big.Int) *big.Int {
|
||||||
|
quotient, remainder := new(big.Int).DivMod(x, y, new(big.Int))
|
||||||
|
if remainder.Mul(remainder, big.NewInt(2)).Cmp(y) >= 0 {
|
||||||
|
quotient.Add(quotient, big.NewInt(1))
|
||||||
|
}
|
||||||
|
return quotient
|
||||||
|
}
|
||||||
|
|
||||||
|
// wrapText wraps a string to multiple lines if it exceeds maxWidth.
|
||||||
|
func wrapText(s string, maxWidth int) string {
|
||||||
|
if len(s) <= maxWidth {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
var lines []string
|
||||||
|
for len(s) > maxWidth {
|
||||||
|
lines = append(lines, s[:maxWidth])
|
||||||
|
s = s[maxWidth:]
|
||||||
|
}
|
||||||
|
if len(s) > 0 {
|
||||||
|
lines = append(lines, s)
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(lines, "\n")
|
||||||
|
}
|
||||||
818
internal/sarin/sarin.go
Normal file
818
internal/sarin/sarin.go
Normal file
@@ -0,0 +1,818 @@
|
|||||||
|
package sarin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/bubbles/progress"
|
||||||
|
"github.com/charmbracelet/bubbles/spinner"
|
||||||
|
tea "github.com/charmbracelet/bubbletea"
|
||||||
|
"github.com/charmbracelet/lipgloss"
|
||||||
|
"github.com/charmbracelet/x/term"
|
||||||
|
"github.com/valyala/fasthttp"
|
||||||
|
"go.aykhans.me/sarin/internal/script"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type runtimeMessageLevel uint8
|
||||||
|
|
||||||
|
const (
|
||||||
|
runtimeMessageLevelWarning runtimeMessageLevel = iota
|
||||||
|
runtimeMessageLevelError
|
||||||
|
)
|
||||||
|
|
||||||
|
type runtimeMessage struct {
|
||||||
|
timestamp time.Time
|
||||||
|
level runtimeMessageLevel
|
||||||
|
text string
|
||||||
|
}
|
||||||
|
|
||||||
|
type messageSender func(level runtimeMessageLevel, text string)
|
||||||
|
|
||||||
|
type sarin struct {
|
||||||
|
workers uint
|
||||||
|
requestURL *url.URL
|
||||||
|
methods []string
|
||||||
|
params types.Params
|
||||||
|
headers types.Headers
|
||||||
|
cookies types.Cookies
|
||||||
|
bodies []string
|
||||||
|
totalRequests *uint64
|
||||||
|
totalDuration *time.Duration
|
||||||
|
timeout time.Duration
|
||||||
|
quiet bool
|
||||||
|
skipCertVerify bool
|
||||||
|
values []string
|
||||||
|
collectStats bool
|
||||||
|
dryRun bool
|
||||||
|
|
||||||
|
hostClients []*fasthttp.HostClient
|
||||||
|
responses *SarinResponseData
|
||||||
|
fileCache *FileCache
|
||||||
|
scriptChain *script.Chain
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSarin creates a new sarin instance for load testing.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ProxyDialError
|
||||||
|
// - types.ErrScriptEmpty
|
||||||
|
// - types.ScriptLoadError
|
||||||
|
func NewSarin(
|
||||||
|
ctx context.Context,
|
||||||
|
methods []string,
|
||||||
|
requestURL *url.URL,
|
||||||
|
timeout time.Duration,
|
||||||
|
workers uint,
|
||||||
|
totalRequests *uint64,
|
||||||
|
totalDuration *time.Duration,
|
||||||
|
quiet bool,
|
||||||
|
skipCertVerify bool,
|
||||||
|
params types.Params,
|
||||||
|
headers types.Headers,
|
||||||
|
cookies types.Cookies,
|
||||||
|
bodies []string,
|
||||||
|
proxies types.Proxies,
|
||||||
|
values []string,
|
||||||
|
collectStats bool,
|
||||||
|
dryRun bool,
|
||||||
|
luaScripts []string,
|
||||||
|
jsScripts []string,
|
||||||
|
) (*sarin, error) {
|
||||||
|
if workers == 0 {
|
||||||
|
workers = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
hostClients, err := newHostClients(ctx, timeout, proxies, workers, requestURL, skipCertVerify)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load script sources
|
||||||
|
luaSources, err := script.LoadSources(ctx, luaScripts, script.EngineTypeLua)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
jsSources, err := script.LoadSources(ctx, jsScripts, script.EngineTypeJavaScript)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
scriptChain := script.NewChain(luaSources, jsSources)
|
||||||
|
|
||||||
|
srn := &sarin{
|
||||||
|
workers: workers,
|
||||||
|
requestURL: requestURL,
|
||||||
|
methods: methods,
|
||||||
|
params: params,
|
||||||
|
headers: headers,
|
||||||
|
cookies: cookies,
|
||||||
|
bodies: bodies,
|
||||||
|
totalRequests: totalRequests,
|
||||||
|
totalDuration: totalDuration,
|
||||||
|
timeout: timeout,
|
||||||
|
quiet: quiet,
|
||||||
|
skipCertVerify: skipCertVerify,
|
||||||
|
values: values,
|
||||||
|
collectStats: collectStats,
|
||||||
|
dryRun: dryRun,
|
||||||
|
hostClients: hostClients,
|
||||||
|
fileCache: NewFileCache(time.Second * 10),
|
||||||
|
scriptChain: scriptChain,
|
||||||
|
}
|
||||||
|
|
||||||
|
if collectStats {
|
||||||
|
srn.responses = NewSarinResponseData(uint32(100))
|
||||||
|
}
|
||||||
|
|
||||||
|
return srn, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) GetResponses() *SarinResponseData {
|
||||||
|
return q.responses
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) Start(ctx context.Context) {
|
||||||
|
jobsCtx, jobsCancel := context.WithCancel(ctx)
|
||||||
|
|
||||||
|
var workersWG sync.WaitGroup
|
||||||
|
jobsCh := make(chan struct{}, max(q.workers, 1))
|
||||||
|
|
||||||
|
var counter atomic.Uint64
|
||||||
|
|
||||||
|
totalRequests := uint64(0)
|
||||||
|
if q.totalRequests != nil {
|
||||||
|
totalRequests = *q.totalRequests
|
||||||
|
}
|
||||||
|
|
||||||
|
var streamCtx context.Context
|
||||||
|
var streamCancel context.CancelFunc
|
||||||
|
var streamCh chan struct{}
|
||||||
|
var messageChannel chan runtimeMessage
|
||||||
|
var sendMessage messageSender
|
||||||
|
|
||||||
|
if !q.quiet && !term.IsTerminal(os.Stdout.Fd()) {
|
||||||
|
q.quiet = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if q.quiet {
|
||||||
|
sendMessage = func(level runtimeMessageLevel, text string) {}
|
||||||
|
} else {
|
||||||
|
streamCtx, streamCancel = context.WithCancel(context.Background())
|
||||||
|
defer streamCancel()
|
||||||
|
streamCh = make(chan struct{})
|
||||||
|
messageChannel = make(chan runtimeMessage, max(q.workers, 1))
|
||||||
|
sendMessage = func(level runtimeMessageLevel, text string) {
|
||||||
|
messageChannel <- runtimeMessage{
|
||||||
|
timestamp: time.Now(),
|
||||||
|
level: level,
|
||||||
|
text: text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start workers
|
||||||
|
q.startWorkers(&workersWG, jobsCh, q.hostClients, &counter, sendMessage)
|
||||||
|
|
||||||
|
if !q.quiet {
|
||||||
|
// Start streaming to terminal
|
||||||
|
//nolint:contextcheck // streamCtx must remain active until all workers complete to ensure all collected data is streamed
|
||||||
|
go q.streamProgress(streamCtx, jobsCancel, streamCh, totalRequests, &counter, messageChannel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup duration-based cancellation
|
||||||
|
q.setupDurationTimeout(ctx, jobsCancel)
|
||||||
|
// Distribute jobs to workers.
|
||||||
|
// This blocks until all jobs are sent or the context is canceled.
|
||||||
|
q.sendJobs(jobsCtx, jobsCh)
|
||||||
|
|
||||||
|
// Close the jobs channel so workers stop after completing their current job
|
||||||
|
close(jobsCh)
|
||||||
|
// Wait until all workers stopped
|
||||||
|
workersWG.Wait()
|
||||||
|
if messageChannel != nil {
|
||||||
|
close(messageChannel)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !q.quiet {
|
||||||
|
// Stop the progress streaming
|
||||||
|
streamCancel()
|
||||||
|
// Wait until progress streaming has completely stopped
|
||||||
|
<-streamCh
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) Worker(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
hostClientGenerator HostClientGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
req := fasthttp.AcquireRequest()
|
||||||
|
resp := fasthttp.AcquireResponse()
|
||||||
|
defer fasthttp.ReleaseRequest(req)
|
||||||
|
defer fasthttp.ReleaseResponse(resp)
|
||||||
|
|
||||||
|
// Create script transformer for this worker (engines are not thread-safe)
|
||||||
|
// Scripts are pre-validated in NewSarin, so this should not fail
|
||||||
|
var scriptTransformer *script.Transformer
|
||||||
|
if !q.scriptChain.IsEmpty() {
|
||||||
|
var err error
|
||||||
|
scriptTransformer, err = q.scriptChain.NewTransformer()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer scriptTransformer.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
requestGenerator, isDynamic := NewRequestGenerator(
|
||||||
|
q.methods, q.requestURL, q.params, q.headers, q.cookies, q.bodies, q.values, q.fileCache, scriptTransformer,
|
||||||
|
)
|
||||||
|
|
||||||
|
if q.dryRun {
|
||||||
|
switch {
|
||||||
|
case q.collectStats && isDynamic:
|
||||||
|
q.workerDryRunStatsWithDynamic(jobs, req, requestGenerator, counter, sendMessage)
|
||||||
|
case q.collectStats && !isDynamic:
|
||||||
|
q.workerDryRunStatsWithStatic(jobs, req, requestGenerator, counter, sendMessage)
|
||||||
|
case !q.collectStats && isDynamic:
|
||||||
|
q.workerDryRunNoStatsWithDynamic(jobs, req, requestGenerator, counter, sendMessage)
|
||||||
|
default:
|
||||||
|
q.workerDryRunNoStatsWithStatic(jobs, req, requestGenerator, counter, sendMessage)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
switch {
|
||||||
|
case q.collectStats && isDynamic:
|
||||||
|
q.workerStatsWithDynamic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||||
|
case q.collectStats && !isDynamic:
|
||||||
|
q.workerStatsWithStatic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||||
|
case !q.collectStats && isDynamic:
|
||||||
|
q.workerNoStatsWithDynamic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||||
|
default:
|
||||||
|
q.workerNoStatsWithStatic(jobs, req, resp, requestGenerator, hostClientGenerator, counter, sendMessage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) workerStatsWithDynamic(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
req *fasthttp.Request,
|
||||||
|
resp *fasthttp.Response,
|
||||||
|
requestGenerator RequestGenerator,
|
||||||
|
hostClientGenerator HostClientGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
for range jobs {
|
||||||
|
req.Reset()
|
||||||
|
resp.Reset()
|
||||||
|
|
||||||
|
if err := requestGenerator(req); err != nil {
|
||||||
|
q.responses.Add(err.Error(), 0)
|
||||||
|
sendMessage(runtimeMessageLevelError, err.Error())
|
||||||
|
counter.Add(1)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
startTime := time.Now()
|
||||||
|
err := hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||||
|
if err != nil {
|
||||||
|
q.responses.Add(err.Error(), time.Since(startTime))
|
||||||
|
} else {
|
||||||
|
q.responses.Add(statusCodeToString(resp.StatusCode()), time.Since(startTime))
|
||||||
|
}
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) workerStatsWithStatic(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
req *fasthttp.Request,
|
||||||
|
resp *fasthttp.Response,
|
||||||
|
requestGenerator RequestGenerator,
|
||||||
|
hostClientGenerator HostClientGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
if err := requestGenerator(req); err != nil {
|
||||||
|
// Static request generation failed - record all jobs as errors
|
||||||
|
for range jobs {
|
||||||
|
q.responses.Add(err.Error(), 0)
|
||||||
|
sendMessage(runtimeMessageLevelError, err.Error())
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for range jobs {
|
||||||
|
resp.Reset()
|
||||||
|
|
||||||
|
startTime := time.Now()
|
||||||
|
err := hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||||
|
if err != nil {
|
||||||
|
q.responses.Add(err.Error(), time.Since(startTime))
|
||||||
|
} else {
|
||||||
|
q.responses.Add(statusCodeToString(resp.StatusCode()), time.Since(startTime))
|
||||||
|
}
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) workerNoStatsWithDynamic(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
req *fasthttp.Request,
|
||||||
|
resp *fasthttp.Response,
|
||||||
|
requestGenerator RequestGenerator,
|
||||||
|
hostClientGenerator HostClientGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
for range jobs {
|
||||||
|
req.Reset()
|
||||||
|
resp.Reset()
|
||||||
|
if err := requestGenerator(req); err != nil {
|
||||||
|
sendMessage(runtimeMessageLevelError, err.Error())
|
||||||
|
counter.Add(1)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
_ = hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) workerNoStatsWithStatic(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
req *fasthttp.Request,
|
||||||
|
resp *fasthttp.Response,
|
||||||
|
requestGenerator RequestGenerator,
|
||||||
|
hostClientGenerator HostClientGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
if err := requestGenerator(req); err != nil {
|
||||||
|
sendMessage(runtimeMessageLevelError, err.Error())
|
||||||
|
|
||||||
|
// Static request generation failed - just count the jobs without sending
|
||||||
|
for range jobs {
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for range jobs {
|
||||||
|
resp.Reset()
|
||||||
|
_ = hostClientGenerator().DoTimeout(req, resp, q.timeout)
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const dryRunResponseKey = "dry-run"
|
||||||
|
|
||||||
|
// statusCodeStrings contains pre-computed string representations for HTTP status codes 100-599.
|
||||||
|
var statusCodeStrings = func() map[int]string {
|
||||||
|
m := make(map[int]string, 500)
|
||||||
|
for i := 100; i < 600; i++ {
|
||||||
|
m[i] = strconv.Itoa(i)
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}()
|
||||||
|
|
||||||
|
// statusCodeToString returns a string representation of the HTTP status code.
|
||||||
|
// Uses a pre-computed map for codes 100-599, falls back to strconv.Itoa for others.
|
||||||
|
func statusCodeToString(code int) string {
|
||||||
|
if s, ok := statusCodeStrings[code]; ok {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return strconv.Itoa(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) workerDryRunStatsWithDynamic(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
req *fasthttp.Request,
|
||||||
|
requestGenerator RequestGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
for range jobs {
|
||||||
|
req.Reset()
|
||||||
|
startTime := time.Now()
|
||||||
|
if err := requestGenerator(req); err != nil {
|
||||||
|
q.responses.Add(err.Error(), time.Since(startTime))
|
||||||
|
sendMessage(runtimeMessageLevelError, err.Error())
|
||||||
|
counter.Add(1)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
q.responses.Add(dryRunResponseKey, time.Since(startTime))
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) workerDryRunStatsWithStatic(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
req *fasthttp.Request,
|
||||||
|
requestGenerator RequestGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
if err := requestGenerator(req); err != nil {
|
||||||
|
// Static request generation failed - record all jobs as errors
|
||||||
|
for range jobs {
|
||||||
|
q.responses.Add(err.Error(), 0)
|
||||||
|
sendMessage(runtimeMessageLevelError, err.Error())
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for range jobs {
|
||||||
|
q.responses.Add(dryRunResponseKey, 0)
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) workerDryRunNoStatsWithDynamic(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
req *fasthttp.Request,
|
||||||
|
requestGenerator RequestGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
for range jobs {
|
||||||
|
req.Reset()
|
||||||
|
if err := requestGenerator(req); err != nil {
|
||||||
|
sendMessage(runtimeMessageLevelError, err.Error())
|
||||||
|
}
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) workerDryRunNoStatsWithStatic(
|
||||||
|
jobs <-chan struct{},
|
||||||
|
req *fasthttp.Request,
|
||||||
|
requestGenerator RequestGenerator,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
sendMessage messageSender,
|
||||||
|
) {
|
||||||
|
if err := requestGenerator(req); err != nil {
|
||||||
|
sendMessage(runtimeMessageLevelError, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
for range jobs {
|
||||||
|
counter.Add(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// newHostClients initializes HTTP clients for the given configuration.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ProxyDialError
|
||||||
|
func newHostClients(
|
||||||
|
ctx context.Context,
|
||||||
|
timeout time.Duration,
|
||||||
|
proxies types.Proxies,
|
||||||
|
workers uint,
|
||||||
|
requestURL *url.URL,
|
||||||
|
skipCertVerify bool,
|
||||||
|
) ([]*fasthttp.HostClient, error) {
|
||||||
|
proxiesRaw := make([]url.URL, len(proxies))
|
||||||
|
for i, proxy := range proxies {
|
||||||
|
proxiesRaw[i] = url.URL(proxy)
|
||||||
|
}
|
||||||
|
|
||||||
|
maxConns := max(fasthttp.DefaultMaxConnsPerHost, workers)
|
||||||
|
maxConns = ((maxConns * 50 / 100) + maxConns)
|
||||||
|
return NewHostClients(
|
||||||
|
ctx,
|
||||||
|
timeout,
|
||||||
|
proxiesRaw,
|
||||||
|
maxConns,
|
||||||
|
requestURL,
|
||||||
|
skipCertVerify,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) startWorkers(wg *sync.WaitGroup, jobs <-chan struct{}, hostClients []*fasthttp.HostClient, counter *atomic.Uint64, sendMessage messageSender) {
|
||||||
|
for range max(q.workers, 1) {
|
||||||
|
wg.Go(func() {
|
||||||
|
q.Worker(jobs, NewHostClientGenerator(hostClients...), counter, sendMessage)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) setupDurationTimeout(ctx context.Context, cancel context.CancelFunc) {
|
||||||
|
if q.totalDuration != nil {
|
||||||
|
go func() {
|
||||||
|
timer := time.NewTimer(*q.totalDuration)
|
||||||
|
defer timer.Stop()
|
||||||
|
select {
|
||||||
|
case <-timer.C:
|
||||||
|
cancel()
|
||||||
|
case <-ctx.Done():
|
||||||
|
// Context cancelled, cleanup
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) sendJobs(ctx context.Context, jobs chan<- struct{}) {
|
||||||
|
if q.totalRequests != nil && *q.totalRequests > 0 {
|
||||||
|
for range *q.totalRequests {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
jobs <- struct{}{}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for ctx.Err() == nil {
|
||||||
|
jobs <- struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type tickMsg time.Time
|
||||||
|
|
||||||
|
var (
|
||||||
|
helpStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#d1d1d1"))
|
||||||
|
errorStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#FC5B5B")).Bold(true)
|
||||||
|
warningStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#FFD93D")).Bold(true)
|
||||||
|
messageChannelStyle = lipgloss.NewStyle().
|
||||||
|
Border(lipgloss.ThickBorder(), false, false, false, true).
|
||||||
|
BorderForeground(lipgloss.Color("#757575")).
|
||||||
|
PaddingLeft(1).
|
||||||
|
Margin(1, 0, 0, 0).
|
||||||
|
Foreground(lipgloss.Color("#888888"))
|
||||||
|
)
|
||||||
|
|
||||||
|
type progressModel struct {
|
||||||
|
progress progress.Model
|
||||||
|
startTime time.Time
|
||||||
|
messages []string
|
||||||
|
counter *atomic.Uint64
|
||||||
|
current uint64
|
||||||
|
maxValue uint64
|
||||||
|
ctx context.Context //nolint:containedctx
|
||||||
|
cancel context.CancelFunc
|
||||||
|
cancelling bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m progressModel) Init() tea.Cmd {
|
||||||
|
return tea.Batch(progressTickCmd())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m progressModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg := msg.(type) {
|
||||||
|
case tea.KeyMsg:
|
||||||
|
if msg.Type == tea.KeyCtrlC {
|
||||||
|
m.cancelling = true
|
||||||
|
m.cancel()
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case tea.WindowSizeMsg:
|
||||||
|
m.progress.Width = max(10, msg.Width-1)
|
||||||
|
if m.ctx.Err() != nil {
|
||||||
|
return m, tea.Quit
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case runtimeMessage:
|
||||||
|
var msgBuilder strings.Builder
|
||||||
|
msgBuilder.WriteString("[")
|
||||||
|
msgBuilder.WriteString(msg.timestamp.Format("15:04:05"))
|
||||||
|
msgBuilder.WriteString("] ")
|
||||||
|
switch msg.level {
|
||||||
|
case runtimeMessageLevelError:
|
||||||
|
msgBuilder.WriteString(errorStyle.Render("ERROR: "))
|
||||||
|
case runtimeMessageLevelWarning:
|
||||||
|
msgBuilder.WriteString(warningStyle.Render("WARNING: "))
|
||||||
|
}
|
||||||
|
msgBuilder.WriteString(msg.text)
|
||||||
|
m.messages = append(m.messages[1:], msgBuilder.String())
|
||||||
|
if m.ctx.Err() != nil {
|
||||||
|
return m, tea.Quit
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case tickMsg:
|
||||||
|
if m.ctx.Err() != nil {
|
||||||
|
return m, tea.Quit
|
||||||
|
}
|
||||||
|
return m, progressTickCmd()
|
||||||
|
|
||||||
|
default:
|
||||||
|
if m.ctx.Err() != nil {
|
||||||
|
return m, tea.Quit
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m progressModel) View() string {
|
||||||
|
var messagesBuilder strings.Builder
|
||||||
|
for i, msg := range m.messages {
|
||||||
|
if len(msg) > 0 {
|
||||||
|
messagesBuilder.WriteString(msg)
|
||||||
|
if i < len(m.messages)-1 {
|
||||||
|
messagesBuilder.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var finalBuilder strings.Builder
|
||||||
|
if messagesBuilder.Len() > 0 {
|
||||||
|
finalBuilder.WriteString(messageChannelStyle.Render(messagesBuilder.String()))
|
||||||
|
finalBuilder.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
m.current = m.counter.Load()
|
||||||
|
finalBuilder.WriteString("\n ")
|
||||||
|
finalBuilder.WriteString(strconv.FormatUint(m.current, 10))
|
||||||
|
finalBuilder.WriteString("/")
|
||||||
|
finalBuilder.WriteString(strconv.FormatUint(m.maxValue, 10))
|
||||||
|
finalBuilder.WriteString(" - ")
|
||||||
|
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||||
|
finalBuilder.WriteString("\n ")
|
||||||
|
finalBuilder.WriteString(m.progress.ViewAs(float64(m.current) / float64(m.maxValue)))
|
||||||
|
finalBuilder.WriteString("\n\n ")
|
||||||
|
if m.cancelling {
|
||||||
|
finalBuilder.WriteString(helpStyle.Render("Stopping..."))
|
||||||
|
} else {
|
||||||
|
finalBuilder.WriteString(helpStyle.Render("Press Ctrl+C to quit"))
|
||||||
|
}
|
||||||
|
return finalBuilder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func progressTickCmd() tea.Cmd {
|
||||||
|
return tea.Tick(time.Millisecond*250, func(t time.Time) tea.Msg {
|
||||||
|
return tickMsg(t)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
var infiniteProgressStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#00D4FF"))
|
||||||
|
|
||||||
|
type infiniteProgressModel struct {
|
||||||
|
spinner spinner.Model
|
||||||
|
startTime time.Time
|
||||||
|
counter *atomic.Uint64
|
||||||
|
messages []string
|
||||||
|
ctx context.Context //nolint:containedctx
|
||||||
|
quit bool
|
||||||
|
cancel context.CancelFunc
|
||||||
|
cancelling bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m infiniteProgressModel) Init() tea.Cmd {
|
||||||
|
return m.spinner.Tick
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m infiniteProgressModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg := msg.(type) {
|
||||||
|
case tea.KeyMsg:
|
||||||
|
if msg.Type == tea.KeyCtrlC {
|
||||||
|
m.cancelling = true
|
||||||
|
m.cancel()
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case runtimeMessage:
|
||||||
|
var msgBuilder strings.Builder
|
||||||
|
msgBuilder.WriteString("[")
|
||||||
|
msgBuilder.WriteString(msg.timestamp.Format("15:04:05"))
|
||||||
|
msgBuilder.WriteString("] ")
|
||||||
|
switch msg.level {
|
||||||
|
case runtimeMessageLevelError:
|
||||||
|
msgBuilder.WriteString(errorStyle.Render("ERROR: "))
|
||||||
|
case runtimeMessageLevelWarning:
|
||||||
|
msgBuilder.WriteString(warningStyle.Render("WARNING: "))
|
||||||
|
}
|
||||||
|
msgBuilder.WriteString(msg.text)
|
||||||
|
m.messages = append(m.messages[1:], msgBuilder.String())
|
||||||
|
if m.ctx.Err() != nil {
|
||||||
|
m.quit = true
|
||||||
|
return m, tea.Quit
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
if m.ctx.Err() != nil {
|
||||||
|
m.quit = true
|
||||||
|
return m, tea.Quit
|
||||||
|
}
|
||||||
|
var cmd tea.Cmd
|
||||||
|
m.spinner, cmd = m.spinner.Update(msg)
|
||||||
|
return m, cmd
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m infiniteProgressModel) View() string {
|
||||||
|
var messagesBuilder strings.Builder
|
||||||
|
for i, msg := range m.messages {
|
||||||
|
if len(msg) > 0 {
|
||||||
|
messagesBuilder.WriteString(msg)
|
||||||
|
if i < len(m.messages)-1 {
|
||||||
|
messagesBuilder.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var finalBuilder strings.Builder
|
||||||
|
if messagesBuilder.Len() > 0 {
|
||||||
|
finalBuilder.WriteString(messageChannelStyle.Render(messagesBuilder.String()))
|
||||||
|
finalBuilder.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
if m.quit {
|
||||||
|
finalBuilder.WriteString("\n ")
|
||||||
|
finalBuilder.WriteString(strconv.FormatUint(m.counter.Load(), 10))
|
||||||
|
finalBuilder.WriteString(" ")
|
||||||
|
finalBuilder.WriteString(infiniteProgressStyle.Render("∙∙∙∙∙"))
|
||||||
|
finalBuilder.WriteString(" ")
|
||||||
|
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||||
|
finalBuilder.WriteString("\n\n")
|
||||||
|
} else {
|
||||||
|
finalBuilder.WriteString("\n ")
|
||||||
|
finalBuilder.WriteString(strconv.FormatUint(m.counter.Load(), 10))
|
||||||
|
finalBuilder.WriteString(" ")
|
||||||
|
finalBuilder.WriteString(m.spinner.View())
|
||||||
|
finalBuilder.WriteString(" ")
|
||||||
|
finalBuilder.WriteString(time.Since(m.startTime).Round(time.Second / 10).String())
|
||||||
|
finalBuilder.WriteString("\n\n ")
|
||||||
|
if m.cancelling {
|
||||||
|
finalBuilder.WriteString(helpStyle.Render("Stopping..."))
|
||||||
|
} else {
|
||||||
|
finalBuilder.WriteString(helpStyle.Render("Press Ctrl+C to quit"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return finalBuilder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q sarin) streamProgress(
|
||||||
|
ctx context.Context,
|
||||||
|
cancel context.CancelFunc,
|
||||||
|
done chan<- struct{},
|
||||||
|
total uint64,
|
||||||
|
counter *atomic.Uint64,
|
||||||
|
messageChannel <-chan runtimeMessage,
|
||||||
|
) {
|
||||||
|
var program *tea.Program
|
||||||
|
if total > 0 {
|
||||||
|
model := progressModel{
|
||||||
|
progress: progress.New(progress.WithGradient("#151594", "#00D4FF")),
|
||||||
|
startTime: time.Now(),
|
||||||
|
messages: make([]string, 8),
|
||||||
|
counter: counter,
|
||||||
|
current: 0,
|
||||||
|
maxValue: total,
|
||||||
|
ctx: ctx,
|
||||||
|
cancel: cancel,
|
||||||
|
}
|
||||||
|
|
||||||
|
program = tea.NewProgram(model)
|
||||||
|
} else {
|
||||||
|
model := infiniteProgressModel{
|
||||||
|
spinner: spinner.New(
|
||||||
|
spinner.WithSpinner(
|
||||||
|
spinner.Spinner{
|
||||||
|
Frames: []string{
|
||||||
|
"●∙∙∙∙",
|
||||||
|
"∙●∙∙∙",
|
||||||
|
"∙∙●∙∙",
|
||||||
|
"∙∙∙●∙",
|
||||||
|
"∙∙∙∙●",
|
||||||
|
"∙∙∙●∙",
|
||||||
|
"∙∙●∙∙",
|
||||||
|
"∙●∙∙∙",
|
||||||
|
},
|
||||||
|
FPS: time.Second / 8, //nolint:mnd
|
||||||
|
},
|
||||||
|
),
|
||||||
|
spinner.WithStyle(infiniteProgressStyle),
|
||||||
|
),
|
||||||
|
startTime: time.Now(),
|
||||||
|
counter: counter,
|
||||||
|
messages: make([]string, 8),
|
||||||
|
ctx: ctx,
|
||||||
|
cancel: cancel,
|
||||||
|
quit: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
program = tea.NewProgram(model)
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
for msg := range messageChannel {
|
||||||
|
program.Send(msg)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
if _, err := program.Run(); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
done <- struct{}{}
|
||||||
|
}
|
||||||
647
internal/sarin/template.go
Normal file
647
internal/sarin/template.go
Normal file
@@ -0,0 +1,647 @@
|
|||||||
|
package sarin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/base64"
|
||||||
|
"math/rand/v2"
|
||||||
|
"mime/multipart"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
"text/template/parse"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/brianvoe/gofakeit/v7"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewDefaultTemplateFuncMap(randSource rand.Source, fileCache *FileCache) template.FuncMap {
|
||||||
|
fakeit := gofakeit.NewFaker(randSource, false)
|
||||||
|
|
||||||
|
return template.FuncMap{
|
||||||
|
// Strings
|
||||||
|
"strings_ToUpper": strings.ToUpper,
|
||||||
|
"strings_ToLower": strings.ToLower,
|
||||||
|
"strings_RemoveSpaces": func(s string) string { return strings.ReplaceAll(s, " ", "") },
|
||||||
|
"strings_Replace": strings.Replace,
|
||||||
|
"strings_ToDate": func(dateString string) time.Time {
|
||||||
|
date, err := time.Parse("2006-01-02", dateString)
|
||||||
|
if err != nil {
|
||||||
|
return time.Now()
|
||||||
|
}
|
||||||
|
return date
|
||||||
|
},
|
||||||
|
"strings_First": func(s string, n int) string {
|
||||||
|
runes := []rune(s)
|
||||||
|
if n <= 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if n >= len(runes) {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return string(runes[:n])
|
||||||
|
},
|
||||||
|
"strings_Last": func(s string, n int) string {
|
||||||
|
runes := []rune(s)
|
||||||
|
if n <= 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if n >= len(runes) {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return string(runes[len(runes)-n:])
|
||||||
|
},
|
||||||
|
"strings_Truncate": func(s string, n int) string {
|
||||||
|
runes := []rune(s)
|
||||||
|
if n <= 0 {
|
||||||
|
return "..."
|
||||||
|
}
|
||||||
|
if n >= len(runes) {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return string(runes[:n]) + "..."
|
||||||
|
},
|
||||||
|
"strings_TrimPrefix": strings.TrimPrefix,
|
||||||
|
"strings_TrimSuffix": strings.TrimSuffix,
|
||||||
|
// Dict
|
||||||
|
"dict_Str": func(values ...string) map[string]string {
|
||||||
|
dict := make(map[string]string)
|
||||||
|
for i := 0; i < len(values); i += 2 {
|
||||||
|
if i+1 < len(values) {
|
||||||
|
key := values[i]
|
||||||
|
value := values[i+1]
|
||||||
|
dict[key] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return dict
|
||||||
|
},
|
||||||
|
|
||||||
|
// Slice
|
||||||
|
"slice_Str": func(values ...string) []string { return values },
|
||||||
|
"slice_Int": func(values ...int) []int { return values },
|
||||||
|
"slice_Uint": func(values ...uint) []uint { return values },
|
||||||
|
"slice_Join": strings.Join,
|
||||||
|
|
||||||
|
// File
|
||||||
|
// file_Base64 reads a file (local or remote URL) and returns its Base64 encoded content.
|
||||||
|
// Usage: {{ file_Base64 "/path/to/file.pdf" }}
|
||||||
|
// {{ file_Base64 "https://example.com/image.png" }}
|
||||||
|
"file_Base64": func(source string) (string, error) {
|
||||||
|
if fileCache == nil {
|
||||||
|
return "", types.ErrFileCacheNotInitialized
|
||||||
|
}
|
||||||
|
cached, err := fileCache.GetOrLoad(source)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return base64.StdEncoding.EncodeToString(cached.Content), nil
|
||||||
|
},
|
||||||
|
|
||||||
|
// Fakeit / File
|
||||||
|
// "fakeit_CSV": fakeit.CSV(nil),
|
||||||
|
// "fakeit_JSON": fakeit.JSON(nil),
|
||||||
|
// "fakeit_XML": fakeit.XML(nil),
|
||||||
|
"fakeit_FileExtension": fakeit.FileExtension,
|
||||||
|
"fakeit_FileMimeType": fakeit.FileMimeType,
|
||||||
|
|
||||||
|
// Fakeit / ID
|
||||||
|
"fakeit_ID": fakeit.ID,
|
||||||
|
"fakeit_UUID": fakeit.UUID,
|
||||||
|
|
||||||
|
// Fakeit / Template
|
||||||
|
// "fakeit_Template": fakeit.Template(nil) (string, error),
|
||||||
|
// "fakeit_Markdown": fakeit.Markdown(nil) (string, error),
|
||||||
|
// "fakeit_EmailText": fakeit.EmailText(nil) (string, error),
|
||||||
|
// "fakeit_FixedWidth": fakeit.FixedWidth(nil) (string, error),
|
||||||
|
|
||||||
|
// Fakeit / Product
|
||||||
|
// "fakeit_Product": fakeit.Product() *ProductInfo,
|
||||||
|
"fakeit_ProductName": fakeit.ProductName,
|
||||||
|
"fakeit_ProductDescription": fakeit.ProductDescription,
|
||||||
|
"fakeit_ProductCategory": fakeit.ProductCategory,
|
||||||
|
"fakeit_ProductFeature": fakeit.ProductFeature,
|
||||||
|
"fakeit_ProductMaterial": fakeit.ProductMaterial,
|
||||||
|
"fakeit_ProductUPC": fakeit.ProductUPC,
|
||||||
|
"fakeit_ProductAudience": fakeit.ProductAudience,
|
||||||
|
"fakeit_ProductDimension": fakeit.ProductDimension,
|
||||||
|
"fakeit_ProductUseCase": fakeit.ProductUseCase,
|
||||||
|
"fakeit_ProductBenefit": fakeit.ProductBenefit,
|
||||||
|
"fakeit_ProductSuffix": fakeit.ProductSuffix,
|
||||||
|
"fakeit_ProductISBN": func() string { return fakeit.ProductISBN(nil) },
|
||||||
|
|
||||||
|
// Fakeit / Person
|
||||||
|
// "fakeit_Person": fakeit.Person() *PersonInfo,
|
||||||
|
"fakeit_Name": fakeit.Name,
|
||||||
|
"fakeit_NamePrefix": fakeit.NamePrefix,
|
||||||
|
"fakeit_NameSuffix": fakeit.NameSuffix,
|
||||||
|
"fakeit_FirstName": fakeit.FirstName,
|
||||||
|
"fakeit_MiddleName": fakeit.MiddleName,
|
||||||
|
"fakeit_LastName": fakeit.LastName,
|
||||||
|
"fakeit_Gender": fakeit.Gender,
|
||||||
|
"fakeit_Age": fakeit.Age,
|
||||||
|
"fakeit_Ethnicity": fakeit.Ethnicity,
|
||||||
|
"fakeit_SSN": fakeit.SSN,
|
||||||
|
"fakeit_EIN": fakeit.EIN,
|
||||||
|
"fakeit_Hobby": fakeit.Hobby,
|
||||||
|
// "fakeit_Contact": fakeit.Contact() *ContactInfo,
|
||||||
|
"fakeit_Email": fakeit.Email,
|
||||||
|
"fakeit_Phone": fakeit.Phone,
|
||||||
|
"fakeit_PhoneFormatted": fakeit.PhoneFormatted,
|
||||||
|
// "fakeit_Teams": fakeit.Teams(peopleArray []string, teamsArray []string) map[string][]string,
|
||||||
|
|
||||||
|
// Fakeit / Generate
|
||||||
|
// "fakeit_Struct": fakeit.Struct(v any),
|
||||||
|
// "fakeit_Slice": fakeit.Slice(v any),
|
||||||
|
// "fakeit_Map": fakeit.Map() map[string]any,
|
||||||
|
// "fakeit_Generate": fakeit.Generate(value string) string,
|
||||||
|
"fakeit_Regex": fakeit.Regex,
|
||||||
|
|
||||||
|
// Fakeit / Auth
|
||||||
|
"fakeit_Username": fakeit.Username,
|
||||||
|
"fakeit_Password": fakeit.Password,
|
||||||
|
|
||||||
|
// Fakeit / Address
|
||||||
|
// "fakeit_Address": fakeit.Address() *AddressInfo,
|
||||||
|
"fakeit_City": fakeit.City,
|
||||||
|
"fakeit_Country": fakeit.Country,
|
||||||
|
"fakeit_CountryAbr": fakeit.CountryAbr,
|
||||||
|
"fakeit_State": fakeit.State,
|
||||||
|
"fakeit_StateAbr": fakeit.StateAbr,
|
||||||
|
"fakeit_Street": fakeit.Street,
|
||||||
|
"fakeit_StreetName": fakeit.StreetName,
|
||||||
|
"fakeit_StreetNumber": fakeit.StreetNumber,
|
||||||
|
"fakeit_StreetPrefix": fakeit.StreetPrefix,
|
||||||
|
"fakeit_StreetSuffix": fakeit.StreetSuffix,
|
||||||
|
"fakeit_Unit": fakeit.Unit,
|
||||||
|
"fakeit_Zip": fakeit.Zip,
|
||||||
|
"fakeit_Latitude": fakeit.Latitude,
|
||||||
|
"fakeit_LatitudeInRange": func(minLatitude, maxLatitude float64) float64 {
|
||||||
|
value, err := fakeit.LatitudeInRange(minLatitude, maxLatitude)
|
||||||
|
if err != nil {
|
||||||
|
var zero float64
|
||||||
|
return zero
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
},
|
||||||
|
"fakeit_Longitude": fakeit.Longitude,
|
||||||
|
"fakeit_LongitudeInRange": func(minLongitude, maxLongitude float64) float64 {
|
||||||
|
value, err := fakeit.LongitudeInRange(minLongitude, maxLongitude)
|
||||||
|
if err != nil {
|
||||||
|
var zero float64
|
||||||
|
return zero
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
},
|
||||||
|
|
||||||
|
// Fakeit / Game
|
||||||
|
"fakeit_Gamertag": fakeit.Gamertag,
|
||||||
|
// "fakeit_Dice": fakeit.Dice(numDice uint, sides []uint) []uint,
|
||||||
|
|
||||||
|
// Fakeit / Beer
|
||||||
|
"fakeit_BeerAlcohol": fakeit.BeerAlcohol,
|
||||||
|
"fakeit_BeerBlg": fakeit.BeerBlg,
|
||||||
|
"fakeit_BeerHop": fakeit.BeerHop,
|
||||||
|
"fakeit_BeerIbu": fakeit.BeerIbu,
|
||||||
|
"fakeit_BeerMalt": fakeit.BeerMalt,
|
||||||
|
"fakeit_BeerName": fakeit.BeerName,
|
||||||
|
"fakeit_BeerStyle": fakeit.BeerStyle,
|
||||||
|
"fakeit_BeerYeast": fakeit.BeerYeast,
|
||||||
|
|
||||||
|
// Fakeit / Car
|
||||||
|
// "fakeit_Car": fakeit.Car() *CarInfo,
|
||||||
|
"fakeit_CarMaker": fakeit.CarMaker,
|
||||||
|
"fakeit_CarModel": fakeit.CarModel,
|
||||||
|
"fakeit_CarType": fakeit.CarType,
|
||||||
|
"fakeit_CarFuelType": fakeit.CarFuelType,
|
||||||
|
"fakeit_CarTransmissionType": fakeit.CarTransmissionType,
|
||||||
|
|
||||||
|
// Fakeit / Words
|
||||||
|
// Nouns
|
||||||
|
"fakeit_Noun": fakeit.Noun,
|
||||||
|
"fakeit_NounCommon": fakeit.NounCommon,
|
||||||
|
"fakeit_NounConcrete": fakeit.NounConcrete,
|
||||||
|
"fakeit_NounAbstract": fakeit.NounAbstract,
|
||||||
|
"fakeit_NounCollectivePeople": fakeit.NounCollectivePeople,
|
||||||
|
"fakeit_NounCollectiveAnimal": fakeit.NounCollectiveAnimal,
|
||||||
|
"fakeit_NounCollectiveThing": fakeit.NounCollectiveThing,
|
||||||
|
"fakeit_NounCountable": fakeit.NounCountable,
|
||||||
|
"fakeit_NounUncountable": fakeit.NounUncountable,
|
||||||
|
|
||||||
|
// Verbs
|
||||||
|
"fakeit_Verb": fakeit.Verb,
|
||||||
|
"fakeit_VerbAction": fakeit.VerbAction,
|
||||||
|
"fakeit_VerbLinking": fakeit.VerbLinking,
|
||||||
|
"fakeit_VerbHelping": fakeit.VerbHelping,
|
||||||
|
|
||||||
|
// Adverbs
|
||||||
|
"fakeit_Adverb": fakeit.Adverb,
|
||||||
|
"fakeit_AdverbManner": fakeit.AdverbManner,
|
||||||
|
"fakeit_AdverbDegree": fakeit.AdverbDegree,
|
||||||
|
"fakeit_AdverbPlace": fakeit.AdverbPlace,
|
||||||
|
"fakeit_AdverbTimeDefinite": fakeit.AdverbTimeDefinite,
|
||||||
|
"fakeit_AdverbTimeIndefinite": fakeit.AdverbTimeIndefinite,
|
||||||
|
"fakeit_AdverbFrequencyDefinite": fakeit.AdverbFrequencyDefinite,
|
||||||
|
"fakeit_AdverbFrequencyIndefinite": fakeit.AdverbFrequencyIndefinite,
|
||||||
|
|
||||||
|
// Propositions
|
||||||
|
"fakeit_Preposition": fakeit.Preposition,
|
||||||
|
"fakeit_PrepositionSimple": fakeit.PrepositionSimple,
|
||||||
|
"fakeit_PrepositionDouble": fakeit.PrepositionDouble,
|
||||||
|
"fakeit_PrepositionCompound": fakeit.PrepositionCompound,
|
||||||
|
|
||||||
|
// Adjectives
|
||||||
|
"fakeit_Adjective": fakeit.Adjective,
|
||||||
|
"fakeit_AdjectiveDescriptive": fakeit.AdjectiveDescriptive,
|
||||||
|
"fakeit_AdjectiveQuantitative": fakeit.AdjectiveQuantitative,
|
||||||
|
"fakeit_AdjectiveProper": fakeit.AdjectiveProper,
|
||||||
|
"fakeit_AdjectiveDemonstrative": fakeit.AdjectiveDemonstrative,
|
||||||
|
"fakeit_AdjectivePossessive": fakeit.AdjectivePossessive,
|
||||||
|
"fakeit_AdjectiveInterrogative": fakeit.AdjectiveInterrogative,
|
||||||
|
"fakeit_AdjectiveIndefinite": fakeit.AdjectiveIndefinite,
|
||||||
|
|
||||||
|
// Pronouns
|
||||||
|
"fakeit_Pronoun": fakeit.Pronoun,
|
||||||
|
"fakeit_PronounPersonal": fakeit.PronounPersonal,
|
||||||
|
"fakeit_PronounObject": fakeit.PronounObject,
|
||||||
|
"fakeit_PronounPossessive": fakeit.PronounPossessive,
|
||||||
|
"fakeit_PronounReflective": fakeit.PronounReflective,
|
||||||
|
"fakeit_PronounDemonstrative": fakeit.PronounDemonstrative,
|
||||||
|
"fakeit_PronounInterrogative": fakeit.PronounInterrogative,
|
||||||
|
"fakeit_PronounRelative": fakeit.PronounRelative,
|
||||||
|
|
||||||
|
// Connectives
|
||||||
|
"fakeit_Connective": fakeit.Connective,
|
||||||
|
"fakeit_ConnectiveTime": fakeit.ConnectiveTime,
|
||||||
|
"fakeit_ConnectiveComparative": fakeit.ConnectiveComparative,
|
||||||
|
"fakeit_ConnectiveComplaint": fakeit.ConnectiveComplaint,
|
||||||
|
"fakeit_ConnectiveListing": fakeit.ConnectiveListing,
|
||||||
|
"fakeit_ConnectiveCasual": fakeit.ConnectiveCasual,
|
||||||
|
"fakeit_ConnectiveExamplify": fakeit.ConnectiveExamplify,
|
||||||
|
|
||||||
|
// Words
|
||||||
|
"fakeit_Word": fakeit.Word,
|
||||||
|
|
||||||
|
// Text
|
||||||
|
"fakeit_Sentence": fakeit.Sentence,
|
||||||
|
"fakeit_Paragraph": fakeit.Paragraph,
|
||||||
|
"fakeit_LoremIpsumWord": fakeit.LoremIpsumWord,
|
||||||
|
"fakeit_LoremIpsumSentence": fakeit.LoremIpsumSentence,
|
||||||
|
"fakeit_LoremIpsumParagraph": fakeit.LoremIpsumParagraph,
|
||||||
|
"fakeit_Question": fakeit.Question,
|
||||||
|
"fakeit_Quote": fakeit.Quote,
|
||||||
|
"fakeit_Phrase": fakeit.Phrase,
|
||||||
|
|
||||||
|
// Fakeit / Foods
|
||||||
|
"fakeit_Fruit": fakeit.Fruit,
|
||||||
|
"fakeit_Vegetable": fakeit.Vegetable,
|
||||||
|
"fakeit_Breakfast": fakeit.Breakfast,
|
||||||
|
"fakeit_Lunch": fakeit.Lunch,
|
||||||
|
"fakeit_Dinner": fakeit.Dinner,
|
||||||
|
"fakeit_Snack": fakeit.Snack,
|
||||||
|
"fakeit_Dessert": fakeit.Dessert,
|
||||||
|
|
||||||
|
// Fakeit / Misc
|
||||||
|
"fakeit_Bool": fakeit.Bool,
|
||||||
|
// "fakeit_Weighted": fakeit.Weighted(options []any, weights []float32) (any, error),
|
||||||
|
"fakeit_FlipACoin": fakeit.FlipACoin,
|
||||||
|
// "fakeit_RandomMapKey": fakeit.RandomMapKey(mapI any) any,
|
||||||
|
// "fakeit_ShuffleAnySlice": fakeit.ShuffleAnySlice(v any),
|
||||||
|
|
||||||
|
// Fakeit / Colors
|
||||||
|
"fakeit_Color": fakeit.Color,
|
||||||
|
"fakeit_HexColor": fakeit.HexColor,
|
||||||
|
"fakeit_RGBColor": fakeit.RGBColor,
|
||||||
|
"fakeit_SafeColor": fakeit.SafeColor,
|
||||||
|
"fakeit_NiceColors": fakeit.NiceColors,
|
||||||
|
|
||||||
|
// Fakeit / Images
|
||||||
|
// "fakeit_Image": fakeit.Image(width int, height int) *img.RGBA,
|
||||||
|
"fakeit_ImageJpeg": fakeit.ImageJpeg,
|
||||||
|
"fakeit_ImagePng": fakeit.ImagePng,
|
||||||
|
|
||||||
|
// Fakeit / Internet
|
||||||
|
"fakeit_URL": fakeit.URL,
|
||||||
|
"fakeit_UrlSlug": fakeit.UrlSlug,
|
||||||
|
"fakeit_DomainName": fakeit.DomainName,
|
||||||
|
"fakeit_DomainSuffix": fakeit.DomainSuffix,
|
||||||
|
"fakeit_IPv4Address": fakeit.IPv4Address,
|
||||||
|
"fakeit_IPv6Address": fakeit.IPv6Address,
|
||||||
|
"fakeit_MacAddress": fakeit.MacAddress,
|
||||||
|
"fakeit_HTTPStatusCode": fakeit.HTTPStatusCode,
|
||||||
|
"fakeit_HTTPStatusCodeSimple": fakeit.HTTPStatusCodeSimple,
|
||||||
|
"fakeit_LogLevel": fakeit.LogLevel,
|
||||||
|
"fakeit_HTTPMethod": fakeit.HTTPMethod,
|
||||||
|
"fakeit_HTTPVersion": fakeit.HTTPVersion,
|
||||||
|
"fakeit_UserAgent": fakeit.UserAgent,
|
||||||
|
"fakeit_ChromeUserAgent": fakeit.ChromeUserAgent,
|
||||||
|
"fakeit_FirefoxUserAgent": fakeit.FirefoxUserAgent,
|
||||||
|
"fakeit_OperaUserAgent": fakeit.OperaUserAgent,
|
||||||
|
"fakeit_SafariUserAgent": fakeit.SafariUserAgent,
|
||||||
|
"fakeit_APIUserAgent": fakeit.APIUserAgent,
|
||||||
|
|
||||||
|
// Fakeit / HTML
|
||||||
|
"fakeit_InputName": fakeit.InputName,
|
||||||
|
"fakeit_Svg": func() string { return fakeit.Svg(nil) },
|
||||||
|
|
||||||
|
// Fakeit / Date/Time
|
||||||
|
"fakeit_Date": fakeit.Date,
|
||||||
|
"fakeit_PastDate": fakeit.PastDate,
|
||||||
|
"fakeit_FutureDate": fakeit.FutureDate,
|
||||||
|
"fakeit_DateRange": fakeit.DateRange,
|
||||||
|
"fakeit_NanoSecond": fakeit.NanoSecond,
|
||||||
|
"fakeit_Second": fakeit.Second,
|
||||||
|
"fakeit_Minute": fakeit.Minute,
|
||||||
|
"fakeit_Hour": fakeit.Hour,
|
||||||
|
"fakeit_Month": fakeit.Month,
|
||||||
|
"fakeit_MonthString": fakeit.MonthString,
|
||||||
|
"fakeit_Day": fakeit.Day,
|
||||||
|
"fakeit_WeekDay": fakeit.WeekDay,
|
||||||
|
"fakeit_Year": fakeit.Year,
|
||||||
|
"fakeit_TimeZone": fakeit.TimeZone,
|
||||||
|
"fakeit_TimeZoneAbv": fakeit.TimeZoneAbv,
|
||||||
|
"fakeit_TimeZoneFull": fakeit.TimeZoneFull,
|
||||||
|
"fakeit_TimeZoneOffset": fakeit.TimeZoneOffset,
|
||||||
|
"fakeit_TimeZoneRegion": fakeit.TimeZoneRegion,
|
||||||
|
|
||||||
|
// Fakeit / Payment
|
||||||
|
"fakeit_Price": fakeit.Price,
|
||||||
|
// "fakeit_CreditCard": fakeit.CreditCard() *CreditCardInfo,
|
||||||
|
"fakeit_CreditCardCvv": fakeit.CreditCardCvv,
|
||||||
|
"fakeit_CreditCardExp": fakeit.CreditCardExp,
|
||||||
|
"fakeit_CreditCardNumber": func(gaps bool) string {
|
||||||
|
return fakeit.CreditCardNumber(&gofakeit.CreditCardOptions{Gaps: gaps})
|
||||||
|
},
|
||||||
|
"fakeit_CreditCardType": fakeit.CreditCardType,
|
||||||
|
// "fakeit_Currency": fakeit.Currency() *CurrencyInfo,
|
||||||
|
"fakeit_CurrencyLong": fakeit.CurrencyLong,
|
||||||
|
"fakeit_CurrencyShort": fakeit.CurrencyShort,
|
||||||
|
"fakeit_AchRouting": fakeit.AchRouting,
|
||||||
|
"fakeit_AchAccount": fakeit.AchAccount,
|
||||||
|
"fakeit_BitcoinAddress": fakeit.BitcoinAddress,
|
||||||
|
"fakeit_BitcoinPrivateKey": fakeit.BitcoinPrivateKey,
|
||||||
|
"fakeit_BankName": fakeit.BankName,
|
||||||
|
"fakeit_BankType": fakeit.BankType,
|
||||||
|
|
||||||
|
// Fakeit / Finance
|
||||||
|
"fakeit_Cusip": fakeit.Cusip,
|
||||||
|
"fakeit_Isin": fakeit.Isin,
|
||||||
|
|
||||||
|
// Fakeit / Company
|
||||||
|
"fakeit_BS": fakeit.BS,
|
||||||
|
"fakeit_Blurb": fakeit.Blurb,
|
||||||
|
"fakeit_BuzzWord": fakeit.BuzzWord,
|
||||||
|
"fakeit_Company": fakeit.Company,
|
||||||
|
"fakeit_CompanySuffix": fakeit.CompanySuffix,
|
||||||
|
// "fakeit_Job": fakeit.Job() *JobInfo,
|
||||||
|
"fakeit_JobDescriptor": fakeit.JobDescriptor,
|
||||||
|
"fakeit_JobLevel": fakeit.JobLevel,
|
||||||
|
"fakeit_JobTitle": fakeit.JobTitle,
|
||||||
|
"fakeit_Slogan": fakeit.Slogan,
|
||||||
|
|
||||||
|
// Fakeit / Hacker
|
||||||
|
"fakeit_HackerAbbreviation": fakeit.HackerAbbreviation,
|
||||||
|
"fakeit_HackerAdjective": fakeit.HackerAdjective,
|
||||||
|
"fakeit_HackeringVerb": fakeit.HackeringVerb,
|
||||||
|
"fakeit_HackerNoun": fakeit.HackerNoun,
|
||||||
|
"fakeit_HackerPhrase": fakeit.HackerPhrase,
|
||||||
|
"fakeit_HackerVerb": fakeit.HackerVerb,
|
||||||
|
|
||||||
|
// Fakeit / Hipster
|
||||||
|
"fakeit_HipsterWord": fakeit.HipsterWord,
|
||||||
|
"fakeit_HipsterSentence": fakeit.HipsterSentence,
|
||||||
|
"fakeit_HipsterParagraph": fakeit.HipsterParagraph,
|
||||||
|
|
||||||
|
// Fakeit / App
|
||||||
|
"fakeit_AppName": fakeit.AppName,
|
||||||
|
"fakeit_AppVersion": fakeit.AppVersion,
|
||||||
|
"fakeit_AppAuthor": fakeit.AppAuthor,
|
||||||
|
|
||||||
|
// Fakeit / Animal
|
||||||
|
"fakeit_PetName": fakeit.PetName,
|
||||||
|
"fakeit_Animal": fakeit.Animal,
|
||||||
|
"fakeit_AnimalType": fakeit.AnimalType,
|
||||||
|
"fakeit_FarmAnimal": fakeit.FarmAnimal,
|
||||||
|
"fakeit_Cat": fakeit.Cat,
|
||||||
|
"fakeit_Dog": fakeit.Dog,
|
||||||
|
"fakeit_Bird": fakeit.Bird,
|
||||||
|
|
||||||
|
// Fakeit / Emoji
|
||||||
|
"fakeit_Emoji": fakeit.Emoji,
|
||||||
|
"fakeit_EmojiCategory": fakeit.EmojiCategory,
|
||||||
|
"fakeit_EmojiAlias": fakeit.EmojiAlias,
|
||||||
|
"fakeit_EmojiTag": fakeit.EmojiTag,
|
||||||
|
"fakeit_EmojiFlag": fakeit.EmojiFlag,
|
||||||
|
"fakeit_EmojiAnimal": fakeit.EmojiAnimal,
|
||||||
|
"fakeit_EmojiFood": fakeit.EmojiFood,
|
||||||
|
"fakeit_EmojiPlant": fakeit.EmojiPlant,
|
||||||
|
"fakeit_EmojiMusic": fakeit.EmojiMusic,
|
||||||
|
"fakeit_EmojiVehicle": fakeit.EmojiVehicle,
|
||||||
|
"fakeit_EmojiSport": fakeit.EmojiSport,
|
||||||
|
"fakeit_EmojiFace": fakeit.EmojiFace,
|
||||||
|
"fakeit_EmojiHand": fakeit.EmojiHand,
|
||||||
|
"fakeit_EmojiClothing": fakeit.EmojiClothing,
|
||||||
|
"fakeit_EmojiLandmark": fakeit.EmojiLandmark,
|
||||||
|
"fakeit_EmojiElectronics": fakeit.EmojiElectronics,
|
||||||
|
"fakeit_EmojiGame": fakeit.EmojiGame,
|
||||||
|
"fakeit_EmojiTools": fakeit.EmojiTools,
|
||||||
|
"fakeit_EmojiWeather": fakeit.EmojiWeather,
|
||||||
|
"fakeit_EmojiJob": fakeit.EmojiJob,
|
||||||
|
"fakeit_EmojiPerson": fakeit.EmojiPerson,
|
||||||
|
"fakeit_EmojiGesture": fakeit.EmojiGesture,
|
||||||
|
"fakeit_EmojiCostume": fakeit.EmojiCostume,
|
||||||
|
"fakeit_EmojiSentence": fakeit.EmojiSentence,
|
||||||
|
|
||||||
|
// Fakeit / Language
|
||||||
|
"fakeit_Language": fakeit.Language,
|
||||||
|
"fakeit_LanguageAbbreviation": fakeit.LanguageAbbreviation,
|
||||||
|
"fakeit_ProgrammingLanguage": fakeit.ProgrammingLanguage,
|
||||||
|
|
||||||
|
// Fakeit / Number
|
||||||
|
"fakeit_Number": fakeit.Number,
|
||||||
|
"fakeit_Int": fakeit.Int,
|
||||||
|
"fakeit_IntN": fakeit.IntN,
|
||||||
|
"fakeit_Int8": fakeit.Int8,
|
||||||
|
"fakeit_Int16": fakeit.Int16,
|
||||||
|
"fakeit_Int32": fakeit.Int32,
|
||||||
|
"fakeit_Int64": fakeit.Int64,
|
||||||
|
"fakeit_Uint": fakeit.Uint,
|
||||||
|
"fakeit_UintN": fakeit.UintN,
|
||||||
|
"fakeit_Uint8": fakeit.Uint8,
|
||||||
|
"fakeit_Uint16": fakeit.Uint16,
|
||||||
|
"fakeit_Uint32": fakeit.Uint32,
|
||||||
|
"fakeit_Uint64": fakeit.Uint64,
|
||||||
|
"fakeit_Float32": fakeit.Float32,
|
||||||
|
"fakeit_Float32Range": fakeit.Float32Range,
|
||||||
|
"fakeit_Float64": fakeit.Float64,
|
||||||
|
"fakeit_Float64Range": fakeit.Float64Range,
|
||||||
|
// "fakeit_ShuffleInts": fakeit.ShuffleInts,
|
||||||
|
"fakeit_RandomInt": fakeit.RandomInt,
|
||||||
|
"fakeit_HexUint": fakeit.HexUint,
|
||||||
|
|
||||||
|
// Fakeit / String
|
||||||
|
"fakeit_Digit": fakeit.Digit,
|
||||||
|
"fakeit_DigitN": fakeit.DigitN,
|
||||||
|
"fakeit_Letter": fakeit.Letter,
|
||||||
|
"fakeit_LetterN": fakeit.LetterN,
|
||||||
|
"fakeit_Lexify": fakeit.Lexify,
|
||||||
|
"fakeit_Numerify": fakeit.Numerify,
|
||||||
|
// "fakeit_ShuffleStrings": fakeit.ShuffleStrings,
|
||||||
|
"fakeit_RandomString": fakeit.RandomString,
|
||||||
|
|
||||||
|
// Fakeit / Celebrity
|
||||||
|
"fakeit_CelebrityActor": fakeit.CelebrityActor,
|
||||||
|
"fakeit_CelebrityBusiness": fakeit.CelebrityBusiness,
|
||||||
|
"fakeit_CelebritySport": fakeit.CelebritySport,
|
||||||
|
|
||||||
|
// Fakeit / Minecraft
|
||||||
|
"fakeit_MinecraftOre": fakeit.MinecraftOre,
|
||||||
|
"fakeit_MinecraftWood": fakeit.MinecraftWood,
|
||||||
|
"fakeit_MinecraftArmorTier": fakeit.MinecraftArmorTier,
|
||||||
|
"fakeit_MinecraftArmorPart": fakeit.MinecraftArmorPart,
|
||||||
|
"fakeit_MinecraftWeapon": fakeit.MinecraftWeapon,
|
||||||
|
"fakeit_MinecraftTool": fakeit.MinecraftTool,
|
||||||
|
"fakeit_MinecraftDye": fakeit.MinecraftDye,
|
||||||
|
"fakeit_MinecraftFood": fakeit.MinecraftFood,
|
||||||
|
"fakeit_MinecraftAnimal": fakeit.MinecraftAnimal,
|
||||||
|
"fakeit_MinecraftVillagerJob": fakeit.MinecraftVillagerJob,
|
||||||
|
"fakeit_MinecraftVillagerStation": fakeit.MinecraftVillagerStation,
|
||||||
|
"fakeit_MinecraftVillagerLevel": fakeit.MinecraftVillagerLevel,
|
||||||
|
"fakeit_MinecraftMobPassive": fakeit.MinecraftMobPassive,
|
||||||
|
"fakeit_MinecraftMobNeutral": fakeit.MinecraftMobNeutral,
|
||||||
|
"fakeit_MinecraftMobHostile": fakeit.MinecraftMobHostile,
|
||||||
|
"fakeit_MinecraftMobBoss": fakeit.MinecraftMobBoss,
|
||||||
|
"fakeit_MinecraftBiome": fakeit.MinecraftBiome,
|
||||||
|
"fakeit_MinecraftWeather": fakeit.MinecraftWeather,
|
||||||
|
|
||||||
|
// Fakeit / Book
|
||||||
|
// "fakeit_Book": fakeit.Book() *BookInfo,
|
||||||
|
"fakeit_BookTitle": fakeit.BookTitle,
|
||||||
|
"fakeit_BookAuthor": fakeit.BookAuthor,
|
||||||
|
"fakeit_BookGenre": fakeit.BookGenre,
|
||||||
|
|
||||||
|
// Fakeit / Movie
|
||||||
|
// "fakeit_Movie": fakeit.Movie() *MovieInfo,
|
||||||
|
"fakeit_MovieName": fakeit.MovieName,
|
||||||
|
"fakeit_MovieGenre": fakeit.MovieGenre,
|
||||||
|
|
||||||
|
// Fakeit / Error
|
||||||
|
"fakeit_Error": func() string { return fakeit.Error().Error() },
|
||||||
|
"fakeit_ErrorDatabase": func() string { return fakeit.ErrorDatabase().Error() },
|
||||||
|
"fakeit_ErrorGRPC": func() string { return fakeit.ErrorGRPC().Error() },
|
||||||
|
"fakeit_ErrorHTTP": func() string { return fakeit.ErrorHTTP().Error() },
|
||||||
|
"fakeit_ErrorHTTPClient": func() string { return fakeit.ErrorHTTPClient().Error() },
|
||||||
|
"fakeit_ErrorHTTPServer": func() string { return fakeit.ErrorHTTPServer().Error() },
|
||||||
|
// "fakeit_ErrorInput": func() string { return fakeit.ErrorInput().Error() },
|
||||||
|
"fakeit_ErrorRuntime": func() string { return fakeit.ErrorRuntime().Error() },
|
||||||
|
|
||||||
|
// Fakeit / School
|
||||||
|
"fakeit_School": fakeit.School,
|
||||||
|
|
||||||
|
// Fakeit / Song
|
||||||
|
// "fakeit_Song": fakeit.Song() *SongInfo,
|
||||||
|
"fakeit_SongName": fakeit.SongName,
|
||||||
|
"fakeit_SongArtist": fakeit.SongArtist,
|
||||||
|
"fakeit_SongGenre": fakeit.SongGenre,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type BodyTemplateFuncMapData struct {
|
||||||
|
formDataContenType string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (data BodyTemplateFuncMapData) GetFormDataContenType() string {
|
||||||
|
return data.formDataContenType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (data *BodyTemplateFuncMapData) ClearFormDataContenType() {
|
||||||
|
data.formDataContenType = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDefaultBodyTemplateFuncMap(
|
||||||
|
randSource rand.Source,
|
||||||
|
data *BodyTemplateFuncMapData,
|
||||||
|
fileCache *FileCache,
|
||||||
|
) template.FuncMap {
|
||||||
|
funcMap := NewDefaultTemplateFuncMap(randSource, fileCache)
|
||||||
|
|
||||||
|
if data != nil {
|
||||||
|
// body_FormData creates a multipart/form-data body from key-value pairs.
|
||||||
|
// Usage: {{ body_FormData "field1" "value1" "field2" "value2" ... }}
|
||||||
|
//
|
||||||
|
// Values starting with "@" are treated as file references:
|
||||||
|
// - "@/path/to/file.txt" - local file
|
||||||
|
// - "@http://example.com/file" - remote file via HTTP
|
||||||
|
// - "@https://example.com/file" - remote file via HTTPS
|
||||||
|
//
|
||||||
|
// To send a literal string starting with "@", escape it with "@@":
|
||||||
|
// - "@@literal" sends "@literal"
|
||||||
|
//
|
||||||
|
// Example with mixed text and files:
|
||||||
|
// {{ body_FormData "name" "John" "avatar" "@/path/to/photo.jpg" "doc" "@https://example.com/file.pdf" }}
|
||||||
|
funcMap["body_FormData"] = func(pairs ...string) (string, error) {
|
||||||
|
if len(pairs)%2 != 0 {
|
||||||
|
return "", types.ErrFormDataOddArgs
|
||||||
|
}
|
||||||
|
|
||||||
|
var multipartData bytes.Buffer
|
||||||
|
writer := multipart.NewWriter(&multipartData)
|
||||||
|
data.formDataContenType = writer.FormDataContentType()
|
||||||
|
|
||||||
|
for i := 0; i < len(pairs); i += 2 {
|
||||||
|
key := pairs[i]
|
||||||
|
val := pairs[i+1]
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(val, "@@"):
|
||||||
|
// Escaped @ - send as literal string without first @
|
||||||
|
if err := writer.WriteField(key, val[1:]); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
case strings.HasPrefix(val, "@"):
|
||||||
|
// File (local path or remote URL)
|
||||||
|
if fileCache == nil {
|
||||||
|
return "", types.ErrFileCacheNotInitialized
|
||||||
|
}
|
||||||
|
source := val[1:]
|
||||||
|
cached, err := fileCache.GetOrLoad(source)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
part, err := writer.CreateFormFile(key, cached.Filename)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if _, err := part.Write(cached.Content); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
// Regular text field
|
||||||
|
if err := writer.WriteField(key, val); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writer.Close(); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return multipartData.String(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return funcMap
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasTemplateActions(tmpl *template.Template) bool {
|
||||||
|
if tmpl.Tree == nil || tmpl.Root == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, node := range tmpl.Root.Nodes {
|
||||||
|
switch node.Type() {
|
||||||
|
case parse.NodeAction, parse.NodeIf, parse.NodeRange,
|
||||||
|
parse.NodeWith, parse.NodeTemplate:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
107
internal/script/chain.go
Normal file
107
internal/script/chain.go
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
package script
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Chain holds the loaded script sources and can create engine instances.
|
||||||
|
// The sources are loaded once, but engines are created per-worker since they're not thread-safe.
|
||||||
|
type Chain struct {
|
||||||
|
luaSources []*Source
|
||||||
|
jsSources []*Source
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewChain creates a new script chain from loaded sources.
|
||||||
|
// Lua scripts run first, then JavaScript scripts, in the order provided.
|
||||||
|
func NewChain(luaSources, jsSources []*Source) *Chain {
|
||||||
|
return &Chain{
|
||||||
|
luaSources: luaSources,
|
||||||
|
jsSources: jsSources,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEmpty returns true if there are no scripts to execute.
|
||||||
|
func (c *Chain) IsEmpty() bool {
|
||||||
|
return len(c.luaSources) == 0 && len(c.jsSources) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transformer holds instantiated script engines for a single worker.
|
||||||
|
// It is NOT safe for concurrent use.
|
||||||
|
type Transformer struct {
|
||||||
|
luaEngines []*LuaEngine
|
||||||
|
jsEngines []*JsEngine
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTransformer creates engine instances from the chain's sources.
|
||||||
|
// Call this once per worker goroutine.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ScriptChainError
|
||||||
|
func (c *Chain) NewTransformer() (*Transformer, error) {
|
||||||
|
if c.IsEmpty() {
|
||||||
|
return &Transformer{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
t := &Transformer{
|
||||||
|
luaEngines: make([]*LuaEngine, 0, len(c.luaSources)),
|
||||||
|
jsEngines: make([]*JsEngine, 0, len(c.jsSources)),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Lua engines
|
||||||
|
for i, src := range c.luaSources {
|
||||||
|
engine, err := NewLuaEngine(src.Content)
|
||||||
|
if err != nil {
|
||||||
|
t.Close() // Clean up already created engines
|
||||||
|
return nil, types.NewScriptChainError("lua", i, err)
|
||||||
|
}
|
||||||
|
t.luaEngines = append(t.luaEngines, engine)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create JS engines
|
||||||
|
for i, src := range c.jsSources {
|
||||||
|
engine, err := NewJsEngine(src.Content)
|
||||||
|
if err != nil {
|
||||||
|
t.Close() // Clean up already created engines
|
||||||
|
return nil, types.NewScriptChainError("js", i, err)
|
||||||
|
}
|
||||||
|
t.jsEngines = append(t.jsEngines, engine)
|
||||||
|
}
|
||||||
|
|
||||||
|
return t, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transform applies all scripts to the request data.
|
||||||
|
// Lua scripts run first, then JavaScript scripts.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ScriptChainError
|
||||||
|
func (t *Transformer) Transform(req *RequestData) error {
|
||||||
|
// Run Lua scripts
|
||||||
|
for i, engine := range t.luaEngines {
|
||||||
|
if err := engine.Transform(req); err != nil {
|
||||||
|
return types.NewScriptChainError("lua", i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run JS scripts
|
||||||
|
for i, engine := range t.jsEngines {
|
||||||
|
if err := engine.Transform(req); err != nil {
|
||||||
|
return types.NewScriptChainError("js", i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close releases all engine resources.
|
||||||
|
func (t *Transformer) Close() {
|
||||||
|
for _, engine := range t.luaEngines {
|
||||||
|
engine.Close()
|
||||||
|
}
|
||||||
|
for _, engine := range t.jsEngines {
|
||||||
|
engine.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEmpty returns true if there are no engines.
|
||||||
|
func (t *Transformer) IsEmpty() bool {
|
||||||
|
return len(t.luaEngines) == 0 && len(t.jsEngines) == 0
|
||||||
|
}
|
||||||
198
internal/script/js.go
Normal file
198
internal/script/js.go
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
package script
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/dop251/goja"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// JsEngine implements the Engine interface using goja (JavaScript).
|
||||||
|
type JsEngine struct {
|
||||||
|
runtime *goja.Runtime
|
||||||
|
transform goja.Callable
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewJsEngine creates a new JavaScript script engine with the given script content.
|
||||||
|
// The script must define a global `transform` function that takes a request object
|
||||||
|
// and returns the modified request object.
|
||||||
|
//
|
||||||
|
// Example JavaScript script:
|
||||||
|
//
|
||||||
|
// function transform(req) {
|
||||||
|
// req.headers["X-Custom"] = ["value"];
|
||||||
|
// return req;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ErrScriptTransformMissing
|
||||||
|
// - types.ScriptExecutionError
|
||||||
|
func NewJsEngine(scriptContent string) (*JsEngine, error) {
|
||||||
|
vm := goja.New()
|
||||||
|
|
||||||
|
// Execute the script to define the transform function
|
||||||
|
_, err := vm.RunString(scriptContent)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewScriptExecutionError("JavaScript", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the transform function
|
||||||
|
transformVal := vm.Get("transform")
|
||||||
|
if transformVal == nil || goja.IsUndefined(transformVal) || goja.IsNull(transformVal) {
|
||||||
|
return nil, types.ErrScriptTransformMissing
|
||||||
|
}
|
||||||
|
|
||||||
|
transform, ok := goja.AssertFunction(transformVal)
|
||||||
|
if !ok {
|
||||||
|
return nil, types.NewScriptExecutionError("JavaScript", errors.New("'transform' must be a function"))
|
||||||
|
}
|
||||||
|
|
||||||
|
return &JsEngine{
|
||||||
|
runtime: vm,
|
||||||
|
transform: transform,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transform executes the JavaScript transform function with the given request data.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ScriptExecutionError
|
||||||
|
func (e *JsEngine) Transform(req *RequestData) error {
|
||||||
|
// Convert RequestData to JavaScript object
|
||||||
|
reqObj := e.requestDataToObject(req)
|
||||||
|
|
||||||
|
// Call transform(req)
|
||||||
|
result, err := e.transform(goja.Undefined(), reqObj)
|
||||||
|
if err != nil {
|
||||||
|
return types.NewScriptExecutionError("JavaScript", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update RequestData from the returned object
|
||||||
|
if err := e.objectToRequestData(result, req); err != nil {
|
||||||
|
return types.NewScriptExecutionError("JavaScript", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close releases the JavaScript runtime resources.
|
||||||
|
func (e *JsEngine) Close() {
|
||||||
|
// goja doesn't have an explicit close method, but we can help GC
|
||||||
|
e.runtime = nil
|
||||||
|
e.transform = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// requestDataToObject converts RequestData to a goja Value (JavaScript object).
|
||||||
|
func (e *JsEngine) requestDataToObject(req *RequestData) goja.Value {
|
||||||
|
obj := e.runtime.NewObject()
|
||||||
|
|
||||||
|
_ = obj.Set("method", req.Method)
|
||||||
|
_ = obj.Set("path", req.Path)
|
||||||
|
_ = obj.Set("body", req.Body)
|
||||||
|
|
||||||
|
// Headers (map[string][]string -> object of arrays)
|
||||||
|
headers := e.runtime.NewObject()
|
||||||
|
for k, values := range req.Headers {
|
||||||
|
_ = headers.Set(k, e.stringSliceToArray(values))
|
||||||
|
}
|
||||||
|
_ = obj.Set("headers", headers)
|
||||||
|
|
||||||
|
// Params (map[string][]string -> object of arrays)
|
||||||
|
params := e.runtime.NewObject()
|
||||||
|
for k, values := range req.Params {
|
||||||
|
_ = params.Set(k, e.stringSliceToArray(values))
|
||||||
|
}
|
||||||
|
_ = obj.Set("params", params)
|
||||||
|
|
||||||
|
// Cookies (map[string][]string -> object of arrays)
|
||||||
|
cookies := e.runtime.NewObject()
|
||||||
|
for k, values := range req.Cookies {
|
||||||
|
_ = cookies.Set(k, e.stringSliceToArray(values))
|
||||||
|
}
|
||||||
|
_ = obj.Set("cookies", cookies)
|
||||||
|
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// objectToRequestData updates RequestData from a JavaScript object.
|
||||||
|
func (e *JsEngine) objectToRequestData(val goja.Value, req *RequestData) error {
|
||||||
|
if val == nil || goja.IsUndefined(val) || goja.IsNull(val) {
|
||||||
|
return types.ErrScriptTransformReturnObject
|
||||||
|
}
|
||||||
|
|
||||||
|
obj := val.ToObject(e.runtime)
|
||||||
|
if obj == nil {
|
||||||
|
return types.ErrScriptTransformReturnObject
|
||||||
|
}
|
||||||
|
|
||||||
|
// Method
|
||||||
|
if v := obj.Get("method"); v != nil && !goja.IsUndefined(v) {
|
||||||
|
req.Method = v.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path
|
||||||
|
if v := obj.Get("path"); v != nil && !goja.IsUndefined(v) {
|
||||||
|
req.Path = v.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Body
|
||||||
|
if v := obj.Get("body"); v != nil && !goja.IsUndefined(v) {
|
||||||
|
req.Body = v.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Headers
|
||||||
|
if v := obj.Get("headers"); v != nil && !goja.IsUndefined(v) && !goja.IsNull(v) {
|
||||||
|
req.Headers = e.objectToStringSliceMap(v.ToObject(e.runtime))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Params
|
||||||
|
if v := obj.Get("params"); v != nil && !goja.IsUndefined(v) && !goja.IsNull(v) {
|
||||||
|
req.Params = e.objectToStringSliceMap(v.ToObject(e.runtime))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cookies
|
||||||
|
if v := obj.Get("cookies"); v != nil && !goja.IsUndefined(v) && !goja.IsNull(v) {
|
||||||
|
req.Cookies = e.objectToStringSliceMap(v.ToObject(e.runtime))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// stringSliceToArray converts a Go []string to a JavaScript array.
|
||||||
|
func (e *JsEngine) stringSliceToArray(values []string) *goja.Object {
|
||||||
|
ifaces := make([]any, len(values))
|
||||||
|
for i, v := range values {
|
||||||
|
ifaces[i] = v
|
||||||
|
}
|
||||||
|
return e.runtime.NewArray(ifaces...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// objectToStringSliceMap converts a JavaScript object to a Go map[string][]string.
|
||||||
|
// Supports both single string values and array values.
|
||||||
|
func (e *JsEngine) objectToStringSliceMap(obj *goja.Object) map[string][]string {
|
||||||
|
if obj == nil {
|
||||||
|
return make(map[string][]string)
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make(map[string][]string)
|
||||||
|
for _, key := range obj.Keys() {
|
||||||
|
v := obj.Get(key)
|
||||||
|
if v == nil || goja.IsUndefined(v) || goja.IsNull(v) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's an array
|
||||||
|
if arr, ok := v.Export().([]any); ok {
|
||||||
|
var values []string
|
||||||
|
for _, item := range arr {
|
||||||
|
if s, ok := item.(string); ok {
|
||||||
|
values = append(values, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result[key] = values
|
||||||
|
} else {
|
||||||
|
// Single value - wrap in slice
|
||||||
|
result[key] = []string{v.String()}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
191
internal/script/lua.go
Normal file
191
internal/script/lua.go
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
package script
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
lua "github.com/yuin/gopher-lua"
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LuaEngine implements the Engine interface using gopher-lua.
|
||||||
|
type LuaEngine struct {
|
||||||
|
state *lua.LState
|
||||||
|
transform *lua.LFunction
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewLuaEngine creates a new Lua script engine with the given script content.
|
||||||
|
// The script must define a global `transform` function that takes a request table
|
||||||
|
// and returns the modified request table.
|
||||||
|
//
|
||||||
|
// Example Lua script:
|
||||||
|
//
|
||||||
|
// function transform(req)
|
||||||
|
// req.headers["X-Custom"] = {"value"}
|
||||||
|
// return req
|
||||||
|
// end
|
||||||
|
//
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ErrScriptTransformMissing
|
||||||
|
// - types.ScriptExecutionError
|
||||||
|
func NewLuaEngine(scriptContent string) (*LuaEngine, error) {
|
||||||
|
L := lua.NewState()
|
||||||
|
|
||||||
|
// Execute the script to define the transform function
|
||||||
|
if err := L.DoString(scriptContent); err != nil {
|
||||||
|
L.Close()
|
||||||
|
return nil, types.NewScriptExecutionError("Lua", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the transform function
|
||||||
|
transform := L.GetGlobal("transform")
|
||||||
|
if transform.Type() != lua.LTFunction {
|
||||||
|
L.Close()
|
||||||
|
return nil, types.ErrScriptTransformMissing
|
||||||
|
}
|
||||||
|
|
||||||
|
return &LuaEngine{
|
||||||
|
state: L,
|
||||||
|
transform: transform.(*lua.LFunction),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transform executes the Lua transform function with the given request data.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ScriptExecutionError
|
||||||
|
func (e *LuaEngine) Transform(req *RequestData) error {
|
||||||
|
// Convert RequestData to Lua table
|
||||||
|
reqTable := e.requestDataToTable(req)
|
||||||
|
|
||||||
|
// Call transform(req)
|
||||||
|
e.state.Push(e.transform)
|
||||||
|
e.state.Push(reqTable)
|
||||||
|
if err := e.state.PCall(1, 1, nil); err != nil {
|
||||||
|
return types.NewScriptExecutionError("Lua", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the result
|
||||||
|
result := e.state.Get(-1)
|
||||||
|
e.state.Pop(1)
|
||||||
|
|
||||||
|
if result.Type() != lua.LTTable {
|
||||||
|
return types.NewScriptExecutionError("Lua", fmt.Errorf("transform function must return a table, got %s", result.Type()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update RequestData from the returned table
|
||||||
|
e.tableToRequestData(result.(*lua.LTable), req)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close releases the Lua state resources.
|
||||||
|
func (e *LuaEngine) Close() {
|
||||||
|
if e.state != nil {
|
||||||
|
e.state.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// requestDataToTable converts RequestData to a Lua table.
|
||||||
|
func (e *LuaEngine) requestDataToTable(req *RequestData) *lua.LTable {
|
||||||
|
L := e.state
|
||||||
|
t := L.NewTable()
|
||||||
|
|
||||||
|
t.RawSetString("method", lua.LString(req.Method))
|
||||||
|
t.RawSetString("path", lua.LString(req.Path))
|
||||||
|
t.RawSetString("body", lua.LString(req.Body))
|
||||||
|
|
||||||
|
// Headers (map[string][]string -> table of arrays)
|
||||||
|
headers := L.NewTable()
|
||||||
|
for k, values := range req.Headers {
|
||||||
|
arr := L.NewTable()
|
||||||
|
for _, v := range values {
|
||||||
|
arr.Append(lua.LString(v))
|
||||||
|
}
|
||||||
|
headers.RawSetString(k, arr)
|
||||||
|
}
|
||||||
|
t.RawSetString("headers", headers)
|
||||||
|
|
||||||
|
// Params (map[string][]string -> table of arrays)
|
||||||
|
params := L.NewTable()
|
||||||
|
for k, values := range req.Params {
|
||||||
|
arr := L.NewTable()
|
||||||
|
for _, v := range values {
|
||||||
|
arr.Append(lua.LString(v))
|
||||||
|
}
|
||||||
|
params.RawSetString(k, arr)
|
||||||
|
}
|
||||||
|
t.RawSetString("params", params)
|
||||||
|
|
||||||
|
// Cookies (map[string][]string -> table of arrays)
|
||||||
|
cookies := L.NewTable()
|
||||||
|
for k, values := range req.Cookies {
|
||||||
|
arr := L.NewTable()
|
||||||
|
for _, v := range values {
|
||||||
|
arr.Append(lua.LString(v))
|
||||||
|
}
|
||||||
|
cookies.RawSetString(k, arr)
|
||||||
|
}
|
||||||
|
t.RawSetString("cookies", cookies)
|
||||||
|
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
// tableToRequestData updates RequestData from a Lua table.
|
||||||
|
func (e *LuaEngine) tableToRequestData(t *lua.LTable, req *RequestData) {
|
||||||
|
// Method
|
||||||
|
if v := t.RawGetString("method"); v.Type() == lua.LTString {
|
||||||
|
req.Method = string(v.(lua.LString))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path
|
||||||
|
if v := t.RawGetString("path"); v.Type() == lua.LTString {
|
||||||
|
req.Path = string(v.(lua.LString))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Body
|
||||||
|
if v := t.RawGetString("body"); v.Type() == lua.LTString {
|
||||||
|
req.Body = string(v.(lua.LString))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Headers
|
||||||
|
if v := t.RawGetString("headers"); v.Type() == lua.LTTable {
|
||||||
|
req.Headers = e.tableToStringSliceMap(v.(*lua.LTable))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Params
|
||||||
|
if v := t.RawGetString("params"); v.Type() == lua.LTTable {
|
||||||
|
req.Params = e.tableToStringSliceMap(v.(*lua.LTable))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cookies
|
||||||
|
if v := t.RawGetString("cookies"); v.Type() == lua.LTTable {
|
||||||
|
req.Cookies = e.tableToStringSliceMap(v.(*lua.LTable))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// tableToStringSliceMap converts a Lua table to a Go map[string][]string.
|
||||||
|
// Supports both single string values and array values.
|
||||||
|
func (e *LuaEngine) tableToStringSliceMap(t *lua.LTable) map[string][]string {
|
||||||
|
result := make(map[string][]string)
|
||||||
|
t.ForEach(func(k, v lua.LValue) {
|
||||||
|
if k.Type() != lua.LTString {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
key := string(k.(lua.LString))
|
||||||
|
|
||||||
|
switch v.Type() {
|
||||||
|
case lua.LTString:
|
||||||
|
// Single string value
|
||||||
|
result[key] = []string{string(v.(lua.LString))}
|
||||||
|
case lua.LTTable:
|
||||||
|
// Array of strings
|
||||||
|
var values []string
|
||||||
|
v.(*lua.LTable).ForEach(func(_, item lua.LValue) {
|
||||||
|
if item.Type() == lua.LTString {
|
||||||
|
values = append(values, string(item.(lua.LString)))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
result[key] = values
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
}
|
||||||
197
internal/script/script.go
Normal file
197
internal/script/script.go
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
package script
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"go.aykhans.me/sarin/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RequestData represents the request data passed to scripts for transformation.
|
||||||
|
// Scripts can modify any field and the changes will be applied to the actual request.
|
||||||
|
// Headers, Params, and Cookies use []string values to support multiple values per key.
|
||||||
|
type RequestData struct {
|
||||||
|
Method string `json:"method"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
Headers map[string][]string `json:"headers"`
|
||||||
|
Params map[string][]string `json:"params"`
|
||||||
|
Cookies map[string][]string `json:"cookies"`
|
||||||
|
Body string `json:"body"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Engine defines the interface for script engines (Lua, JavaScript).
|
||||||
|
// Each engine must be able to transform request data using a user-provided script.
|
||||||
|
type Engine interface {
|
||||||
|
// Transform executes the script's transform function with the given request data.
|
||||||
|
// The script should modify the RequestData and return it.
|
||||||
|
Transform(req *RequestData) error
|
||||||
|
|
||||||
|
// Close releases any resources held by the engine.
|
||||||
|
Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
// EngineType represents the type of script engine.
|
||||||
|
type EngineType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
EngineTypeLua EngineType = "lua"
|
||||||
|
EngineTypeJavaScript EngineType = "js"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Source represents a loaded script source.
|
||||||
|
type Source struct {
|
||||||
|
Content string
|
||||||
|
EngineType EngineType
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadSource loads a script from the given source string.
|
||||||
|
// The source can be:
|
||||||
|
// - Inline script: any string not starting with "@"
|
||||||
|
// - Escaped "@": strings starting with "@@" (literal "@" at start, returns string without first @)
|
||||||
|
// - File reference: "@/path/to/file" or "@./relative/path"
|
||||||
|
// - URL reference: "@http://..." or "@https://..."
|
||||||
|
//
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ErrScriptEmpty
|
||||||
|
// - types.ScriptLoadError
|
||||||
|
func LoadSource(ctx context.Context, source string, engineType EngineType) (*Source, error) {
|
||||||
|
if source == "" {
|
||||||
|
return nil, types.ErrScriptEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
var content string
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(source, "@@"):
|
||||||
|
// Escaped @ - it's an inline script starting with literal @
|
||||||
|
content = source[1:] // Remove first @, keep the rest
|
||||||
|
case strings.HasPrefix(source, "@"):
|
||||||
|
// File or URL reference
|
||||||
|
ref := source[1:]
|
||||||
|
if strings.HasPrefix(ref, "http://") || strings.HasPrefix(ref, "https://") {
|
||||||
|
content, err = fetchURL(ctx, ref)
|
||||||
|
} else {
|
||||||
|
content, err = readFile(ref)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewScriptLoadError(ref, err)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
// Inline script
|
||||||
|
content = source
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Source{
|
||||||
|
Content: content,
|
||||||
|
EngineType: engineType,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadSources loads multiple script sources.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ErrScriptEmpty
|
||||||
|
// - types.ScriptLoadError
|
||||||
|
func LoadSources(ctx context.Context, sources []string, engineType EngineType) ([]*Source, error) {
|
||||||
|
loaded := make([]*Source, 0, len(sources))
|
||||||
|
for _, src := range sources {
|
||||||
|
source, err := LoadSource(ctx, src, engineType)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
loaded = append(loaded, source)
|
||||||
|
}
|
||||||
|
return loaded, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateScript validates a script source by loading it and checking syntax.
|
||||||
|
// It loads the script (from file/URL/inline), parses it, and verifies
|
||||||
|
// that a 'transform' function is defined.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.ErrScriptEmpty
|
||||||
|
// - types.ErrScriptTransformMissing
|
||||||
|
// - types.ScriptLoadError
|
||||||
|
// - types.ScriptExecutionError
|
||||||
|
// - types.ScriptUnknownEngineError
|
||||||
|
func ValidateScript(ctx context.Context, source string, engineType EngineType) error {
|
||||||
|
// Load the script source
|
||||||
|
src, err := LoadSource(ctx, source, engineType)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to create an engine - this validates syntax and transform function
|
||||||
|
var engine Engine
|
||||||
|
switch engineType {
|
||||||
|
case EngineTypeLua:
|
||||||
|
engine, err = NewLuaEngine(src.Content)
|
||||||
|
case EngineTypeJavaScript:
|
||||||
|
engine, err = NewJsEngine(src.Content)
|
||||||
|
default:
|
||||||
|
return types.NewScriptUnknownEngineError(string(engineType))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up the engine - we only needed it for validation
|
||||||
|
engine.Close()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchURL downloads content from an HTTP/HTTPS URL.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.HTTPFetchError
|
||||||
|
// - types.HTTPStatusError
|
||||||
|
func fetchURL(ctx context.Context, url string) (string, error) {
|
||||||
|
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", types.NewHTTPFetchError(url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return "", types.NewHTTPFetchError(url, err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close() //nolint:errcheck
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return "", types.NewHTTPStatusError(url, resp.StatusCode, resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return "", types.NewHTTPFetchError(url, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(data), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// readFile reads content from a local file.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - types.FileReadError
|
||||||
|
func readFile(path string) (string, error) {
|
||||||
|
if !filepath.IsAbs(path) {
|
||||||
|
pwd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return "", types.NewFileReadError(path, err)
|
||||||
|
}
|
||||||
|
path = filepath.Join(pwd, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := os.ReadFile(path) //nolint:gosec
|
||||||
|
if err != nil {
|
||||||
|
return "", types.NewFileReadError(path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(data), nil
|
||||||
|
}
|
||||||
46
internal/types/config_file.go
Normal file
46
internal/types/config_file.go
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConfigFileType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ConfigFileTypeUnknown ConfigFileType = "unknown"
|
||||||
|
ConfigFileTypeYAML ConfigFileType = "yaml/yml"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConfigFile struct {
|
||||||
|
path string
|
||||||
|
_type ConfigFileType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (configFile ConfigFile) Path() string {
|
||||||
|
return configFile.path
|
||||||
|
}
|
||||||
|
|
||||||
|
func (configFile ConfigFile) Type() ConfigFileType {
|
||||||
|
return configFile._type
|
||||||
|
}
|
||||||
|
|
||||||
|
func ParseConfigFile(configFileRaw string) *ConfigFile {
|
||||||
|
// TODO: Improve file type detection
|
||||||
|
// (e.g., use magic bytes or content inspection instead of relying solely on file extension)
|
||||||
|
|
||||||
|
configFileParsed := &ConfigFile{
|
||||||
|
path: configFileRaw,
|
||||||
|
}
|
||||||
|
|
||||||
|
configFileExtension, _ := strings.CutPrefix(filepath.Ext(configFileRaw), ".")
|
||||||
|
|
||||||
|
switch strings.ToLower(configFileExtension) {
|
||||||
|
case "yml", "yaml":
|
||||||
|
configFileParsed._type = ConfigFileTypeYAML
|
||||||
|
default:
|
||||||
|
configFileParsed._type = ConfigFileTypeUnknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return configFileParsed
|
||||||
|
}
|
||||||
40
internal/types/cookie.go
Normal file
40
internal/types/cookie.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
type Cookie KeyValue[string, []string]
|
||||||
|
|
||||||
|
type Cookies []Cookie
|
||||||
|
|
||||||
|
func (cookies Cookies) GetValue(key string) *[]string {
|
||||||
|
for i := range cookies {
|
||||||
|
if cookies[i].Key == key {
|
||||||
|
return &cookies[i].Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cookies *Cookies) Merge(cookie ...Cookie) {
|
||||||
|
for _, c := range cookie {
|
||||||
|
if item := cookies.GetValue(c.Key); item != nil {
|
||||||
|
*item = append(*item, c.Value...)
|
||||||
|
} else {
|
||||||
|
*cookies = append(*cookies, c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cookies *Cookies) Parse(rawValues ...string) {
|
||||||
|
for _, rawValue := range rawValues {
|
||||||
|
*cookies = append(*cookies, *ParseCookie(rawValue))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ParseCookie(rawValue string) *Cookie {
|
||||||
|
parts := strings.SplitN(rawValue, "=", 2)
|
||||||
|
if len(parts) == 1 {
|
||||||
|
return &Cookie{Key: parts[0], Value: []string{""}}
|
||||||
|
}
|
||||||
|
return &Cookie{Key: parts[0], Value: []string{parts[1]}}
|
||||||
|
}
|
||||||
444
internal/types/errors.go
Normal file
444
internal/types/errors.go
Normal file
@@ -0,0 +1,444 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ======================================== General ========================================
|
||||||
|
|
||||||
|
var (
|
||||||
|
errNoError = errors.New("no error (internal)")
|
||||||
|
)
|
||||||
|
|
||||||
|
type FieldParseError struct {
|
||||||
|
Field string
|
||||||
|
Value string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFieldParseError(field string, value string, err error) FieldParseError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return FieldParseError{field, value, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e FieldParseError) Error() string {
|
||||||
|
return fmt.Sprintf("Field '%s' parse failed: %v", e.Field, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e FieldParseError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type FieldParseErrors struct {
|
||||||
|
Errors []FieldParseError
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFieldParseErrors(fieldParseErrors []FieldParseError) FieldParseErrors {
|
||||||
|
return FieldParseErrors{fieldParseErrors}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e FieldParseErrors) Error() string {
|
||||||
|
if len(e.Errors) == 0 {
|
||||||
|
return "No field parse errors"
|
||||||
|
}
|
||||||
|
if len(e.Errors) == 1 {
|
||||||
|
return e.Errors[0].Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
var builder strings.Builder
|
||||||
|
for i, err := range e.Errors {
|
||||||
|
if i > 0 {
|
||||||
|
builder.WriteString("\n")
|
||||||
|
}
|
||||||
|
builder.WriteString(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
type FieldValidationError struct {
|
||||||
|
Field string
|
||||||
|
Value string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFieldValidationError(field string, value string, err error) FieldValidationError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return FieldValidationError{field, value, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e FieldValidationError) Error() string {
|
||||||
|
return fmt.Sprintf("Field '%s' validation failed: %v", e.Field, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e FieldValidationError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type FieldValidationErrors struct {
|
||||||
|
Errors []FieldValidationError
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFieldValidationErrors(fieldValidationErrors []FieldValidationError) FieldValidationErrors {
|
||||||
|
return FieldValidationErrors{fieldValidationErrors}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e FieldValidationErrors) Error() string {
|
||||||
|
if len(e.Errors) == 0 {
|
||||||
|
return "No field validation errors"
|
||||||
|
}
|
||||||
|
if len(e.Errors) == 1 {
|
||||||
|
return e.Errors[0].Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
var builder strings.Builder
|
||||||
|
for i, err := range e.Errors {
|
||||||
|
if i > 0 {
|
||||||
|
builder.WriteString("\n")
|
||||||
|
}
|
||||||
|
builder.WriteString(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
type UnmarshalError struct {
|
||||||
|
error error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewUnmarshalError(err error) UnmarshalError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return UnmarshalError{err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e UnmarshalError) Error() string {
|
||||||
|
return "Unmarshal error: " + e.error.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e UnmarshalError) Unwrap() error {
|
||||||
|
return e.error
|
||||||
|
}
|
||||||
|
|
||||||
|
// ======================================== General I/O ========================================
|
||||||
|
|
||||||
|
type FileReadError struct {
|
||||||
|
Path string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFileReadError(path string, err error) FileReadError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return FileReadError{path, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e FileReadError) Error() string {
|
||||||
|
return fmt.Sprintf("failed to read file %s: %v", e.Path, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e FileReadError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type HTTPFetchError struct {
|
||||||
|
URL string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHTTPFetchError(url string, err error) HTTPFetchError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return HTTPFetchError{url, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e HTTPFetchError) Error() string {
|
||||||
|
return fmt.Sprintf("failed to fetch %s: %v", e.URL, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e HTTPFetchError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type HTTPStatusError struct {
|
||||||
|
URL string
|
||||||
|
StatusCode int
|
||||||
|
Status string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHTTPStatusError(url string, statusCode int, status string) HTTPStatusError {
|
||||||
|
return HTTPStatusError{url, statusCode, status}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e HTTPStatusError) Error() string {
|
||||||
|
return fmt.Sprintf("HTTP %d %s (url: %s)", e.StatusCode, e.Status, e.URL)
|
||||||
|
}
|
||||||
|
|
||||||
|
type URLParseError struct {
|
||||||
|
URL string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewURLParseError(url string, err error) URLParseError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return URLParseError{url, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e URLParseError) Error() string {
|
||||||
|
return fmt.Sprintf("invalid URL %q: %v", e.URL, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e URLParseError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ======================================== Template ========================================
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrFileCacheNotInitialized = errors.New("file cache is not initialized")
|
||||||
|
ErrFormDataOddArgs = errors.New("body_FormData requires an even number of arguments (key-value pairs)")
|
||||||
|
)
|
||||||
|
|
||||||
|
type TemplateParseError struct {
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewTemplateParseError(err error) TemplateParseError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return TemplateParseError{err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e TemplateParseError) Error() string {
|
||||||
|
return "template parse error: " + e.Err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e TemplateParseError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type TemplateRenderError struct {
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewTemplateRenderError(err error) TemplateRenderError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return TemplateRenderError{err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e TemplateRenderError) Error() string {
|
||||||
|
return "template rendering: " + e.Err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e TemplateRenderError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ======================================== CLI ========================================
|
||||||
|
|
||||||
|
type CLIUnexpectedArgsError struct {
|
||||||
|
Args []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCLIUnexpectedArgsError(args []string) CLIUnexpectedArgsError {
|
||||||
|
return CLIUnexpectedArgsError{args}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e CLIUnexpectedArgsError) Error() string {
|
||||||
|
return fmt.Sprintf("CLI received unexpected arguments: %v", strings.Join(e.Args, ","))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ======================================== Config File ========================================
|
||||||
|
|
||||||
|
type ConfigFileReadError struct {
|
||||||
|
error error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewConfigFileReadError(err error) ConfigFileReadError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return ConfigFileReadError{err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ConfigFileReadError) Error() string {
|
||||||
|
return "Config file read error: " + e.error.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ConfigFileReadError) Unwrap() error {
|
||||||
|
return e.error
|
||||||
|
}
|
||||||
|
|
||||||
|
// ======================================== Proxy ========================================
|
||||||
|
|
||||||
|
type ProxyUnsupportedSchemeError struct {
|
||||||
|
Scheme string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewProxyUnsupportedSchemeError(scheme string) ProxyUnsupportedSchemeError {
|
||||||
|
return ProxyUnsupportedSchemeError{scheme}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ProxyUnsupportedSchemeError) Error() string {
|
||||||
|
return "unsupported proxy scheme: " + e.Scheme
|
||||||
|
}
|
||||||
|
|
||||||
|
type ProxyParseError struct {
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewProxyParseError(err error) ProxyParseError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return ProxyParseError{err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ProxyParseError) Error() string {
|
||||||
|
return "failed to parse proxy URL: " + e.Err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ProxyParseError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type ProxyConnectError struct {
|
||||||
|
Status string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewProxyConnectError(status string) ProxyConnectError {
|
||||||
|
return ProxyConnectError{status}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ProxyConnectError) Error() string {
|
||||||
|
return "proxy CONNECT failed: " + e.Status
|
||||||
|
}
|
||||||
|
|
||||||
|
type ProxyResolveError struct {
|
||||||
|
Host string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewProxyResolveError(host string) ProxyResolveError {
|
||||||
|
return ProxyResolveError{host}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ProxyResolveError) Error() string {
|
||||||
|
return "no IP addresses found for host: " + e.Host
|
||||||
|
}
|
||||||
|
|
||||||
|
type ProxyDialError struct {
|
||||||
|
Proxy string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewProxyDialError(proxy string, err error) ProxyDialError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return ProxyDialError{proxy, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ProxyDialError) Error() string {
|
||||||
|
return "proxy \"" + e.Proxy + "\": " + e.Err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ProxyDialError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ======================================== Script ========================================
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrScriptEmpty = errors.New("script cannot be empty")
|
||||||
|
ErrScriptSourceEmpty = errors.New("script source cannot be empty after @")
|
||||||
|
ErrScriptTransformMissing = errors.New("script must define a global 'transform' function")
|
||||||
|
ErrScriptTransformReturnObject = errors.New("transform function must return an object")
|
||||||
|
ErrScriptURLNoHost = errors.New("script URL must have a host")
|
||||||
|
)
|
||||||
|
|
||||||
|
type ScriptLoadError struct {
|
||||||
|
Source string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewScriptLoadError(source string, err error) ScriptLoadError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return ScriptLoadError{source, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ScriptLoadError) Error() string {
|
||||||
|
return fmt.Sprintf("failed to load script from %q: %v", e.Source, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ScriptLoadError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScriptExecutionError struct {
|
||||||
|
EngineType string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewScriptExecutionError(engineType string, err error) ScriptExecutionError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return ScriptExecutionError{engineType, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ScriptExecutionError) Error() string {
|
||||||
|
return fmt.Sprintf("%s script error: %v", e.EngineType, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ScriptExecutionError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScriptChainError struct {
|
||||||
|
EngineType string
|
||||||
|
Index int
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewScriptChainError(engineType string, index int, err error) ScriptChainError {
|
||||||
|
if err == nil {
|
||||||
|
err = errNoError
|
||||||
|
}
|
||||||
|
return ScriptChainError{engineType, index, err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ScriptChainError) Error() string {
|
||||||
|
return fmt.Sprintf("%s script[%d]: %v", e.EngineType, e.Index, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ScriptChainError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScriptUnknownEngineError struct {
|
||||||
|
EngineType string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewScriptUnknownEngineError(engineType string) ScriptUnknownEngineError {
|
||||||
|
return ScriptUnknownEngineError{engineType}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ScriptUnknownEngineError) Error() string {
|
||||||
|
return "unknown engine type: " + e.EngineType
|
||||||
|
}
|
||||||
49
internal/types/header.go
Normal file
49
internal/types/header.go
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
type Header KeyValue[string, []string]
|
||||||
|
|
||||||
|
type Headers []Header
|
||||||
|
|
||||||
|
func (headers Headers) Has(key string) bool {
|
||||||
|
for i := range headers {
|
||||||
|
if headers[i].Key == key {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (headers Headers) GetValue(key string) *[]string {
|
||||||
|
for i := range headers {
|
||||||
|
if headers[i].Key == key {
|
||||||
|
return &headers[i].Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (headers *Headers) Merge(header ...Header) {
|
||||||
|
for _, h := range header {
|
||||||
|
if item := headers.GetValue(h.Key); item != nil {
|
||||||
|
*item = append(*item, h.Value...)
|
||||||
|
} else {
|
||||||
|
*headers = append(*headers, h)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (headers *Headers) Parse(rawValues ...string) {
|
||||||
|
for _, rawValue := range rawValues {
|
||||||
|
*headers = append(*headers, *ParseHeader(rawValue))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ParseHeader(rawValue string) *Header {
|
||||||
|
parts := strings.SplitN(rawValue, ": ", 2)
|
||||||
|
if len(parts) == 1 {
|
||||||
|
return &Header{Key: parts[0], Value: []string{""}}
|
||||||
|
}
|
||||||
|
return &Header{Key: parts[0], Value: []string{parts[1]}}
|
||||||
|
}
|
||||||
6
internal/types/key_value.go
Normal file
6
internal/types/key_value.go
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
type KeyValue[K, V any] struct {
|
||||||
|
Key K
|
||||||
|
Value V
|
||||||
|
}
|
||||||
40
internal/types/param.go
Normal file
40
internal/types/param.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
type Param KeyValue[string, []string]
|
||||||
|
|
||||||
|
type Params []Param
|
||||||
|
|
||||||
|
func (params Params) GetValue(key string) *[]string {
|
||||||
|
for i := range params {
|
||||||
|
if params[i].Key == key {
|
||||||
|
return ¶ms[i].Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (params *Params) Merge(param ...Param) {
|
||||||
|
for _, p := range param {
|
||||||
|
if item := params.GetValue(p.Key); item != nil {
|
||||||
|
*item = append(*item, p.Value...)
|
||||||
|
} else {
|
||||||
|
*params = append(*params, p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (params *Params) Parse(rawValues ...string) {
|
||||||
|
for _, rawValue := range rawValues {
|
||||||
|
*params = append(*params, *ParseParam(rawValue))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ParseParam(rawValue string) *Param {
|
||||||
|
parts := strings.SplitN(rawValue, "=", 2)
|
||||||
|
if len(parts) == 1 {
|
||||||
|
return &Param{Key: parts[0], Value: []string{""}}
|
||||||
|
}
|
||||||
|
return &Param{Key: parts[0], Value: []string{parts[1]}}
|
||||||
|
}
|
||||||
43
internal/types/proxy.go
Normal file
43
internal/types/proxy.go
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/url"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Proxy url.URL
|
||||||
|
|
||||||
|
func (proxy Proxy) String() string {
|
||||||
|
return (*url.URL)(&proxy).String()
|
||||||
|
}
|
||||||
|
|
||||||
|
type Proxies []Proxy
|
||||||
|
|
||||||
|
func (proxies *Proxies) Append(proxy ...Proxy) {
|
||||||
|
*proxies = append(*proxies, proxy...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse parses a raw proxy string and appends it to the list.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - ProxyParseError
|
||||||
|
func (proxies *Proxies) Parse(rawValue string) error {
|
||||||
|
parsedProxy, err := ParseProxy(rawValue)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
proxies.Append(*parsedProxy)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseProxy parses a raw proxy URL string into a Proxy.
|
||||||
|
// It can return the following errors:
|
||||||
|
// - ProxyParseError
|
||||||
|
func ParseProxy(rawValue string) (*Proxy, error) {
|
||||||
|
urlParsed, err := url.Parse(rawValue)
|
||||||
|
if err != nil {
|
||||||
|
return nil, NewProxyParseError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
proxyParsed := Proxy(*urlParsed)
|
||||||
|
return &proxyParsed, nil
|
||||||
|
}
|
||||||
8
internal/version/version.go
Normal file
8
internal/version/version.go
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
package version
|
||||||
|
|
||||||
|
var (
|
||||||
|
Version = "unknown" // Set via ldflags
|
||||||
|
GitCommit = "unknown" // Set via ldflags
|
||||||
|
BuildDate = "unknown" // Set via ldflags
|
||||||
|
GoVersion = "unknown" // Set via ldflags
|
||||||
|
)
|
||||||
74
main.go
74
main.go
@@ -1,74 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/signal"
|
|
||||||
"syscall"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/aykhans/dodo/config"
|
|
||||||
"github.com/aykhans/dodo/requests"
|
|
||||||
"github.com/aykhans/dodo/types"
|
|
||||||
"github.com/aykhans/dodo/utils"
|
|
||||||
"github.com/jedib0t/go-pretty/v6/text"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
conf := config.NewConfig()
|
|
||||||
configFile, err := conf.ReadCLI()
|
|
||||||
if err != nil {
|
|
||||||
utils.PrintErrAndExit(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if configFile.String() != "" {
|
|
||||||
tempConf := config.NewConfig()
|
|
||||||
if err := tempConf.ReadFile(configFile); err != nil {
|
|
||||||
utils.PrintErrAndExit(err)
|
|
||||||
}
|
|
||||||
tempConf.MergeConfig(conf)
|
|
||||||
conf = tempConf
|
|
||||||
}
|
|
||||||
conf.SetDefaults()
|
|
||||||
|
|
||||||
if errs := conf.Validate(); len(errs) > 0 {
|
|
||||||
utils.PrintErrAndExit(errors.Join(errs...))
|
|
||||||
}
|
|
||||||
|
|
||||||
requestConf := config.NewRequestConfig(conf)
|
|
||||||
requestConf.Print()
|
|
||||||
|
|
||||||
if !requestConf.Yes {
|
|
||||||
response := config.CLIYesOrNoReader("Do you want to continue?", false)
|
|
||||||
if !response {
|
|
||||||
utils.PrintAndExit("Exiting...\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
|
||||||
go listenForTermination(func() { cancel() })
|
|
||||||
|
|
||||||
if requestConf.Duration > 0 {
|
|
||||||
time.AfterFunc(requestConf.Duration, func() { cancel() })
|
|
||||||
}
|
|
||||||
|
|
||||||
responses, err := requests.Run(ctx, requestConf)
|
|
||||||
if err != nil {
|
|
||||||
if err == types.ErrInterrupt {
|
|
||||||
fmt.Println(text.FgYellow.Sprint(err.Error()))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
utils.PrintErrAndExit(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
responses.Print()
|
|
||||||
}
|
|
||||||
|
|
||||||
func listenForTermination(do func()) {
|
|
||||||
sigChan := make(chan os.Signal, 1)
|
|
||||||
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
|
|
||||||
<-sigChan
|
|
||||||
do()
|
|
||||||
}
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
package requests
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"math/rand"
|
|
||||||
"net/url"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/aykhans/dodo/utils"
|
|
||||||
"github.com/valyala/fasthttp"
|
|
||||||
"github.com/valyala/fasthttp/fasthttpproxy"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ClientGeneratorFunc func() *fasthttp.HostClient
|
|
||||||
|
|
||||||
// getClients initializes and returns a slice of fasthttp.HostClient based on the provided parameters.
|
|
||||||
// It can either return clients with proxies or a single client without proxies.
|
|
||||||
func getClients(
|
|
||||||
ctx context.Context,
|
|
||||||
timeout time.Duration,
|
|
||||||
proxies []url.URL,
|
|
||||||
maxConns uint,
|
|
||||||
URL url.URL,
|
|
||||||
) []*fasthttp.HostClient {
|
|
||||||
isTLS := URL.Scheme == "https"
|
|
||||||
|
|
||||||
if proxiesLen := len(proxies); proxiesLen > 0 {
|
|
||||||
clients := make([]*fasthttp.HostClient, 0, proxiesLen)
|
|
||||||
addr := URL.Host
|
|
||||||
if isTLS && URL.Port() == "" {
|
|
||||||
addr += ":443"
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, proxy := range proxies {
|
|
||||||
dialFunc, err := getDialFunc(&proxy, timeout)
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
clients = append(clients, &fasthttp.HostClient{
|
|
||||||
MaxConns: int(maxConns),
|
|
||||||
IsTLS: isTLS,
|
|
||||||
Addr: addr,
|
|
||||||
Dial: dialFunc,
|
|
||||||
MaxIdleConnDuration: timeout,
|
|
||||||
MaxConnDuration: timeout,
|
|
||||||
WriteTimeout: timeout,
|
|
||||||
ReadTimeout: timeout,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return clients
|
|
||||||
}
|
|
||||||
|
|
||||||
client := &fasthttp.HostClient{
|
|
||||||
MaxConns: int(maxConns),
|
|
||||||
IsTLS: isTLS,
|
|
||||||
Addr: URL.Host,
|
|
||||||
MaxIdleConnDuration: timeout,
|
|
||||||
MaxConnDuration: timeout,
|
|
||||||
WriteTimeout: timeout,
|
|
||||||
ReadTimeout: timeout,
|
|
||||||
}
|
|
||||||
return []*fasthttp.HostClient{client}
|
|
||||||
}
|
|
||||||
|
|
||||||
// getDialFunc returns the appropriate fasthttp.DialFunc based on the provided proxy URL scheme.
|
|
||||||
// It supports SOCKS5 ('socks5' or 'socks5h') and HTTP ('http') proxy schemes.
|
|
||||||
// For HTTP proxies, the timeout parameter determines connection timeouts.
|
|
||||||
// Returns an error if the proxy scheme is unsupported.
|
|
||||||
func getDialFunc(proxy *url.URL, timeout time.Duration) (fasthttp.DialFunc, error) {
|
|
||||||
var dialer fasthttp.DialFunc
|
|
||||||
|
|
||||||
if proxy.Scheme == "socks5" || proxy.Scheme == "socks5h" {
|
|
||||||
dialer = fasthttpproxy.FasthttpSocksDialerDualStack(proxy.String())
|
|
||||||
} else if proxy.Scheme == "http" {
|
|
||||||
dialer = fasthttpproxy.FasthttpHTTPDialerDualStackTimeout(proxy.String(), timeout)
|
|
||||||
} else {
|
|
||||||
return nil, errors.New("unsupported proxy scheme")
|
|
||||||
}
|
|
||||||
return dialer, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// getSharedClientFuncMultiple returns a ClientGeneratorFunc that cycles through a list of fasthttp.HostClient instances.
|
|
||||||
// The function uses a local random number generator to determine the starting index and stop index for cycling through the clients.
|
|
||||||
// The returned function isn't thread-safe and should be used in a single-threaded context.
|
|
||||||
func getSharedClientFuncMultiple(clients []*fasthttp.HostClient, localRand *rand.Rand) ClientGeneratorFunc {
|
|
||||||
return utils.RandomValueCycle(clients, localRand)
|
|
||||||
}
|
|
||||||
|
|
||||||
// getSharedClientFuncSingle returns a ClientGeneratorFunc that always returns the provided fasthttp.HostClient instance.
|
|
||||||
// This can be useful for sharing a single client instance across multiple requests.
|
|
||||||
func getSharedClientFuncSingle(client *fasthttp.HostClient) ClientGeneratorFunc {
|
|
||||||
return func() *fasthttp.HostClient {
|
|
||||||
return client
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
package requests
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/progress"
|
|
||||||
)
|
|
||||||
|
|
||||||
// streamProgress streams the progress of a task to the console using a progress bar.
|
|
||||||
// It listens for increments on the provided channel and updates the progress bar accordingly.
|
|
||||||
//
|
|
||||||
// The function will stop and mark the progress as errored if the context is cancelled.
|
|
||||||
// It will also stop and mark the progress as done when the total number of increments is reached.
|
|
||||||
func streamProgress(
|
|
||||||
ctx context.Context,
|
|
||||||
wg *sync.WaitGroup,
|
|
||||||
total uint,
|
|
||||||
message string,
|
|
||||||
increase <-chan int64,
|
|
||||||
) {
|
|
||||||
defer wg.Done()
|
|
||||||
pw := progress.NewWriter()
|
|
||||||
pw.SetTrackerPosition(progress.PositionRight)
|
|
||||||
pw.SetStyle(progress.StyleBlocks)
|
|
||||||
pw.SetTrackerLength(40)
|
|
||||||
pw.SetUpdateFrequency(time.Millisecond * 250)
|
|
||||||
if total == 0 {
|
|
||||||
pw.Style().Visibility.Percentage = false
|
|
||||||
}
|
|
||||||
go pw.Render()
|
|
||||||
dodosTracker := progress.Tracker{
|
|
||||||
Message: message,
|
|
||||||
Total: int64(total),
|
|
||||||
}
|
|
||||||
pw.AppendTracker(&dodosTracker)
|
|
||||||
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-ctx.Done():
|
|
||||||
if err := ctx.Err(); err == context.Canceled || err == context.DeadlineExceeded {
|
|
||||||
dodosTracker.MarkAsDone()
|
|
||||||
} else {
|
|
||||||
dodosTracker.MarkAsErrored()
|
|
||||||
}
|
|
||||||
time.Sleep(time.Millisecond * 300)
|
|
||||||
fmt.Printf("\r")
|
|
||||||
return
|
|
||||||
|
|
||||||
case value := <-increase:
|
|
||||||
dodosTracker.Increment(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,247 +0,0 @@
|
|||||||
package requests
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"math/rand"
|
|
||||||
"net/url"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/aykhans/dodo/config"
|
|
||||||
"github.com/aykhans/dodo/types"
|
|
||||||
"github.com/aykhans/dodo/utils"
|
|
||||||
"github.com/valyala/fasthttp"
|
|
||||||
)
|
|
||||||
|
|
||||||
type RequestGeneratorFunc func() *fasthttp.Request
|
|
||||||
|
|
||||||
// Request represents an HTTP request to be sent using the fasthttp client.
|
|
||||||
// It isn't thread-safe and should be used by a single goroutine.
|
|
||||||
type Request struct {
|
|
||||||
getClient ClientGeneratorFunc
|
|
||||||
getRequest RequestGeneratorFunc
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send sends the HTTP request using the fasthttp client with a specified timeout.
|
|
||||||
// It returns the HTTP response or an error if the request fails or times out.
|
|
||||||
func (r *Request) Send(ctx context.Context, timeout time.Duration) (*fasthttp.Response, error) {
|
|
||||||
client := r.getClient()
|
|
||||||
request := r.getRequest()
|
|
||||||
defer fasthttp.ReleaseRequest(request)
|
|
||||||
|
|
||||||
response := fasthttp.AcquireResponse()
|
|
||||||
ch := make(chan error)
|
|
||||||
go func() {
|
|
||||||
err := client.DoTimeout(request, response, timeout)
|
|
||||||
ch <- err
|
|
||||||
}()
|
|
||||||
select {
|
|
||||||
case err := <-ch:
|
|
||||||
if err != nil {
|
|
||||||
fasthttp.ReleaseResponse(response)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return response, nil
|
|
||||||
case <-time.After(timeout):
|
|
||||||
fasthttp.ReleaseResponse(response)
|
|
||||||
return nil, types.ErrTimeout
|
|
||||||
case <-ctx.Done():
|
|
||||||
return nil, types.ErrInterrupt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// newRequest creates a new Request instance based on the provided configuration and clients.
|
|
||||||
// It initializes a random number generator using the current time and a unique identifier (uid).
|
|
||||||
// Depending on the number of clients provided, it sets up a function to select the appropriate client.
|
|
||||||
// It also sets up a function to generate the request based on the provided configuration.
|
|
||||||
func newRequest(
|
|
||||||
requestConfig config.RequestConfig,
|
|
||||||
clients []*fasthttp.HostClient,
|
|
||||||
uid int64,
|
|
||||||
) *Request {
|
|
||||||
localRand := rand.New(rand.NewSource(time.Now().UnixNano() + uid))
|
|
||||||
|
|
||||||
clientsCount := len(clients)
|
|
||||||
if clientsCount < 1 {
|
|
||||||
panic("no clients")
|
|
||||||
}
|
|
||||||
|
|
||||||
getClient := ClientGeneratorFunc(nil)
|
|
||||||
if clientsCount == 1 {
|
|
||||||
getClient = getSharedClientFuncSingle(clients[0])
|
|
||||||
} else {
|
|
||||||
getClient = getSharedClientFuncMultiple(clients, localRand)
|
|
||||||
}
|
|
||||||
|
|
||||||
getRequest := getRequestGeneratorFunc(
|
|
||||||
requestConfig.URL,
|
|
||||||
requestConfig.Params,
|
|
||||||
requestConfig.Headers,
|
|
||||||
requestConfig.Cookies,
|
|
||||||
requestConfig.Method,
|
|
||||||
requestConfig.Body,
|
|
||||||
localRand,
|
|
||||||
)
|
|
||||||
|
|
||||||
requests := &Request{
|
|
||||||
getClient: getClient,
|
|
||||||
getRequest: getRequest,
|
|
||||||
}
|
|
||||||
|
|
||||||
return requests
|
|
||||||
}
|
|
||||||
|
|
||||||
// getRequestGeneratorFunc returns a RequestGeneratorFunc which generates HTTP requests with the specified parameters.
|
|
||||||
// The function uses a local random number generator to select bodies, headers, cookies, and parameters if multiple options are provided.
|
|
||||||
func getRequestGeneratorFunc(
|
|
||||||
URL url.URL,
|
|
||||||
params types.Params,
|
|
||||||
headers types.Headers,
|
|
||||||
cookies types.Cookies,
|
|
||||||
method string,
|
|
||||||
bodies []string,
|
|
||||||
localRand *rand.Rand,
|
|
||||||
) RequestGeneratorFunc {
|
|
||||||
bodiesLen := len(bodies)
|
|
||||||
getBody := func() string { return "" }
|
|
||||||
if bodiesLen == 1 {
|
|
||||||
getBody = func() string { return bodies[0] }
|
|
||||||
} else if bodiesLen > 1 {
|
|
||||||
getBody = utils.RandomValueCycle(bodies, localRand)
|
|
||||||
}
|
|
||||||
|
|
||||||
getParams := getKeyValueGeneratorFunc(params, localRand)
|
|
||||||
getHeaders := getKeyValueGeneratorFunc(headers, localRand)
|
|
||||||
getCookies := getKeyValueGeneratorFunc(cookies, localRand)
|
|
||||||
|
|
||||||
return func() *fasthttp.Request {
|
|
||||||
return newFasthttpRequest(
|
|
||||||
URL,
|
|
||||||
getParams(),
|
|
||||||
getHeaders(),
|
|
||||||
getCookies(),
|
|
||||||
method,
|
|
||||||
getBody(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// newFasthttpRequest creates a new fasthttp.Request object with the provided parameters.
|
|
||||||
// It sets the request URI, host header, headers, cookies, params, method, and body.
|
|
||||||
func newFasthttpRequest(
|
|
||||||
URL url.URL,
|
|
||||||
params []types.KeyValue[string, string],
|
|
||||||
headers []types.KeyValue[string, string],
|
|
||||||
cookies []types.KeyValue[string, string],
|
|
||||||
method string,
|
|
||||||
body string,
|
|
||||||
) *fasthttp.Request {
|
|
||||||
request := fasthttp.AcquireRequest()
|
|
||||||
request.SetRequestURI(URL.Path)
|
|
||||||
|
|
||||||
// Set the host of the request to the host header
|
|
||||||
// If the host header is not set, the request will fail
|
|
||||||
// If there is host header in the headers, it will be overwritten
|
|
||||||
request.Header.SetHost(URL.Host)
|
|
||||||
setRequestParams(request, params)
|
|
||||||
setRequestHeaders(request, headers)
|
|
||||||
setRequestCookies(request, cookies)
|
|
||||||
setRequestMethod(request, method)
|
|
||||||
setRequestBody(request, body)
|
|
||||||
if URL.Scheme == "https" {
|
|
||||||
request.URI().SetScheme("https")
|
|
||||||
}
|
|
||||||
|
|
||||||
return request
|
|
||||||
}
|
|
||||||
|
|
||||||
// setRequestParams adds the query parameters of the given request based on the provided key-value pairs.
|
|
||||||
func setRequestParams(req *fasthttp.Request, params []types.KeyValue[string, string]) {
|
|
||||||
for _, param := range params {
|
|
||||||
req.URI().QueryArgs().Add(param.Key, param.Value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// setRequestHeaders adds the headers of the given request with the provided key-value pairs.
|
|
||||||
func setRequestHeaders(req *fasthttp.Request, headers []types.KeyValue[string, string]) {
|
|
||||||
for _, header := range headers {
|
|
||||||
req.Header.Add(header.Key, header.Value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// setRequestCookies adds the cookies of the given request with the provided key-value pairs.
|
|
||||||
func setRequestCookies(req *fasthttp.Request, cookies []types.KeyValue[string, string]) {
|
|
||||||
for _, cookie := range cookies {
|
|
||||||
req.Header.Add("Cookie", cookie.Key+"="+cookie.Value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// setRequestMethod sets the HTTP request method for the given request.
|
|
||||||
func setRequestMethod(req *fasthttp.Request, method string) {
|
|
||||||
req.Header.SetMethod(method)
|
|
||||||
}
|
|
||||||
|
|
||||||
// setRequestBody sets the request body of the given fasthttp.Request object.
|
|
||||||
// The body parameter is a string that will be converted to a byte slice and set as the request body.
|
|
||||||
func setRequestBody(req *fasthttp.Request, body string) {
|
|
||||||
req.SetBody([]byte(body))
|
|
||||||
}
|
|
||||||
|
|
||||||
// getKeyValueGeneratorFunc creates a function that generates key-value pairs for HTTP requests.
|
|
||||||
// It takes a slice of key-value pairs where each key maps to a slice of possible values,
|
|
||||||
// and a random number generator.
|
|
||||||
//
|
|
||||||
// If any key has multiple possible values, the function will randomly select one value for each
|
|
||||||
// call (using the provided random number generator). If all keys have at most one value, the
|
|
||||||
// function will always return the same set of key-value pairs for efficiency.
|
|
||||||
func getKeyValueGeneratorFunc[
|
|
||||||
T []types.KeyValue[string, string],
|
|
||||||
](
|
|
||||||
keyValueSlice []types.KeyValue[string, []string],
|
|
||||||
localRand *rand.Rand,
|
|
||||||
) func() T {
|
|
||||||
getKeyValueSlice := []map[string]func() string{}
|
|
||||||
isRandom := false
|
|
||||||
|
|
||||||
for _, kv := range keyValueSlice {
|
|
||||||
valuesLen := len(kv.Value)
|
|
||||||
|
|
||||||
getValueFunc := func() string { return "" }
|
|
||||||
if valuesLen == 1 {
|
|
||||||
getValueFunc = func() string { return kv.Value[0] }
|
|
||||||
} else if valuesLen > 1 {
|
|
||||||
getValueFunc = utils.RandomValueCycle(kv.Value, localRand)
|
|
||||||
isRandom = true
|
|
||||||
}
|
|
||||||
|
|
||||||
getKeyValueSlice = append(
|
|
||||||
getKeyValueSlice,
|
|
||||||
map[string]func() string{kv.Key: getValueFunc},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if isRandom {
|
|
||||||
return func() T {
|
|
||||||
keyValues := make(T, len(getKeyValueSlice))
|
|
||||||
for i, keyValue := range getKeyValueSlice {
|
|
||||||
for key, value := range keyValue {
|
|
||||||
keyValues[i] = types.KeyValue[string, string]{
|
|
||||||
Key: key,
|
|
||||||
Value: value(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return keyValues
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
keyValues := make(T, len(getKeyValueSlice))
|
|
||||||
for i, keyValue := range getKeyValueSlice {
|
|
||||||
for key, value := range keyValue {
|
|
||||||
keyValues[i] = types.KeyValue[string, string]{
|
|
||||||
Key: key,
|
|
||||||
Value: value(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return func() T { return keyValues }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,108 +0,0 @@
|
|||||||
package requests
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/aykhans/dodo/types"
|
|
||||||
"github.com/aykhans/dodo/utils"
|
|
||||||
"github.com/jedib0t/go-pretty/v6/table"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Response struct {
|
|
||||||
Response string
|
|
||||||
Time time.Duration
|
|
||||||
}
|
|
||||||
|
|
||||||
type Responses []*Response
|
|
||||||
|
|
||||||
// Print prints the responses in a tabular format, including information such as
|
|
||||||
// response count, minimum time, maximum time, average time, and latency percentiles.
|
|
||||||
func (responses Responses) Print() {
|
|
||||||
total := struct {
|
|
||||||
Count int
|
|
||||||
Min time.Duration
|
|
||||||
Max time.Duration
|
|
||||||
Sum time.Duration
|
|
||||||
P90 time.Duration
|
|
||||||
P95 time.Duration
|
|
||||||
P99 time.Duration
|
|
||||||
}{
|
|
||||||
Count: len(responses),
|
|
||||||
Min: responses[0].Time,
|
|
||||||
Max: responses[0].Time,
|
|
||||||
}
|
|
||||||
mergedResponses := make(map[string]types.Durations)
|
|
||||||
var allDurations types.Durations
|
|
||||||
|
|
||||||
for _, response := range responses {
|
|
||||||
if response.Time < total.Min {
|
|
||||||
total.Min = response.Time
|
|
||||||
}
|
|
||||||
if response.Time > total.Max {
|
|
||||||
total.Max = response.Time
|
|
||||||
}
|
|
||||||
total.Sum += response.Time
|
|
||||||
|
|
||||||
mergedResponses[response.Response] = append(
|
|
||||||
mergedResponses[response.Response],
|
|
||||||
response.Time,
|
|
||||||
)
|
|
||||||
allDurations = append(allDurations, response.Time)
|
|
||||||
}
|
|
||||||
allDurations.Sort()
|
|
||||||
allDurationsLenAsFloat := float64(len(allDurations) - 1)
|
|
||||||
total.P90 = allDurations[int(0.90*allDurationsLenAsFloat)]
|
|
||||||
total.P95 = allDurations[int(0.95*allDurationsLenAsFloat)]
|
|
||||||
total.P99 = allDurations[int(0.99*allDurationsLenAsFloat)]
|
|
||||||
|
|
||||||
t := table.NewWriter()
|
|
||||||
t.SetOutputMirror(os.Stdout)
|
|
||||||
t.SetStyle(table.StyleLight)
|
|
||||||
t.SetColumnConfigs([]table.ColumnConfig{
|
|
||||||
{Number: 1, WidthMax: 40},
|
|
||||||
})
|
|
||||||
t.AppendHeader(table.Row{
|
|
||||||
"Response",
|
|
||||||
"Count",
|
|
||||||
"Min",
|
|
||||||
"Max",
|
|
||||||
"Average",
|
|
||||||
"P90",
|
|
||||||
"P95",
|
|
||||||
"P99",
|
|
||||||
})
|
|
||||||
|
|
||||||
var roundPrecision int64 = 4
|
|
||||||
for key, durations := range mergedResponses {
|
|
||||||
durations.Sort()
|
|
||||||
durationsLen := len(durations)
|
|
||||||
durationsLenAsFloat := float64(durationsLen - 1)
|
|
||||||
|
|
||||||
t.AppendRow(table.Row{
|
|
||||||
key,
|
|
||||||
durationsLen,
|
|
||||||
utils.DurationRoundBy(*durations.First(), roundPrecision),
|
|
||||||
utils.DurationRoundBy(*durations.Last(), roundPrecision),
|
|
||||||
utils.DurationRoundBy(durations.Avg(), roundPrecision),
|
|
||||||
utils.DurationRoundBy(durations[int(0.90*durationsLenAsFloat)], roundPrecision),
|
|
||||||
utils.DurationRoundBy(durations[int(0.95*durationsLenAsFloat)], roundPrecision),
|
|
||||||
utils.DurationRoundBy(durations[int(0.99*durationsLenAsFloat)], roundPrecision),
|
|
||||||
})
|
|
||||||
t.AppendSeparator()
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(mergedResponses) > 1 {
|
|
||||||
t.AppendRow(table.Row{
|
|
||||||
"Total",
|
|
||||||
total.Count,
|
|
||||||
utils.DurationRoundBy(total.Min, roundPrecision),
|
|
||||||
utils.DurationRoundBy(total.Max, roundPrecision),
|
|
||||||
utils.DurationRoundBy(total.Sum/time.Duration(total.Count), roundPrecision), // Average
|
|
||||||
utils.DurationRoundBy(total.P90, roundPrecision),
|
|
||||||
utils.DurationRoundBy(total.P95, roundPrecision),
|
|
||||||
utils.DurationRoundBy(total.P99, roundPrecision),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
t.Render()
|
|
||||||
}
|
|
||||||
204
requests/run.go
204
requests/run.go
@@ -1,204 +0,0 @@
|
|||||||
package requests
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"strconv"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/aykhans/dodo/config"
|
|
||||||
"github.com/aykhans/dodo/types"
|
|
||||||
"github.com/aykhans/dodo/utils"
|
|
||||||
"github.com/valyala/fasthttp"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Run executes the main logic for processing requests based on the provided configuration.
|
|
||||||
// It initializes clients based on the request configuration and releases the dodos.
|
|
||||||
// If the context is canceled and no responses are collected, it returns an interrupt error.
|
|
||||||
//
|
|
||||||
// Parameters:
|
|
||||||
// - ctx: The context for managing request lifecycle and cancellation.
|
|
||||||
// - requestConfig: The configuration for the request, including timeout, proxies, and other settings.
|
|
||||||
func Run(ctx context.Context, requestConfig *config.RequestConfig) (Responses, error) {
|
|
||||||
clients := getClients(
|
|
||||||
ctx,
|
|
||||||
requestConfig.Timeout,
|
|
||||||
requestConfig.Proxies,
|
|
||||||
requestConfig.GetMaxConns(fasthttp.DefaultMaxConnsPerHost),
|
|
||||||
requestConfig.URL,
|
|
||||||
)
|
|
||||||
if clients == nil {
|
|
||||||
return nil, types.ErrInterrupt
|
|
||||||
}
|
|
||||||
|
|
||||||
responses := releaseDodos(ctx, requestConfig, clients)
|
|
||||||
if ctx.Err() != nil && len(responses) == 0 {
|
|
||||||
return nil, types.ErrInterrupt
|
|
||||||
}
|
|
||||||
|
|
||||||
return responses, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// releaseDodos sends requests concurrently using multiple dodos (goroutines) and returns the aggregated responses.
|
|
||||||
//
|
|
||||||
// The function performs the following steps:
|
|
||||||
// 1. Initializes wait groups and other necessary variables.
|
|
||||||
// 2. Starts a goroutine to stream progress updates.
|
|
||||||
// 3. Distributes the total request count among the dodos.
|
|
||||||
// 4. Starts a goroutine for each dodo to send requests concurrently.
|
|
||||||
// 5. Waits for all dodos to complete their requests.
|
|
||||||
// 6. Cancels the progress streaming context and waits for the progress goroutine to finish.
|
|
||||||
// 7. Flattens and returns the aggregated responses.
|
|
||||||
func releaseDodos(
|
|
||||||
ctx context.Context,
|
|
||||||
requestConfig *config.RequestConfig,
|
|
||||||
clients []*fasthttp.HostClient,
|
|
||||||
) Responses {
|
|
||||||
var (
|
|
||||||
wg sync.WaitGroup
|
|
||||||
streamWG sync.WaitGroup
|
|
||||||
requestCountPerDodo uint
|
|
||||||
dodosCount uint = requestConfig.GetValidDodosCountForRequests()
|
|
||||||
responses = make([][]*Response, dodosCount)
|
|
||||||
increase = make(chan int64, requestConfig.RequestCount)
|
|
||||||
)
|
|
||||||
|
|
||||||
wg.Add(int(dodosCount))
|
|
||||||
streamWG.Add(1)
|
|
||||||
streamCtx, streamCtxCancel := context.WithCancel(context.Background())
|
|
||||||
|
|
||||||
go streamProgress(streamCtx, &streamWG, requestConfig.RequestCount, "Dodos Working🔥", increase)
|
|
||||||
|
|
||||||
if requestConfig.RequestCount == 0 {
|
|
||||||
for i := range dodosCount {
|
|
||||||
go sendRequest(
|
|
||||||
ctx,
|
|
||||||
newRequest(*requestConfig, clients, int64(i)),
|
|
||||||
requestConfig.Timeout,
|
|
||||||
&responses[i],
|
|
||||||
increase,
|
|
||||||
&wg,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for i := range dodosCount {
|
|
||||||
if i+1 == dodosCount {
|
|
||||||
requestCountPerDodo = requestConfig.RequestCount - (i * requestConfig.RequestCount / dodosCount)
|
|
||||||
} else {
|
|
||||||
requestCountPerDodo = ((i + 1) * requestConfig.RequestCount / dodosCount) -
|
|
||||||
(i * requestConfig.RequestCount / dodosCount)
|
|
||||||
}
|
|
||||||
|
|
||||||
go sendRequestByCount(
|
|
||||||
ctx,
|
|
||||||
newRequest(*requestConfig, clients, int64(i)),
|
|
||||||
requestConfig.Timeout,
|
|
||||||
requestCountPerDodo,
|
|
||||||
&responses[i],
|
|
||||||
increase,
|
|
||||||
&wg,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
wg.Wait()
|
|
||||||
streamCtxCancel()
|
|
||||||
streamWG.Wait()
|
|
||||||
return utils.Flatten(responses)
|
|
||||||
}
|
|
||||||
|
|
||||||
// sendRequestByCount sends a specified number of HTTP requests concurrently with a given timeout.
|
|
||||||
// It appends the responses to the provided responseData slice and sends the count of completed requests
|
|
||||||
// to the increase channel. The function terminates early if the context is canceled or if a custom
|
|
||||||
// interrupt error is encountered.
|
|
||||||
func sendRequestByCount(
|
|
||||||
ctx context.Context,
|
|
||||||
request *Request,
|
|
||||||
timeout time.Duration,
|
|
||||||
requestCount uint,
|
|
||||||
responseData *[]*Response,
|
|
||||||
increase chan<- int64,
|
|
||||||
wg *sync.WaitGroup,
|
|
||||||
) {
|
|
||||||
defer wg.Done()
|
|
||||||
|
|
||||||
for range requestCount {
|
|
||||||
if ctx.Err() != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func() {
|
|
||||||
startTime := time.Now()
|
|
||||||
response, err := request.Send(ctx, timeout)
|
|
||||||
completedTime := time.Since(startTime)
|
|
||||||
if response != nil {
|
|
||||||
defer fasthttp.ReleaseResponse(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
if err == types.ErrInterrupt {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
*responseData = append(*responseData, &Response{
|
|
||||||
Response: err.Error(),
|
|
||||||
Time: completedTime,
|
|
||||||
})
|
|
||||||
increase <- 1
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
*responseData = append(*responseData, &Response{
|
|
||||||
Response: strconv.Itoa(response.StatusCode()),
|
|
||||||
Time: completedTime,
|
|
||||||
})
|
|
||||||
increase <- 1
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// sendRequest continuously sends HTTP requests until the context is canceled.
|
|
||||||
// It records the response status code or error message along with the response time,
|
|
||||||
// and signals each completed request through the increase channel.
|
|
||||||
func sendRequest(
|
|
||||||
ctx context.Context,
|
|
||||||
request *Request,
|
|
||||||
timeout time.Duration,
|
|
||||||
responseData *[]*Response,
|
|
||||||
increase chan<- int64,
|
|
||||||
wg *sync.WaitGroup,
|
|
||||||
) {
|
|
||||||
defer wg.Done()
|
|
||||||
|
|
||||||
for {
|
|
||||||
if ctx.Err() != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func() {
|
|
||||||
startTime := time.Now()
|
|
||||||
response, err := request.Send(ctx, timeout)
|
|
||||||
completedTime := time.Since(startTime)
|
|
||||||
if response != nil {
|
|
||||||
defer fasthttp.ReleaseResponse(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
if err == types.ErrInterrupt {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
*responseData = append(*responseData, &Response{
|
|
||||||
Response: err.Error(),
|
|
||||||
Time: completedTime,
|
|
||||||
})
|
|
||||||
increase <- 1
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
*responseData = append(*responseData, &Response{
|
|
||||||
Response: strconv.Itoa(response.StatusCode()),
|
|
||||||
Time: completedTime,
|
|
||||||
})
|
|
||||||
increase <- 1
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/text"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Body []string
|
|
||||||
|
|
||||||
func (body Body) String() string {
|
|
||||||
var buffer bytes.Buffer
|
|
||||||
if len(body) == 0 {
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(body) == 1 {
|
|
||||||
buffer.WriteString(body[0])
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString(text.FgBlue.Sprint("Random") + "[\n")
|
|
||||||
|
|
||||||
indent := " "
|
|
||||||
|
|
||||||
displayLimit := 5
|
|
||||||
|
|
||||||
for i, item := range body[:min(len(body), displayLimit)] {
|
|
||||||
if i > 0 {
|
|
||||||
buffer.WriteString(",\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString(indent + item)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining count if there are more items
|
|
||||||
if remainingValues := len(body) - displayLimit; remainingValues > 0 {
|
|
||||||
buffer.WriteString(",\n" + indent + text.FgGreen.Sprintf("+%d bodies", remainingValues))
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString("\n]")
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (body *Body) UnmarshalJSON(b []byte) error {
|
|
||||||
var data any
|
|
||||||
if err := json.Unmarshal(b, &data); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch v := data.(type) {
|
|
||||||
case string:
|
|
||||||
*body = []string{v}
|
|
||||||
case []any:
|
|
||||||
var slice []string
|
|
||||||
for _, item := range v {
|
|
||||||
slice = append(slice, fmt.Sprintf("%v", item))
|
|
||||||
}
|
|
||||||
*body = slice
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("invalid type for Body: %T (should be string or []string)", v)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (body *Body) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var data any
|
|
||||||
if err := unmarshal(&data); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch v := data.(type) {
|
|
||||||
case string:
|
|
||||||
*body = []string{v}
|
|
||||||
case []any:
|
|
||||||
var slice []string
|
|
||||||
for _, item := range v {
|
|
||||||
slice = append(slice, fmt.Sprintf("%v", item))
|
|
||||||
}
|
|
||||||
*body = slice
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("invalid type for Body: %T (should be string or []string)", v)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (body *Body) Set(value string) error {
|
|
||||||
*body = append(*body, value)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import "strings"
|
|
||||||
|
|
||||||
type FileLocationType int
|
|
||||||
|
|
||||||
const (
|
|
||||||
FileLocationTypeLocal FileLocationType = iota
|
|
||||||
FileLocationTypeRemoteHTTP
|
|
||||||
)
|
|
||||||
|
|
||||||
type ConfigFile string
|
|
||||||
|
|
||||||
func (configFile ConfigFile) String() string {
|
|
||||||
return string(configFile)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (configFile ConfigFile) LocationType() FileLocationType {
|
|
||||||
if strings.HasPrefix(string(configFile), "http://") || strings.HasPrefix(string(configFile), "https://") {
|
|
||||||
return FileLocationTypeRemoteHTTP
|
|
||||||
}
|
|
||||||
return FileLocationTypeLocal
|
|
||||||
}
|
|
||||||
|
|
||||||
func (configFile ConfigFile) Extension() string {
|
|
||||||
i := strings.LastIndex(configFile.String(), ".")
|
|
||||||
if i == -1 {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
return configFile.String()[i+1:]
|
|
||||||
}
|
|
||||||
139
types/cookies.go
139
types/cookies.go
@@ -1,139 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/text"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Cookies []KeyValue[string, []string]
|
|
||||||
|
|
||||||
func (cookies Cookies) String() string {
|
|
||||||
var buffer bytes.Buffer
|
|
||||||
if len(cookies) == 0 {
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
indent := " "
|
|
||||||
|
|
||||||
displayLimit := 3
|
|
||||||
|
|
||||||
for i, item := range cookies[:min(len(cookies), displayLimit)] {
|
|
||||||
if i > 0 {
|
|
||||||
buffer.WriteString(",\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(item.Value) == 1 {
|
|
||||||
buffer.WriteString(item.Key + ": " + item.Value[0])
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
buffer.WriteString(item.Key + ": " + text.FgBlue.Sprint("Random") + "[\n")
|
|
||||||
|
|
||||||
for ii, v := range item.Value[:min(len(item.Value), displayLimit)] {
|
|
||||||
if ii == len(item.Value)-1 {
|
|
||||||
buffer.WriteString(indent + v + "\n")
|
|
||||||
} else {
|
|
||||||
buffer.WriteString(indent + v + ",\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining values count if needed
|
|
||||||
if remainingValues := len(item.Value) - displayLimit; remainingValues > 0 {
|
|
||||||
buffer.WriteString(indent + text.FgGreen.Sprintf("+%d values", remainingValues) + "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString("]")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining key-value pairs count if needed
|
|
||||||
if remainingPairs := len(cookies) - displayLimit; remainingPairs > 0 {
|
|
||||||
buffer.WriteString(",\n" + text.FgGreen.Sprintf("+%d cookies", remainingPairs))
|
|
||||||
}
|
|
||||||
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cookies *Cookies) AppendByKey(key, value string) {
|
|
||||||
if item := cookies.GetValue(key); item != nil {
|
|
||||||
*item = append(*item, value)
|
|
||||||
} else {
|
|
||||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: []string{value}})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cookies Cookies) GetValue(key string) *[]string {
|
|
||||||
for i := range cookies {
|
|
||||||
if cookies[i].Key == key {
|
|
||||||
return &cookies[i].Value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cookies *Cookies) UnmarshalJSON(b []byte) error {
|
|
||||||
var data []map[string]any
|
|
||||||
if err := json.Unmarshal(b, &data); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, item := range data {
|
|
||||||
for key, value := range item {
|
|
||||||
switch parsedValue := value.(type) {
|
|
||||||
case string:
|
|
||||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: []string{parsedValue}})
|
|
||||||
case []any:
|
|
||||||
parsedStr := make([]string, len(parsedValue))
|
|
||||||
for i, item := range parsedValue {
|
|
||||||
parsedStr[i] = fmt.Sprintf("%v", item)
|
|
||||||
}
|
|
||||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: parsedStr})
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("unsupported type for cookies expected string or []string, got %T", parsedValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cookies *Cookies) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var raw []map[string]any
|
|
||||||
if err := unmarshal(&raw); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, param := range raw {
|
|
||||||
for key, value := range param {
|
|
||||||
switch parsed := value.(type) {
|
|
||||||
case string:
|
|
||||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: []string{parsed}})
|
|
||||||
case []any:
|
|
||||||
var values []string
|
|
||||||
for _, v := range parsed {
|
|
||||||
if str, ok := v.(string); ok {
|
|
||||||
values = append(values, str)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*cookies = append(*cookies, KeyValue[string, []string]{Key: key, Value: values})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cookies *Cookies) Set(value string) error {
|
|
||||||
parts := strings.SplitN(value, "=", 2)
|
|
||||||
switch len(parts) {
|
|
||||||
case 0:
|
|
||||||
cookies.AppendByKey("", "")
|
|
||||||
case 1:
|
|
||||||
cookies.AppendByKey(parts[0], "")
|
|
||||||
case 2:
|
|
||||||
cookies.AppendByKey(parts[0], parts[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Duration struct {
|
|
||||||
time.Duration
|
|
||||||
}
|
|
||||||
|
|
||||||
func (duration *Duration) UnmarshalJSON(b []byte) error {
|
|
||||||
var v any
|
|
||||||
if err := json.Unmarshal(b, &v); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
switch value := v.(type) {
|
|
||||||
case float64:
|
|
||||||
duration.Duration = time.Duration(value)
|
|
||||||
return nil
|
|
||||||
case string:
|
|
||||||
var err error
|
|
||||||
duration.Duration, err = time.ParseDuration(value)
|
|
||||||
if err != nil {
|
|
||||||
return errors.New("Duration is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
default:
|
|
||||||
return errors.New("Duration is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (duration Duration) MarshalJSON() ([]byte, error) {
|
|
||||||
return json.Marshal(duration.Duration.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (duration *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var v any
|
|
||||||
if err := unmarshal(&v); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
switch value := v.(type) {
|
|
||||||
case float64:
|
|
||||||
duration.Duration = time.Duration(value)
|
|
||||||
return nil
|
|
||||||
case string:
|
|
||||||
var err error
|
|
||||||
duration.Duration, err = time.ParseDuration(value)
|
|
||||||
if err != nil {
|
|
||||||
return errors.New("Duration is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
default:
|
|
||||||
return errors.New("Duration is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sort"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Durations []time.Duration
|
|
||||||
|
|
||||||
func (d Durations) Sort(ascending ...bool) {
|
|
||||||
// If ascending is provided and is false, sort in descending order
|
|
||||||
if len(ascending) > 0 && ascending[0] == false {
|
|
||||||
sort.Slice(d, func(i, j int) bool {
|
|
||||||
return d[i] > d[j]
|
|
||||||
})
|
|
||||||
} else { // Otherwise, sort in ascending order
|
|
||||||
sort.Slice(d, func(i, j int) bool {
|
|
||||||
return d[i] < d[j]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d Durations) First() *time.Duration {
|
|
||||||
return &d[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d Durations) Last() *time.Duration {
|
|
||||||
return &d[len(d)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d Durations) Sum() time.Duration {
|
|
||||||
sum := time.Duration(0)
|
|
||||||
for _, duration := range d {
|
|
||||||
sum += duration
|
|
||||||
}
|
|
||||||
return sum
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d Durations) Avg() time.Duration {
|
|
||||||
return d.Sum() / time.Duration(len(d))
|
|
||||||
}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
ErrInterrupt = errors.New("interrupted")
|
|
||||||
ErrTimeout = errors.New("timeout")
|
|
||||||
)
|
|
||||||
156
types/headers.go
156
types/headers.go
@@ -1,156 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/text"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Headers []KeyValue[string, []string]
|
|
||||||
|
|
||||||
func (headers Headers) String() string {
|
|
||||||
var buffer bytes.Buffer
|
|
||||||
if len(headers) == 0 {
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
indent := " "
|
|
||||||
|
|
||||||
displayLimit := 3
|
|
||||||
|
|
||||||
for i, item := range headers[:min(len(headers), displayLimit)] {
|
|
||||||
if i > 0 {
|
|
||||||
buffer.WriteString(",\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(item.Value) == 1 {
|
|
||||||
buffer.WriteString(item.Key + ": " + item.Value[0])
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
buffer.WriteString(item.Key + ": " + text.FgBlue.Sprint("Random") + "[\n")
|
|
||||||
|
|
||||||
for ii, v := range item.Value[:min(len(item.Value), displayLimit)] {
|
|
||||||
if ii == len(item.Value)-1 {
|
|
||||||
buffer.WriteString(indent + v + "\n")
|
|
||||||
} else {
|
|
||||||
buffer.WriteString(indent + v + ",\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining values count if needed
|
|
||||||
if remainingValues := len(item.Value) - displayLimit; remainingValues > 0 {
|
|
||||||
buffer.WriteString(indent + text.FgGreen.Sprintf("+%d values", remainingValues) + "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString("]")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining key-value pairs count if needed
|
|
||||||
if remainingPairs := len(headers) - displayLimit; remainingPairs > 0 {
|
|
||||||
buffer.WriteString(",\n" + text.FgGreen.Sprintf("+%d headers", remainingPairs))
|
|
||||||
}
|
|
||||||
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (headers *Headers) AppendByKey(key, value string) {
|
|
||||||
if item := headers.GetValue(key); item != nil {
|
|
||||||
*item = append(*item, value)
|
|
||||||
} else {
|
|
||||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: []string{value}})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (headers Headers) GetValue(key string) *[]string {
|
|
||||||
for i := range headers {
|
|
||||||
if headers[i].Key == key {
|
|
||||||
return &headers[i].Value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (headers Headers) Has(key string) bool {
|
|
||||||
for i := range headers {
|
|
||||||
if headers[i].Key == key {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (headers *Headers) UnmarshalJSON(b []byte) error {
|
|
||||||
var data []map[string]any
|
|
||||||
if err := json.Unmarshal(b, &data); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, item := range data {
|
|
||||||
for key, value := range item {
|
|
||||||
switch parsedValue := value.(type) {
|
|
||||||
case string:
|
|
||||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: []string{parsedValue}})
|
|
||||||
case []any:
|
|
||||||
parsedStr := make([]string, len(parsedValue))
|
|
||||||
for i, item := range parsedValue {
|
|
||||||
parsedStr[i] = fmt.Sprintf("%v", item)
|
|
||||||
}
|
|
||||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: parsedStr})
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("unsupported type for headers expected string or []string, got %T", parsedValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (headers *Headers) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var raw []map[string]any
|
|
||||||
if err := unmarshal(&raw); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, param := range raw {
|
|
||||||
for key, value := range param {
|
|
||||||
switch parsed := value.(type) {
|
|
||||||
case string:
|
|
||||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: []string{parsed}})
|
|
||||||
case []any:
|
|
||||||
var values []string
|
|
||||||
for _, v := range parsed {
|
|
||||||
if str, ok := v.(string); ok {
|
|
||||||
values = append(values, str)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: values})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (headers *Headers) Set(value string) error {
|
|
||||||
parts := strings.SplitN(value, ":", 2)
|
|
||||||
switch len(parts) {
|
|
||||||
case 0:
|
|
||||||
headers.AppendByKey("", "")
|
|
||||||
case 1:
|
|
||||||
headers.AppendByKey(parts[0], "")
|
|
||||||
case 2:
|
|
||||||
headers.AppendByKey(parts[0], parts[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (headers *Headers) SetIfNotExists(key string, value string) bool {
|
|
||||||
if headers.Has(key) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
*headers = append(*headers, KeyValue[string, []string]{Key: key, Value: []string{value}})
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
type KeyValue[K comparable, V any] struct {
|
|
||||||
Key K
|
|
||||||
Value V
|
|
||||||
}
|
|
||||||
139
types/params.go
139
types/params.go
@@ -1,139 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/text"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Params []KeyValue[string, []string]
|
|
||||||
|
|
||||||
func (params Params) String() string {
|
|
||||||
var buffer bytes.Buffer
|
|
||||||
if len(params) == 0 {
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
indent := " "
|
|
||||||
|
|
||||||
displayLimit := 3
|
|
||||||
|
|
||||||
for i, item := range params[:min(len(params), displayLimit)] {
|
|
||||||
if i > 0 {
|
|
||||||
buffer.WriteString(",\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(item.Value) == 1 {
|
|
||||||
buffer.WriteString(item.Key + ": " + item.Value[0])
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
buffer.WriteString(item.Key + ": " + text.FgBlue.Sprint("Random") + "[\n")
|
|
||||||
|
|
||||||
for ii, v := range item.Value[:min(len(item.Value), displayLimit)] {
|
|
||||||
if ii == len(item.Value)-1 {
|
|
||||||
buffer.WriteString(indent + v + "\n")
|
|
||||||
} else {
|
|
||||||
buffer.WriteString(indent + v + ",\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining values count if needed
|
|
||||||
if remainingValues := len(item.Value) - displayLimit; remainingValues > 0 {
|
|
||||||
buffer.WriteString(indent + text.FgGreen.Sprintf("+%d values", remainingValues) + "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString("]")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining key-value pairs count if needed
|
|
||||||
if remainingPairs := len(params) - displayLimit; remainingPairs > 0 {
|
|
||||||
buffer.WriteString(",\n" + text.FgGreen.Sprintf("+%d params", remainingPairs))
|
|
||||||
}
|
|
||||||
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (params *Params) AppendByKey(key, value string) {
|
|
||||||
if item := params.GetValue(key); item != nil {
|
|
||||||
*item = append(*item, value)
|
|
||||||
} else {
|
|
||||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: []string{value}})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (params Params) GetValue(key string) *[]string {
|
|
||||||
for i := range params {
|
|
||||||
if params[i].Key == key {
|
|
||||||
return ¶ms[i].Value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (params *Params) UnmarshalJSON(b []byte) error {
|
|
||||||
var data []map[string]any
|
|
||||||
if err := json.Unmarshal(b, &data); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, item := range data {
|
|
||||||
for key, value := range item {
|
|
||||||
switch parsedValue := value.(type) {
|
|
||||||
case string:
|
|
||||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: []string{parsedValue}})
|
|
||||||
case []any:
|
|
||||||
parsedStr := make([]string, len(parsedValue))
|
|
||||||
for i, item := range parsedValue {
|
|
||||||
parsedStr[i] = fmt.Sprintf("%v", item)
|
|
||||||
}
|
|
||||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: parsedStr})
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("unsupported type for params expected string or []string, got %T", parsedValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (params *Params) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var raw []map[string]any
|
|
||||||
if err := unmarshal(&raw); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, param := range raw {
|
|
||||||
for key, value := range param {
|
|
||||||
switch parsed := value.(type) {
|
|
||||||
case string:
|
|
||||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: []string{parsed}})
|
|
||||||
case []any:
|
|
||||||
var values []string
|
|
||||||
for _, v := range parsed {
|
|
||||||
if str, ok := v.(string); ok {
|
|
||||||
values = append(values, str)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*params = append(*params, KeyValue[string, []string]{Key: key, Value: values})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (params *Params) Set(value string) error {
|
|
||||||
parts := strings.SplitN(value, "=", 2)
|
|
||||||
switch len(parts) {
|
|
||||||
case 0:
|
|
||||||
params.AppendByKey("", "")
|
|
||||||
case 1:
|
|
||||||
params.AppendByKey(parts[0], "")
|
|
||||||
case 2:
|
|
||||||
params.AppendByKey(parts[0], parts[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
116
types/proxies.go
116
types/proxies.go
@@ -1,116 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/text"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Proxies []url.URL
|
|
||||||
|
|
||||||
func (proxies Proxies) String() string {
|
|
||||||
var buffer bytes.Buffer
|
|
||||||
if len(proxies) == 0 {
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(proxies) == 1 {
|
|
||||||
buffer.WriteString(proxies[0].String())
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString(text.FgBlue.Sprint("Random") + "[\n")
|
|
||||||
|
|
||||||
indent := " "
|
|
||||||
|
|
||||||
displayLimit := 5
|
|
||||||
|
|
||||||
for i, item := range proxies[:min(len(proxies), displayLimit)] {
|
|
||||||
if i > 0 {
|
|
||||||
buffer.WriteString(",\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString(indent + item.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining count if there are more items
|
|
||||||
if remainingValues := len(proxies) - displayLimit; remainingValues > 0 {
|
|
||||||
buffer.WriteString(",\n" + indent + text.FgGreen.Sprintf("+%d proxies", remainingValues))
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.WriteString("\n]")
|
|
||||||
return string(buffer.Bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (proxies *Proxies) UnmarshalJSON(b []byte) error {
|
|
||||||
var data any
|
|
||||||
if err := json.Unmarshal(b, &data); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch v := data.(type) {
|
|
||||||
case string:
|
|
||||||
parsed, err := url.Parse(v)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
*proxies = []url.URL{*parsed}
|
|
||||||
case []any:
|
|
||||||
var urls []url.URL
|
|
||||||
for _, item := range v {
|
|
||||||
url, err := url.Parse(item.(string))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
urls = append(urls, *url)
|
|
||||||
}
|
|
||||||
*proxies = urls
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("invalid type for Body: %T (should be URL or []URL)", v)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (proxies *Proxies) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var data any
|
|
||||||
if err := unmarshal(&data); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch v := data.(type) {
|
|
||||||
case string:
|
|
||||||
parsed, err := url.Parse(v)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
*proxies = []url.URL{*parsed}
|
|
||||||
case []any:
|
|
||||||
var urls []url.URL
|
|
||||||
for _, item := range v {
|
|
||||||
url, err := url.Parse(item.(string))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
urls = append(urls, *url)
|
|
||||||
}
|
|
||||||
*proxies = urls
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("invalid type for Body: %T (should be URL or []URL)", v)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (proxies *Proxies) Set(value string) error {
|
|
||||||
parsedURL, err := url.Parse(value)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
*proxies = append(*proxies, *parsedURL)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"net/url"
|
|
||||||
)
|
|
||||||
|
|
||||||
type RequestURL struct {
|
|
||||||
url.URL
|
|
||||||
}
|
|
||||||
|
|
||||||
func (requestURL *RequestURL) UnmarshalJSON(data []byte) error {
|
|
||||||
var urlStr string
|
|
||||||
if err := json.Unmarshal(data, &urlStr); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedURL, err := url.Parse(urlStr)
|
|
||||||
if err != nil {
|
|
||||||
return errors.New("Request URL is invalid")
|
|
||||||
}
|
|
||||||
|
|
||||||
requestURL.URL = *parsedURL
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (requestURL *RequestURL) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var urlStr string
|
|
||||||
if err := unmarshal(&urlStr); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedURL, err := url.Parse(urlStr)
|
|
||||||
if err != nil {
|
|
||||||
return errors.New("Request URL is invalid")
|
|
||||||
}
|
|
||||||
|
|
||||||
requestURL.URL = *parsedURL
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (requestURL RequestURL) MarshalJSON() ([]byte, error) {
|
|
||||||
return json.Marshal(requestURL.URL.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (requestURL RequestURL) String() string {
|
|
||||||
return requestURL.URL.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (requestURL *RequestURL) Set(value string) error {
|
|
||||||
parsedURL, err := url.Parse(value)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
requestURL.URL = *parsedURL
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
package types
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Timeout struct {
|
|
||||||
time.Duration
|
|
||||||
}
|
|
||||||
|
|
||||||
func (timeout *Timeout) UnmarshalJSON(b []byte) error {
|
|
||||||
var v any
|
|
||||||
if err := json.Unmarshal(b, &v); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
switch value := v.(type) {
|
|
||||||
case float64:
|
|
||||||
timeout.Duration = time.Duration(value)
|
|
||||||
return nil
|
|
||||||
case string:
|
|
||||||
var err error
|
|
||||||
timeout.Duration, err = time.ParseDuration(value)
|
|
||||||
if err != nil {
|
|
||||||
return errors.New("Timeout is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
default:
|
|
||||||
return errors.New("Timeout is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (timeout Timeout) MarshalJSON() ([]byte, error) {
|
|
||||||
return json.Marshal(timeout.Duration.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (timeout *Timeout) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
||||||
var v any
|
|
||||||
if err := unmarshal(&v); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
switch value := v.(type) {
|
|
||||||
case float64:
|
|
||||||
timeout.Duration = time.Duration(value)
|
|
||||||
return nil
|
|
||||||
case string:
|
|
||||||
var err error
|
|
||||||
timeout.Duration, err = time.ParseDuration(value)
|
|
||||||
if err != nil {
|
|
||||||
return errors.New("Timeout is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
default:
|
|
||||||
return errors.New("Timeout is invalid (e.g. 400ms, 1s, 5m, 1h)")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
func IsNilOrZero[T comparable](value *T) bool {
|
|
||||||
if value == nil {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
var zero T
|
|
||||||
if *value == zero {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
func ToPtr[T any](value T) *T {
|
|
||||||
return &value
|
|
||||||
}
|
|
||||||
21
utils/int.go
21
utils/int.go
@@ -1,21 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
type Number interface {
|
|
||||||
int | int8 | int16 | int32 | int64
|
|
||||||
}
|
|
||||||
|
|
||||||
func NumLen[T Number](n T) T {
|
|
||||||
if n < 0 {
|
|
||||||
n = -n
|
|
||||||
}
|
|
||||||
if n == 0 {
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
var count T = 0
|
|
||||||
for n > 0 {
|
|
||||||
n /= 10
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
return count
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/text"
|
|
||||||
)
|
|
||||||
|
|
||||||
func PrintErr(err error) {
|
|
||||||
fmt.Fprintln(os.Stderr, text.FgRed.Sprint(err.Error()))
|
|
||||||
}
|
|
||||||
|
|
||||||
func PrintErrAndExit(err error) {
|
|
||||||
if err != nil {
|
|
||||||
PrintErr(err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func PrintAndExit(message string) {
|
|
||||||
fmt.Println(message)
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
import "math/rand"
|
|
||||||
|
|
||||||
func Flatten[T any](nested [][]*T) []*T {
|
|
||||||
flattened := make([]*T, 0)
|
|
||||||
for _, n := range nested {
|
|
||||||
flattened = append(flattened, n...)
|
|
||||||
}
|
|
||||||
return flattened
|
|
||||||
}
|
|
||||||
|
|
||||||
// RandomValueCycle returns a function that cycles through the provided slice of values
|
|
||||||
// in a random order. Each call to the returned function will yield a value from the slice.
|
|
||||||
// The order of values is determined by the provided random number generator.
|
|
||||||
//
|
|
||||||
// The returned function will cycle through the values in a random order until all values
|
|
||||||
// have been returned at least once. After all values have been returned, the function will
|
|
||||||
// reset and start cycling through the values in a random order again.
|
|
||||||
// The returned function isn't thread-safe and should be used in a single-threaded context.
|
|
||||||
func RandomValueCycle[Value any](values []Value, localRand *rand.Rand) func() Value {
|
|
||||||
var (
|
|
||||||
clientsCount int = len(values)
|
|
||||||
currentIndex int = localRand.Intn(clientsCount)
|
|
||||||
stopIndex int = currentIndex
|
|
||||||
)
|
|
||||||
|
|
||||||
return func() Value {
|
|
||||||
client := values[currentIndex]
|
|
||||||
currentIndex++
|
|
||||||
if currentIndex == clientsCount {
|
|
||||||
currentIndex = 0
|
|
||||||
}
|
|
||||||
if currentIndex == stopIndex {
|
|
||||||
currentIndex = localRand.Intn(clientsCount)
|
|
||||||
stopIndex = currentIndex
|
|
||||||
}
|
|
||||||
|
|
||||||
return client
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
import "time"
|
|
||||||
|
|
||||||
func DurationRoundBy(duration time.Duration, n int64) time.Duration {
|
|
||||||
if durationLen := NumLen(duration.Nanoseconds()); durationLen > n {
|
|
||||||
roundNum := 1
|
|
||||||
for range durationLen - n {
|
|
||||||
roundNum *= 10
|
|
||||||
}
|
|
||||||
return duration.Round(time.Duration(roundNum))
|
|
||||||
}
|
|
||||||
return duration
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user