diff --git a/Makefile b/Makefile index 252ca05..a72b58b 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ test: build: - GODEBUG=cgocheck=0 go build -o dist/tilty + GOOS=linux GOARCH=arm GODEBUG=cgocheck=0 go build -o dist/tilty run: sudo ./dist/tilty -c test.ini diff --git a/README.md b/README.md index 1b981e9..040a5b9 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ Tilty [![Docker Pulls](https://img.shields.io/docker/pulls/myoung34/tilty.svg)](https://hub.docker.com/r/myoung34/tilty) ![](assets/datadog.png) +![](assets/influxdb.png) A CLI to capture and emit events from your [tilt hydrometer](https://tilthydrometer.com/) @@ -25,6 +26,7 @@ The Tilt supports writing to a google doc which you could use with something lik * Generic (Send to any endpoint with any type) * Brewstat.us (Example below) * BrewersFriend (Example below) +* InfluxDB (1.8+) * Datadog (dogstatsd) * SQLite @@ -87,6 +89,15 @@ enabled = true statsd_host = "statsdhost.corp.com" statsd_port = 8125 +[influxdb] +url = "http://localhost:8086" +verify_ssl = true +bucket = "tilty" +org = "Mine" +token = "myuser:password" +gravity_payload_template = "gravity,color={{.Color}},mac={{.Mac}} sg={{.Gravity}}" +temperature_payload_template = "temperature,color={{.Color}},mac={{.Mac}} temp={{.Temp}}" + ``` ### Run ### diff --git a/assets/influxdb.png b/assets/influxdb.png new file mode 100644 index 0000000..0290c89 Binary files /dev/null and b/assets/influxdb.png differ diff --git a/emitters/influxdb.go b/emitters/influxdb.go new file mode 100644 index 0000000..3c4121b --- /dev/null +++ b/emitters/influxdb.go @@ -0,0 +1,82 @@ +package emitters + +import ( + "bytes" + "context" + "crypto/tls" + "encoding/json" + "github.com/go-kit/log/level" + influxdb2 "github.com/influxdata/influxdb-client-go/v2" + "github.com/myoung34/tilty/tilt" + "strconv" + "text/template" +) + +type InfluxDB struct { + Enabled bool + URL string `json:"url"` + VerifySSL bool `json:"verify_ssl"` + Bucket string `json:"bucket"` + Org string `json:"org"` + Token string `json:"token"` + GravityPayloadTemplate string `json:"gravity_payload_template"` + TemperaturePayloadTemplate string `json:"temperature_payload_template"` +} + +//gravity_payload_template = gravity,color={{ color }},mac={{ mac }} sg={{ gravity }} +//temperature_payload_template = temperature,color={{ color }},mac={{ mac }} temp={{ temp }} + +func InfluxDBEmit(payload tilt.Payload, emitterConfig interface{}) (string, error) { + influxdb := InfluxDB{} + jsonString, _ := json.Marshal(emitterConfig) + json.Unmarshal(jsonString, &influxdb) + + client := influxdb2.NewClientWithOptions(influxdb.URL, influxdb.Token, + influxdb2.DefaultOptions(). + SetTLSConfig(&tls.Config{ + InsecureSkipVerify: influxdb.VerifySSL, + })) + writeAPI := client.WriteAPIBlocking(influxdb.Org, influxdb.Bucket) + + payloadTemplate := Template{ + Color: payload.Color, + Gravity: strconv.Itoa(int(payload.Minor)), + Mac: payload.Mac, + Temp: strconv.Itoa(int(payload.Major)), + Timestamp: payload.Timestamp, + } + + // Generate the gravity body from a template + gravityTmpl, err := template.New("influxdb").Parse(`"gravity,color={{.Color}},mac={{.Mac}} sg={{.Gravity}}"`) + if len(influxdb.GravityPayloadTemplate) > 0 { + gravityTmpl, err = template.New("influxdb").Parse(influxdb.GravityPayloadTemplate) + } + if err != nil { + level.Error(tilt.Logger).Log("emitters.influxdb", err) + return "", err + } + var gravityTpl bytes.Buffer + if err := gravityTmpl.Execute(&gravityTpl, payloadTemplate); err != nil { + level.Error(tilt.Logger).Log("emitters.influxdb", err) + return "", err + } + writeAPI.WriteRecord(context.Background(), gravityTpl.String()) + + // Generate the temperature body from a template + temperatureTmpl, err := template.New("influxdb").Parse(`"gravity,color={{.Color}},mac={{.Mac}} sg={{.Gravity}}"`) + if len(influxdb.TemperaturePayloadTemplate) > 0 { + temperatureTmpl, err = template.New("influxdb").Parse(influxdb.TemperaturePayloadTemplate) + } + if err != nil { + level.Error(tilt.Logger).Log("emitters.influxdb", err) + return "", err + } + var temperatureTpl bytes.Buffer + if err := temperatureTmpl.Execute(&temperatureTpl, payloadTemplate); err != nil { + level.Error(tilt.Logger).Log("emitters.influxdb", err) + return "", err + } + writeAPI.WriteRecord(context.Background(), temperatureTpl.String()) + + return "", nil +} diff --git a/go.mod b/go.mod index b9b0819..9fc45b2 100644 --- a/go.mod +++ b/go.mod @@ -5,8 +5,9 @@ go 1.19 require ( github.com/DataDog/datadog-go/v5 v5.1.1 github.com/akamensky/argparse v1.4.0 - github.com/go-kit/kit v0.12.0 + github.com/go-kit/log v0.2.0 github.com/go-playground/validator/v10 v10.11.0 + github.com/influxdata/influxdb-client-go/v2 v2.10.0 github.com/jarcoal/httpmock v1.2.0 github.com/mattn/go-sqlite3 v1.14.15 github.com/myoung34/gatt v0.0.0-20220817003501-ce14497a0f85 @@ -17,17 +18,19 @@ require ( require ( github.com/Microsoft/go-winio v0.5.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/deepmap/oapi-codegen v1.8.2 // indirect github.com/fsnotify/fsnotify v1.5.4 // indirect - github.com/go-kit/log v0.2.0 // indirect github.com/go-logfmt/logfmt v0.5.1 // indirect github.com/go-playground/locales v0.14.0 // indirect github.com/go-playground/universal-translator v0.18.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect + github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 // indirect github.com/leodido/go-urn v1.2.1 // indirect github.com/magiconair/properties v1.8.6 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.0.1 // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/spf13/afero v1.8.2 // indirect github.com/spf13/cast v1.5.0 // indirect @@ -35,6 +38,7 @@ require ( github.com/spf13/pflag v1.0.5 // indirect github.com/subosito/gotenv v1.3.0 // indirect golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect + golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 // indirect golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect golang.org/x/text v0.3.7 // indirect gopkg.in/ini.v1 v1.66.4 // indirect diff --git a/go.sum b/go.sum index aaa42a6..b527621 100644 --- a/go.sum +++ b/go.sum @@ -53,9 +53,13 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/deepmap/oapi-codegen v1.8.2 h1:SegyeYGcdi0jLLrpbCMoJxnUUn8GBXHsvr4rbzjuhfU= +github.com/deepmap/oapi-codegen v1.8.2/go.mod h1:YLgSKSDv/bZQB7N4ws6luhozi3cEdRktEqrX88CvjIw= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -65,15 +69,18 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7 github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= +github.com/getkin/kin-openapi v0.61.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.12.0 h1:e4o3o3IsBfAKQh5Qbbiqyfu97Ku7jrO/JbohvztANh4= -github.com/go-kit/kit v0.12.0/go.mod h1:lHd+EkCZPIwYItmGDDRdhinkzX2A1sj+M9biaEaizzs= github.com/go-kit/log v0.2.0 h1:7i2K3eKTos3Vc0enKCfnVcgHh2olr/MyfboYq7cAcFw= github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA= github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= @@ -108,6 +115,7 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -138,12 +146,17 @@ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/influxdata/influxdb-client-go/v2 v2.10.0 h1:bWCwNsp0KxBioW9PTG7LPk7/uXj2auHezuUMpztbpZY= +github.com/influxdata/influxdb-client-go/v2 v2.10.0/go.mod h1:x7Jo5UHHl+w8wu8UnGiNobDDHygojXwJX4mx7rXGKMk= +github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 h1:W9WBk7wlPfJLvMCdtV4zPulc4uCPrlywQOmbFOhgQNU= +github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo= github.com/jarcoal/httpmock v1.2.0 h1:gSvTxxFR/MEMfsGrvRbdfpRUMBStovlSRLw0Ep1bwwc= github.com/jarcoal/httpmock v1.2.0/go.mod h1:oCoTsnAz4+UoOUIf5lJOWV2QQIW5UoeUI6aM2YnWAZk= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= @@ -158,10 +171,21 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg= +github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/maxatome/go-testdeep v1.11.0 h1:Tgh5efyCYyJFGUYiT0qxBSIDeXw0F5zSoatlou685kk= @@ -174,6 +198,8 @@ github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCko github.com/pelletier/go-toml/v2 v2.0.1 h1:8e3L2cCQzLFi2CR4g7vGFuFxX7Jl1kKX8gW+iV0GUKU= github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -197,6 +223,7 @@ github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiu github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -205,6 +232,9 @@ github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMT github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/subosito/gotenv v1.3.0 h1:mjC+YW8QpAdXibNi+vNWgzmgBH4+5l5dCXv8cNysBLI= github.com/subosito/gotenv v1.3.0/go.mod h1:YzJjq/33h7nrwdY+iHMhEOEEbW0ovIz0tB6t6PwAXzs= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -221,6 +251,8 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= @@ -290,9 +322,12 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y= +golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -315,6 +350,7 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -322,11 +358,13 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -339,6 +377,7 @@ golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -356,6 +395,7 @@ golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -363,12 +403,15 @@ golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -517,6 +560,7 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/ini.v1 v1.66.4 h1:SsAcf+mM7mRZo2nJNGt8mZCjG8ZRaNGMURJw7BsIST4= gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/main.go b/main.go index be9fbc8..ca7f480 100644 --- a/main.go +++ b/main.go @@ -21,9 +21,10 @@ var config = tilt.Config{} var validate = validator.New() var EmittersMap = map[string]interface{}{ - "webhook.emit": emitters.WebhookEmit, - "sqlite.emit": emitters.SQLiteEmit, - "datadog.emit": emitters.DatadogEmit, + "webhook.emit": emitters.WebhookEmit, + "sqlite.emit": emitters.SQLiteEmit, + "datadog.emit": emitters.DatadogEmit, + "influxdb.emit": emitters.InfluxDBEmit, } func main() { diff --git a/vendor/github.com/deepmap/oapi-codegen/LICENSE b/vendor/github.com/deepmap/oapi-codegen/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go new file mode 100644 index 0000000..3e2a689 --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go @@ -0,0 +1,24 @@ +// Copyright 2021 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package runtime + +// Binder is the interface implemented by types that can be bound to a query string or a parameter string +// The input can be assumed to be a valid string. If you define a Bind method you are responsible for all +// data being completely bound to the type. +// +// By convention, to approximate the behavior of Bind functions themselves, +// Binder implements Bind("") as a no-op. +type Binder interface { + Bind(src string) error +} \ No newline at end of file diff --git a/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go new file mode 100644 index 0000000..751cc7d --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go @@ -0,0 +1,502 @@ +// Copyright 2019 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package runtime + +import ( + "encoding" + "encoding/json" + "fmt" + "net/url" + "reflect" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/deepmap/oapi-codegen/pkg/types" +) + +// This function binds a parameter as described in the Path Parameters +// section here to a Go object: +// https://swagger.io/docs/specification/serialization/ +// It is a backward compatible function to clients generated with codegen +// up to version v1.5.5. v1.5.6+ calls the function below. +func BindStyledParameter(style string, explode bool, paramName string, + value string, dest interface{}) error { + return BindStyledParameterWithLocation(style, explode, paramName, ParamLocationUndefined, value, dest) +} + +// This function binds a parameter as described in the Path Parameters +// section here to a Go object: +// https://swagger.io/docs/specification/serialization/ +func BindStyledParameterWithLocation(style string, explode bool, paramName string, + paramLocation ParamLocation, value string, dest interface{}) error { + + if value == "" { + return fmt.Errorf("parameter '%s' is empty, can't bind its value", paramName) + } + + // Based on the location of the parameter, we need to unescape it properly. + var err error + switch paramLocation { + case ParamLocationQuery, ParamLocationUndefined: + // We unescape undefined parameter locations here for older generated code, + // since prior to this refactoring, they always query unescaped. + value, err = url.QueryUnescape(value) + if err != nil { + return fmt.Errorf("error unescaping query parameter '%s': %v", paramName, err) + } + case ParamLocationPath: + value, err = url.PathUnescape(value) + if err != nil { + return fmt.Errorf("error unescaping path parameter '%s': %v", paramName, err) + } + default: + // Headers and cookies aren't escaped. + } + + // If the destination implements encoding.TextUnmarshaler we use it for binding + if tu, ok := dest.(encoding.TextUnmarshaler); ok { + if err := tu.UnmarshalText([]byte(value)); err != nil { + return fmt.Errorf("error unmarshaling '%s' text as %T: %s", value, dest, err) + } + + return nil + } + + // Everything comes in by pointer, dereference it + v := reflect.Indirect(reflect.ValueOf(dest)) + + // This is the basic type of the destination object. + t := v.Type() + + if t.Kind() == reflect.Struct { + // We've got a destination object, we'll create a JSON representation + // of the input value, and let the json library deal with the unmarshaling + parts, err := splitStyledParameter(style, explode, true, paramName, value) + if err != nil { + return err + } + + return bindSplitPartsToDestinationStruct(paramName, parts, explode, dest) + } + + if t.Kind() == reflect.Slice { + // Chop up the parameter into parts based on its style + parts, err := splitStyledParameter(style, explode, false, paramName, value) + if err != nil { + return fmt.Errorf("error splitting input '%s' into parts: %s", value, err) + } + + return bindSplitPartsToDestinationArray(parts, dest) + } + + // Try to bind the remaining types as a base type. + return BindStringToObject(value, dest) +} + +// This is a complex set of operations, but each given parameter style can be +// packed together in multiple ways, using different styles of separators, and +// different packing strategies based on the explode flag. This function takes +// as input any parameter format, and unpacks it to a simple list of strings +// or key-values which we can then treat generically. +// Why, oh why, great Swagger gods, did you have to make this so complicated? +func splitStyledParameter(style string, explode bool, object bool, paramName string, value string) ([]string, error) { + switch style { + case "simple": + // In the simple case, we always split on comma + parts := strings.Split(value, ",") + return parts, nil + case "label": + // In the label case, it's more tricky. In the no explode case, we have + // /users/.3,4,5 for arrays + // /users/.role,admin,firstName,Alex for objects + // in the explode case, we have: + // /users/.3.4.5 + // /users/.role=admin.firstName=Alex + if explode { + // In the exploded case, split everything on periods. + parts := strings.Split(value, ".") + // The first part should be an empty string because we have a + // leading period. + if parts[0] != "" { + return nil, fmt.Errorf("invalid format for label parameter '%s', should start with '.'", paramName) + } + return parts[1:], nil + + } else { + // In the unexploded case, we strip off the leading period. + if value[0] != '.' { + return nil, fmt.Errorf("invalid format for label parameter '%s', should start with '.'", paramName) + } + // The rest is comma separated. + return strings.Split(value[1:], ","), nil + } + + case "matrix": + if explode { + // In the exploded case, we break everything up on semicolon + parts := strings.Split(value, ";") + // The first part should always be empty string, since we started + // with ;something + if parts[0] != "" { + return nil, fmt.Errorf("invalid format for matrix parameter '%s', should start with ';'", paramName) + } + parts = parts[1:] + // Now, if we have an object, we just have a list of x=y statements. + // for a non-object, like an array, we have id=x, id=y. id=z, etc, + // so we need to strip the prefix from each of them. + if !object { + prefix := paramName + "=" + for i := range parts { + parts[i] = strings.TrimPrefix(parts[i], prefix) + } + } + return parts, nil + } else { + // In the unexploded case, parameters will start with ;paramName= + prefix := ";" + paramName + "=" + if !strings.HasPrefix(value, prefix) { + return nil, fmt.Errorf("expected parameter '%s' to start with %s", paramName, prefix) + } + str := strings.TrimPrefix(value, prefix) + return strings.Split(str, ","), nil + } + case "form": + var parts []string + if explode { + parts = strings.Split(value, "&") + if !object { + prefix := paramName + "=" + for i := range parts { + parts[i] = strings.TrimPrefix(parts[i], prefix) + } + } + return parts, nil + } else { + parts = strings.Split(value, ",") + prefix := paramName + "=" + for i := range parts { + parts[i] = strings.TrimPrefix(parts[i], prefix) + } + } + return parts, nil + } + + return nil, fmt.Errorf("unhandled parameter style: %s", style) +} + +// Given a set of values as a slice, create a slice to hold them all, and +// assign to each one by one. +func bindSplitPartsToDestinationArray(parts []string, dest interface{}) error { + // Everything comes in by pointer, dereference it + v := reflect.Indirect(reflect.ValueOf(dest)) + + // This is the basic type of the destination object. + t := v.Type() + + // We've got a destination array, bind each object one by one. + // This generates a slice of the correct element type and length to + // hold all the parts. + newArray := reflect.MakeSlice(t, len(parts), len(parts)) + for i, p := range parts { + err := BindStringToObject(p, newArray.Index(i).Addr().Interface()) + if err != nil { + return fmt.Errorf("error setting array element: %s", err) + } + } + v.Set(newArray) + return nil +} + +// Given a set of chopped up parameter parts, bind them to a destination +// struct. The exploded parameter controls whether we send key value pairs +// in the exploded case, or a sequence of values which are interpreted as +// tuples. +// Given the struct Id { firstName string, role string }, as in the canonical +// swagger examples, in the exploded case, we would pass +// ["firstName=Alex", "role=admin"], where in the non-exploded case, we would +// pass "firstName", "Alex", "role", "admin"] +// +// We punt the hard work of binding these values to the object to the json +// library. We'll turn those arrays into JSON strings, and unmarshal +// into the struct. +func bindSplitPartsToDestinationStruct(paramName string, parts []string, explode bool, dest interface{}) error { + // We've got a destination object, we'll create a JSON representation + // of the input value, and let the json library deal with the unmarshaling + var fields []string + if explode { + fields = make([]string, len(parts)) + for i, property := range parts { + propertyParts := strings.Split(property, "=") + if len(propertyParts) != 2 { + return fmt.Errorf("parameter '%s' has invalid exploded format", paramName) + } + fields[i] = "\"" + propertyParts[0] + "\":\"" + propertyParts[1] + "\"" + } + } else { + if len(parts)%2 != 0 { + return fmt.Errorf("parameter '%s' has invalid format, property/values need to be pairs", paramName) + } + fields = make([]string, len(parts)/2) + for i := 0; i < len(parts); i += 2 { + key := parts[i] + value := parts[i+1] + fields[i/2] = "\"" + key + "\":\"" + value + "\"" + } + } + jsonParam := "{" + strings.Join(fields, ",") + "}" + err := json.Unmarshal([]byte(jsonParam), dest) + if err != nil { + return fmt.Errorf("error binding parameter %s fields: %s", paramName, err) + } + return nil +} + +// This works much like BindStyledParameter, however it takes a query argument +// input array from the url package, since query arguments come through a +// different path than the styled arguments. They're also exceptionally fussy. +// For example, consider the exploded and unexploded form parameter examples: +// (exploded) /users?role=admin&firstName=Alex +// (unexploded) /users?id=role,admin,firstName,Alex +// +// In the first case, we can pull the "id" parameter off the context, +// and unmarshal via json as an intermediate. Easy. In the second case, we +// don't have the id QueryParam present, but must find "role", and "firstName". +// what if there is another parameter similar to "ID" named "role"? We can't +// tell them apart. This code tries to fail, but the moral of the story is that +// you shouldn't pass objects via form styled query arguments, just use +// the Content parameter form. +func BindQueryParameter(style string, explode bool, required bool, paramName string, + queryParams url.Values, dest interface{}) error { + + // dv = destination value. + dv := reflect.Indirect(reflect.ValueOf(dest)) + + // intermediate value form which is either dv or dv dereferenced. + v := dv + + // inner code will bind the string's value to this interface. + var output interface{} + + if required { + // If the parameter is required, then the generated code will pass us + // a pointer to it: &int, &object, and so forth. We can directly set + // them. + output = dest + } else { + // For optional parameters, we have an extra indirect. An optional + // parameter of type "int" will be *int on the struct. We pass that + // in by pointer, and have **int. + + // If the destination, is a nil pointer, we need to allocate it. + if v.IsNil() { + t := v.Type() + newValue := reflect.New(t.Elem()) + // for now, hang onto the output buffer separately from destination, + // as we don't want to write anything to destination until we can + // unmarshal successfully, and check whether a field is required. + output = newValue.Interface() + } else { + // If the destination isn't nil, just use that. + output = v.Interface() + } + + // Get rid of that extra indirect as compared to the required case, + // so the code below doesn't have to care. + v = reflect.Indirect(reflect.ValueOf(output)) + } + + // This is the basic type of the destination object. + t := v.Type() + k := t.Kind() + + switch style { + case "form": + var parts []string + if explode { + // ok, the explode case in query arguments is very, very annoying, + // because an exploded object, such as /users?role=admin&firstName=Alex + // isn't actually present in the parameter array. We have to do + // different things based on destination type. + values, found := queryParams[paramName] + var err error + + switch k { + case reflect.Slice: + // In the slice case, we simply use the arguments provided by + // http library. + if !found { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + return nil + } + } + err = bindSplitPartsToDestinationArray(values, output) + case reflect.Struct: + // This case is really annoying, and error prone, but the + // form style object binding doesn't tell us which arguments + // in the query string correspond to the object's fields. We'll + // try to bind field by field. + err = bindParamsToExplodedObject(paramName, queryParams, output) + default: + // Primitive object case. We expect to have 1 value to + // unmarshal. + if len(values) == 0 { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + return nil + } + } + if len(values) != 1 { + return fmt.Errorf("multiple values for single value parameter '%s'", paramName) + } + err = BindStringToObject(values[0], output) + } + if err != nil { + return err + } + // If the parameter is required, and we've successfully unmarshaled + // it, this assigns the new object to the pointer pointer. + if !required { + dv.Set(reflect.ValueOf(output)) + } + return nil + } else { + values, found := queryParams[paramName] + if !found { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + return nil + } + } + if len(values) != 1 { + return fmt.Errorf("parameter '%s' is not exploded, but is specified multiple times", paramName) + } + parts = strings.Split(values[0], ",") + } + var err error + switch k { + case reflect.Slice: + err = bindSplitPartsToDestinationArray(parts, output) + case reflect.Struct: + err = bindSplitPartsToDestinationStruct(paramName, parts, explode, output) + default: + if len(parts) == 0 { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + return nil + } + } + if len(parts) != 1 { + return fmt.Errorf("multiple values for single value parameter '%s'", paramName) + } + err = BindStringToObject(parts[0], output) + } + if err != nil { + return err + } + if !required { + dv.Set(reflect.ValueOf(output)) + } + return nil + case "deepObject": + if !explode { + return errors.New("deepObjects must be exploded") + } + return UnmarshalDeepObject(dest, paramName, queryParams) + case "spaceDelimited", "pipeDelimited": + return fmt.Errorf("query arguments of style '%s' aren't yet supported", style) + default: + return fmt.Errorf("style '%s' on parameter '%s' is invalid", style, paramName) + + } +} + +// This function reflects the destination structure, and pulls the value for +// each settable field from the given parameters map. This is to deal with the +// exploded form styled object which may occupy any number of parameter names. +// We don't try to be smart here, if the field exists as a query argument, +// set its value. +func bindParamsToExplodedObject(paramName string, values url.Values, dest interface{}) error { + // Dereference pointers to their destination values + binder, v, t := indirect(dest) + if binder != nil { + return BindStringToObject(values.Get(paramName), dest) + } + if t.Kind() != reflect.Struct { + return fmt.Errorf("unmarshaling query arg '%s' into wrong type", paramName) + } + + for i := 0; i < t.NumField(); i++ { + fieldT := t.Field(i) + + // Skip unsettable fields, such as internal ones. + if !v.Field(i).CanSet() { + continue + } + + // Find the json annotation on the field, and use the json specified + // name if available, otherwise, just the field name. + tag := fieldT.Tag.Get("json") + fieldName := fieldT.Name + if tag != "" { + tagParts := strings.Split(tag, ",") + name := tagParts[0] + if name != "" { + fieldName = name + } + } + + // At this point, we look up field name in the parameter list. + fieldVal, found := values[fieldName] + if found { + if len(fieldVal) != 1 { + return fmt.Errorf("field '%s' specified multiple times for param '%s'", fieldName, paramName) + } + err := BindStringToObject(fieldVal[0], v.Field(i).Addr().Interface()) + if err != nil { + return fmt.Errorf("could not bind query arg '%s' to request object: %s'", paramName, err) + } + } + } + return nil +} + +// indirect +func indirect(dest interface{}) (interface{}, reflect.Value, reflect.Type) { + v := reflect.ValueOf(dest) + if v.Type().NumMethod() > 0 && v.CanInterface() { + if u, ok := v.Interface().(Binder); ok { + return u, reflect.Value{}, nil + } + } + v = reflect.Indirect(v) + t := v.Type() + // special handling for custom types which might look like an object. We + // don't want to use object binding on them, but rather treat them as + // primitive types. time.Time{} is a unique case since we can't add a Binder + // to it without changing the underlying generated code. + if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { + return dest, reflect.Value{}, nil + } + if t.ConvertibleTo(reflect.TypeOf(types.Date{})) { + return dest, reflect.Value{}, nil + } + return nil, v, t +} diff --git a/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go new file mode 100644 index 0000000..e75964b --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go @@ -0,0 +1,143 @@ +// Copyright 2019 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package runtime + +import ( + "errors" + "fmt" + "reflect" + "strconv" + "time" + + "github.com/deepmap/oapi-codegen/pkg/types" +) + +// This function takes a string, and attempts to assign it to the destination +// interface via whatever type conversion is necessary. We have to do this +// via reflection instead of a much simpler type switch so that we can handle +// type aliases. This function was the easy way out, the better way, since we +// know the destination type each place that we use this, is to generate code +// to read each specific type. +func BindStringToObject(src string, dst interface{}) error { + var err error + + v := reflect.ValueOf(dst) + t := reflect.TypeOf(dst) + + // We need to dereference pointers + if t.Kind() == reflect.Ptr { + v = reflect.Indirect(v) + t = v.Type() + } + + // The resulting type must be settable. reflect will catch issues like + // passing the destination by value. + if !v.CanSet() { + return errors.New("destination is not settable") + } + + switch t.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + var val int64 + val, err = strconv.ParseInt(src, 10, 64) + if err == nil { + v.SetInt(val) + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + var val uint64 + val, err = strconv.ParseUint(src, 10, 64) + if err == nil { + v.SetUint(val) + } + case reflect.String: + v.SetString(src) + err = nil + case reflect.Float64, reflect.Float32: + var val float64 + val, err = strconv.ParseFloat(src, 64) + if err == nil { + v.SetFloat(val) + } + case reflect.Bool: + var val bool + val, err = strconv.ParseBool(src) + if err == nil { + v.SetBool(val) + } + case reflect.Struct: + // if this is not of type Time or of type Date look to see if this is of type Binder. + if dstType, ok := dst.(Binder); ok { + return dstType.Bind(src) + } + + if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { + // Don't fail on empty string. + if src == "" { + return nil + } + // Time is a special case of a struct that we handle + parsedTime, err := time.Parse(time.RFC3339Nano, src) + if err != nil { + parsedTime, err = time.Parse(types.DateFormat, src) + if err != nil { + return fmt.Errorf("error parsing '%s' as RFC3339 or 2006-01-02 time: %s", src, err) + } + } + // So, assigning this gets a little fun. We have a value to the + // dereference destination. We can't do a conversion to + // time.Time because the result isn't assignable, so we need to + // convert pointers. + if t != reflect.TypeOf(time.Time{}) { + vPtr := v.Addr() + vtPtr := vPtr.Convert(reflect.TypeOf(&time.Time{})) + v = reflect.Indirect(vtPtr) + } + v.Set(reflect.ValueOf(parsedTime)) + return nil + } + + if t.ConvertibleTo(reflect.TypeOf(types.Date{})) { + // Don't fail on empty string. + if src == "" { + return nil + } + parsedTime, err := time.Parse(types.DateFormat, src) + if err != nil { + return fmt.Errorf("error parsing '%s' as date: %s", src, err) + } + parsedDate := types.Date{Time: parsedTime} + + // We have to do the same dance here to assign, just like with times + // above. + if t != reflect.TypeOf(types.Date{}) { + vPtr := v.Addr() + vtPtr := vPtr.Convert(reflect.TypeOf(&types.Date{})) + v = reflect.Indirect(vtPtr) + } + v.Set(reflect.ValueOf(parsedDate)) + return nil + } + + // We fall through to the error case below if we haven't handled the + // destination type above. + fallthrough + default: + // We've got a bunch of types unimplemented, don't fail silently. + err = fmt.Errorf("can not bind to destination of type: %s", t.Kind()) + } + if err != nil { + return fmt.Errorf("error binding string parameter: %s", err) + } + return nil +} diff --git a/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go new file mode 100644 index 0000000..e13c795 --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go @@ -0,0 +1,357 @@ +package runtime + +import ( + "encoding/json" + "fmt" + "github.com/deepmap/oapi-codegen/pkg/types" + "net/url" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" +) + +func marshalDeepObject(in interface{}, path []string) ([]string, error) { + var result []string + + switch t := in.(type) { + case []interface{}: + // For the array, we will use numerical subscripts of the form [x], + // in the same order as the array. + for i, iface := range t { + newPath := append(path, strconv.Itoa(i)) + fields, err := marshalDeepObject(iface, newPath) + if err != nil { + return nil, errors.Wrap(err, "error traversing array") + } + result = append(result, fields...) + } + case map[string]interface{}: + // For a map, each key (field name) becomes a member of the path, and + // we recurse. First, sort the keys. + keys := make([]string, len(t)) + i := 0 + for k := range t { + keys[i] = k + i++ + } + sort.Strings(keys) + + // Now, for each key, we recursively marshal it. + for _, k := range keys { + newPath := append(path, k) + fields, err := marshalDeepObject(t[k], newPath) + if err != nil { + return nil, errors.Wrap(err, "error traversing map") + } + result = append(result, fields...) + } + default: + // Now, for a concrete value, we will turn the path elements + // into a deepObject style set of subscripts. [a, b, c] turns into + // [a][b][c] + prefix := "[" + strings.Join(path, "][") + "]" + result = []string{ + prefix + fmt.Sprintf("=%v", t), + } + } + return result, nil +} + +func MarshalDeepObject(i interface{}, paramName string) (string, error) { + // We're going to marshal to JSON and unmarshal into an interface{}, + // which will use the json pkg to deal with all the field annotations. We + // can then walk the generic object structure to produce a deepObject. This + // isn't efficient and it would be more efficient to reflect on our own, + // but it's complicated, error-prone code. + buf, err := json.Marshal(i) + if err != nil { + return "", errors.Wrap(err, "failed to marshal input to JSON") + } + var i2 interface{} + err = json.Unmarshal(buf, &i2) + if err != nil { + return "", errors.Wrap(err, "failed to unmarshal JSON") + } + fields, err := marshalDeepObject(i2, nil) + if err != nil { + return "", errors.Wrap(err, "error traversing JSON structure") + } + + // Prefix the param name to each subscripted field. + for i := range fields { + fields[i] = paramName + fields[i] + } + return strings.Join(fields, "&"), nil +} + +type fieldOrValue struct { + fields map[string]fieldOrValue + value string +} + +func (f *fieldOrValue) appendPathValue(path []string, value string) { + fieldName := path[0] + if len(path) == 1 { + f.fields[fieldName] = fieldOrValue{value: value} + return + } + + pv, found := f.fields[fieldName] + if !found { + pv = fieldOrValue{ + fields: make(map[string]fieldOrValue), + } + f.fields[fieldName] = pv + } + pv.appendPathValue(path[1:], value) +} + +func makeFieldOrValue(paths [][]string, values []string) fieldOrValue { + + f := fieldOrValue{ + fields: make(map[string]fieldOrValue), + } + for i := range paths { + path := paths[i] + value := values[i] + f.appendPathValue(path, value) + } + return f +} + +func UnmarshalDeepObject(dst interface{}, paramName string, params url.Values) error { + // Params are all the query args, so we need those that look like + // "paramName["... + var fieldNames []string + var fieldValues []string + searchStr := paramName + "[" + for pName, pValues := range params { + if strings.HasPrefix(pName, searchStr) { + // trim the parameter name from the full name. + pName = pName[len(paramName):] + fieldNames = append(fieldNames, pName) + if len(pValues) != 1 { + return fmt.Errorf("%s has multiple values", pName) + } + fieldValues = append(fieldValues, pValues[0]) + } + } + + // Now, for each field, reconstruct its subscript path and value + paths := make([][]string, len(fieldNames)) + for i, path := range fieldNames { + path = strings.TrimLeft(path, "[") + path = strings.TrimRight(path, "]") + paths[i] = strings.Split(path, "][") + } + + fieldPaths := makeFieldOrValue(paths, fieldValues) + err := assignPathValues(dst, fieldPaths) + if err != nil { + return errors.Wrap(err, "error assigning value to destination") + } + + return nil +} + +// This returns a field name, either using the variable name, or the json +// annotation if that exists. +func getFieldName(f reflect.StructField) string { + n := f.Name + tag, found := f.Tag.Lookup("json") + if found { + // If we have a json field, and the first part of it before the + // first comma is non-empty, that's our field name. + parts := strings.Split(tag, ",") + if parts[0] != "" { + n = parts[0] + } + } + return n +} + +// Create a map of field names that we'll see in the deepObject to reflect +// field indices on the given type. +func fieldIndicesByJsonTag(i interface{}) (map[string]int, error) { + t := reflect.TypeOf(i) + if t.Kind() != reflect.Struct { + return nil, errors.New("expected a struct as input") + } + + n := t.NumField() + fieldMap := make(map[string]int) + for i := 0; i < n; i++ { + field := t.Field(i) + fieldName := getFieldName(field) + fieldMap[fieldName] = i + } + return fieldMap, nil +} + +func assignPathValues(dst interface{}, pathValues fieldOrValue) error { + //t := reflect.TypeOf(dst) + v := reflect.ValueOf(dst) + + iv := reflect.Indirect(v) + it := iv.Type() + + switch it.Kind() { + case reflect.Slice: + sliceLength := len(pathValues.fields) + dstSlice := reflect.MakeSlice(it, sliceLength, sliceLength) + err := assignSlice(dstSlice, pathValues) + if err != nil { + return errors.Wrap(err, "error assigning slice") + } + iv.Set(dstSlice) + return nil + case reflect.Struct: + // Some special types we care about are structs. Handle them + // here. They may be redefined, so we need to do some hoop + // jumping. If the types are aliased, we need to type convert + // the pointer, then set the value of the dereference pointer. + + // We check to see if the object implements the Binder interface first. + if dst, isBinder := v.Interface().(Binder); isBinder { + return dst.Bind(pathValues.value) + } + // Then check the legacy types + if it.ConvertibleTo(reflect.TypeOf(types.Date{})) { + var date types.Date + var err error + date.Time, err = time.Parse(types.DateFormat, pathValues.value) + if err != nil { + return errors.Wrap(err, "invalid date format") + } + dst := iv + if it != reflect.TypeOf(types.Date{}) { + // Types are aliased, convert the pointers. + ivPtr := iv.Addr() + aPtr := ivPtr.Convert(reflect.TypeOf(&types.Date{})) + dst = reflect.Indirect(aPtr) + } + dst.Set(reflect.ValueOf(date)) + } + if it.ConvertibleTo(reflect.TypeOf(time.Time{})) { + var tm time.Time + var err error + tm, err = time.Parse(time.RFC3339Nano, pathValues.value) + if err != nil { + // Fall back to parsing it as a date. + tm, err = time.Parse(types.DateFormat, pathValues.value) + if err != nil { + return fmt.Errorf("error parsing tim as RFC3339 or 2006-01-02 time: %s", err) + } + return errors.Wrap(err, "invalid date format") + } + dst := iv + if it != reflect.TypeOf(time.Time{}) { + // Types are aliased, convert the pointers. + ivPtr := iv.Addr() + aPtr := ivPtr.Convert(reflect.TypeOf(&time.Time{})) + dst = reflect.Indirect(aPtr) + } + dst.Set(reflect.ValueOf(tm)) + } + fieldMap, err := fieldIndicesByJsonTag(iv.Interface()) + if err != nil { + return errors.Wrap(err, "failed enumerating fields") + } + for _, fieldName := range sortedFieldOrValueKeys(pathValues.fields) { + fieldValue := pathValues.fields[fieldName] + fieldIndex, found := fieldMap[fieldName] + if !found { + return fmt.Errorf("field [%s] is not present in destination object", fieldName) + } + field := iv.Field(fieldIndex) + err = assignPathValues(field.Addr().Interface(), fieldValue) + if err != nil { + return errors.Wrapf(err, "error assigning field [%s]", fieldName) + } + } + return nil + case reflect.Ptr: + // If we have a pointer after redirecting, it means we're dealing with + // an optional field, such as *string, which was passed in as &foo. We + // will allocate it if necessary, and call ourselves with a different + // interface. + dstVal := reflect.New(it.Elem()) + dstPtr := dstVal.Interface() + err := assignPathValues(dstPtr, pathValues) + iv.Set(dstVal) + return err + case reflect.Bool: + val, err := strconv.ParseBool(pathValues.value) + if err != nil { + return fmt.Errorf("expected a valid bool, got %s", pathValues.value) + } + iv.SetBool(val) + return nil + case reflect.Float32: + val, err := strconv.ParseFloat(pathValues.value, 32) + if err != nil { + return fmt.Errorf("expected a valid float, got %s", pathValues.value) + } + iv.SetFloat(val) + return nil + case reflect.Float64: + val, err := strconv.ParseFloat(pathValues.value, 64) + if err != nil { + return fmt.Errorf("expected a valid float, got %s", pathValues.value) + } + iv.SetFloat(val) + return nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + val, err := strconv.ParseInt(pathValues.value, 10, 64) + if err != nil { + return fmt.Errorf("expected a valid int, got %s", pathValues.value) + } + iv.SetInt(val) + return nil + case reflect.String: + iv.SetString(pathValues.value) + return nil + default: + return errors.New("unhandled type: " + it.String()) + } +} + +func assignSlice(dst reflect.Value, pathValues fieldOrValue) error { + // Gather up the values + nValues := len(pathValues.fields) + values := make([]string, nValues) + // We expect to have consecutive array indices in the map + for i := 0; i < nValues; i++ { + indexStr := strconv.Itoa(i) + fv, found := pathValues.fields[indexStr] + if !found { + return errors.New("array deepObjects must have consecutive indices") + } + values[i] = fv.value + } + + // This could be cleaner, but we can call into assignPathValues to + // avoid recreating this logic. + for i := 0; i < nValues; i++ { + dstElem := dst.Index(i).Addr() + err := assignPathValues(dstElem.Interface(), fieldOrValue{value: values[i]}) + if err != nil { + return errors.Wrap(err, "error binding array") + } + } + + return nil +} + +func sortedFieldOrValueKeys(m map[string]fieldOrValue) []string { + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} diff --git a/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go new file mode 100644 index 0000000..446e42a --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go @@ -0,0 +1,390 @@ +// Copyright 2019 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package runtime + +import ( + "fmt" + "net/url" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/deepmap/oapi-codegen/pkg/types" +) + +// Parameter escaping works differently based on where a header is found +type ParamLocation int + +const ( + ParamLocationUndefined ParamLocation = iota + ParamLocationQuery + ParamLocationPath + ParamLocationHeader + ParamLocationCookie +) + +// This function is used by older generated code, and must remain compatible +// with that code. It is not to be used in new templates. Please see the +// function below, which can specialize its output based on the location of +// the parameter. +func StyleParam(style string, explode bool, paramName string, value interface{}) (string, error) { + return StyleParamWithLocation(style, explode, paramName, ParamLocationUndefined, value) +} + +// Given an input value, such as a primitive type, array or object, turn it +// into a parameter based on style/explode definition, performing whatever +// escaping is necessary based on parameter location +func StyleParamWithLocation(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) { + t := reflect.TypeOf(value) + v := reflect.ValueOf(value) + + // Things may be passed in by pointer, we need to dereference, so return + // error on nil. + if t.Kind() == reflect.Ptr { + if v.IsNil() { + return "", fmt.Errorf("value is a nil pointer") + } + v = reflect.Indirect(v) + t = v.Type() + } + + switch t.Kind() { + case reflect.Slice: + n := v.Len() + sliceVal := make([]interface{}, n) + for i := 0; i < n; i++ { + sliceVal[i] = v.Index(i).Interface() + } + return styleSlice(style, explode, paramName, paramLocation, sliceVal) + case reflect.Struct: + return styleStruct(style, explode, paramName, paramLocation, value) + case reflect.Map: + return styleMap(style, explode, paramName, paramLocation, value) + default: + return stylePrimitive(style, explode, paramName, paramLocation, value) + } +} + +func styleSlice(style string, explode bool, paramName string, paramLocation ParamLocation, values []interface{}) (string, error) { + if style == "deepObject" { + if !explode { + return "", errors.New("deepObjects must be exploded") + } + return MarshalDeepObject(values, paramName) + } + + var prefix string + var separator string + + switch style { + case "simple": + separator = "," + case "label": + prefix = "." + if explode { + separator = "." + } else { + separator = "," + } + case "matrix": + prefix = fmt.Sprintf(";%s=", paramName) + if explode { + separator = prefix + } else { + separator = "," + } + case "form": + prefix = fmt.Sprintf("%s=", paramName) + if explode { + separator = "&" + prefix + } else { + separator = "," + } + case "spaceDelimited": + prefix = fmt.Sprintf("%s=", paramName) + if explode { + separator = "&" + prefix + } else { + separator = " " + } + case "pipeDelimited": + prefix = fmt.Sprintf("%s=", paramName) + if explode { + separator = "&" + prefix + } else { + separator = "|" + } + default: + return "", fmt.Errorf("unsupported style '%s'", style) + } + + // We're going to assume here that the array is one of simple types. + var err error + var part string + parts := make([]string, len(values)) + for i, v := range values { + part, err = primitiveToString(v) + part = escapeParameterString(part, paramLocation) + parts[i] = part + if err != nil { + return "", fmt.Errorf("error formatting '%s': %s", paramName, err) + } + } + return prefix + strings.Join(parts, separator), nil +} + +func sortedKeys(strMap map[string]string) []string { + keys := make([]string, len(strMap)) + i := 0 + for k := range strMap { + keys[i] = k + i++ + } + sort.Strings(keys) + return keys +} + +// This is a special case. The struct may be a date or time, in +// which case, marshal it in correct format. +func marshalDateTimeValue(value interface{}) (string, bool) { + v := reflect.Indirect(reflect.ValueOf(value)) + t := v.Type() + + if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { + tt := v.Convert(reflect.TypeOf(time.Time{})) + timeVal := tt.Interface().(time.Time) + return timeVal.Format(time.RFC3339Nano), true + } + + if t.ConvertibleTo(reflect.TypeOf(types.Date{})) { + d := v.Convert(reflect.TypeOf(types.Date{})) + dateVal := d.Interface().(types.Date) + return dateVal.Format(types.DateFormat), true + } + + return "", false +} + +func styleStruct(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) { + + if timeVal, ok := marshalDateTimeValue(value); ok { + styledVal, err := stylePrimitive(style, explode, paramName, paramLocation, timeVal) + if err != nil { + return "", errors.Wrap(err, "failed to style time") + } + return styledVal, nil + } + + if style == "deepObject" { + if !explode { + return "", errors.New("deepObjects must be exploded") + } + return MarshalDeepObject(value, paramName) + } + + // Otherwise, we need to build a dictionary of the struct's fields. Each + // field may only be a primitive value. + v := reflect.ValueOf(value) + t := reflect.TypeOf(value) + fieldDict := make(map[string]string) + + for i := 0; i < t.NumField(); i++ { + fieldT := t.Field(i) + // Find the json annotation on the field, and use the json specified + // name if available, otherwise, just the field name. + tag := fieldT.Tag.Get("json") + fieldName := fieldT.Name + if tag != "" { + tagParts := strings.Split(tag, ",") + name := tagParts[0] + if name != "" { + fieldName = name + } + } + f := v.Field(i) + + // Unset optional fields will be nil pointers, skip over those. + if f.Type().Kind() == reflect.Ptr && f.IsNil() { + continue + } + str, err := primitiveToString(f.Interface()) + if err != nil { + return "", fmt.Errorf("error formatting '%s': %s", paramName, err) + } + fieldDict[fieldName] = str + } + + return processFieldDict(style, explode, paramName, paramLocation, fieldDict) +} + +func styleMap(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) { + if style == "deepObject" { + if !explode { + return "", errors.New("deepObjects must be exploded") + } + return MarshalDeepObject(value, paramName) + } + + dict, ok := value.(map[string]interface{}) + if !ok { + return "", errors.New("map not of type map[string]interface{}") + } + + fieldDict := make(map[string]string) + for fieldName, value := range dict { + str, err := primitiveToString(value) + if err != nil { + return "", fmt.Errorf("error formatting '%s': %s", paramName, err) + } + fieldDict[fieldName] = str + } + return processFieldDict(style, explode, paramName, paramLocation, fieldDict) +} + +func processFieldDict(style string, explode bool, paramName string, paramLocation ParamLocation, fieldDict map[string]string) (string, error) { + var parts []string + + // This works for everything except deepObject. We'll handle that one + // separately. + if style != "deepObject" { + if explode { + for _, k := range sortedKeys(fieldDict) { + v := escapeParameterString(fieldDict[k], paramLocation) + parts = append(parts, k+"="+v) + } + } else { + for _, k := range sortedKeys(fieldDict) { + v := escapeParameterString(fieldDict[k], paramLocation) + parts = append(parts, k) + parts = append(parts, v) + } + } + } + + var prefix string + var separator string + + switch style { + case "simple": + separator = "," + case "label": + prefix = "." + if explode { + separator = prefix + } else { + separator = "," + } + case "matrix": + if explode { + separator = ";" + prefix = ";" + } else { + separator = "," + prefix = fmt.Sprintf(";%s=", paramName) + } + case "form": + if explode { + separator = "&" + } else { + prefix = fmt.Sprintf("%s=", paramName) + separator = "," + } + case "deepObject": + { + if !explode { + return "", fmt.Errorf("deepObject parameters must be exploded") + } + for _, k := range sortedKeys(fieldDict) { + v := fieldDict[k] + part := fmt.Sprintf("%s[%s]=%s", paramName, k, v) + parts = append(parts, part) + } + separator = "&" + } + default: + return "", fmt.Errorf("unsupported style '%s'", style) + } + + return prefix + strings.Join(parts, separator), nil +} + +func stylePrimitive(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) { + strVal, err := primitiveToString(value) + if err != nil { + return "", err + } + + var prefix string + switch style { + case "simple": + case "label": + prefix = "." + case "matrix": + prefix = fmt.Sprintf(";%s=", paramName) + case "form": + prefix = fmt.Sprintf("%s=", paramName) + default: + return "", fmt.Errorf("unsupported style '%s'", style) + } + return prefix + escapeParameterString(strVal, paramLocation), nil +} + +// Converts a primitive value to a string. We need to do this based on the +// Kind of an interface, not the Type to work with aliased types. +func primitiveToString(value interface{}) (string, error) { + var output string + + // Values may come in by pointer for optionals, so make sure to dereferene. + v := reflect.Indirect(reflect.ValueOf(value)) + t := v.Type() + kind := t.Kind() + + switch kind { + case reflect.Int8, reflect.Int32, reflect.Int64, reflect.Int: + output = strconv.FormatInt(v.Int(), 10) + case reflect.Float64: + output = strconv.FormatFloat(v.Float(), 'f', -1, 64) + case reflect.Float32: + output = strconv.FormatFloat(v.Float(), 'f', -1, 32) + case reflect.Bool: + if v.Bool() { + output = "true" + } else { + output = "false" + } + case reflect.String: + output = v.String() + default: + return "", fmt.Errorf("unsupported type %s", reflect.TypeOf(value).String()) + } + return output, nil +} + +// This function escapes a parameter value bas on the location of that parameter. +// Query params and path params need different kinds of escaping, while header +// and cookie params seem not to need escaping. +func escapeParameterString(value string, paramLocation ParamLocation) string { + switch paramLocation { + case ParamLocationQuery: + return url.QueryEscape(value) + case ParamLocationPath: + return url.PathEscape(value) + default: + return value + } +} diff --git a/vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go b/vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go new file mode 100644 index 0000000..bdf94a9 --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go @@ -0,0 +1,30 @@ +package types + +import ( + "encoding/json" + "time" +) + +const DateFormat = "2006-01-02" + +type Date struct { + time.Time +} + +func (d Date) MarshalJSON() ([]byte, error) { + return json.Marshal(d.Time.Format(DateFormat)) +} + +func (d *Date) UnmarshalJSON(data []byte) error { + var dateStr string + err := json.Unmarshal(data, &dateStr) + if err != nil { + return err + } + parsed, err := time.Parse(DateFormat, dateStr) + if err != nil { + return err + } + d.Time = parsed + return nil +} diff --git a/vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go b/vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go new file mode 100644 index 0000000..00a4cf6 --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go @@ -0,0 +1,27 @@ +package types + +import ( + "encoding/json" + "errors" +) + +type Email string + +func (e Email) MarshalJSON() ([]byte, error) { + if !emailRegex.MatchString(string(e)) { + return nil, errors.New("email: failed to pass regex validation") + } + return json.Marshal(string(e)) +} + +func (e *Email) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + if !emailRegex.MatchString(s) { + return errors.New("email: failed to pass regex validation") + } + *e = Email(s) + return nil +} diff --git a/vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go b/vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go new file mode 100644 index 0000000..94f17df --- /dev/null +++ b/vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go @@ -0,0 +1,11 @@ +package types + +import "regexp" + +const ( + emailRegexString = "^(?:(?:(?:(?:[a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(?:\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|(?:(?:\\x22)(?:(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(?:\\x20|\\x09)+)?(?:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(\\x20|\\x09)+)?(?:\\x22))))@(?:(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$" +) + +var ( + emailRegex = regexp.MustCompile(emailRegexString) +) diff --git a/vendor/github.com/go-kit/kit/log/README.md b/vendor/github.com/go-kit/kit/log/README.md deleted file mode 100644 index 5492dd9..0000000 --- a/vendor/github.com/go-kit/kit/log/README.md +++ /dev/null @@ -1,160 +0,0 @@ -# package log - -**Deprecation notice:** The core Go kit log packages (log, log/level, log/term, and -log/syslog) have been moved to their own repository at github.com/go-kit/log. -The corresponding packages in this directory remain for backwards compatibility. -Their types alias the types and their functions call the functions provided by -the new repository. Using either import path should be equivalent. Prefer the -new import path when practical. - -______ - -`package log` provides a minimal interface for structured logging in services. -It may be wrapped to encode conventions, enforce type-safety, provide leveled -logging, and so on. It can be used for both typical application log events, -and log-structured data streams. - -## Structured logging - -Structured logging is, basically, conceding to the reality that logs are -_data_, and warrant some level of schematic rigor. Using a stricter, -key/value-oriented message format for our logs, containing contextual and -semantic information, makes it much easier to get insight into the -operational activity of the systems we build. Consequently, `package log` is -of the strong belief that "[the benefits of structured logging outweigh the -minimal effort involved](https://www.thoughtworks.com/radar/techniques/structured-logging)". - -Migrating from unstructured to structured logging is probably a lot easier -than you'd expect. - -```go -// Unstructured -log.Printf("HTTP server listening on %s", addr) - -// Structured -logger.Log("transport", "HTTP", "addr", addr, "msg", "listening") -``` - -## Usage - -### Typical application logging - -```go -w := log.NewSyncWriter(os.Stderr) -logger := log.NewLogfmtLogger(w) -logger.Log("question", "what is the meaning of life?", "answer", 42) - -// Output: -// question="what is the meaning of life?" answer=42 -``` - -### Contextual Loggers - -```go -func main() { - var logger log.Logger - logger = log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr)) - logger = log.With(logger, "instance_id", 123) - - logger.Log("msg", "starting") - NewWorker(log.With(logger, "component", "worker")).Run() - NewSlacker(log.With(logger, "component", "slacker")).Run() -} - -// Output: -// instance_id=123 msg=starting -// instance_id=123 component=worker msg=running -// instance_id=123 component=slacker msg=running -``` - -### Interact with stdlib logger - -Redirect stdlib logger to Go kit logger. - -```go -import ( - "os" - stdlog "log" - kitlog "github.com/go-kit/kit/log" -) - -func main() { - logger := kitlog.NewJSONLogger(kitlog.NewSyncWriter(os.Stdout)) - stdlog.SetOutput(kitlog.NewStdlibAdapter(logger)) - stdlog.Print("I sure like pie") -} - -// Output: -// {"msg":"I sure like pie","ts":"2016/01/01 12:34:56"} -``` - -Or, if, for legacy reasons, you need to pipe all of your logging through the -stdlib log package, you can redirect Go kit logger to the stdlib logger. - -```go -logger := kitlog.NewLogfmtLogger(kitlog.StdlibWriter{}) -logger.Log("legacy", true, "msg", "at least it's something") - -// Output: -// 2016/01/01 12:34:56 legacy=true msg="at least it's something" -``` - -### Timestamps and callers - -```go -var logger log.Logger -logger = log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr)) -logger = log.With(logger, "ts", log.DefaultTimestampUTC, "caller", log.DefaultCaller) - -logger.Log("msg", "hello") - -// Output: -// ts=2016-01-01T12:34:56Z caller=main.go:15 msg=hello -``` - -## Levels - -Log levels are supported via the [level package](https://godoc.org/github.com/go-kit/kit/log/level). - -## Supported output formats - -- [Logfmt](https://brandur.org/logfmt) ([see also](https://blog.codeship.com/logfmt-a-log-format-thats-easy-to-read-and-write)) -- JSON - -## Enhancements - -`package log` is centered on the one-method Logger interface. - -```go -type Logger interface { - Log(keyvals ...interface{}) error -} -``` - -This interface, and its supporting code like is the product of much iteration -and evaluation. For more details on the evolution of the Logger interface, -see [The Hunt for a Logger Interface](http://go-talks.appspot.com/github.com/ChrisHines/talks/structured-logging/structured-logging.slide#1), -a talk by [Chris Hines](https://github.com/ChrisHines). -Also, please see -[#63](https://github.com/go-kit/kit/issues/63), -[#76](https://github.com/go-kit/kit/pull/76), -[#131](https://github.com/go-kit/kit/issues/131), -[#157](https://github.com/go-kit/kit/pull/157), -[#164](https://github.com/go-kit/kit/issues/164), and -[#252](https://github.com/go-kit/kit/pull/252) -to review historical conversations about package log and the Logger interface. - -Value-add packages and suggestions, -like improvements to [the leveled logger](https://godoc.org/github.com/go-kit/kit/log/level), -are of course welcome. Good proposals should - -- Be composable with [contextual loggers](https://godoc.org/github.com/go-kit/kit/log#With), -- Not break the behavior of [log.Caller](https://godoc.org/github.com/go-kit/kit/log#Caller) in any wrapped contextual loggers, and -- Be friendly to packages that accept only an unadorned log.Logger. - -## Benchmarks & comparisons - -There are a few Go logging benchmarks and comparisons that include Go kit's package log. - -- [imkira/go-loggers-bench](https://github.com/imkira/go-loggers-bench) includes kit/log -- [uber-common/zap](https://github.com/uber-common/zap), a zero-alloc logging library, includes a comparison with kit/log diff --git a/vendor/github.com/go-kit/kit/log/doc.go b/vendor/github.com/go-kit/kit/log/doc.go deleted file mode 100644 index c9873f4..0000000 --- a/vendor/github.com/go-kit/kit/log/doc.go +++ /dev/null @@ -1,118 +0,0 @@ -// Package log provides a structured logger. -// -// Deprecated: Use github.com/go-kit/log instead. -// -// Structured logging produces logs easily consumed later by humans or -// machines. Humans might be interested in debugging errors, or tracing -// specific requests. Machines might be interested in counting interesting -// events, or aggregating information for off-line processing. In both cases, -// it is important that the log messages are structured and actionable. -// Package log is designed to encourage both of these best practices. -// -// Basic Usage -// -// The fundamental interface is Logger. Loggers create log events from -// key/value data. The Logger interface has a single method, Log, which -// accepts a sequence of alternating key/value pairs, which this package names -// keyvals. -// -// type Logger interface { -// Log(keyvals ...interface{}) error -// } -// -// Here is an example of a function using a Logger to create log events. -// -// func RunTask(task Task, logger log.Logger) string { -// logger.Log("taskID", task.ID, "event", "starting task") -// ... -// logger.Log("taskID", task.ID, "event", "task complete") -// } -// -// The keys in the above example are "taskID" and "event". The values are -// task.ID, "starting task", and "task complete". Every key is followed -// immediately by its value. -// -// Keys are usually plain strings. Values may be any type that has a sensible -// encoding in the chosen log format. With structured logging it is a good -// idea to log simple values without formatting them. This practice allows -// the chosen logger to encode values in the most appropriate way. -// -// Contextual Loggers -// -// A contextual logger stores keyvals that it includes in all log events. -// Building appropriate contextual loggers reduces repetition and aids -// consistency in the resulting log output. With, WithPrefix, and WithSuffix -// add context to a logger. We can use With to improve the RunTask example. -// -// func RunTask(task Task, logger log.Logger) string { -// logger = log.With(logger, "taskID", task.ID) -// logger.Log("event", "starting task") -// ... -// taskHelper(task.Cmd, logger) -// ... -// logger.Log("event", "task complete") -// } -// -// The improved version emits the same log events as the original for the -// first and last calls to Log. Passing the contextual logger to taskHelper -// enables each log event created by taskHelper to include the task.ID even -// though taskHelper does not have access to that value. Using contextual -// loggers this way simplifies producing log output that enables tracing the -// life cycle of individual tasks. (See the Contextual example for the full -// code of the above snippet.) -// -// Dynamic Contextual Values -// -// A Valuer function stored in a contextual logger generates a new value each -// time an event is logged. The Valuer example demonstrates how this feature -// works. -// -// Valuers provide the basis for consistently logging timestamps and source -// code location. The log package defines several valuers for that purpose. -// See Timestamp, DefaultTimestamp, DefaultTimestampUTC, Caller, and -// DefaultCaller. A common logger initialization sequence that ensures all log -// entries contain a timestamp and source location looks like this: -// -// logger := log.NewLogfmtLogger(log.NewSyncWriter(os.Stdout)) -// logger = log.With(logger, "ts", log.DefaultTimestampUTC, "caller", log.DefaultCaller) -// -// Concurrent Safety -// -// Applications with multiple goroutines want each log event written to the -// same logger to remain separate from other log events. Package log provides -// two simple solutions for concurrent safe logging. -// -// NewSyncWriter wraps an io.Writer and serializes each call to its Write -// method. Using a SyncWriter has the benefit that the smallest practical -// portion of the logging logic is performed within a mutex, but it requires -// the formatting Logger to make only one call to Write per log event. -// -// NewSyncLogger wraps any Logger and serializes each call to its Log method. -// Using a SyncLogger has the benefit that it guarantees each log event is -// handled atomically within the wrapped logger, but it typically serializes -// both the formatting and output logic. Use a SyncLogger if the formatting -// logger may perform multiple writes per log event. -// -// Error Handling -// -// This package relies on the practice of wrapping or decorating loggers with -// other loggers to provide composable pieces of functionality. It also means -// that Logger.Log must return an error because some -// implementations—especially those that output log data to an io.Writer—may -// encounter errors that cannot be handled locally. This in turn means that -// Loggers that wrap other loggers should return errors from the wrapped -// logger up the stack. -// -// Fortunately, the decorator pattern also provides a way to avoid the -// necessity to check for errors every time an application calls Logger.Log. -// An application required to panic whenever its Logger encounters -// an error could initialize its logger as follows. -// -// fmtlogger := log.NewLogfmtLogger(log.NewSyncWriter(os.Stdout)) -// logger := log.LoggerFunc(func(keyvals ...interface{}) error { -// if err := fmtlogger.Log(keyvals...); err != nil { -// panic(err) -// } -// return nil -// }) -package log diff --git a/vendor/github.com/go-kit/kit/log/json_logger.go b/vendor/github.com/go-kit/kit/log/json_logger.go deleted file mode 100644 index edfde2f..0000000 --- a/vendor/github.com/go-kit/kit/log/json_logger.go +++ /dev/null @@ -1,15 +0,0 @@ -package log - -import ( - "io" - - "github.com/go-kit/log" -) - -// NewJSONLogger returns a Logger that encodes keyvals to the Writer as a -// single JSON object. Each log event produces no more than one call to -// w.Write. The passed Writer must be safe for concurrent use by multiple -// goroutines if the returned Logger will be used concurrently. -func NewJSONLogger(w io.Writer) Logger { - return log.NewJSONLogger(w) -} diff --git a/vendor/github.com/go-kit/kit/log/level/doc.go b/vendor/github.com/go-kit/kit/log/level/doc.go deleted file mode 100644 index 7baf870..0000000 --- a/vendor/github.com/go-kit/kit/log/level/doc.go +++ /dev/null @@ -1,25 +0,0 @@ -// Package level implements leveled logging on top of Go kit's log package. -// -// Deprecated: Use github.com/go-kit/log/level instead. -// -// To use the level package, create a logger as per normal in your func main, -// and wrap it with level.NewFilter. -// -// var logger log.Logger -// logger = log.NewLogfmtLogger(os.Stderr) -// logger = level.NewFilter(logger, level.AllowInfo()) // <-- -// logger = log.With(logger, "ts", log.DefaultTimestampUTC) -// -// Then, at the callsites, use one of the level.Debug, Info, Warn, or Error -// helper methods to emit leveled log events. -// -// logger.Log("foo", "bar") // as normal, no level -// level.Debug(logger).Log("request_id", reqID, "trace_data", trace.Get()) -// if value > 100 { -// level.Error(logger).Log("value", value) -// } -// -// NewFilter allows precise control over what happens when a log event is -// emitted without a level key, or if a squelched level is used. Check the -// Option functions for details. -package level diff --git a/vendor/github.com/go-kit/kit/log/level/level.go b/vendor/github.com/go-kit/kit/log/level/level.go deleted file mode 100644 index 803e8b9..0000000 --- a/vendor/github.com/go-kit/kit/log/level/level.go +++ /dev/null @@ -1,120 +0,0 @@ -package level - -import ( - "github.com/go-kit/log" - "github.com/go-kit/log/level" -) - -// Error returns a logger that includes a Key/ErrorValue pair. -func Error(logger log.Logger) log.Logger { - return level.Error(logger) -} - -// Warn returns a logger that includes a Key/WarnValue pair. -func Warn(logger log.Logger) log.Logger { - return level.Warn(logger) -} - -// Info returns a logger that includes a Key/InfoValue pair. -func Info(logger log.Logger) log.Logger { - return level.Info(logger) -} - -// Debug returns a logger that includes a Key/DebugValue pair. -func Debug(logger log.Logger) log.Logger { - return level.Debug(logger) -} - -// NewFilter wraps next and implements level filtering. See the commentary on -// the Option functions for a detailed description of how to configure levels. -// If no options are provided, all leveled log events created with Debug, -// Info, Warn or Error helper methods are squelched and non-leveled log -// events are passed to next unmodified. -func NewFilter(next log.Logger, options ...Option) log.Logger { - return level.NewFilter(next, options...) -} - -// Option sets a parameter for the leveled logger. -type Option = level.Option - -// AllowAll is an alias for AllowDebug. -func AllowAll() Option { - return level.AllowAll() -} - -// AllowDebug allows error, warn, info and debug level log events to pass. -func AllowDebug() Option { - return level.AllowDebug() -} - -// AllowInfo allows error, warn and info level log events to pass. -func AllowInfo() Option { - return level.AllowInfo() -} - -// AllowWarn allows error and warn level log events to pass. -func AllowWarn() Option { - return level.AllowWarn() -} - -// AllowError allows only error level log events to pass. -func AllowError() Option { - return level.AllowError() -} - -// AllowNone allows no leveled log events to pass. -func AllowNone() Option { - return level.AllowNone() -} - -// ErrNotAllowed sets the error to return from Log when it squelches a log -// event disallowed by the configured Allow[Level] option. By default, -// ErrNotAllowed is nil; in this case the log event is squelched with no -// error. -func ErrNotAllowed(err error) Option { - return level.ErrNotAllowed(err) -} - -// SquelchNoLevel instructs Log to squelch log events with no level, so that -// they don't proceed through to the wrapped logger. If SquelchNoLevel is set -// to true and a log event is squelched in this way, the error value -// configured with ErrNoLevel is returned to the caller. -func SquelchNoLevel(squelch bool) Option { - return level.SquelchNoLevel(squelch) -} - -// ErrNoLevel sets the error to return from Log when it squelches a log event -// with no level. By default, ErrNoLevel is nil; in this case the log event is -// squelched with no error. -func ErrNoLevel(err error) Option { - return level.ErrNoLevel(err) -} - -// NewInjector wraps next and returns a logger that adds a Key/level pair to -// the beginning of log events that don't already contain a level. In effect, -// this gives a default level to logs without a level. -func NewInjector(next log.Logger, lvl Value) log.Logger { - return level.NewInjector(next, lvl) -} - -// Value is the interface that each of the canonical level values implement. -// It contains unexported methods that prevent types from other packages from -// implementing it and guaranteeing that NewFilter can distinguish the levels -// defined in this package from all other values. -type Value = level.Value - -// Key returns the unique key added to log events by the loggers in this -// package. -func Key() interface{} { return level.Key() } - -// ErrorValue returns the unique value added to log events by Error. -func ErrorValue() Value { return level.ErrorValue() } - -// WarnValue returns the unique value added to log events by Warn. -func WarnValue() Value { return level.WarnValue() } - -// InfoValue returns the unique value added to log events by Info. -func InfoValue() Value { return level.InfoValue() } - -// DebugValue returns the unique value added to log events by Debug. -func DebugValue() Value { return level.DebugValue() } diff --git a/vendor/github.com/go-kit/kit/log/log.go b/vendor/github.com/go-kit/kit/log/log.go deleted file mode 100644 index 164a4f9..0000000 --- a/vendor/github.com/go-kit/kit/log/log.go +++ /dev/null @@ -1,51 +0,0 @@ -package log - -import ( - "github.com/go-kit/log" -) - -// Logger is the fundamental interface for all log operations. Log creates a -// log event from keyvals, a variadic sequence of alternating keys and values. -// Implementations must be safe for concurrent use by multiple goroutines. In -// particular, any implementation of Logger that appends to keyvals or -// modifies or retains any of its elements must make a copy first. -type Logger = log.Logger - -// ErrMissingValue is appended to keyvals slices with odd length to substitute -// the missing value. -var ErrMissingValue = log.ErrMissingValue - -// With returns a new contextual logger with keyvals prepended to those passed -// to calls to Log. If logger is also a contextual logger created by With, -// WithPrefix, or WithSuffix, keyvals is appended to the existing context. -// -// The returned Logger replaces all value elements (odd indexes) containing a -// Valuer with their generated value for each call to its Log method. -func With(logger Logger, keyvals ...interface{}) Logger { - return log.With(logger, keyvals...) -} - -// WithPrefix returns a new contextual logger with keyvals prepended to those -// passed to calls to Log. If logger is also a contextual logger created by -// With, WithPrefix, or WithSuffix, keyvals is prepended to the existing context. -// -// The returned Logger replaces all value elements (odd indexes) containing a -// Valuer with their generated value for each call to its Log method. -func WithPrefix(logger Logger, keyvals ...interface{}) Logger { - return log.WithPrefix(logger, keyvals...) -} - -// WithSuffix returns a new contextual logger with keyvals appended to those -// passed to calls to Log. If logger is also a contextual logger created by -// With, WithPrefix, or WithSuffix, keyvals is appended to the existing context. -// -// The returned Logger replaces all value elements (odd indexes) containing a -// Valuer with their generated value for each call to its Log method. -func WithSuffix(logger Logger, keyvals ...interface{}) Logger { - return log.WithSuffix(logger, keyvals...) -} - -// LoggerFunc is an adapter to allow use of ordinary functions as Loggers. If -// f is a function with the appropriate signature, LoggerFunc(f) is a Logger -// object that calls f. -type LoggerFunc = log.LoggerFunc diff --git a/vendor/github.com/go-kit/kit/log/logfmt_logger.go b/vendor/github.com/go-kit/kit/log/logfmt_logger.go deleted file mode 100644 index 51cde2c..0000000 --- a/vendor/github.com/go-kit/kit/log/logfmt_logger.go +++ /dev/null @@ -1,15 +0,0 @@ -package log - -import ( - "io" - - "github.com/go-kit/log" -) - -// NewLogfmtLogger returns a logger that encodes keyvals to the Writer in -// logfmt format. Each log event produces no more than one call to w.Write. -// The passed Writer must be safe for concurrent use by multiple goroutines if -// the returned Logger will be used concurrently. -func NewLogfmtLogger(w io.Writer) Logger { - return log.NewLogfmtLogger(w) -} diff --git a/vendor/github.com/go-kit/kit/log/nop_logger.go b/vendor/github.com/go-kit/kit/log/nop_logger.go deleted file mode 100644 index b02c686..0000000 --- a/vendor/github.com/go-kit/kit/log/nop_logger.go +++ /dev/null @@ -1,8 +0,0 @@ -package log - -import "github.com/go-kit/log" - -// NewNopLogger returns a logger that doesn't do anything. -func NewNopLogger() Logger { - return log.NewNopLogger() -} diff --git a/vendor/github.com/go-kit/kit/log/stdlib.go b/vendor/github.com/go-kit/kit/log/stdlib.go deleted file mode 100644 index cb604a7..0000000 --- a/vendor/github.com/go-kit/kit/log/stdlib.go +++ /dev/null @@ -1,54 +0,0 @@ -package log - -import ( - "io" - - "github.com/go-kit/log" -) - -// StdlibWriter implements io.Writer by invoking the stdlib log.Print. It's -// designed to be passed to a Go kit logger as the writer, for cases where -// it's necessary to redirect all Go kit log output to the stdlib logger. -// -// If you have any choice in the matter, you shouldn't use this. Prefer to -// redirect the stdlib log to the Go kit logger via NewStdlibAdapter. -type StdlibWriter = log.StdlibWriter - -// StdlibAdapter wraps a Logger and allows it to be passed to the stdlib -// logger's SetOutput. It will extract date/timestamps, filenames, and -// messages, and place them under relevant keys. -type StdlibAdapter = log.StdlibAdapter - -// StdlibAdapterOption sets a parameter for the StdlibAdapter. -type StdlibAdapterOption = log.StdlibAdapterOption - -// TimestampKey sets the key for the timestamp field. By default, it's "ts". -func TimestampKey(key string) StdlibAdapterOption { - return log.TimestampKey(key) -} - -// FileKey sets the key for the file and line field. By default, it's "caller". -func FileKey(key string) StdlibAdapterOption { - return log.FileKey(key) -} - -// MessageKey sets the key for the actual log message. By default, it's "msg". -func MessageKey(key string) StdlibAdapterOption { - return log.MessageKey(key) -} - -// Prefix configures the adapter to parse a prefix from stdlib log events. If -// you provide a non-empty prefix to the stdlib logger, then your should provide -// that same prefix to the adapter via this option. -// -// By default, the prefix isn't included in the msg key. Set joinPrefixToMsg to -// true if you want to include the parsed prefix in the msg. -func Prefix(prefix string, joinPrefixToMsg bool) StdlibAdapterOption { - return log.Prefix(prefix, joinPrefixToMsg) -} - -// NewStdlibAdapter returns a new StdlibAdapter wrapper around the passed -// logger. It's designed to be passed to log.SetOutput. -func NewStdlibAdapter(logger Logger, options ...StdlibAdapterOption) io.Writer { - return log.NewStdlibAdapter(logger, options...) -} diff --git a/vendor/github.com/go-kit/kit/log/sync.go b/vendor/github.com/go-kit/kit/log/sync.go deleted file mode 100644 index bcfee2b..0000000 --- a/vendor/github.com/go-kit/kit/log/sync.go +++ /dev/null @@ -1,37 +0,0 @@ -package log - -import ( - "io" - - "github.com/go-kit/log" -) - -// SwapLogger wraps another logger that may be safely replaced while other -// goroutines use the SwapLogger concurrently. The zero value for a SwapLogger -// will discard all log events without error. -// -// SwapLogger serves well as a package global logger that can be changed by -// importers. -type SwapLogger = log.SwapLogger - -// NewSyncWriter returns a new writer that is safe for concurrent use by -// multiple goroutines. Writes to the returned writer are passed on to w. If -// another write is already in progress, the calling goroutine blocks until -// the writer is available. -// -// If w implements the following interface, so does the returned writer. -// -// interface { -// Fd() uintptr -// } -func NewSyncWriter(w io.Writer) io.Writer { - return log.NewSyncWriter(w) -} - -// NewSyncLogger returns a logger that synchronizes concurrent use of the -// wrapped logger. When multiple goroutines use the SyncLogger concurrently -// only one goroutine will be allowed to log to the wrapped logger at a time. -// The other goroutines will block until the logger is available. -func NewSyncLogger(logger Logger) Logger { - return log.NewSyncLogger(logger) -} diff --git a/vendor/github.com/go-kit/kit/log/value.go b/vendor/github.com/go-kit/kit/log/value.go deleted file mode 100644 index 96d783b..0000000 --- a/vendor/github.com/go-kit/kit/log/value.go +++ /dev/null @@ -1,52 +0,0 @@ -package log - -import ( - "time" - - "github.com/go-kit/log" -) - -// A Valuer generates a log value. When passed to With, WithPrefix, or -// WithSuffix in a value element (odd indexes), it represents a dynamic -// value which is re-evaluated with each log event. -type Valuer = log.Valuer - -// Timestamp returns a timestamp Valuer. It invokes the t function to get the -// time; unless you are doing something tricky, pass time.Now. -// -// Most users will want to use DefaultTimestamp or DefaultTimestampUTC, which -// are TimestampFormats that use the RFC3339Nano format. -func Timestamp(t func() time.Time) Valuer { - return log.Timestamp(t) -} - -// TimestampFormat returns a timestamp Valuer with a custom time format. It -// invokes the t function to get the time to format; unless you are doing -// something tricky, pass time.Now. The layout string is passed to -// Time.Format. -// -// Most users will want to use DefaultTimestamp or DefaultTimestampUTC, which -// are TimestampFormats that use the RFC3339Nano format. -func TimestampFormat(t func() time.Time, layout string) Valuer { - return log.TimestampFormat(t, layout) -} - -// Caller returns a Valuer that returns a file and line from a specified depth -// in the callstack. Users will probably want to use DefaultCaller. -func Caller(depth int) Valuer { - return log.Caller(depth) -} - -var ( - // DefaultTimestamp is a Valuer that returns the current wallclock time, - // respecting time zones, when bound. - DefaultTimestamp = log.DefaultTimestamp - - // DefaultTimestampUTC is a Valuer that returns the current time in UTC - // when bound. - DefaultTimestampUTC = log.DefaultTimestampUTC - - // DefaultCaller is a Valuer that returns the file and line where the Log - // method was invoked. It can only be used with log.With. - DefaultCaller = log.DefaultCaller -) diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/.gitignore b/vendor/github.com/influxdata/influxdb-client-go/v2/.gitignore new file mode 100644 index 0000000..7f892c7 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/.gitignore @@ -0,0 +1,20 @@ +# Binaries for programs and plugins +*.exe +*.bat +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# IntelliJ IDEA +.IDEA +*.IML diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/CHANGELOG.md b/vendor/github.com/influxdata/influxdb-client-go/v2/CHANGELOG.md new file mode 100644 index 0000000..ba87a02 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/CHANGELOG.md @@ -0,0 +1,233 @@ +## 2.10.0 [2022-08-25] +### Features +- [#348](https://github.com/influxdata/influxdb-client-go/pull/348) Added `write.Options.Consitency` parameter to support InfluxDB Enterprise. +- [#350](https://github.com/influxdata/influxdb-client-go/pull/350) Added support for implicit batching to `WriteAPIBlocking`. It's off by default, enabled by `EnableBatching()`. + +### Bug fixes +- [#349](https://github.com/influxdata/influxdb-client-go/pull/349) Skip retrying on specific write errors (mostly partial write error). + +### Breaking change +- [#350](https://github.com/influxdata/influxdb-client-go/pull/350) Interface `WriteAPIBlocking` is extend with `EnableBatching()` and `Flush()`. + +## 2.9.2 [2022-07-29] +### Bug fixes +- [#341](https://github.com/influxdata/influxdb-client-go/pull/341) Changing logging level of messages about discarding batch to Error. +- [#344](https://github.com/influxdata/influxdb-client-go/pull/344) `WriteAPI.Flush()` writes also batches from the retry queue. + +### Test +- [#345](https://github.com/influxdata/influxdb-client-go/pul/345) Added makefile for simplifing testing from command line. + +## 2.9.1 [2022-06-24] +### Bug fixes +- [#332](https://github.com/influxdata/influxdb-client-go/pull/332) Retry strategy drops expired batches as soon as they expire. +- [#335](https://github.com/influxdata/influxdb-client-go/pull/335) Retry strategy keeps max retry delay for new batches. + +## 2.9.0 [2022-05-20] +### Features +- [#323](https://github.com/influxdata/influxdb-client-go/pull/323) Added `TasksAPI.CreateTaskByFlux` to allow full control of task script. +- [#328](https://github.com/influxdata/influxdb-client-go/pull/328) Added `Client.SetupWithToken` allowing to specify a custom token. + +### Bug fixes +- [#324](https://github.com/influxdata/influxdb-client-go/pull/324) Non-empty error channel will not block writes + +## 2.8.2 [2022-04-19] +### Bug fixes +- [#319](https://github.com/influxdata/influxdb-client-go/pull/319) Synchronize `WriteAPIImpl.Close` to prevent panic when closing client by multiple go-routines. + +## 2.8.1 [2022-03-21] +### Bug fixes +- [#311](https://github.com/influxdata/influxdb-client-go/pull/311) Correctly unwrapping http.Error from Server API calls +- [#315](https://github.com/influxdata/influxdb-client-go/pull/315) Masking authorization token in log + +## 2.8.0 [2022-02-18] +### Features +- [#304](https://github.com/influxdata/influxdb-client-go/pull/304) Added public constructor for `QueryTableResult` +- [#307](https://github.com/influxdata/influxdb-client-go/pull/307) Synced generated server API with the latest [oss.yml](https://github.com/influxdata/openapi/blob/master/contracts/oss.yml). +- [#308](https://github.com/influxdata/influxdb-client-go/pull/308) Added Flux query parameters. Supported by InfluxDB Cloud only now. +- [#308](https://github.com/influxdata/influxdb-client-go/pull/308) Go 1.17 is required + +## 2.7.0[2022-01-20] +### Features +- [#297](https://github.com/influxdata/influxdb-client-go/pull/297),[#298](https://github.com/influxdata/influxdb-client-go/pull/298) Optimized `WriteRecord` of [WriteAPIBlocking](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteAPIBlocking). Custom batch can be written by single argument. + +### Bug fixes +- [#294](https://github.com/influxdata/influxdb-client-go/pull/294) `WritePoint` and `WriteRecord` of [WriteAPIBlocking](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteAPIBlocking) returns always full error information. +- [300](https://github.com/influxdata/influxdb-client-go/pull/300) Closing the response body after write batch. +- [302](https://github.com/influxdata/influxdb-client-go/pull/302) FluxRecord.Table() returns value of the table column. + +## 2.6.0[2021-11-26] +### Features +- [#285](https://github.com/influxdata/influxdb-client-go/pull/285) Added *Client.Ping()* function as the only validation method available in both OSS and Cloud. +- [#286](https://github.com/influxdata/influxdb-client-go/pull/286) Synced generated server API with the latest [oss.yml](https://github.com/influxdata/openapi/blob/master/contracts/oss.yml). +- [#287](https://github.com/influxdata/influxdb-client-go/pull/287) Added *FluxRecord.Result()* function as a convenient way to retrieve the Flux result name of data. + +### Bug fixes +- [#285](https://github.com/influxdata/influxdb-client-go/pull/285) Functions *Client.Health()* and *Client.Ready()* correctly report an error when called against InfluxDB Cloud. + +### Breaking change +- [#285](https://github.com/influxdata/influxdb-client-go/pull/285) Function *Client.Ready()* now returns `*domain.Ready` with full uptime info. + +## 2.5.1[2021-09-17] +### Bug fixes + - [#276](https://github.com/influxdata/influxdb-client-go/pull/276) Synchronized logging methods of _log.Logger_. + +## 2.5.0 [2021-08-20] +### Features + - [#264](https://github.com/influxdata/influxdb-client-go/pull/264) Synced generated server API with the latest [oss.yml](https://github.com/influxdata/openapi/blob/master/contracts/oss.yml). + - [#271](https://github.com/influxdata/influxdb-client-go/pull/271) Use exponential _random_ retry strategy + - [#273](https://github.com/influxdata/influxdb-client-go/pull/273) Added `WriteFailedCallback` for `WriteAPI` allowing to be _synchronously_ notified about failed writes and decide on further batch processing. + +### Bug fixes + - [#269](https://github.com/influxdata/influxdb-client-go/pull/269) Synchronized setters of _log.Logger_ to allow concurrent usage + - [#270](https://github.com/influxdata/influxdb-client-go/pull/270) Fixed duplicate `Content-Type` header in requests to managemet API + +### Documentation + - [#261](https://github.com/influxdata/influxdb-client-go/pull/261) Update Line Protocol document link to v2.0 + - [#274](https://github.com/influxdata/influxdb-client-go/pull/274) Documenting proxy configuration and HTTP redirects handling + +## 2.4.0 [2021-06-04] +### Features + - [#256](https://github.com/influxdata/influxdb-client-go/pull/256) Allowing 'Doer' interface for HTTP requests + +### Bug fixes + - [#259](https://github.com/influxdata/influxdb-client-go/pull/259) Fixed leaking connection in case of not reading whole query result on TLS connection + + +## 2.3.0 [2021-04-30] +### Breaking change + - [#253](https://github.com/influxdata/influxdb-client-go/pull/253) Interface 'Logger' extended with 'LogLevel() uint' getter. + +### Features + - [#241](https://github.com/influxdata/influxdb-client-go/pull/241),[#248](https://github.com/influxdata/influxdb-client-go/pull/248) Synced with InfluxDB 2.0.5 swagger: + - Setup (onboarding) now sends correctly retentionDuration if specified + - `RetentionRule` used in `Bucket` now contains `ShardGroupDurationSeconds` to specify the shard group duration. + +### Documentation +1. [#242](https://github.com/influxdata/influxdb-client-go/pull/242) Documentation improvements: + - [Custom server API example](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2#example-Client-CustomServerAPICall) now shows how to create DBRP mapping + - Improved documentation about concurrency +1. [#251](https://github.com/influxdata/influxdb-client-go/pull/251) Fixed Readme.md formatting + +### Bug fixes +1. [#252](https://github.com/influxdata/influxdb-client-go/pull/252) Fixed panic when getting not present standard Flux columns +1. [#253](https://github.com/influxdata/influxdb-client-go/pull/253) Conditional debug logging of buffers +1. [#254](https://github.com/influxdata/influxdb-client-go/pull/254) Fixed golint pull + +## 2.2.3 [2021-04-01] +### Bug fixes +1. [#236](https://github.com/influxdata/influxdb-client-go/pull/236) Setting MaxRetries to zero value disables retry strategy. +1. [#239](https://github.com/influxdata/influxdb-client-go/pull/239) Blocking write client doesn't use retry handling. + +## 2.2.2 [2021-01-29] +### Bug fixes +1. [#229](https://github.com/influxdata/influxdb-client-go/pull/229) Connection errors are also subject for retrying. + +## 2.2.1 [2020-12-24] +### Bug fixes +1. [#220](https://github.com/influxdata/influxdb-client-go/pull/220) Fixed runtime error occurring when calling v2 API on v1 server. + +### Documentation +1. [#218](https://github.com/influxdata/influxdb-client-go/pull/218), [#221](https://github.com/influxdata/influxdb-client-go/pull/221), [#222](https://github.com/influxdata/influxdb-client-go/pull/222), Changed links leading to sources to point to API docs in Readme, fixed broken links to InfluxDB docs. + +## 2.2.0 [2020-10-30] +### Features +1. [#206](https://github.com/influxdata/influxdb-client-go/pull/206) Adding TasksAPI for managing tasks and associated logs and runs. + +### Bug fixes +1. [#209](https://github.com/influxdata/influxdb-client-go/pull/209) Synchronizing access to the write service in WriteAPIBlocking. + +## 2.1.0 [2020-10-02] +### Features +1. [#193](https://github.com/influxdata/influxdb-client-go/pull/193) Added authentication using username and password. See `UsersAPI.SignIn()` and `UsersAPI.SignOut()` +1. [#204](https://github.com/influxdata/influxdb-client-go/pull/204) Synced with InfluxDB 2 RC0 swagger. Added pagination to Organizations API and `After` paging param to Buckets API. + +### Bug fixes +1. [#191](https://github.com/influxdata/influxdb-client-go/pull/191) Fixed QueryTableResult.Next() failed to parse boolean datatype. +1. [#192](https://github.com/influxdata/influxdb-client-go/pull/192) Client.Close() closes idle connections of internally created HTTP client + +### Documentation +1. [#189](https://github.com/influxdata/influxdb-client-go/pull/189) Added clarification that server URL has to be the InfluxDB server base URL to API docs and all examples. +1. [#196](https://github.com/influxdata/influxdb-client-go/pull/196) Changed default server port 9999 to 8086 in docs and examples +1. [#200](https://github.com/influxdata/influxdb-client-go/pull/200) Fix example code in the Readme + +## 2.0.1 [2020-08-14] +### Bug fixes +1. [#187](https://github.com/influxdata/influxdb-client-go/pull/187) Properly updated library for new major version. + +## 2.0.0 [2020-08-14] +### Breaking changes +1. [#173](https://github.com/influxdata/influxdb-client-go/pull/173) Removed deprecated API. +1. [#174](https://github.com/influxdata/influxdb-client-go/pull/174) Removed orgs labels API cause [it has been removed from the server API](https://github.com/influxdata/influxdb/pull/19104) +1. [#175](https://github.com/influxdata/influxdb-client-go/pull/175) Removed WriteAPI.Close() + +### Features +1. [#165](https://github.com/influxdata/influxdb-client-go/pull/165) Allow overriding the http.Client for the http service. +1. [#179](https://github.com/influxdata/influxdb-client-go/pull/179) Unifying retry strategy among InfluxDB 2 clients: added exponential backoff. +1. [#180](https://github.com/influxdata/influxdb-client-go/pull/180) Provided public logger API to enable overriding logging. It is also possible to disable logging. +1. [#181](https://github.com/influxdata/influxdb-client-go/pull/181) Exposed HTTP service to allow custom server API calls. Added example. + +### Bug fixes +1. [#175](https://github.com/influxdata/influxdb-client-go/pull/175) Fixed WriteAPIs management. Keeping single instance for each org and bucket pair. + +### Documentation +1. [#185](https://github.com/influxdata/influxdb-client-go/pull/185) DeleteAPI and sample WriteAPIBlocking wrapper for implicit batching + +## 1.4.0 [2020-07-17] +### Breaking changes +1. [#156](https://github.com/influxdata/influxdb-client-go/pull/156) Fixing Go naming and code style violations: +- Introducing new *API interfaces with proper name of types, methods and arguments. +- This also affects the `Client` interface and the `Options` type. +- Affected types and methods have been deprecated and they will be removed in the next release. + +### Bug fixes +1. [#152](https://github.com/influxdata/influxdb-client-go/pull/152) Allow connecting to server on a URL path +1. [#154](https://github.com/influxdata/influxdb-client-go/pull/154) Use idiomatic go style for write channels (internal) +1. [#155](https://github.com/influxdata/influxdb-client-go/pull/155) Fix panic in FindOrganizationByName in case of no permissions + + +## 1.3.0 [2020-06-19] +### Features +1. [#131](https://github.com/influxdata/influxdb-client-go/pull/131) Labels API +1. [#136](https://github.com/influxdata/influxdb-client-go/pull/136) Possibility to specify default tags +1. [#138](https://github.com/influxdata/influxdb-client-go/pull/138) Fix errors from InfluxDB 1.8 being empty + +### Bug fixes +1. [#132](https://github.com/influxdata/influxdb-client-go/pull/132) Handle unsupported write type as string instead of generating panic +1. [#134](https://github.com/influxdata/influxdb-client-go/pull/134) FluxQueryResult: support reordering of annotations + +## 1.2.0 [2020-05-15] +### Breaking Changes + - [#107](https://github.com/influxdata/influxdb-client-go/pull/107) Renamed `InfluxDBClient` interface to `Client`, so the full name `influxdb2.Client` suits better to Go naming conventions + - [#125](https://github.com/influxdata/influxdb-client-go/pull/125) `WriteApi`,`WriteApiBlocking`,`QueryApi` interfaces and related objects like `Point`, `FluxTableMetadata`, `FluxTableColumn`, `FluxRecord`, moved to the `api` ( and `api/write`, `api/query`) packages + to provide consistent interface + +### Features +1. [#120](https://github.com/influxdata/influxdb-client-go/pull/120) Health check API +1. [#122](https://github.com/influxdata/influxdb-client-go/pull/122) Delete API +1. [#124](https://github.com/influxdata/influxdb-client-go/pull/124) Buckets API + +### Bug fixes +1. [#108](https://github.com/influxdata/influxdb-client-go/pull/108) Fix default retry interval doc +1. [#110](https://github.com/influxdata/influxdb-client-go/pull/110) Allowing empty (nil) values in query result + +### Documentation + - [#112](https://github.com/influxdata/influxdb-client-go/pull/112) Clarify how to use client with InfluxDB 1.8+ + - [#115](https://github.com/influxdata/influxdb-client-go/pull/115) Doc and examples for reading write api errors + +## 1.1.0 [2020-04-24] +### Features +1. [#100](https://github.com/influxdata/influxdb-client-go/pull/100) HTTP request timeout made configurable +1. [#99](https://github.com/influxdata/influxdb-client-go/pull/99) Organizations API and Users API +1. [#96](https://github.com/influxdata/influxdb-client-go/pull/96) Authorization API + +### Docs +1. [#101](https://github.com/influxdata/influxdb-client-go/pull/101) Added examples to API docs + +## 1.0.0 [2020-04-01] +### Core + +- initial release of new client version + +### APIs + +- initial release of new client version diff --git a/vendor/github.com/go-kit/kit/LICENSE b/vendor/github.com/influxdata/influxdb-client-go/v2/LICENSE similarity index 94% rename from vendor/github.com/go-kit/kit/LICENSE rename to vendor/github.com/influxdata/influxdb-client-go/v2/LICENSE index 9d83342..6b1f43f 100644 --- a/vendor/github.com/go-kit/kit/LICENSE +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/LICENSE @@ -1,6 +1,6 @@ -The MIT License (MIT) +MIT License -Copyright (c) 2015 Peter Bourgon +Copyright (c) 2020-2021 Influxdata, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/Makefile b/vendor/github.com/influxdata/influxdb-client-go/v2/Makefile new file mode 100644 index 0000000..849aef1 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/Makefile @@ -0,0 +1,31 @@ +artifacts_path := /tmp/artifacts + +help: + @echo 'Targets:' + @echo ' all - runs lint, server, coverage' + @echo ' lint - runs code style checks' + @echo ' shorttest - runs unit and integration tests' + @echo ' test - runs all tests, including e2e tests - requires running influxdb 2 server' + @echo ' coverage - runs all tests, including e2e tests, with coverage report - requires running influxdb 2 server' + @echo ' server - prepares InfluxDB in docker environment' + +lint: + go vet ./... + go install honnef.co/go/tools/cmd/staticcheck@latest && staticcheck --checks='all' --tags e2e ./... + go install golang.org/x/lint/golint@latest && golint ./... + +shorttest: + go test -race -v -count=1 ./... + +test: + go test -race -v -count=1 --tags e2e ./... + +coverage: + go install gotest.tools/gotestsum@latest && gotestsum --junitfile /tmp/test-results/unit-tests.xml -- -race -coverprofile=coverage.txt -covermode=atomic -coverpkg '.,./api/...,./internal/.../,./log/...' -tags e2e ./... + if test ! -e $(artifacts_path); then mkdir $(artifacts_path); fi + go tool cover -html=coverage.txt -o $(artifacts_path)/coverage.html + +server: + ./scripts/influxdb-restart.sh + +all: lint server coverage diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/README.md b/vendor/github.com/influxdata/influxdb-client-go/v2/README.md new file mode 100644 index 0000000..adaed29 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/README.md @@ -0,0 +1,697 @@ +# InfluxDB Client Go + +[![CircleCI](https://circleci.com/gh/influxdata/influxdb-client-go.svg?style=svg)](https://circleci.com/gh/influxdata/influxdb-client-go) +[![codecov](https://codecov.io/gh/influxdata/influxdb-client-go/branch/master/graph/badge.svg)](https://codecov.io/gh/influxdata/influxdb-client-go) +[![License](https://img.shields.io/github/license/influxdata/influxdb-client-go.svg)](https://github.com/influxdata/influxdb-client-go/blob/master/LICENSE) +[![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://www.influxdata.com/slack) + +This repository contains the reference Go client for InfluxDB 2. + +#### Note: Use this client library with InfluxDB 2.x and InfluxDB 1.8+ ([see details](#influxdb-18-api-compatibility)). For connecting to InfluxDB 1.7 or earlier instances, use the [influxdb1-go](https://github.com/influxdata/influxdb1-client) client library. + +- [Features](#features) +- [Documentation](#documentation) + - [Examples](#examples) +- [How To Use](#how-to-use) + - [Basic Example](#basic-example) + - [Writes in Detail](#writes) + - [Queries in Detail](#queries) + - [Parametrized Queries](#parametrized-queries) + - [Concurrency](#concurrency) + - [Proxy and redirects](#proxy-and-redirects) + - [Checking Server State](#checking-server-state) +- [InfluxDB 1.8 API compatibility](#influxdb-18-api-compatibility) +- [Contributing](#contributing) +- [License](#license) + +## Features + +- InfluxDB 2 client + - Querying data + - using the Flux language + - into raw data, flux table representation + - [How to queries](#queries) + - Writing data using + - [Line Protocol](https://docs.influxdata.com/influxdb/v2.0/reference/syntax/line-protocol/) + - [Data Point](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api/write#Point) + - Both [asynchronous](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteAPI) or [synchronous](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteAPIBlocking) ways + - [How to writes](#writes) + - InfluxDB 2 API + - setup, ready, health + - authotizations, users, organizations + - buckets, delete + - ... + +## Documentation + +This section contains links to the client library documentation. + +- [Product documentation](https://docs.influxdata.com/influxdb/v2.0/tools/client-libraries/), [Getting Started](#how-to-use) +- [Examples](#examples) +- [API Reference](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2) +- [Changelog](CHANGELOG.md) + +### Examples + +Examples for basic writing and querying data are shown below in this document + +There are also other examples in the API docs: + - [Client usage](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2?tab=doc#pkg-examples) + - [Management APIs](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api?tab=doc#pkg-examples) + +## How To Use + +### Installation +**Go 1.17** or later is required. + +1. Add the client package your to your project dependencies (go.mod). + ```sh + go get github.com/influxdata/influxdb-client-go/v2 + ``` +1. Add import `github.com/influxdata/influxdb-client-go/v2` to your source code. + +### Basic Example +The following example demonstrates how to write data to InfluxDB 2 and read them back using the Flux language: +```go +package main + +import ( + "context" + "fmt" + "time" + + "github.com/influxdata/influxdb-client-go/v2" +) + +func main() { + // Create a new client using an InfluxDB server base URL and an authentication token + client := influxdb2.NewClient("http://localhost:8086", "my-token") + // Use blocking write client for writes to desired bucket + writeAPI := client.WriteAPIBlocking("my-org", "my-bucket") + // Create point using full params constructor + p := influxdb2.NewPoint("stat", + map[string]string{"unit": "temperature"}, + map[string]interface{}{"avg": 24.5, "max": 45.0}, + time.Now()) + // write point immediately + writeAPI.WritePoint(context.Background(), p) + // Create point using fluent style + p = influxdb2.NewPointWithMeasurement("stat"). + AddTag("unit", "temperature"). + AddField("avg", 23.2). + AddField("max", 45.0). + SetTime(time.Now()) + writeAPI.WritePoint(context.Background(), p) + + // Or write directly line protocol + line := fmt.Sprintf("stat,unit=temperature avg=%f,max=%f", 23.5, 45.0) + writeAPI.WriteRecord(context.Background(), line) + + // Get query client + queryAPI := client.QueryAPI("my-org") + // Get parser flux query result + result, err := queryAPI.Query(context.Background(), `from(bucket:"my-bucket")|> range(start: -1h) |> filter(fn: (r) => r._measurement == "stat")`) + if err == nil { + // Use Next() to iterate over query result lines + for result.Next() { + // Observe when there is new grouping key producing new table + if result.TableChanged() { + fmt.Printf("table: %s\n", result.TableMetadata().String()) + } + // read result + fmt.Printf("row: %s\n", result.Record().String()) + } + if result.Err() != nil { + fmt.Printf("Query error: %s\n", result.Err().Error()) + } + } + // Ensures background processes finishes + client.Close() +} +``` +### Options +The InfluxDBClient uses set of options to configure behavior. These are available in the [Options](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2#Options) object +Creating a client instance using +```go +client := influxdb2.NewClient("http://localhost:8086", "my-token") +``` +will use the default options. + +To set different configuration values, e.g. to set gzip compression and trust all server certificates, get default options +and change what is needed: +```go +client := influxdb2.NewClientWithOptions("http://localhost:8086", "my-token", + influxdb2.DefaultOptions(). + SetUseGZip(true). + SetTLSConfig(&tls.Config{ + InsecureSkipVerify: true, + })) +``` +### Writes + +Client offers two ways of writing, non-blocking and blocking. + +### Non-blocking write client +Non-blocking write client uses implicit batching. Data are asynchronously +written to the underlying buffer and they are automatically sent to a server when the size of the write buffer reaches the batch size, default 5000, or the flush interval, default 1s, times out. +Writes are automatically retried on server back pressure. + +This write client also offers synchronous blocking method to ensure that write buffer is flushed and all pending writes are finished, +see [Flush()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteAPI.Flush) method. +Always use [Close()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2#Client.Close) method of the client to stop all background processes. + +Asynchronous write client is recommended for frequent periodic writes. + +```go +package main + +import ( + "fmt" + "math/rand" + "time" + + "github.com/influxdata/influxdb-client-go/v2" +) + +func main() { + // Create a new client using an InfluxDB server base URL and an authentication token + // and set batch size to 20 + client := influxdb2.NewClientWithOptions("http://localhost:8086", "my-token", + influxdb2.DefaultOptions().SetBatchSize(20)) + // Get non-blocking write client + writeAPI := client.WriteAPI("my-org","my-bucket") + // write some points + for i := 0; i <100; i++ { + // create point + p := influxdb2.NewPoint( + "system", + map[string]string{ + "id": fmt.Sprintf("rack_%v", i%10), + "vendor": "AWS", + "hostname": fmt.Sprintf("host_%v", i%100), + }, + map[string]interface{}{ + "temperature": rand.Float64() * 80.0, + "disk_free": rand.Float64() * 1000.0, + "disk_total": (i/10 + 1) * 1000000, + "mem_total": (i/100 + 1) * 10000000, + "mem_free": rand.Uint64(), + }, + time.Now()) + // write asynchronously + writeAPI.WritePoint(p) + } + // Force all unwritten data to be sent + writeAPI.Flush() + // Ensures background processes finishes + client.Close() +} +``` +### Handling of failed async writes +WriteAPI by default continues with retrying of failed writes. +Retried are automatically writes that fail on a connection failure or when server returns response HTTP status code >= 429. + +Retrying algorithm uses random exponential strategy to set retry time. +The delay for the next retry attempt is a random value in the interval _retryInterval * exponentialBase^(attempts)_ and _retryInterval * exponentialBase^(attempts+1)_. +If writes of batch repeatedly fails, WriteAPI continues with retrying until _maxRetries_ is reached or the overall retry time of batch exceeds _maxRetryTime_. + +The defaults parameters (part of the WriteOptions) are: + - _retryInterval_=5,000ms + - _exponentialBase_=2 + - _maxRetryDelay_=125,000ms + - _maxRetries_=5 + - _maxRetryTime_=180,000ms + +Retry delays are by default randomly distributed within the ranges: + 1. 5,000-10,000 + 1. 10,000-20,000 + 1. 20,000-40,000 + 1. 40,000-80,000 + 1. 80,000-125,000 + +Setting _retryInterval_ to 0 disables retry strategy and any failed write will discard the batch. + +[WriteFailedCallback](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteFailedCallback) allows advanced controlling of retrying. +It is synchronously notified in case async write fails. +It controls further batch handling by its return value. If it returns `true`, WriteAPI continues with retrying of writes of this batch. Returned `false` means the batch should be discarded. + +### Reading async errors +WriteAPI automatically logs write errors. Use [Errors()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteAPI.Errors) method, which returns the channel for reading errors occuring during async writes, for writing write error to a custom target: + +```go +package main + +import ( + "fmt" + "math/rand" + "time" + + "github.com/influxdata/influxdb-client-go/v2" +) + +func main() { + // Create a new client using an InfluxDB server base URL and an authentication token + client := influxdb2.NewClient("http://localhost:8086", "my-token") + // Get non-blocking write client + writeAPI := client.WriteAPI("my-org", "my-bucket") + // Get errors channel + errorsCh := writeAPI.Errors() + // Create go proc for reading and logging errors + go func() { + for err := range errorsCh { + fmt.Printf("write error: %s\n", err.Error()) + } + }() + // write some points + for i := 0; i < 100; i++ { + // create point + p := influxdb2.NewPointWithMeasurement("stat"). + AddTag("id", fmt.Sprintf("rack_%v", i%10)). + AddTag("vendor", "AWS"). + AddTag("hostname", fmt.Sprintf("host_%v", i%100)). + AddField("temperature", rand.Float64()*80.0). + AddField("disk_free", rand.Float64()*1000.0). + AddField("disk_total", (i/10+1)*1000000). + AddField("mem_total", (i/100+1)*10000000). + AddField("mem_free", rand.Uint64()). + SetTime(time.Now()) + // write asynchronously + writeAPI.WritePoint(p) + } + // Force all unwritten data to be sent + writeAPI.Flush() + // Ensures background processes finishes + client.Close() +} +``` + +### Blocking write client +Blocking write client writes given point(s) synchronously. It doesn't do implicit batching. Batch is created from given set of points. +Implicit batching can be enabled with `WriteAPIBlocking.EnableBatching()`. + +```go +package main + +import ( + "context" + "fmt" + "math/rand" + "time" + + "github.com/influxdata/influxdb-client-go/v2" +) + +func main() { + // Create a new client using an InfluxDB server base URL and an authentication token + client := influxdb2.NewClient("http://localhost:8086", "my-token") + // Get blocking write client + writeAPI := client.WriteAPIBlocking("my-org","my-bucket") + // write some points + for i := 0; i <100; i++ { + // create data point + p := influxdb2.NewPoint( + "system", + map[string]string{ + "id": fmt.Sprintf("rack_%v", i%10), + "vendor": "AWS", + "hostname": fmt.Sprintf("host_%v", i%100), + }, + map[string]interface{}{ + "temperature": rand.Float64() * 80.0, + "disk_free": rand.Float64() * 1000.0, + "disk_total": (i/10 + 1) * 1000000, + "mem_total": (i/100 + 1) * 10000000, + "mem_free": rand.Uint64(), + }, + time.Now()) + // write synchronously + err := writeAPI.WritePoint(context.Background(), p) + if err != nil { + panic(err) + } + } + // Ensures background processes finishes + client.Close() +} +``` + +### Queries +Query client offers retrieving of query results to a parsed representation in a [QueryTableResult](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#QueryTableResult) or to a raw string. + +### QueryTableResult +QueryTableResult offers comfortable way how to deal with flux query CSV response. It parses CSV stream into FluxTableMetaData, FluxColumn and FluxRecord objects +for easy reading the result. + +```go +package main + +import ( + "context" + "fmt" + + "github.com/influxdata/influxdb-client-go/v2" +) + +func main() { + // Create a new client using an InfluxDB server base URL and an authentication token + client := influxdb2.NewClient("http://localhost:8086", "my-token") + // Get query client + queryAPI := client.QueryAPI("my-org") + // get QueryTableResult + result, err := queryAPI.Query(context.Background(), `from(bucket:"my-bucket")|> range(start: -1h) |> filter(fn: (r) => r._measurement == "stat")`) + if err == nil { + // Iterate over query response + for result.Next() { + // Notice when group key has changed + if result.TableChanged() { + fmt.Printf("table: %s\n", result.TableMetadata().String()) + } + // Access data + fmt.Printf("value: %v\n", result.Record().Value()) + } + // check for an error + if result.Err() != nil { + fmt.Printf("query parsing error: %s\n", result.Err().Error()) + } + } else { + panic(err) + } + // Ensures background processes finishes + client.Close() +} +``` + +### Raw +[QueryRaw()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#QueryAPI.QueryRaw) returns raw, unparsed, query result string and process it on your own. Returned csv format +can be controlled by the third parameter, query dialect. + +```go +package main + +import ( + "context" + "fmt" + + "github.com/influxdata/influxdb-client-go/v2" +) + +func main() { + // Create a new client using an InfluxDB server base URL and an authentication token + client := influxdb2.NewClient("http://localhost:8086", "my-token") + // Get query client + queryAPI := client.QueryAPI("my-org") + // Query and get complete result as a string + // Use default dialect + result, err := queryAPI.QueryRaw(context.Background(), `from(bucket:"my-bucket")|> range(start: -1h) |> filter(fn: (r) => r._measurement == "stat")`, influxdb2.DefaultDialect()) + if err == nil { + fmt.Println("QueryResult:") + fmt.Println(result) + } else { + panic(err) + } + // Ensures background processes finishes + client.Close() +} +``` +### Parametrized Queries +InfluxDB Cloud supports [Parameterized Queries](https://docs.influxdata.com/influxdb/cloud/query-data/parameterized-queries/) +that let you dynamically change values in a query using the InfluxDB API. Parameterized queries make Flux queries more +reusable and can also be used to help prevent injection attacks. + +InfluxDB Cloud inserts the params object into the Flux query as a Flux record named `params`. Use dot or bracket +notation to access parameters in the `params` record in your Flux query. Parameterized Flux queries support only `int` +, `float`, and `string` data types. To convert the supported data types into +other [Flux basic data types, use Flux type conversion functions](https://docs.influxdata.com/influxdb/cloud/query-data/parameterized-queries/#supported-parameter-data-types). + +Query parameters can be passed as a struct or map. Param values can be only simple types or `time.Time`. +The name of the parameter represented by a struct field can be specified by JSON annotation. + +Parameterized query example: +> :warning: Parameterized Queries are supported only in InfluxDB Cloud. There is no support in InfluxDB OSS currently. +```go +package main + +import ( + "context" + "fmt" + + "github.com/influxdata/influxdb-client-go/v2" +) + +func main() { + // Create a new client using an InfluxDB server base URL and an authentication token + client := influxdb2.NewClient("http://localhost:8086", "my-token") + // Get query client + queryAPI := client.QueryAPI("my-org") + // Define parameters + parameters := struct { + Start string `json:"start"` + Field string `json:"field"` + Value float64 `json:"value"` + }{ + "-1h", + "temperature", + 25, + } + // Query with parameters + query := `from(bucket:"my-bucket") + |> range(start: duration(params.start)) + |> filter(fn: (r) => r._measurement == "stat") + |> filter(fn: (r) => r._field == params.field) + |> filter(fn: (r) => r._value > params.value)` + + // Get result + result, err := queryAPI.QueryWithParams(context.Background(), query, parameters) + if err == nil { + // Iterate over query response + for result.Next() { + // Notice when group key has changed + if result.TableChanged() { + fmt.Printf("table: %s\n", result.TableMetadata().String()) + } + // Access data + fmt.Printf("value: %v\n", result.Record().Value()) + } + // check for an error + if result.Err() != nil { + fmt.Printf("query parsing error: %s\n", result.Err().Error()) + } + } else { + panic(err) + } + // Ensures background processes finishes + client.Close() +} +``` + +### Concurrency +InfluxDB Go Client can be used in a concurrent environment. All its functions are thread-safe. + +The best practise is to use a single `Client` instance per server URL. This ensures optimized resources usage, +most importantly reusing HTTP connections. + +For efficient reuse of HTTP resources among multiple clients, create an HTTP client and use `Options.SetHTTPClient()` for setting it to all clients: +```go + // Create HTTP client + httpClient := &http.Client{ + Timeout: time.Second * time.Duration(60), + Transport: &http.Transport{ + DialContext: (&net.Dialer{ + Timeout: 5 * time.Second, + }).DialContext, + TLSHandshakeTimeout: 5 * time.Second, + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: true, + }, + MaxIdleConns: 100, + MaxIdleConnsPerHost: 100, + IdleConnTimeout: 90 * time.Second, + }, + } + // Client for server 1 + client1 := influxdb2.NewClientWithOptions("https://server:8086", "my-token", influxdb2.DefaultOptions().SetHTTPClient(httpClient)) + // Client for server 2 + client2 := influxdb2.NewClientWithOptions("https://server:9999", "my-token2", influxdb2.DefaultOptions().SetHTTPClient(httpClient)) +``` + +Client ensures that there is a single instance of each server API sub-client for the specific area. E.g. a single `WriteAPI` instance for each org/bucket pair, +a single `QueryAPI` for each org. + +Such a single API sub-client instance can be used concurrently: +```go +package main + +import ( + "math/rand" + "sync" + "time" + + influxdb2 "github.com/influxdata/influxdb-client-go" + "github.com/influxdata/influxdb-client-go/v2/api/write" +) + +func main() { + // Create client + client := influxdb2.NewClient("http://localhost:8086", "my-token") + // Ensure closing the client + defer client.Close() + + // Get write client + writeApi := client.WriteAPI("my-org", "my-bucket") + + // Create channel for points feeding + pointsCh := make(chan *write.Point, 200) + + threads := 5 + + var wg sync.WaitGroup + go func(points int) { + for i := 0; i < points; i++ { + p := influxdb2.NewPoint("meas", + map[string]string{"tag": "tagvalue"}, + map[string]interface{}{"val1": rand.Int63n(1000), "val2": rand.Float64()*100.0 - 50.0}, + time.Now()) + pointsCh <- p + } + close(pointsCh) + }(1000000) + + // Launch write routines + for t := 0; t < threads; t++ { + wg.Add(1) + go func() { + for p := range pointsCh { + writeApi.WritePoint(p) + } + wg.Done() + }() + } + // Wait for writes complete + wg.Wait() +} +``` + +### Proxy and redirects +You can configure InfluxDB Go client behind a proxy in two ways: + 1. Using environment variable + Set environment variable `HTTP_PROXY` (or `HTTPS_PROXY` based on the scheme of your server url). + e.g. (linux) `export HTTP_PROXY=http://my-proxy:8080` or in Go code `os.Setenv("HTTP_PROXY","http://my-proxy:8080")` + + 1. Configure `http.Client` to use proxy
+ Create a custom `http.Client` with a proxy configuration: + ```go + proxyUrl, err := url.Parse("http://my-proxy:8080") + httpClient := &http.Client{ + Transport: &http.Transport{ + Proxy: http.ProxyURL(proxyUrl) + } + } + client := influxdb2.NewClientWithOptions("http://localhost:8086", token, influxdb2.DefaultOptions().SetHTTPClient(httpClient)) + ``` + + Client automatically follows HTTP redirects. The default redirect policy is to follow up to 10 consecutive requests. + Due to a security reason _Authorization_ header is not forwarded when redirect leads to a different domain. + To overcome this limitation you have to set a custom redirect handler: +```go +token := "my-token" + +httpClient := &http.Client{ + CheckRedirect: func(req *http.Request, via []*http.Request) error { + req.Header.Add("Authorization","Token " + token) + return nil + }, +} +client := influxdb2.NewClientWithOptions("http://localhost:8086", token, influxdb2.DefaultOptions().SetHTTPClient(httpClient)) +``` + +### Checking Server State +There are three functions for checking whether a server is up and ready for communication: + +| Function| Description | Availability | +|:----------|:----------|:----------| +| [Health()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2#Client.Health) | Detailed info about the server status, along with version string | OSS | +| [Ready()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2#Client.Ready) | Server uptime info | OSS | +| [Ping()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2#Client.Ping) | Whether a server is up | OSS, Cloud | + +Only the [Ping()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2#Client.Ping) function works in InfluxDB Cloud server. + +## InfluxDB 1.8 API compatibility + + [InfluxDB 1.8.0 introduced forward compatibility APIs](https://docs.influxdata.com/influxdb/latest/tools/api/#influxdb-2-0-api-compatibility-endpoints) for InfluxDB 2.0. This allow you to easily move from InfluxDB 1.x to InfluxDB 2.0 Cloud or open source. + + Client API usage differences summary: + 1. Use the form `username:password` for an **authentication token**. Example: `my-user:my-password`. Use an empty string (`""`) if the server doesn't require authentication. + 1. The organization parameter is not used. Use an empty string (`""`) where necessary. + 1. Use the form `database/retention-policy` where a **bucket** is required. Skip retention policy if the default retention policy should be used. Examples: `telegraf/autogen`, `telegraf`.   + + The following forward compatible APIs are available: + + | API | Endpoint | Description | + |:----------|:----------|:----------| + | [WriteAPI](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteAPI) (also [WriteAPIBlocking](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#WriteAPIBlocking))| [/api/v2/write](https://docs.influxdata.com/influxdb/v2.0/write-data/developer-tools/api/) | Write data to InfluxDB 1.8.0+ using the InfluxDB 2.0 API | + | [QueryAPI](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2/api#QueryAPI) | [/api/v2/query](https://docs.influxdata.com/influxdb/v2.0/query-data/execute-queries/influx-api/) | Query data in InfluxDB 1.8.0+ using the InfluxDB 2.0 API and [Flux](https://docs.influxdata.com/flux/latest/) endpoint should be enabled by the [`flux-enabled` option](https://docs.influxdata.com/influxdb/v1.8/administration/config/#flux-enabled-false) + | [Health()](https://pkg.go.dev/github.com/influxdata/influxdb-client-go/v2#Client.Health) | [/health](https://docs.influxdata.com/influxdb/v2.0/api/#tag/Health) | Check the health of your InfluxDB instance | + + +### Example +```go +package main + +import ( + "context" + "fmt" + "time" + + "github.com/influxdata/influxdb-client-go/v2" +) + +func main() { + userName := "my-user" + password := "my-password" + // Create a new client using an InfluxDB server base URL and an authentication token + // For authentication token supply a string in the form: "username:password" as a token. Set empty value for an unauthenticated server + client := influxdb2.NewClient("http://localhost:8086", fmt.Sprintf("%s:%s",userName, password)) + // Get the blocking write client + // Supply a string in the form database/retention-policy as a bucket. Skip retention policy for the default one, use just a database name (without the slash character) + // Org name is not used + writeAPI := client.WriteAPIBlocking("", "test/autogen") + // create point using full params constructor + p := influxdb2.NewPoint("stat", + map[string]string{"unit": "temperature"}, + map[string]interface{}{"avg": 24.5, "max": 45}, + time.Now()) + // Write data + err := writeAPI.WritePoint(context.Background(), p) + if err != nil { + fmt.Printf("Write error: %s\n", err.Error()) + } + + // Get query client. Org name is not used + queryAPI := client.QueryAPI("") + // Supply string in a form database/retention-policy as a bucket. Skip retention policy for the default one, use just a database name (without the slash character) + result, err := queryAPI.Query(context.Background(), `from(bucket:"test")|> range(start: -1h) |> filter(fn: (r) => r._measurement == "stat")`) + if err == nil { + for result.Next() { + if result.TableChanged() { + fmt.Printf("table: %s\n", result.TableMetadata().String()) + } + fmt.Printf("row: %s\n", result.Record().String()) + } + if result.Err() != nil { + fmt.Printf("Query error: %s\n", result.Err().Error()) + } + } else { + fmt.Printf("Query error: %s\n", err.Error()) + } + // Close client + client.Close() +} +``` + +## Contributing + +If you would like to contribute code you can do through GitHub by forking the repository and sending a pull request into the `master` branch. + +## License + +The InfluxDB 2 Go Client is released under the [MIT License](https://opensource.org/licenses/MIT). diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/authorizations.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/authorizations.go new file mode 100644 index 0000000..692659f --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/authorizations.go @@ -0,0 +1,149 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + + "github.com/influxdata/influxdb-client-go/v2/domain" +) + +// AuthorizationsAPI provides methods for organizing Authorization in a InfluxDB server +type AuthorizationsAPI interface { + // GetAuthorizations returns all authorizations + GetAuthorizations(ctx context.Context) (*[]domain.Authorization, error) + // FindAuthorizationsByUserName returns all authorizations for given userName + FindAuthorizationsByUserName(ctx context.Context, userName string) (*[]domain.Authorization, error) + // FindAuthorizationsByUserID returns all authorizations for given userID + FindAuthorizationsByUserID(ctx context.Context, userID string) (*[]domain.Authorization, error) + // FindAuthorizationsByOrgName returns all authorizations for given organization name + FindAuthorizationsByOrgName(ctx context.Context, orgName string) (*[]domain.Authorization, error) + // FindAuthorizationsByOrgID returns all authorizations for given organization id + FindAuthorizationsByOrgID(ctx context.Context, orgID string) (*[]domain.Authorization, error) + // CreateAuthorization creates new authorization + CreateAuthorization(ctx context.Context, authorization *domain.Authorization) (*domain.Authorization, error) + // CreateAuthorizationWithOrgID creates new authorization with given permissions scoped to given orgID + CreateAuthorizationWithOrgID(ctx context.Context, orgID string, permissions []domain.Permission) (*domain.Authorization, error) + // UpdateAuthorizationStatus updates status of authorization + UpdateAuthorizationStatus(ctx context.Context, authorization *domain.Authorization, status domain.AuthorizationUpdateRequestStatus) (*domain.Authorization, error) + // UpdateAuthorizationStatusWithID updates status of authorization with authID + UpdateAuthorizationStatusWithID(ctx context.Context, authID string, status domain.AuthorizationUpdateRequestStatus) (*domain.Authorization, error) + // DeleteAuthorization deletes authorization + DeleteAuthorization(ctx context.Context, authorization *domain.Authorization) error + // DeleteAuthorization deletes authorization with authID + DeleteAuthorizationWithID(ctx context.Context, authID string) error +} + +// authorizationsAPI implements AuthorizationsAPI +type authorizationsAPI struct { + apiClient *domain.ClientWithResponses +} + +// NewAuthorizationsAPI creates new instance of AuthorizationsAPI +func NewAuthorizationsAPI(apiClient *domain.ClientWithResponses) AuthorizationsAPI { + return &authorizationsAPI{ + apiClient: apiClient, + } +} + +func (a *authorizationsAPI) GetAuthorizations(ctx context.Context) (*[]domain.Authorization, error) { + authQuery := &domain.GetAuthorizationsParams{} + return a.listAuthorizations(ctx, authQuery) +} + +func (a *authorizationsAPI) FindAuthorizationsByUserName(ctx context.Context, userName string) (*[]domain.Authorization, error) { + authQuery := &domain.GetAuthorizationsParams{User: &userName} + return a.listAuthorizations(ctx, authQuery) +} + +func (a *authorizationsAPI) FindAuthorizationsByUserID(ctx context.Context, userID string) (*[]domain.Authorization, error) { + authQuery := &domain.GetAuthorizationsParams{UserID: &userID} + return a.listAuthorizations(ctx, authQuery) +} + +func (a *authorizationsAPI) FindAuthorizationsByOrgName(ctx context.Context, orgName string) (*[]domain.Authorization, error) { + authQuery := &domain.GetAuthorizationsParams{Org: &orgName} + return a.listAuthorizations(ctx, authQuery) +} + +func (a *authorizationsAPI) FindAuthorizationsByOrgID(ctx context.Context, orgID string) (*[]domain.Authorization, error) { + authQuery := &domain.GetAuthorizationsParams{OrgID: &orgID} + return a.listAuthorizations(ctx, authQuery) +} + +func (a *authorizationsAPI) listAuthorizations(ctx context.Context, query *domain.GetAuthorizationsParams) (*[]domain.Authorization, error) { + response, err := a.apiClient.GetAuthorizationsWithResponse(ctx, query) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200.Authorizations, nil +} + +func (a *authorizationsAPI) CreateAuthorization(ctx context.Context, authorization *domain.Authorization) (*domain.Authorization, error) { + params := &domain.PostAuthorizationsParams{} + req := domain.PostAuthorizationsJSONRequestBody{ + AuthorizationUpdateRequest: authorization.AuthorizationUpdateRequest, + OrgID: authorization.OrgID, + Permissions: authorization.Permissions, + UserID: authorization.UserID, + } + response, err := a.apiClient.PostAuthorizationsWithResponse(ctx, params, req) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON400 != nil { + return nil, domain.ErrorToHTTPError(response.JSON400, response.StatusCode()) + } + return response.JSON201, nil +} + +func (a *authorizationsAPI) CreateAuthorizationWithOrgID(ctx context.Context, orgID string, permissions []domain.Permission) (*domain.Authorization, error) { + status := domain.AuthorizationUpdateRequestStatusActive + auth := &domain.Authorization{ + AuthorizationUpdateRequest: domain.AuthorizationUpdateRequest{Status: &status}, + OrgID: &orgID, + Permissions: &permissions, + } + return a.CreateAuthorization(ctx, auth) +} + +func (a *authorizationsAPI) UpdateAuthorizationStatusWithID(ctx context.Context, authID string, status domain.AuthorizationUpdateRequestStatus) (*domain.Authorization, error) { + params := &domain.PatchAuthorizationsIDParams{} + body := &domain.PatchAuthorizationsIDJSONRequestBody{Status: &status} + response, err := a.apiClient.PatchAuthorizationsIDWithResponse(ctx, authID, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (a *authorizationsAPI) UpdateAuthorizationStatus(ctx context.Context, authorization *domain.Authorization, status domain.AuthorizationUpdateRequestStatus) (*domain.Authorization, error) { + return a.UpdateAuthorizationStatusWithID(ctx, *authorization.Id, status) +} + +func (a *authorizationsAPI) DeleteAuthorization(ctx context.Context, authorization *domain.Authorization) error { + return a.DeleteAuthorizationWithID(ctx, *authorization.Id) +} + +func (a *authorizationsAPI) DeleteAuthorizationWithID(ctx context.Context, authID string) error { + params := &domain.DeleteAuthorizationsIDParams{} + response, err := a.apiClient.DeleteAuthorizationsIDWithResponse(ctx, authID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/buckets.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/buckets.go new file mode 100644 index 0000000..1befc37 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/buckets.go @@ -0,0 +1,323 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + "fmt" + "github.com/influxdata/influxdb-client-go/v2/domain" +) + +// BucketsAPI provides methods for managing Buckets in a InfluxDB server. +type BucketsAPI interface { + // GetBuckets returns all buckets. + // GetBuckets supports PagingOptions: Offset, Limit, After. Empty pagingOptions means the default paging (first 20 results). + GetBuckets(ctx context.Context, pagingOptions ...PagingOption) (*[]domain.Bucket, error) + // FindBucketByName returns a bucket found using bucketName. + FindBucketByName(ctx context.Context, bucketName string) (*domain.Bucket, error) + // FindBucketByID returns a bucket found using bucketID. + FindBucketByID(ctx context.Context, bucketID string) (*domain.Bucket, error) + // FindBucketsByOrgID returns buckets belonging to the organization with ID orgID. + // FindBucketsByOrgID supports PagingOptions: Offset, Limit, After. Empty pagingOptions means the default paging (first 20 results). + FindBucketsByOrgID(ctx context.Context, orgID string, pagingOptions ...PagingOption) (*[]domain.Bucket, error) + // FindBucketsByOrgName returns buckets belonging to the organization with name orgName, with the specified paging. Empty pagingOptions means the default paging (first 20 results). + FindBucketsByOrgName(ctx context.Context, orgName string, pagingOptions ...PagingOption) (*[]domain.Bucket, error) + // CreateBucket creates a new bucket. + CreateBucket(ctx context.Context, bucket *domain.Bucket) (*domain.Bucket, error) + // CreateBucketWithName creates a new bucket with bucketName in organization org, with retention specified in rules. Empty rules means infinite retention. + CreateBucketWithName(ctx context.Context, org *domain.Organization, bucketName string, rules ...domain.RetentionRule) (*domain.Bucket, error) + // CreateBucketWithNameWithID creates a new bucket with bucketName in organization with orgID, with retention specified in rules. Empty rules means infinite retention. + CreateBucketWithNameWithID(ctx context.Context, orgID, bucketName string, rules ...domain.RetentionRule) (*domain.Bucket, error) + // UpdateBucket updates a bucket. + UpdateBucket(ctx context.Context, bucket *domain.Bucket) (*domain.Bucket, error) + // DeleteBucket deletes a bucket. + DeleteBucket(ctx context.Context, bucket *domain.Bucket) error + // DeleteBucketWithID deletes a bucket with bucketID. + DeleteBucketWithID(ctx context.Context, bucketID string) error + // GetMembers returns members of a bucket. + GetMembers(ctx context.Context, bucket *domain.Bucket) (*[]domain.ResourceMember, error) + // GetMembersWithID returns members of a bucket with bucketID. + GetMembersWithID(ctx context.Context, bucketID string) (*[]domain.ResourceMember, error) + // AddMember adds a member to a bucket. + AddMember(ctx context.Context, bucket *domain.Bucket, user *domain.User) (*domain.ResourceMember, error) + // AddMemberWithID adds a member with id memberID to a bucket with bucketID. + AddMemberWithID(ctx context.Context, bucketID, memberID string) (*domain.ResourceMember, error) + // RemoveMember removes a member from a bucket. + RemoveMember(ctx context.Context, bucket *domain.Bucket, user *domain.User) error + // RemoveMemberWithID removes a member with id memberID from a bucket with bucketID. + RemoveMemberWithID(ctx context.Context, bucketID, memberID string) error + // GetOwners returns owners of a bucket. + GetOwners(ctx context.Context, bucket *domain.Bucket) (*[]domain.ResourceOwner, error) + // GetOwnersWithID returns owners of a bucket with bucketID. + GetOwnersWithID(ctx context.Context, bucketID string) (*[]domain.ResourceOwner, error) + // AddOwner adds an owner to a bucket. + AddOwner(ctx context.Context, bucket *domain.Bucket, user *domain.User) (*domain.ResourceOwner, error) + // AddOwnerWithID adds an owner with id memberID to a bucket with bucketID. + AddOwnerWithID(ctx context.Context, bucketID, memberID string) (*domain.ResourceOwner, error) + // RemoveOwner removes an owner from a bucket. + RemoveOwner(ctx context.Context, bucket *domain.Bucket, user *domain.User) error + // RemoveOwnerWithID removes a member with id memberID from a bucket with bucketID. + RemoveOwnerWithID(ctx context.Context, bucketID, memberID string) error +} + +// bucketsAPI implements BucketsAPI +type bucketsAPI struct { + apiClient *domain.ClientWithResponses +} + +// NewBucketsAPI creates new instance of BucketsAPI +func NewBucketsAPI(apiClient *domain.ClientWithResponses) BucketsAPI { + return &bucketsAPI{ + apiClient: apiClient, + } +} + +func (b *bucketsAPI) GetBuckets(ctx context.Context, pagingOptions ...PagingOption) (*[]domain.Bucket, error) { + return b.getBuckets(ctx, nil, pagingOptions...) +} + +func (b *bucketsAPI) getBuckets(ctx context.Context, params *domain.GetBucketsParams, pagingOptions ...PagingOption) (*[]domain.Bucket, error) { + if params == nil { + params = &domain.GetBucketsParams{} + } + options := defaultPaging() + for _, opt := range pagingOptions { + opt(options) + } + if options.limit > 0 { + params.Limit = &options.limit + } + params.Offset = &options.offset + + response, err := b.apiClient.GetBucketsWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200.Buckets, nil +} + +func (b *bucketsAPI) FindBucketByName(ctx context.Context, bucketName string) (*domain.Bucket, error) { + params := &domain.GetBucketsParams{Name: &bucketName} + response, err := b.apiClient.GetBucketsWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON200.Buckets != nil && len(*response.JSON200.Buckets) > 0 { + return &(*response.JSON200.Buckets)[0], nil + } + return nil, fmt.Errorf("bucket '%s' not found", bucketName) +} + +func (b *bucketsAPI) FindBucketByID(ctx context.Context, bucketID string) (*domain.Bucket, error) { + params := &domain.GetBucketsIDParams{} + response, err := b.apiClient.GetBucketsIDWithResponse(ctx, bucketID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (b *bucketsAPI) FindBucketsByOrgID(ctx context.Context, orgID string, pagingOptions ...PagingOption) (*[]domain.Bucket, error) { + params := &domain.GetBucketsParams{OrgID: &orgID} + return b.getBuckets(ctx, params, pagingOptions...) +} + +func (b *bucketsAPI) FindBucketsByOrgName(ctx context.Context, orgName string, pagingOptions ...PagingOption) (*[]domain.Bucket, error) { + params := &domain.GetBucketsParams{Org: &orgName} + return b.getBuckets(ctx, params, pagingOptions...) +} + +func (b *bucketsAPI) createBucket(ctx context.Context, bucketReq *domain.PostBucketRequest) (*domain.Bucket, error) { + params := &domain.PostBucketsParams{} + response, err := b.apiClient.PostBucketsWithResponse(ctx, params, domain.PostBucketsJSONRequestBody(*bucketReq)) + if err != nil { + return nil, err + } + if response.JSON422 != nil { + return nil, domain.ErrorToHTTPError(response.JSON422, response.StatusCode()) + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (b *bucketsAPI) CreateBucket(ctx context.Context, bucket *domain.Bucket) (*domain.Bucket, error) { + bucketReq := &domain.PostBucketRequest{ + Description: bucket.Description, + Name: bucket.Name, + OrgID: *bucket.OrgID, + RetentionRules: bucket.RetentionRules, + Rp: bucket.Rp, + } + return b.createBucket(ctx, bucketReq) +} + +func (b *bucketsAPI) CreateBucketWithNameWithID(ctx context.Context, orgID, bucketName string, rules ...domain.RetentionRule) (*domain.Bucket, error) { + bucket := &domain.PostBucketRequest{Name: bucketName, OrgID: orgID, RetentionRules: rules} + return b.createBucket(ctx, bucket) +} +func (b *bucketsAPI) CreateBucketWithName(ctx context.Context, org *domain.Organization, bucketName string, rules ...domain.RetentionRule) (*domain.Bucket, error) { + return b.CreateBucketWithNameWithID(ctx, *org.Id, bucketName, rules...) +} + +func (b *bucketsAPI) DeleteBucket(ctx context.Context, bucket *domain.Bucket) error { + return b.DeleteBucketWithID(ctx, *bucket.Id) +} + +func (b *bucketsAPI) DeleteBucketWithID(ctx context.Context, bucketID string) error { + params := &domain.DeleteBucketsIDParams{} + response, err := b.apiClient.DeleteBucketsIDWithResponse(ctx, bucketID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON404 != nil { + return domain.ErrorToHTTPError(response.JSON404, response.StatusCode()) + } + return nil +} + +func (b *bucketsAPI) UpdateBucket(ctx context.Context, bucket *domain.Bucket) (*domain.Bucket, error) { + params := &domain.PatchBucketsIDParams{} + req := domain.PatchBucketsIDJSONRequestBody{ + Description: bucket.Description, + Name: &bucket.Name, + RetentionRules: retentionRulesToPatchRetentionRules(&bucket.RetentionRules), + } + response, err := b.apiClient.PatchBucketsIDWithResponse(ctx, *bucket.Id, params, req) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (b *bucketsAPI) GetMembers(ctx context.Context, bucket *domain.Bucket) (*[]domain.ResourceMember, error) { + return b.GetMembersWithID(ctx, *bucket.Id) +} + +func (b *bucketsAPI) GetMembersWithID(ctx context.Context, bucketID string) (*[]domain.ResourceMember, error) { + params := &domain.GetBucketsIDMembersParams{} + response, err := b.apiClient.GetBucketsIDMembersWithResponse(ctx, bucketID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200.Users, nil +} + +func (b *bucketsAPI) AddMember(ctx context.Context, bucket *domain.Bucket, user *domain.User) (*domain.ResourceMember, error) { + return b.AddMemberWithID(ctx, *bucket.Id, *user.Id) +} + +func (b *bucketsAPI) AddMemberWithID(ctx context.Context, bucketID, memberID string) (*domain.ResourceMember, error) { + params := &domain.PostBucketsIDMembersParams{} + body := &domain.PostBucketsIDMembersJSONRequestBody{Id: memberID} + response, err := b.apiClient.PostBucketsIDMembersWithResponse(ctx, bucketID, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (b *bucketsAPI) RemoveMember(ctx context.Context, bucket *domain.Bucket, user *domain.User) error { + return b.RemoveMemberWithID(ctx, *bucket.Id, *user.Id) +} + +func (b *bucketsAPI) RemoveMemberWithID(ctx context.Context, bucketID, memberID string) error { + params := &domain.DeleteBucketsIDMembersIDParams{} + response, err := b.apiClient.DeleteBucketsIDMembersIDWithResponse(ctx, bucketID, memberID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (b *bucketsAPI) GetOwners(ctx context.Context, bucket *domain.Bucket) (*[]domain.ResourceOwner, error) { + return b.GetOwnersWithID(ctx, *bucket.Id) +} + +func (b *bucketsAPI) GetOwnersWithID(ctx context.Context, bucketID string) (*[]domain.ResourceOwner, error) { + params := &domain.GetBucketsIDOwnersParams{} + response, err := b.apiClient.GetBucketsIDOwnersWithResponse(ctx, bucketID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200.Users, nil +} + +func (b *bucketsAPI) AddOwner(ctx context.Context, bucket *domain.Bucket, user *domain.User) (*domain.ResourceOwner, error) { + return b.AddOwnerWithID(ctx, *bucket.Id, *user.Id) +} + +func (b *bucketsAPI) AddOwnerWithID(ctx context.Context, bucketID, memberID string) (*domain.ResourceOwner, error) { + params := &domain.PostBucketsIDOwnersParams{} + body := &domain.PostBucketsIDOwnersJSONRequestBody{Id: memberID} + response, err := b.apiClient.PostBucketsIDOwnersWithResponse(ctx, bucketID, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (b *bucketsAPI) RemoveOwner(ctx context.Context, bucket *domain.Bucket, user *domain.User) error { + return b.RemoveOwnerWithID(ctx, *bucket.Id, *user.Id) +} + +func (b *bucketsAPI) RemoveOwnerWithID(ctx context.Context, bucketID, memberID string) error { + params := &domain.DeleteBucketsIDOwnersIDParams{} + response, err := b.apiClient.DeleteBucketsIDOwnersIDWithResponse(ctx, bucketID, memberID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func retentionRulesToPatchRetentionRules(rrs *domain.RetentionRules) *domain.PatchRetentionRules { + if rrs == nil { + return nil + } + prrs := make([]domain.PatchRetentionRule, len(*rrs)) + for i, rr := range *rrs { + prrs[i] = domain.PatchRetentionRule{ + EverySeconds: &rr.EverySeconds, + ShardGroupDurationSeconds: rr.ShardGroupDurationSeconds, + Type: domain.PatchRetentionRuleType(rr.Type), + } + } + dprrs := domain.PatchRetentionRules(prrs) + return &dprrs +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/delete.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/delete.go new file mode 100644 index 0000000..f40b571 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/delete.go @@ -0,0 +1,96 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + "github.com/influxdata/influxdb-client-go/v2/domain" + "time" +) + +// DeleteAPI provides methods for deleting time series data from buckets. +// Deleted series are selected by the time range specified by start and stop arguments and optional predicate string which contains condition for selecting data for deletion, such as: +// tag1="value1" and (tag2="value2" and tag3!="value3") +// Empty predicate string means all data from the given time range will be deleted. See https://v2.docs.influxdata.com/v2.0/reference/syntax/delete-predicate/ +// for more info about predicate syntax. +type DeleteAPI interface { + // Delete deletes series selected by by the time range specified by start and stop arguments and optional predicate string from the bucket bucket belonging to the organization org. + Delete(ctx context.Context, org *domain.Organization, bucket *domain.Bucket, start, stop time.Time, predicate string) error + // Delete deletes series selected by by the time range specified by start and stop arguments and optional predicate string from the bucket with ID bucketID belonging to the organization with ID orgID. + DeleteWithID(ctx context.Context, orgID, bucketID string, start, stop time.Time, predicate string) error + // Delete deletes series selected by by the time range specified by start and stop arguments and optional predicate string from the bucket with name bucketName belonging to the organization with name orgName. + DeleteWithName(ctx context.Context, orgName, bucketName string, start, stop time.Time, predicate string) error +} + +// deleteAPI implements DeleteAPI +type deleteAPI struct { + apiClient *domain.ClientWithResponses +} + +// NewDeleteAPI creates new instance of DeleteAPI +func NewDeleteAPI(apiClient *domain.ClientWithResponses) DeleteAPI { + return &deleteAPI{ + apiClient: apiClient, + } +} + +func (d *deleteAPI) delete(ctx context.Context, params *domain.PostDeleteParams, conditions *domain.DeletePredicateRequest) error { + resp, err := d.apiClient.PostDeleteWithResponse(ctx, params, domain.PostDeleteJSONRequestBody(*conditions)) + if err != nil { + return err + } + if resp.JSON404 != nil { + return domain.ErrorToHTTPError(resp.JSON404, resp.StatusCode()) + } + if resp.JSON403 != nil { + return domain.ErrorToHTTPError(resp.JSON403, resp.StatusCode()) + } + if resp.JSON400 != nil { + return domain.ErrorToHTTPError(resp.JSON400, resp.StatusCode()) + } + if resp.JSONDefault != nil { + return domain.ErrorToHTTPError(resp.JSONDefault, resp.StatusCode()) + } + return nil +} + +func (d *deleteAPI) Delete(ctx context.Context, org *domain.Organization, bucket *domain.Bucket, start, stop time.Time, predicate string) error { + params := &domain.PostDeleteParams{ + OrgID: org.Id, + BucketID: bucket.Id, + } + conditions := &domain.DeletePredicateRequest{ + Predicate: &predicate, + Start: start, + Stop: stop, + } + return d.delete(ctx, params, conditions) +} + +func (d *deleteAPI) DeleteWithID(ctx context.Context, orgID, bucketID string, start, stop time.Time, predicate string) error { + params := &domain.PostDeleteParams{ + OrgID: &orgID, + BucketID: &bucketID, + } + conditions := &domain.DeletePredicateRequest{ + Predicate: &predicate, + Start: start, + Stop: stop, + } + return d.delete(ctx, params, conditions) +} + +func (d *deleteAPI) DeleteWithName(ctx context.Context, orgName, bucketName string, start, stop time.Time, predicate string) error { + params := &domain.PostDeleteParams{ + Org: &orgName, + Bucket: &bucketName, + } + conditions := &domain.DeletePredicateRequest{ + Predicate: &predicate, + Start: start, + Stop: stop, + } + return d.delete(ctx, params, conditions) +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/doc.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/doc.go new file mode 100644 index 0000000..df24f95 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/doc.go @@ -0,0 +1,6 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package api provides clients for InfluxDB server APIs. +package api diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/error.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/error.go new file mode 100644 index 0000000..2cbcae4 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/error.go @@ -0,0 +1,49 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package http + +import ( + "fmt" + "strconv" +) + +// Error represent error response from InfluxDBServer or http error +type Error struct { + StatusCode int + Code string + Message string + Err error + RetryAfter uint +} + +// Error fulfils error interface +func (e *Error) Error() string { + switch { + case e.Err != nil: + return e.Err.Error() + case e.Code != "" && e.Message != "": + return fmt.Sprintf("%s: %s", e.Code, e.Message) + default: + return "Unexpected status code " + strconv.Itoa(e.StatusCode) + } +} + +func (e *Error) Unwrap() error { + if e.Err != nil { + return e.Err + } + return nil +} + +// NewError returns newly created Error initialised with nested error and default values +func NewError(err error) *Error { + return &Error{ + StatusCode: 0, + Code: "", + Message: "", + Err: err, + RetryAfter: 0, + } +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/options.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/options.go new file mode 100644 index 0000000..66ef539 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/options.go @@ -0,0 +1,125 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package http + +import ( + "crypto/tls" + "net" + "net/http" + "time" +) + +// Options holds http configuration properties for communicating with InfluxDB server +type Options struct { + // HTTP client. Default is http.DefaultClient. + httpClient *http.Client + // doer is an http Doer - if set it overrides httpClient + doer Doer + // Flag whether http client was created internally + ownClient bool + // TLS configuration for secure connection. Default nil + tlsConfig *tls.Config + // HTTP request timeout in sec. Default 20 + httpRequestTimeout uint +} + +// HTTPClient returns the http.Client that is configured to be used +// for HTTP requests. It will return the one that has been set using +// SetHTTPClient or it will construct a default client using the +// other configured options. +// HTTPClient panics if SetHTTPDoer was called. +func (o *Options) HTTPClient() *http.Client { + if o.doer != nil { + panic("HTTPClient called after SetHTTPDoer") + } + if o.httpClient == nil { + o.httpClient = &http.Client{ + Timeout: time.Second * time.Duration(o.HTTPRequestTimeout()), + Transport: &http.Transport{ + DialContext: (&net.Dialer{ + Timeout: 5 * time.Second, + }).DialContext, + TLSHandshakeTimeout: 5 * time.Second, + TLSClientConfig: o.TLSConfig(), + MaxIdleConns: 100, + MaxIdleConnsPerHost: 100, + IdleConnTimeout: 90 * time.Second, + }, + } + o.ownClient = true + } + return o.httpClient +} + +// SetHTTPClient will configure the http.Client that is used +// for HTTP requests. If set to nil, an HTTPClient will be +// generated. +// +// Setting the HTTPClient will cause the other HTTP options +// to be ignored. +// In case of UsersAPI.SignIn() is used, HTTPClient.Jar will be used for storing session cookie. +func (o *Options) SetHTTPClient(c *http.Client) *Options { + o.httpClient = c + o.ownClient = false + return o +} + +// OwnHTTPClient returns true of HTTP client was created internally. False if it was set externally. +func (o *Options) OwnHTTPClient() bool { + return o.ownClient +} + +// Doer allows proving custom Do for HTTP operations +type Doer interface { + Do(*http.Request) (*http.Response, error) +} + +// SetHTTPDoer will configure the http.Client that is used +// for HTTP requests. If set to nil, this has no effect. +// +// Setting the HTTPDoer will cause the other HTTP options +// to be ignored. +func (o *Options) SetHTTPDoer(d Doer) *Options { + if d != nil { + o.doer = d + o.ownClient = false + } + return o +} + +// HTTPDoer returns actual Doer if set, or http.Client +func (o *Options) HTTPDoer() Doer { + if o.doer != nil { + return o.doer + } + return o.HTTPClient() +} + +// TLSConfig returns tls.Config +func (o *Options) TLSConfig() *tls.Config { + return o.tlsConfig +} + +// SetTLSConfig sets TLS configuration for secure connection +func (o *Options) SetTLSConfig(tlsConfig *tls.Config) *Options { + o.tlsConfig = tlsConfig + return o +} + +// HTTPRequestTimeout returns HTTP request timeout +func (o *Options) HTTPRequestTimeout() uint { + return o.httpRequestTimeout +} + +// SetHTTPRequestTimeout sets HTTP request timeout in sec +func (o *Options) SetHTTPRequestTimeout(httpRequestTimeout uint) *Options { + o.httpRequestTimeout = httpRequestTimeout + return o +} + +// DefaultOptions returns Options object with default values +func DefaultOptions() *Options { + return &Options{httpRequestTimeout: 20} +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/service.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/service.go new file mode 100644 index 0000000..aa8cd1b --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/http/service.go @@ -0,0 +1,190 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package http provides HTTP servicing related code. +// +// Important type is Service which handles HTTP operations. It is internally used by library and it is not necessary to use it directly for common operations. +// It can be useful when creating custom InfluxDB2 server API calls using generated code from the domain package, that are not yet exposed by API of this library. +// +// Service can be obtained from client using HTTPService() method. +// It can be also created directly. To instantiate a Service use NewService(). Remember, the authorization param is in form "Token your-auth-token". e.g. "Token DXnd7annkGteV5Wqx9G3YjO9Ezkw87nHk8OabcyHCxF5451kdBV0Ag2cG7OmZZgCUTHroagUPdxbuoyen6TSPw==". +// srv := http.NewService("http://localhost:8086", "Token my-token", http.DefaultOptions()) +package http + +import ( + "context" + "encoding/json" + "io" + "io/ioutil" + "mime" + "net/http" + "net/url" + "strconv" + + http2 "github.com/influxdata/influxdb-client-go/v2/internal/http" + "github.com/influxdata/influxdb-client-go/v2/internal/log" +) + +// RequestCallback defines function called after a request is created before any call +type RequestCallback func(req *http.Request) + +// ResponseCallback defines function called after a successful response was received +type ResponseCallback func(resp *http.Response) error + +// Service handles HTTP operations with taking care of mandatory request headers and known errors +type Service interface { + // DoPostRequest sends HTTP POST request to the given url with body + DoPostRequest(ctx context.Context, url string, body io.Reader, requestCallback RequestCallback, responseCallback ResponseCallback) *Error + // DoHTTPRequest sends given HTTP request and handles response + DoHTTPRequest(req *http.Request, requestCallback RequestCallback, responseCallback ResponseCallback) *Error + // DoHTTPRequestWithResponse sends given HTTP request and returns response + DoHTTPRequestWithResponse(req *http.Request, requestCallback RequestCallback) (*http.Response, error) + // SetAuthorization sets the authorization header value + SetAuthorization(authorization string) + // Authorization returns current authorization header value + Authorization() string + // ServerAPIURL returns URL to InfluxDB2 server API space + ServerAPIURL() string + // ServerURL returns URL to InfluxDB2 server + ServerURL() string +} + +// service implements Service interface +type service struct { + serverAPIURL string + serverURL string + authorization string + client Doer +} + +// NewService creates instance of http Service with given parameters +func NewService(serverURL, authorization string, httpOptions *Options) Service { + apiURL, err := url.Parse(serverURL) + serverAPIURL := serverURL + if err == nil { + apiURL, err = apiURL.Parse("api/v2/") + if err == nil { + serverAPIURL = apiURL.String() + } + } + return &service{ + serverAPIURL: serverAPIURL, + serverURL: serverURL, + authorization: authorization, + client: httpOptions.HTTPDoer(), + } +} + +func (s *service) ServerAPIURL() string { + return s.serverAPIURL +} + +func (s *service) ServerURL() string { + return s.serverURL +} + +func (s *service) SetAuthorization(authorization string) { + s.authorization = authorization +} + +func (s *service) Authorization() string { + return s.authorization +} + +func (s *service) DoPostRequest(ctx context.Context, url string, body io.Reader, requestCallback RequestCallback, responseCallback ResponseCallback) *Error { + return s.doHTTPRequestWithURL(ctx, http.MethodPost, url, body, requestCallback, responseCallback) +} + +func (s *service) doHTTPRequestWithURL(ctx context.Context, method, url string, body io.Reader, requestCallback RequestCallback, responseCallback ResponseCallback) *Error { + req, err := http.NewRequestWithContext(ctx, method, url, body) + if err != nil { + return NewError(err) + } + return s.DoHTTPRequest(req, requestCallback, responseCallback) +} + +func (s *service) DoHTTPRequest(req *http.Request, requestCallback RequestCallback, responseCallback ResponseCallback) *Error { + resp, err := s.DoHTTPRequestWithResponse(req, requestCallback) + if err != nil { + return NewError(err) + } + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return s.parseHTTPError(resp) + } + if responseCallback != nil { + err := responseCallback(resp) + if err != nil { + return NewError(err) + } + } + return nil +} + +func (s *service) DoHTTPRequestWithResponse(req *http.Request, requestCallback RequestCallback) (*http.Response, error) { + log.Infof("HTTP %s req to %s", req.Method, req.URL.String()) + if len(s.authorization) > 0 { + req.Header.Set("Authorization", s.authorization) + } + if req.Header.Get("User-Agent") == "" { + req.Header.Set("User-Agent", http2.UserAgent) + } + if requestCallback != nil { + requestCallback(req) + } + return s.client.Do(req) +} + +func (s *service) parseHTTPError(r *http.Response) *Error { + // successful status code range + if r.StatusCode >= 200 && r.StatusCode < 300 { + return nil + } + defer func() { + // discard body so connection can be reused + _, _ = io.Copy(ioutil.Discard, r.Body) + _ = r.Body.Close() + }() + + perror := NewError(nil) + perror.StatusCode = r.StatusCode + + if v := r.Header.Get("Retry-After"); v != "" { + r, err := strconv.ParseUint(v, 10, 32) + if err == nil { + perror.RetryAfter = uint(r) + } + } + + // json encoded error + ctype, _, _ := mime.ParseMediaType(r.Header.Get("Content-Type")) + if ctype == "application/json" { + perror.Err = json.NewDecoder(r.Body).Decode(perror) + } else { + body, err := ioutil.ReadAll(r.Body) + if err != nil { + perror.Err = err + return perror + } + + perror.Code = r.Status + perror.Message = string(body) + } + + if perror.Code == "" && perror.Message == "" { + switch r.StatusCode { + case http.StatusTooManyRequests: + perror.Code = "too many requests" + perror.Message = "exceeded rate limit" + case http.StatusServiceUnavailable: + perror.Code = "unavailable" + perror.Message = "service temporarily unavailable" + default: + perror.Code = r.Status + perror.Message = r.Header.Get("X-Influxdb-Error") + } + } + + return perror +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/labels.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/labels.go new file mode 100644 index 0000000..ee57748 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/labels.go @@ -0,0 +1,171 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + "fmt" + + "github.com/influxdata/influxdb-client-go/v2/domain" +) + +// LabelsAPI provides methods for managing labels in a InfluxDB server. +type LabelsAPI interface { + // GetLabels returns all labels. + GetLabels(ctx context.Context) (*[]domain.Label, error) + // FindLabelsByOrg returns labels belonging to organization org. + FindLabelsByOrg(ctx context.Context, org *domain.Organization) (*[]domain.Label, error) + // FindLabelsByOrgID returns labels belonging to organization with id orgID. + FindLabelsByOrgID(ctx context.Context, orgID string) (*[]domain.Label, error) + // FindLabelByID returns a label with labelID. + FindLabelByID(ctx context.Context, labelID string) (*domain.Label, error) + // FindLabelByName returns a label with name labelName under an organization orgID. + FindLabelByName(ctx context.Context, orgID, labelName string) (*domain.Label, error) + // CreateLabel creates a new label. + CreateLabel(ctx context.Context, label *domain.LabelCreateRequest) (*domain.Label, error) + // CreateLabelWithName creates a new label with label labelName and properties, under the organization org. + // Properties example: {"color": "ffb3b3", "description": "this is a description"}. + CreateLabelWithName(ctx context.Context, org *domain.Organization, labelName string, properties map[string]string) (*domain.Label, error) + // CreateLabelWithName creates a new label with label labelName and properties, under the organization with id orgID. + // Properties example: {"color": "ffb3b3", "description": "this is a description"}. + CreateLabelWithNameWithID(ctx context.Context, orgID, labelName string, properties map[string]string) (*domain.Label, error) + // UpdateLabel updates the label. + // Properties can be removed by sending an update with an empty value. + UpdateLabel(ctx context.Context, label *domain.Label) (*domain.Label, error) + // DeleteLabelWithID deletes a label with labelID. + DeleteLabelWithID(ctx context.Context, labelID string) error + // DeleteLabel deletes a label. + DeleteLabel(ctx context.Context, label *domain.Label) error +} + +// labelsAPI implements LabelsAPI +type labelsAPI struct { + apiClient *domain.ClientWithResponses +} + +// NewLabelsAPI creates new instance of LabelsAPI +func NewLabelsAPI(apiClient *domain.ClientWithResponses) LabelsAPI { + return &labelsAPI{ + apiClient: apiClient, + } +} + +func (u *labelsAPI) GetLabels(ctx context.Context) (*[]domain.Label, error) { + params := &domain.GetLabelsParams{} + return u.getLabels(ctx, params) +} + +func (u *labelsAPI) getLabels(ctx context.Context, params *domain.GetLabelsParams) (*[]domain.Label, error) { + response, err := u.apiClient.GetLabelsWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return (*[]domain.Label)(response.JSON200.Labels), nil +} + +func (u *labelsAPI) FindLabelsByOrg(ctx context.Context, org *domain.Organization) (*[]domain.Label, error) { + return u.FindLabelsByOrgID(ctx, *org.Id) +} + +func (u *labelsAPI) FindLabelsByOrgID(ctx context.Context, orgID string) (*[]domain.Label, error) { + params := &domain.GetLabelsParams{OrgID: &orgID} + return u.getLabels(ctx, params) +} + +func (u *labelsAPI) FindLabelByID(ctx context.Context, labelID string) (*domain.Label, error) { + params := &domain.GetLabelsIDParams{} + response, err := u.apiClient.GetLabelsIDWithResponse(ctx, labelID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200.Label, nil +} + +func (u *labelsAPI) FindLabelByName(ctx context.Context, orgID, labelName string) (*domain.Label, error) { + labels, err := u.FindLabelsByOrgID(ctx, orgID) + if err != nil { + return nil, err + } + var label *domain.Label + for _, u := range *labels { + if *u.Name == labelName { + label = &u + break + } + } + if label == nil { + return nil, fmt.Errorf("label '%s' not found", labelName) + } + return label, nil +} + +func (u *labelsAPI) CreateLabelWithName(ctx context.Context, org *domain.Organization, labelName string, properties map[string]string) (*domain.Label, error) { + return u.CreateLabelWithNameWithID(ctx, *org.Id, labelName, properties) +} + +func (u *labelsAPI) CreateLabelWithNameWithID(ctx context.Context, orgID, labelName string, properties map[string]string) (*domain.Label, error) { + props := &domain.LabelCreateRequest_Properties{AdditionalProperties: properties} + label := &domain.LabelCreateRequest{Name: labelName, OrgID: orgID, Properties: props} + return u.CreateLabel(ctx, label) +} + +func (u *labelsAPI) CreateLabel(ctx context.Context, label *domain.LabelCreateRequest) (*domain.Label, error) { + response, err := u.apiClient.PostLabelsWithResponse(ctx, domain.PostLabelsJSONRequestBody(*label)) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201.Label, nil +} + +func (u *labelsAPI) UpdateLabel(ctx context.Context, label *domain.Label) (*domain.Label, error) { + var props *domain.LabelUpdate_Properties + params := &domain.PatchLabelsIDParams{} + if label.Properties != nil { + props = &domain.LabelUpdate_Properties{AdditionalProperties: label.Properties.AdditionalProperties} + } + body := &domain.LabelUpdate{ + Name: label.Name, + Properties: props, + } + response, err := u.apiClient.PatchLabelsIDWithResponse(ctx, *label.Id, params, domain.PatchLabelsIDJSONRequestBody(*body)) + if err != nil { + return nil, err + } + if response.JSON404 != nil { + return nil, domain.ErrorToHTTPError(response.JSON404, response.StatusCode()) + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200.Label, nil +} + +func (u *labelsAPI) DeleteLabel(ctx context.Context, label *domain.Label) error { + return u.DeleteLabelWithID(ctx, *label.Id) +} + +func (u *labelsAPI) DeleteLabelWithID(ctx context.Context, labelID string) error { + params := &domain.DeleteLabelsIDParams{} + response, err := u.apiClient.DeleteLabelsIDWithResponse(ctx, labelID, params) + if err != nil { + return err + } + if response.JSON404 != nil { + return domain.ErrorToHTTPError(response.JSON404, response.StatusCode()) + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/organizations.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/organizations.go new file mode 100644 index 0000000..75bc847 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/organizations.go @@ -0,0 +1,286 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + "fmt" + + "github.com/influxdata/influxdb-client-go/v2/domain" +) + +// OrganizationsAPI provides methods for managing Organizations in a InfluxDB server. +type OrganizationsAPI interface { + // GetOrganizations returns all organizations. + // GetOrganizations supports PagingOptions: Offset, Limit, Descending + GetOrganizations(ctx context.Context, pagingOptions ...PagingOption) (*[]domain.Organization, error) + // FindOrganizationByName returns an organization found using orgName. + FindOrganizationByName(ctx context.Context, orgName string) (*domain.Organization, error) + // FindOrganizationByID returns an organization found using orgID. + FindOrganizationByID(ctx context.Context, orgID string) (*domain.Organization, error) + // FindOrganizationsByUserID returns organizations an user with userID belongs to. + // FindOrganizationsByUserID supports PagingOptions: Offset, Limit, Descending + FindOrganizationsByUserID(ctx context.Context, userID string, pagingOptions ...PagingOption) (*[]domain.Organization, error) + // CreateOrganization creates new organization. + CreateOrganization(ctx context.Context, org *domain.Organization) (*domain.Organization, error) + // CreateOrganizationWithName creates new organization with orgName and with status active. + CreateOrganizationWithName(ctx context.Context, orgName string) (*domain.Organization, error) + // UpdateOrganization updates organization. + UpdateOrganization(ctx context.Context, org *domain.Organization) (*domain.Organization, error) + // DeleteOrganization deletes an organization. + DeleteOrganization(ctx context.Context, org *domain.Organization) error + // DeleteOrganizationWithID deletes an organization with orgID. + DeleteOrganizationWithID(ctx context.Context, orgID string) error + // GetMembers returns members of an organization. + GetMembers(ctx context.Context, org *domain.Organization) (*[]domain.ResourceMember, error) + // GetMembersWithID returns members of an organization with orgID. + GetMembersWithID(ctx context.Context, orgID string) (*[]domain.ResourceMember, error) + // AddMember adds a member to an organization. + AddMember(ctx context.Context, org *domain.Organization, user *domain.User) (*domain.ResourceMember, error) + // AddMemberWithID adds a member with id memberID to an organization with orgID. + AddMemberWithID(ctx context.Context, orgID, memberID string) (*domain.ResourceMember, error) + // RemoveMember removes a member from an organization. + RemoveMember(ctx context.Context, org *domain.Organization, user *domain.User) error + // RemoveMemberWithID removes a member with id memberID from an organization with orgID. + RemoveMemberWithID(ctx context.Context, orgID, memberID string) error + // GetOwners returns owners of an organization. + GetOwners(ctx context.Context, org *domain.Organization) (*[]domain.ResourceOwner, error) + // GetOwnersWithID returns owners of an organization with orgID. + GetOwnersWithID(ctx context.Context, orgID string) (*[]domain.ResourceOwner, error) + // AddOwner adds an owner to an organization. + AddOwner(ctx context.Context, org *domain.Organization, user *domain.User) (*domain.ResourceOwner, error) + // AddOwnerWithID adds an owner with id memberID to an organization with orgID. + AddOwnerWithID(ctx context.Context, orgID, memberID string) (*domain.ResourceOwner, error) + // RemoveOwner removes an owner from an organization. + RemoveOwner(ctx context.Context, org *domain.Organization, user *domain.User) error + // RemoveOwnerWithID removes an owner with id memberID from an organization with orgID. + RemoveOwnerWithID(ctx context.Context, orgID, memberID string) error +} + +// organizationsAPI implements OrganizationsAPI +type organizationsAPI struct { + apiClient *domain.ClientWithResponses +} + +// NewOrganizationsAPI creates new instance of OrganizationsAPI +func NewOrganizationsAPI(apiClient *domain.ClientWithResponses) OrganizationsAPI { + return &organizationsAPI{ + apiClient: apiClient, + } +} + +func (o *organizationsAPI) getOrganizations(ctx context.Context, params *domain.GetOrgsParams, pagingOptions ...PagingOption) (*[]domain.Organization, error) { + options := defaultPaging() + for _, opt := range pagingOptions { + opt(options) + } + if options.limit > 0 { + params.Limit = &options.limit + } + params.Offset = &options.offset + params.Descending = &options.descending + response, err := o.apiClient.GetOrgsWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200.Orgs, nil +} +func (o *organizationsAPI) GetOrganizations(ctx context.Context, pagingOptions ...PagingOption) (*[]domain.Organization, error) { + params := &domain.GetOrgsParams{} + return o.getOrganizations(ctx, params, pagingOptions...) +} + +func (o *organizationsAPI) FindOrganizationByName(ctx context.Context, orgName string) (*domain.Organization, error) { + params := &domain.GetOrgsParams{Org: &orgName} + organizations, err := o.getOrganizations(ctx, params) + if err != nil { + return nil, err + } + if organizations != nil && len(*organizations) > 0 { + return &(*organizations)[0], nil + } + return nil, fmt.Errorf("organization '%s' not found", orgName) +} + +func (o *organizationsAPI) FindOrganizationByID(ctx context.Context, orgID string) (*domain.Organization, error) { + params := &domain.GetOrgsIDParams{} + response, err := o.apiClient.GetOrgsIDWithResponse(ctx, orgID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (o *organizationsAPI) FindOrganizationsByUserID(ctx context.Context, userID string, pagingOptions ...PagingOption) (*[]domain.Organization, error) { + params := &domain.GetOrgsParams{UserID: &userID} + return o.getOrganizations(ctx, params, pagingOptions...) +} + +func (o *organizationsAPI) CreateOrganization(ctx context.Context, org *domain.Organization) (*domain.Organization, error) { + params := &domain.PostOrgsParams{} + req := domain.PostOrgsJSONRequestBody{ + Name: org.Name, + Description: org.Description, + } + response, err := o.apiClient.PostOrgsWithResponse(ctx, params, req) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (o *organizationsAPI) CreateOrganizationWithName(ctx context.Context, orgName string) (*domain.Organization, error) { + status := domain.OrganizationStatusActive + org := &domain.Organization{Name: orgName, Status: &status} + return o.CreateOrganization(ctx, org) +} + +func (o *organizationsAPI) DeleteOrganization(ctx context.Context, org *domain.Organization) error { + return o.DeleteOrganizationWithID(ctx, *org.Id) +} + +func (o *organizationsAPI) DeleteOrganizationWithID(ctx context.Context, orgID string) error { + params := &domain.DeleteOrgsIDParams{} + response, err := o.apiClient.DeleteOrgsIDWithResponse(ctx, orgID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON404 != nil { + return domain.ErrorToHTTPError(response.JSON404, response.StatusCode()) + } + return nil +} + +func (o *organizationsAPI) UpdateOrganization(ctx context.Context, org *domain.Organization) (*domain.Organization, error) { + params := &domain.PatchOrgsIDParams{} + req := domain.PatchOrgsIDJSONRequestBody{ + Name: &org.Name, + Description: org.Description, + } + response, err := o.apiClient.PatchOrgsIDWithResponse(ctx, *org.Id, params, req) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (o *organizationsAPI) GetMembers(ctx context.Context, org *domain.Organization) (*[]domain.ResourceMember, error) { + return o.GetMembersWithID(ctx, *org.Id) +} + +func (o *organizationsAPI) GetMembersWithID(ctx context.Context, orgID string) (*[]domain.ResourceMember, error) { + params := &domain.GetOrgsIDMembersParams{} + response, err := o.apiClient.GetOrgsIDMembersWithResponse(ctx, orgID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON404 != nil { + return nil, domain.ErrorToHTTPError(response.JSON404, response.StatusCode()) + } + return response.JSON200.Users, nil +} + +func (o *organizationsAPI) AddMember(ctx context.Context, org *domain.Organization, user *domain.User) (*domain.ResourceMember, error) { + return o.AddMemberWithID(ctx, *org.Id, *user.Id) +} + +func (o *organizationsAPI) AddMemberWithID(ctx context.Context, orgID, memberID string) (*domain.ResourceMember, error) { + params := &domain.PostOrgsIDMembersParams{} + body := &domain.PostOrgsIDMembersJSONRequestBody{Id: memberID} + response, err := o.apiClient.PostOrgsIDMembersWithResponse(ctx, orgID, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (o *organizationsAPI) RemoveMember(ctx context.Context, org *domain.Organization, user *domain.User) error { + return o.RemoveMemberWithID(ctx, *org.Id, *user.Id) +} + +func (o *organizationsAPI) RemoveMemberWithID(ctx context.Context, orgID, memberID string) error { + params := &domain.DeleteOrgsIDMembersIDParams{} + response, err := o.apiClient.DeleteOrgsIDMembersIDWithResponse(ctx, orgID, memberID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (o *organizationsAPI) GetOwners(ctx context.Context, org *domain.Organization) (*[]domain.ResourceOwner, error) { + return o.GetOwnersWithID(ctx, *org.Id) +} + +func (o *organizationsAPI) GetOwnersWithID(ctx context.Context, orgID string) (*[]domain.ResourceOwner, error) { + params := &domain.GetOrgsIDOwnersParams{} + response, err := o.apiClient.GetOrgsIDOwnersWithResponse(ctx, orgID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON404 != nil { + return nil, domain.ErrorToHTTPError(response.JSON404, response.StatusCode()) + } + return response.JSON200.Users, nil +} + +func (o *organizationsAPI) AddOwner(ctx context.Context, org *domain.Organization, user *domain.User) (*domain.ResourceOwner, error) { + return o.AddOwnerWithID(ctx, *org.Id, *user.Id) +} + +func (o *organizationsAPI) AddOwnerWithID(ctx context.Context, orgID, memberID string) (*domain.ResourceOwner, error) { + params := &domain.PostOrgsIDOwnersParams{} + body := &domain.PostOrgsIDOwnersJSONRequestBody{Id: memberID} + response, err := o.apiClient.PostOrgsIDOwnersWithResponse(ctx, orgID, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (o *organizationsAPI) RemoveOwner(ctx context.Context, org *domain.Organization, user *domain.User) error { + return o.RemoveOwnerWithID(ctx, *org.Id, *user.Id) +} + +func (o *organizationsAPI) RemoveOwnerWithID(ctx context.Context, orgID, memberID string) error { + params := &domain.DeleteOrgsIDOwnersIDParams{} + response, err := o.apiClient.DeleteOrgsIDOwnersIDWithResponse(ctx, orgID, memberID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/paging.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/paging.go new file mode 100644 index 0000000..c5a4f92 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/paging.go @@ -0,0 +1,69 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import "github.com/influxdata/influxdb-client-go/v2/domain" + +// PagingOption is the function type for applying paging option +type PagingOption func(p *Paging) + +// Paging holds pagination parameters for various Get* functions of InfluxDB 2 API +// Not the all options are usable for some Get* functions +type Paging struct { + // Starting offset for returning items + // Default 0. + offset domain.Offset + // Maximum number of items returned. + // Default 0 - not applied + limit domain.Limit + // What field should be used for sorting + sortBy string + // Changes sorting direction + descending domain.Descending + // The last resource ID from which to seek from (but not including). + // This is to be used instead of `offset`. + after domain.After +} + +// defaultPagingOptions returns default paging options: offset 0, limit 0 (not applied), default sorting, ascending +func defaultPaging() *Paging { + return &Paging{limit: 0, offset: 0, sortBy: "", descending: false, after: ""} +} + +// PagingWithLimit sets limit option - maximum number of items returned. +func PagingWithLimit(limit int) PagingOption { + return func(p *Paging) { + p.limit = domain.Limit(limit) + } +} + +// PagingWithOffset set starting offset for returning items. Default 0. +func PagingWithOffset(offset int) PagingOption { + return func(p *Paging) { + p.offset = domain.Offset(offset) + } +} + +// PagingWithSortBy sets field name which should be used for sorting +func PagingWithSortBy(sortBy string) PagingOption { + return func(p *Paging) { + p.sortBy = sortBy + } +} + +// PagingWithDescending changes sorting direction +func PagingWithDescending(descending bool) PagingOption { + return func(p *Paging) { + p.descending = domain.Descending(descending) + } +} + +// PagingWithAfter set after option - the last resource ID from which to seek from (but not including). +// This is to be used instead of `offset`. +func PagingWithAfter(after string) PagingOption { + return func(p *Paging) { + p.after = domain.After(after) + } +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/query.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/query.go new file mode 100644 index 0000000..9582d45 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/query.go @@ -0,0 +1,532 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "bytes" + "compress/gzip" + "context" + "encoding/base64" + "encoding/csv" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "path" + "reflect" + "strconv" + "strings" + "sync" + "time" + + http2 "github.com/influxdata/influxdb-client-go/v2/api/http" + "github.com/influxdata/influxdb-client-go/v2/api/query" + "github.com/influxdata/influxdb-client-go/v2/domain" + "github.com/influxdata/influxdb-client-go/v2/internal/log" + ilog "github.com/influxdata/influxdb-client-go/v2/log" +) + +const ( + stringDatatype = "string" + doubleDatatype = "double" + boolDatatype = "boolean" + longDatatype = "long" + uLongDatatype = "unsignedLong" + durationDatatype = "duration" + base64BinaryDataType = "base64Binary" + timeDatatypeRFC = "dateTime:RFC3339" + timeDatatypeRFCNano = "dateTime:RFC3339Nano" +) + +// QueryAPI provides methods for performing synchronously flux query against InfluxDB server. +// +// Flux query can contain reference to parameters, which must be passed via queryParams. +// it can be a struct or map. Param values can be only simple types or time.Time. +// The name of a struct field or a map key (must be a string) will be a param name. +// The name of the parameter represented by a struct field can be specified by JSON annotation: +// +// type Condition struct { +// Start time.Time `json:"start"` +// Field string `json:"field"` +// Value float64 `json:"value"` +// } +// +// Parameters are then accessed via the Flux params object: +// +// query:= `from(bucket: "environment") +// |> range(start: time(v: params.start)) +// |> filter(fn: (r) => r._measurement == "air") +// |> filter(fn: (r) => r._field == params.field) +// |> filter(fn: (r) => r._value > params.value)` +// +type QueryAPI interface { + // QueryRaw executes flux query on the InfluxDB server and returns complete query result as a string with table annotations according to dialect + QueryRaw(ctx context.Context, query string, dialect *domain.Dialect) (string, error) + // QueryRawWithParams executes flux parametrized query on the InfluxDB server and returns complete query result as a string with table annotations according to dialect + QueryRawWithParams(ctx context.Context, query string, dialect *domain.Dialect, params interface{}) (string, error) + // Query executes flux query on the InfluxDB server and returns QueryTableResult which parses streamed response into structures representing flux table parts + Query(ctx context.Context, query string) (*QueryTableResult, error) + // QueryWithParams executes flux parametrized query on the InfluxDB server and returns QueryTableResult which parses streamed response into structures representing flux table parts + QueryWithParams(ctx context.Context, query string, params interface{}) (*QueryTableResult, error) +} + +// NewQueryAPI returns new query client for querying buckets belonging to org +func NewQueryAPI(org string, service http2.Service) QueryAPI { + return &queryAPI{ + org: org, + httpService: service, + } +} + +// QueryTableResult parses streamed flux query response into structures representing flux table parts +// Walking though the result is done by repeatedly calling Next() until returns false. +// Actual flux table info (columns with names, data types, etc) is returned by TableMetadata() method. +// Data are acquired by Record() method. +// Preliminary end can be caused by an error, so when Next() return false, check Err() for an error +type QueryTableResult struct { + io.Closer + csvReader *csv.Reader + tablePosition int + tableChanged bool + table *query.FluxTableMetadata + record *query.FluxRecord + err error +} + +// NewQueryTableResult returns new QueryTableResult +func NewQueryTableResult(rawResponse io.ReadCloser) *QueryTableResult { + csvReader := csv.NewReader(rawResponse) + csvReader.FieldsPerRecord = -1 + return &QueryTableResult{Closer: rawResponse, csvReader: csvReader} +} + +// queryAPI implements QueryAPI interface +type queryAPI struct { + org string + httpService http2.Service + url string + lock sync.Mutex +} + +// queryBody holds the body for an HTTP query request. +type queryBody struct { + Dialect *domain.Dialect `json:"dialect,omitempty"` + Query string `json:"query"` + Type domain.QueryType `json:"type"` + Params interface{} `json:"params,omitempty"` +} + +func (q *queryAPI) QueryRaw(ctx context.Context, query string, dialect *domain.Dialect) (string, error) { + return q.QueryRawWithParams(ctx, query, dialect, nil) +} + +func (q *queryAPI) QueryRawWithParams(ctx context.Context, query string, dialect *domain.Dialect, params interface{}) (string, error) { + if err := checkParamsType(params); err != nil { + return "", err + } + queryURL, err := q.queryURL() + if err != nil { + return "", err + } + qr := queryBody{ + Query: query, + Type: domain.QueryTypeFlux, + Dialect: dialect, + Params: params, + } + qrJSON, err := json.Marshal(qr) + if err != nil { + return "", err + } + if log.Level() >= ilog.DebugLevel { + log.Debugf("Query: %s", qrJSON) + } + var body string + perror := q.httpService.DoPostRequest(ctx, queryURL, bytes.NewReader(qrJSON), func(req *http.Request) { + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept-Encoding", "gzip") + }, + func(resp *http.Response) error { + if resp.Header.Get("Content-Encoding") == "gzip" { + resp.Body, err = gzip.NewReader(resp.Body) + if err != nil { + return err + } + } + respBody, err := ioutil.ReadAll(resp.Body) + if err != nil { + return err + } + body = string(respBody) + return nil + }) + if perror != nil { + return "", perror + } + return body, nil +} + +// DefaultDialect return flux query Dialect with full annotations (datatype, group, default), header and comma char as a delimiter +func DefaultDialect() *domain.Dialect { + annotations := []domain.DialectAnnotations{domain.DialectAnnotationsDatatype, domain.DialectAnnotationsGroup, domain.DialectAnnotationsDefault} + delimiter := "," + header := true + return &domain.Dialect{ + Annotations: &annotations, + Delimiter: &delimiter, + Header: &header, + } +} + +func (q *queryAPI) Query(ctx context.Context, query string) (*QueryTableResult, error) { + return q.QueryWithParams(ctx, query, nil) +} + +func (q *queryAPI) QueryWithParams(ctx context.Context, query string, params interface{}) (*QueryTableResult, error) { + var queryResult *QueryTableResult + if err := checkParamsType(params); err != nil { + return nil, err + } + queryURL, err := q.queryURL() + if err != nil { + return nil, err + } + qr := queryBody{ + Query: query, + Type: domain.QueryTypeFlux, + Dialect: DefaultDialect(), + Params: params, + } + qrJSON, err := json.Marshal(qr) + if err != nil { + return nil, err + } + if log.Level() >= ilog.DebugLevel { + log.Debugf("Query: %s", qrJSON) + } + perror := q.httpService.DoPostRequest(ctx, queryURL, bytes.NewReader(qrJSON), func(req *http.Request) { + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept-Encoding", "gzip") + }, + func(resp *http.Response) error { + if resp.Header.Get("Content-Encoding") == "gzip" { + resp.Body, err = gzip.NewReader(resp.Body) + if err != nil { + return err + } + } + csvReader := csv.NewReader(resp.Body) + csvReader.FieldsPerRecord = -1 + queryResult = &QueryTableResult{Closer: resp.Body, csvReader: csvReader} + return nil + }) + if perror != nil { + return queryResult, perror + } + return queryResult, nil +} + +func (q *queryAPI) queryURL() (string, error) { + if q.url == "" { + u, err := url.Parse(q.httpService.ServerAPIURL()) + if err != nil { + return "", err + } + u.Path = path.Join(u.Path, "query") + + params := u.Query() + params.Set("org", q.org) + u.RawQuery = params.Encode() + q.lock.Lock() + q.url = u.String() + q.lock.Unlock() + } + return q.url, nil +} + +// checkParamsType validates the value is struct with simple type fields +// or a map with key as string and value as a simple type +func checkParamsType(p interface{}) error { + if p == nil { + return nil + } + t := reflect.TypeOf(p) + v := reflect.ValueOf(p) + if t.Kind() == reflect.Ptr { + t = t.Elem() + v = v.Elem() + } + if t.Kind() != reflect.Struct && t.Kind() != reflect.Map { + return fmt.Errorf("cannot use %v as query params", t) + } + switch t.Kind() { + case reflect.Struct: + fields := reflect.VisibleFields(t) + for _, f := range fields { + fv := v.FieldByIndex(f.Index) + t := getFieldType(fv) + if !validParamType(t) { + return fmt.Errorf("cannot use field '%s' of type '%v' as a query param", f.Name, t) + } + + } + case reflect.Map: + key := t.Key() + if key.Kind() != reflect.String { + return fmt.Errorf("cannot use map key of type '%v' for query param name", key) + } + for _, k := range v.MapKeys() { + f := v.MapIndex(k) + t := getFieldType(f) + if !validParamType(t) { + return fmt.Errorf("cannot use map value type '%v' as a query param", t) + } + } + } + return nil +} + +// getFieldType extracts type of value +func getFieldType(v reflect.Value) reflect.Type { + t := v.Type() + if t.Kind() == reflect.Ptr { + t = t.Elem() + v = v.Elem() + } + if t.Kind() == reflect.Interface && !v.IsNil() { + t = reflect.ValueOf(v.Interface()).Type() + } + return t +} + +// timeType is the exact type for the Time +var timeType = reflect.TypeOf(time.Time{}) + +// validParamType validates that t is primitive type or string or interface +func validParamType(t reflect.Type) bool { + return (t.Kind() > reflect.Invalid && t.Kind() < reflect.Complex64) || + t.Kind() == reflect.String || + t == timeType +} + +// TablePosition returns actual flux table position in the result, or -1 if no table was found yet +// Each new table is introduced by an annotation in csv +func (q *QueryTableResult) TablePosition() int { + if q.table != nil { + return q.table.Position() + } + return -1 +} + +// TableMetadata returns actual flux table metadata +func (q *QueryTableResult) TableMetadata() *query.FluxTableMetadata { + return q.table +} + +// TableChanged returns true if last call of Next() found also new result table +// Table information is available via TableMetadata method +func (q *QueryTableResult) TableChanged() bool { + return q.tableChanged +} + +// Record returns last parsed flux table data row +// Use Record methods to access value and row properties +func (q *QueryTableResult) Record() *query.FluxRecord { + return q.record +} + +type parsingState int + +const ( + parsingStateNormal parsingState = iota + parsingStateAnnotation + parsingStateNameRow + parsingStateError +) + +// Next advances to next row in query result. +// During the first time it is called, Next creates also table metadata +// Actual parsed row is available through Record() function +// Returns false in case of end or an error, otherwise true +func (q *QueryTableResult) Next() bool { + var row []string + // set closing query in case of preliminary return + closer := func() { + if err := q.Close(); err != nil { + message := err.Error() + if q.err != nil { + message = fmt.Sprintf("%s,%s", message, q.err.Error()) + } + q.err = errors.New(message) + } + } + defer func() { + closer() + }() + parsingState := parsingStateNormal + q.tableChanged = false + dataTypeAnnotationFound := false +readRow: + row, q.err = q.csvReader.Read() + if q.err == io.EOF { + q.err = nil + return false + } + if q.err != nil { + return false + } + + if len(row) <= 1 { + goto readRow + } + if len(row[0]) > 0 && row[0][0] == '#' { + if parsingState == parsingStateNormal { + q.table = query.NewFluxTableMetadata(q.tablePosition) + q.tablePosition++ + q.tableChanged = true + for i := range row[1:] { + q.table.AddColumn(query.NewFluxColumn(i)) + } + parsingState = parsingStateAnnotation + } + } + if q.table == nil { + q.err = errors.New("parsing error, annotations not found") + return false + } + if len(row)-1 != len(q.table.Columns()) { + q.err = fmt.Errorf("parsing error, row has different number of columns than the table: %d vs %d", len(row)-1, len(q.table.Columns())) + return false + } + switch row[0] { + case "": + switch parsingState { + case parsingStateAnnotation: + if !dataTypeAnnotationFound { + q.err = errors.New("parsing error, datatype annotation not found") + return false + } + parsingState = parsingStateNameRow + fallthrough + case parsingStateNameRow: + if row[1] == "error" { + parsingState = parsingStateError + } else { + for i, n := range row[1:] { + if q.table.Column(i) != nil { + q.table.Column(i).SetName(n) + } + } + parsingState = parsingStateNormal + } + goto readRow + case parsingStateError: + var message string + if len(row) > 1 && len(row[1]) > 0 { + message = row[1] + } else { + message = "unknown query error" + } + reference := "" + if len(row) > 2 && len(row[2]) > 0 { + reference = fmt.Sprintf(",%s", row[2]) + } + q.err = fmt.Errorf("%s%s", message, reference) + return false + } + values := make(map[string]interface{}) + for i, v := range row[1:] { + if q.table.Column(i) != nil { + values[q.table.Column(i).Name()], q.err = toValue(stringTernary(v, q.table.Column(i).DefaultValue()), q.table.Column(i).DataType(), q.table.Column(i).Name()) + if q.err != nil { + return false + } + } + } + q.record = query.NewFluxRecord(q.table.Position(), values) + case "#datatype": + dataTypeAnnotationFound = true + for i, d := range row[1:] { + if q.table.Column(i) != nil { + q.table.Column(i).SetDataType(d) + } + } + goto readRow + case "#group": + for i, g := range row[1:] { + if q.table.Column(i) != nil { + q.table.Column(i).SetGroup(g == "true") + } + } + goto readRow + case "#default": + for i, c := range row[1:] { + if q.table.Column(i) != nil { + q.table.Column(i).SetDefaultValue(c) + } + } + goto readRow + } + // don't close query + closer = func() {} + return true +} + +// Err returns an error raised during flux query response parsing +func (q *QueryTableResult) Err() error { + return q.err +} + +// Close reads remaining data and closes underlying Closer +func (q *QueryTableResult) Close() error { + var err error + for err == nil { + _, err = q.csvReader.Read() + } + return q.Closer.Close() +} + +// stringTernary returns a if not empty, otherwise b +func stringTernary(a, b string) string { + if a == "" { + return b + } + return a +} + +// toValues converts s into type by t +func toValue(s, t, name string) (interface{}, error) { + if s == "" { + return nil, nil + } + switch t { + case stringDatatype: + return s, nil + case timeDatatypeRFC: + return time.Parse(time.RFC3339, s) + case timeDatatypeRFCNano: + return time.Parse(time.RFC3339Nano, s) + case durationDatatype: + return time.ParseDuration(s) + case doubleDatatype: + return strconv.ParseFloat(s, 64) + case boolDatatype: + if strings.ToLower(s) == "false" { + return false, nil + } + return true, nil + case longDatatype: + return strconv.ParseInt(s, 10, 64) + case uLongDatatype: + return strconv.ParseUint(s, 10, 64) + case base64BinaryDataType: + return base64.StdEncoding.DecodeString(s) + default: + return nil, fmt.Errorf("%s has unknown data type %s", name, t) + } +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/query/table.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/query/table.go new file mode 100644 index 0000000..58cd3d9 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/query/table.go @@ -0,0 +1,260 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package query defined types for representing flux query result +package query + +import ( + "fmt" + "sort" + "strings" + "time" +) + +// FluxTableMetadata holds flux query result table information represented by collection of columns. +// Each new table is introduced by annotations +type FluxTableMetadata struct { + position int + columns []*FluxColumn +} + +// FluxColumn holds flux query table column properties +type FluxColumn struct { + index int + name string + dataType string + group bool + defaultValue string +} + +// FluxRecord represents row in the flux query result table +type FluxRecord struct { + table int + values map[string]interface{} +} + +// NewFluxTableMetadata creates FluxTableMetadata for the table on position +func NewFluxTableMetadata(position int) *FluxTableMetadata { + return NewFluxTableMetadataFull(position, make([]*FluxColumn, 0, 10)) +} + +// NewFluxTableMetadataFull creates FluxTableMetadata +func NewFluxTableMetadataFull(position int, columns []*FluxColumn) *FluxTableMetadata { + return &FluxTableMetadata{position: position, columns: columns} +} + +// Position returns position of the table in the flux query result +func (f *FluxTableMetadata) Position() int { + return f.position +} + +// Columns returns slice of flux query result table +func (f *FluxTableMetadata) Columns() []*FluxColumn { + return f.columns +} + +// AddColumn adds column definition to table metadata +func (f *FluxTableMetadata) AddColumn(column *FluxColumn) *FluxTableMetadata { + f.columns = append(f.columns, column) + return f +} + +// Column returns flux table column by index. +// Returns nil if index is out of the bounds. +func (f *FluxTableMetadata) Column(index int) *FluxColumn { + if len(f.columns) == 0 || index < 0 || index >= len(f.columns) { + return nil + } + return f.columns[index] +} + +// String returns FluxTableMetadata string dump +func (f *FluxTableMetadata) String() string { + var buffer strings.Builder + for i, c := range f.columns { + if i > 0 { + buffer.WriteString(",") + } + buffer.WriteString("col") + buffer.WriteString(c.String()) + } + return buffer.String() +} + +// NewFluxColumn creates FluxColumn for position +func NewFluxColumn(index int) *FluxColumn { + return &FluxColumn{index: index} +} + +// NewFluxColumnFull creates FluxColumn +func NewFluxColumnFull(dataType string, defaultValue string, name string, group bool, index int) *FluxColumn { + return &FluxColumn{index: index, name: name, dataType: dataType, group: group, defaultValue: defaultValue} +} + +// SetDefaultValue sets default value for the column +func (f *FluxColumn) SetDefaultValue(defaultValue string) { + f.defaultValue = defaultValue +} + +// SetGroup set group flag for the column +func (f *FluxColumn) SetGroup(group bool) { + f.group = group +} + +// SetDataType sets data type for the column +func (f *FluxColumn) SetDataType(dataType string) { + f.dataType = dataType +} + +// SetName sets name of the column +func (f *FluxColumn) SetName(name string) { + f.name = name +} + +// DefaultValue returns default value of the column +func (f *FluxColumn) DefaultValue() string { + return f.defaultValue +} + +// IsGroup return true if the column is grouping column +func (f *FluxColumn) IsGroup() bool { + return f.group +} + +// DataType returns data type of the column +func (f *FluxColumn) DataType() string { + return f.dataType +} + +// Name returns name of the column +func (f *FluxColumn) Name() string { + return f.name +} + +// Index returns index of the column +func (f *FluxColumn) Index() int { + return f.index +} + +// String returns FluxColumn string dump +func (f *FluxColumn) String() string { + return fmt.Sprintf("{%d: name: %s, datatype: %s, defaultValue: %s, group: %v}", f.index, f.name, f.dataType, f.defaultValue, f.group) +} + +// NewFluxRecord returns new record for the table with values +func NewFluxRecord(table int, values map[string]interface{}) *FluxRecord { + return &FluxRecord{table: table, values: values} +} + +// Table returns value of the table column +// It returns zero if the table column is not found +func (r *FluxRecord) Table() int { + return int(intValue(r.values, "table")) +} + +// Start returns the inclusive lower time bound of all records in the current table. +// Returns empty time.Time if there is no column "_start". +func (r *FluxRecord) Start() time.Time { + return timeValue(r.values, "_start") +} + +// Stop returns the exclusive upper time bound of all records in the current table. +// Returns empty time.Time if there is no column "_stop". +func (r *FluxRecord) Stop() time.Time { + return timeValue(r.values, "_stop") +} + +// Time returns the time of the record. +// Returns empty time.Time if there is no column "_time". +func (r *FluxRecord) Time() time.Time { + return timeValue(r.values, "_time") +} + +// Value returns the default _value column value or nil if not present +func (r *FluxRecord) Value() interface{} { + return r.ValueByKey("_value") +} + +// Field returns the field name. +// Returns empty string if there is no column "_field". +func (r *FluxRecord) Field() string { + return stringValue(r.values, "_field") +} + +// Result returns the value of the _result column, which represents result name. +// Returns empty string if there is no column "result". +func (r *FluxRecord) Result() string { + return stringValue(r.values, "result") +} + +// Measurement returns the measurement name of the record +// Returns empty string if there is no column "_measurement". +func (r *FluxRecord) Measurement() string { + return stringValue(r.values, "_measurement") +} + +// Values returns map of the values where key is the column name +func (r *FluxRecord) Values() map[string]interface{} { + return r.values +} + +// ValueByKey returns value for given column key for the record or nil of result has no value the column key +func (r *FluxRecord) ValueByKey(key string) interface{} { + return r.values[key] +} + +// String returns FluxRecord string dump +func (r *FluxRecord) String() string { + if len(r.values) == 0 { + return "" + } + + i := 0 + keys := make([]string, len(r.values)) + for k := range r.values { + keys[i] = k + i++ + } + sort.Strings(keys) + var buffer strings.Builder + buffer.WriteString(fmt.Sprintf("%s:%v", keys[0], r.values[keys[0]])) + for _, k := range keys[1:] { + buffer.WriteString(",") + buffer.WriteString(fmt.Sprintf("%s:%v", k, r.values[k])) + } + return buffer.String() +} + +// timeValue returns time.Time value from values map according to the key +// Empty time.Time value is returned if key is not found +func timeValue(values map[string]interface{}, key string) time.Time { + if val, ok := values[key]; ok { + if t, ok := val.(time.Time); ok { + return t + } + } + return time.Time{} +} + +// stringValue returns string value from values map according to the key +// Empty string is returned if key is not found +func stringValue(values map[string]interface{}, key string) string { + if val, ok := values[key]; ok { + if s, ok := val.(string); ok { + return s + } + } + return "" +} + +// intValue returns int64 value from values map according to the key +// Zero value is returned if key is not found +func intValue(values map[string]interface{}, key string) int64 { + if val, ok := values[key]; ok { + if i, ok := val.(int64); ok { + return i + } + } + return 0 +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/tasks.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/tasks.go new file mode 100644 index 0000000..e584fa3 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/tasks.go @@ -0,0 +1,592 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/influxdata/influxdb-client-go/v2/domain" +) + +// TaskFilter defines filtering options for FindTasks functions. +type TaskFilter struct { + // Returns task with a specific name + Name string + // Filter tasks to a specific organization name. + OrgName string + // Filter tasks to a specific organization ID. + OrgID string + // Filter tasks to a specific user ID. + User string + // Filter tasks by a status--"inactive" or "active". + Status domain.TaskStatusType + // Return tasks after a specified ID. + After string + // The number of tasks to return. + // Default 100, minimum: 1, maximum 500 + Limit int +} + +// RunFilter defines filtering options for FindRun* functions. +type RunFilter struct { + // Return runs after a specified ID. + After string + // The number of runs to return. + // Default 100, minimum 1, maximum 500. + Limit int + // Filter runs to those scheduled before this time. + BeforeTime time.Time + // Filter runs to those scheduled after this time. + AfterTime time.Time +} + +// TasksAPI provides methods for managing tasks and task runs in an InfluxDB server. +type TasksAPI interface { + // FindTasks retrieves tasks according to the filter. More fields can be applied. Filter can be nil. + FindTasks(ctx context.Context, filter *TaskFilter) ([]domain.Task, error) + // GetTask retrieves a refreshed instance of task. + GetTask(ctx context.Context, task *domain.Task) (*domain.Task, error) + // GetTaskByID retrieves a task found using taskID. + GetTaskByID(ctx context.Context, taskID string) (*domain.Task, error) + // CreateTask creates a new task according the task object. + // It copies OrgId, Name, Description, Flux, Status and Every or Cron properties. Every and Cron are mutually exclusive. + // Every has higher priority. + CreateTask(ctx context.Context, task *domain.Task) (*domain.Task, error) + // CreateTaskWithEvery creates a new task with the name, flux script and every repetition setting, in the org orgID. + // Every holds duration values. + CreateTaskWithEvery(ctx context.Context, name, flux, every, orgID string) (*domain.Task, error) + // CreateTaskWithCron creates a new task with the name, flux script and cron repetition setting, in the org orgID + // Cron holds cron-like setting, e.g. once an hour at beginning of the hour "0 * * * *". + CreateTaskWithCron(ctx context.Context, name, flux, cron, orgID string) (*domain.Task, error) + // CreateTaskByFlux creates a new task with complete definition in flux script, in the org orgID + CreateTaskByFlux(ctx context.Context, flux, orgID string) (*domain.Task, error) + // UpdateTask updates a task. + // It copies Description, Flux, Status, Offset and Every or Cron properties. Every and Cron are mutually exclusive. + // Every has higher priority. + UpdateTask(ctx context.Context, task *domain.Task) (*domain.Task, error) + // DeleteTask deletes a task. + DeleteTask(ctx context.Context, task *domain.Task) error + // DeleteTaskWithID deletes a task with taskID. + DeleteTaskWithID(ctx context.Context, taskID string) error + // FindMembers retrieves members of a task. + FindMembers(ctx context.Context, task *domain.Task) ([]domain.ResourceMember, error) + // FindMembersWithID retrieves members of a task with taskID. + FindMembersWithID(ctx context.Context, taskID string) ([]domain.ResourceMember, error) + // AddMember adds a member to a task. + AddMember(ctx context.Context, task *domain.Task, user *domain.User) (*domain.ResourceMember, error) + // AddMemberWithID adds a member with id memberID to a task with taskID. + AddMemberWithID(ctx context.Context, taskID, memberID string) (*domain.ResourceMember, error) + // RemoveMember removes a member from a task. + RemoveMember(ctx context.Context, task *domain.Task, user *domain.User) error + // RemoveMemberWithID removes a member with id memberID from a task with taskID. + RemoveMemberWithID(ctx context.Context, taskID, memberID string) error + // FindOwners retrieves owners of a task. + FindOwners(ctx context.Context, task *domain.Task) ([]domain.ResourceOwner, error) + // FindOwnersWithID retrieves owners of a task with taskID. + FindOwnersWithID(ctx context.Context, taskID string) ([]domain.ResourceOwner, error) + // AddOwner adds an owner to a task. + AddOwner(ctx context.Context, task *domain.Task, user *domain.User) (*domain.ResourceOwner, error) + // AddOwnerWithID adds an owner with id memberID to a task with taskID. + AddOwnerWithID(ctx context.Context, taskID, memberID string) (*domain.ResourceOwner, error) + // RemoveOwner removes an owner from a task. + RemoveOwner(ctx context.Context, task *domain.Task, user *domain.User) error + // RemoveOwnerWithID removes a member with id memberID from a task with taskID. + RemoveOwnerWithID(ctx context.Context, taskID, memberID string) error + // FindRuns retrieves a task runs according the filter. More fields can be applied. Filter can be nil. + FindRuns(ctx context.Context, task *domain.Task, filter *RunFilter) ([]domain.Run, error) + // FindRunsWithID retrieves runs of a task with taskID according the filter. More fields can be applied. Filter can be nil. + FindRunsWithID(ctx context.Context, taskID string, filter *RunFilter) ([]domain.Run, error) + // GetRun retrieves a refreshed instance if a task run. + GetRun(ctx context.Context, run *domain.Run) (*domain.Run, error) + // GetRunByID retrieves a specific task run by taskID and runID + GetRunByID(ctx context.Context, taskID, runID string) (*domain.Run, error) + // FindRunLogs return all log events for a task run. + FindRunLogs(ctx context.Context, run *domain.Run) ([]domain.LogEvent, error) + // FindRunLogsWithID return all log events for a run with runID of a task with taskID. + FindRunLogsWithID(ctx context.Context, taskID, runID string) ([]domain.LogEvent, error) + // RunManually manually start a run of the task now, overriding the current schedule. + RunManually(ctx context.Context, task *domain.Task) (*domain.Run, error) + // RunManuallyWithID manually start a run of a task with taskID now, overriding the current schedule. + RunManuallyWithID(ctx context.Context, taskID string) (*domain.Run, error) + // RetryRun retry a task run. + RetryRun(ctx context.Context, run *domain.Run) (*domain.Run, error) + // RetryRunWithID retry a run with runID of a task with taskID. + RetryRunWithID(ctx context.Context, taskID, runID string) (*domain.Run, error) + // CancelRun cancels a running task. + CancelRun(ctx context.Context, run *domain.Run) error + // CancelRunWithID cancels a running task. + CancelRunWithID(ctx context.Context, taskID, runID string) error + // FindLogs retrieves all logs for a task. + FindLogs(ctx context.Context, task *domain.Task) ([]domain.LogEvent, error) + // FindLogsWithID retrieves all logs for a task with taskID. + FindLogsWithID(ctx context.Context, taskID string) ([]domain.LogEvent, error) + // FindLabels retrieves labels of a task. + FindLabels(ctx context.Context, task *domain.Task) ([]domain.Label, error) + // FindLabelsWithID retrieves labels of an task with taskID. + FindLabelsWithID(ctx context.Context, taskID string) ([]domain.Label, error) + // AddLabel adds a label to a task. + AddLabel(ctx context.Context, task *domain.Task, label *domain.Label) (*domain.Label, error) + // AddLabelWithID adds a label with id labelID to a task with taskID. + AddLabelWithID(ctx context.Context, taskID, labelID string) (*domain.Label, error) + // RemoveLabel removes a label from a task. + RemoveLabel(ctx context.Context, task *domain.Task, label *domain.Label) error + // RemoveLabelWithID removes a label with id labelID from a task with taskID. + RemoveLabelWithID(ctx context.Context, taskID, labelID string) error +} + +// tasksAPI implements TasksAPI +type tasksAPI struct { + apiClient *domain.ClientWithResponses +} + +// NewTasksAPI creates new instance of TasksAPI +func NewTasksAPI(apiClient *domain.ClientWithResponses) TasksAPI { + return &tasksAPI{ + apiClient: apiClient, + } +} + +func (t *tasksAPI) FindTasks(ctx context.Context, filter *TaskFilter) ([]domain.Task, error) { + params := &domain.GetTasksParams{} + if filter != nil { + if filter.Name != "" { + params.Name = &filter.Name + } + if filter.User != "" { + params.User = &filter.User + } + if filter.OrgID != "" { + params.OrgID = &filter.OrgID + } + if filter.OrgName != "" { + params.Org = &filter.OrgName + } + if filter.Status != "" { + status := domain.GetTasksParamsStatus(filter.Status) + params.Status = &status + } + if filter.Limit > 0 { + params.Limit = &filter.Limit + } + if filter.After != "" { + params.After = &filter.After + } + } + + response, err := t.apiClient.GetTasksWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON200.Tasks == nil { + return nil, errors.New("tasks not found") + } + return *response.JSON200.Tasks, nil +} + +func (t *tasksAPI) GetTask(ctx context.Context, task *domain.Task) (*domain.Task, error) { + return t.GetTaskByID(ctx, task.Id) +} + +func (t *tasksAPI) GetTaskByID(ctx context.Context, taskID string) (*domain.Task, error) { + params := &domain.GetTasksIDParams{} + response, err := t.apiClient.GetTasksIDWithResponse(ctx, taskID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (t *tasksAPI) createTask(ctx context.Context, taskReq *domain.TaskCreateRequest) (*domain.Task, error) { + params := &domain.PostTasksParams{} + response, err := t.apiClient.PostTasksWithResponse(ctx, params, domain.PostTasksJSONRequestBody(*taskReq)) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func createTaskReqDetailed(name, flux string, every, cron *string, orgID string) *domain.TaskCreateRequest { + repetition := "" + if every != nil { + repetition = fmt.Sprintf("every: %s", *every) + } else if cron != nil { + repetition = fmt.Sprintf(`cron: "%s"`, *cron) + } + fullFlux := fmt.Sprintf(`option task = { name: "%s", %s } %s`, name, repetition, flux) + return createTaskReq(fullFlux, orgID) +} +func createTaskReq(flux string, orgID string) *domain.TaskCreateRequest { + + status := domain.TaskStatusTypeActive + taskReq := &domain.TaskCreateRequest{ + Flux: flux, + Status: &status, + OrgID: &orgID, + } + return taskReq +} + +func (t *tasksAPI) CreateTask(ctx context.Context, task *domain.Task) (*domain.Task, error) { + taskReq := createTaskReqDetailed(task.Name, task.Flux, task.Every, task.Cron, task.OrgID) + taskReq.Description = task.Description + taskReq.Status = task.Status + return t.createTask(ctx, taskReq) +} + +func (t *tasksAPI) CreateTaskWithEvery(ctx context.Context, name, flux, every, orgID string) (*domain.Task, error) { + taskReq := createTaskReqDetailed(name, flux, &every, nil, orgID) + return t.createTask(ctx, taskReq) +} + +func (t *tasksAPI) CreateTaskWithCron(ctx context.Context, name, flux, cron, orgID string) (*domain.Task, error) { + taskReq := createTaskReqDetailed(name, flux, nil, &cron, orgID) + return t.createTask(ctx, taskReq) +} + +func (t *tasksAPI) CreateTaskByFlux(ctx context.Context, flux, orgID string) (*domain.Task, error) { + taskReq := createTaskReq(flux, orgID) + return t.createTask(ctx, taskReq) +} + +func (t *tasksAPI) DeleteTask(ctx context.Context, task *domain.Task) error { + return t.DeleteTaskWithID(ctx, task.Id) +} + +func (t *tasksAPI) DeleteTaskWithID(ctx context.Context, taskID string) error { + params := &domain.DeleteTasksIDParams{} + response, err := t.apiClient.DeleteTasksIDWithResponse(ctx, taskID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (t *tasksAPI) UpdateTask(ctx context.Context, task *domain.Task) (*domain.Task, error) { + params := &domain.PatchTasksIDParams{} + updateReq := &domain.TaskUpdateRequest{ + Description: task.Description, + Flux: &task.Flux, + Name: &task.Name, + Offset: task.Offset, + Status: task.Status, + } + if task.Every != nil { + updateReq.Every = task.Every + } else { + updateReq.Cron = task.Cron + } + response, err := t.apiClient.PatchTasksIDWithResponse(ctx, task.Id, params, domain.PatchTasksIDJSONRequestBody(*updateReq)) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (t *tasksAPI) FindMembers(ctx context.Context, task *domain.Task) ([]domain.ResourceMember, error) { + return t.FindMembersWithID(ctx, task.Id) +} + +func (t *tasksAPI) FindMembersWithID(ctx context.Context, taskID string) ([]domain.ResourceMember, error) { + params := &domain.GetTasksIDMembersParams{} + response, err := t.apiClient.GetTasksIDMembersWithResponse(ctx, taskID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON200.Users == nil { + return nil, fmt.Errorf("members for task '%s' not found", taskID) + } + return *response.JSON200.Users, nil +} + +func (t *tasksAPI) AddMember(ctx context.Context, task *domain.Task, user *domain.User) (*domain.ResourceMember, error) { + return t.AddMemberWithID(ctx, task.Id, *user.Id) +} + +func (t *tasksAPI) AddMemberWithID(ctx context.Context, taskID, memberID string) (*domain.ResourceMember, error) { + params := &domain.PostTasksIDMembersParams{} + body := &domain.PostTasksIDMembersJSONRequestBody{Id: memberID} + response, err := t.apiClient.PostTasksIDMembersWithResponse(ctx, taskID, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (t *tasksAPI) RemoveMember(ctx context.Context, task *domain.Task, user *domain.User) error { + return t.RemoveMemberWithID(ctx, task.Id, *user.Id) +} + +func (t *tasksAPI) RemoveMemberWithID(ctx context.Context, taskID, memberID string) error { + params := &domain.DeleteTasksIDMembersIDParams{} + response, err := t.apiClient.DeleteTasksIDMembersIDWithResponse(ctx, taskID, memberID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (t *tasksAPI) FindOwners(ctx context.Context, task *domain.Task) ([]domain.ResourceOwner, error) { + return t.FindOwnersWithID(ctx, task.Id) +} + +func (t *tasksAPI) FindOwnersWithID(ctx context.Context, taskID string) ([]domain.ResourceOwner, error) { + params := &domain.GetTasksIDOwnersParams{} + response, err := t.apiClient.GetTasksIDOwnersWithResponse(ctx, taskID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON200.Users == nil { + return nil, fmt.Errorf("owners for task '%s' not found", taskID) + } + return *response.JSON200.Users, nil +} + +func (t *tasksAPI) AddOwner(ctx context.Context, task *domain.Task, user *domain.User) (*domain.ResourceOwner, error) { + return t.AddOwnerWithID(ctx, task.Id, *user.Id) +} + +func (t *tasksAPI) AddOwnerWithID(ctx context.Context, taskID, memberID string) (*domain.ResourceOwner, error) { + params := &domain.PostTasksIDOwnersParams{} + body := &domain.PostTasksIDOwnersJSONRequestBody{Id: memberID} + response, err := t.apiClient.PostTasksIDOwnersWithResponse(ctx, taskID, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (t *tasksAPI) RemoveOwner(ctx context.Context, task *domain.Task, user *domain.User) error { + return t.RemoveOwnerWithID(ctx, task.Id, *user.Id) +} + +func (t *tasksAPI) RemoveOwnerWithID(ctx context.Context, taskID, memberID string) error { + params := &domain.DeleteTasksIDOwnersIDParams{} + response, err := t.apiClient.DeleteTasksIDOwnersIDWithResponse(ctx, taskID, memberID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (t *tasksAPI) FindRuns(ctx context.Context, task *domain.Task, filter *RunFilter) ([]domain.Run, error) { + return t.FindRunsWithID(ctx, task.Id, filter) +} + +func (t *tasksAPI) FindRunsWithID(ctx context.Context, taskID string, filter *RunFilter) ([]domain.Run, error) { + params := &domain.GetTasksIDRunsParams{} + if filter != nil { + if !filter.AfterTime.IsZero() { + params.AfterTime = &filter.AfterTime + } + if !filter.BeforeTime.IsZero() { + params.BeforeTime = &filter.BeforeTime + } + if filter.Limit > 0 { + params.Limit = &filter.Limit + } + if filter.After != "" { + params.After = &filter.After + } + } + response, err := t.apiClient.GetTasksIDRunsWithResponse(ctx, taskID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return *response.JSON200.Runs, nil +} + +func (t *tasksAPI) GetRun(ctx context.Context, run *domain.Run) (*domain.Run, error) { + return t.GetRunByID(ctx, *run.TaskID, *run.Id) +} + +func (t *tasksAPI) GetRunByID(ctx context.Context, taskID, runID string) (*domain.Run, error) { + params := &domain.GetTasksIDRunsIDParams{} + response, err := t.apiClient.GetTasksIDRunsIDWithResponse(ctx, taskID, runID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (t *tasksAPI) FindRunLogs(ctx context.Context, run *domain.Run) ([]domain.LogEvent, error) { + return t.FindRunLogsWithID(ctx, *run.TaskID, *run.Id) +} +func (t *tasksAPI) FindRunLogsWithID(ctx context.Context, taskID, runID string) ([]domain.LogEvent, error) { + params := &domain.GetTasksIDRunsIDLogsParams{} + + response, err := t.apiClient.GetTasksIDRunsIDLogsWithResponse(ctx, taskID, runID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON200.Events == nil { + return nil, fmt.Errorf("logs for task '%s' run '%s 'not found", taskID, runID) + } + return *response.JSON200.Events, nil +} + +func (t *tasksAPI) RunManually(ctx context.Context, task *domain.Task) (*domain.Run, error) { + return t.RunManuallyWithID(ctx, task.Id) +} + +func (t *tasksAPI) RunManuallyWithID(ctx context.Context, taskID string) (*domain.Run, error) { + params := domain.PostTasksIDRunsParams{} + response, err := t.apiClient.PostTasksIDRunsWithResponse(ctx, taskID, ¶ms, domain.PostTasksIDRunsJSONRequestBody{}) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201, nil +} + +func (t *tasksAPI) RetryRun(ctx context.Context, run *domain.Run) (*domain.Run, error) { + return t.RetryRunWithID(ctx, *run.TaskID, *run.Id) +} + +func (t *tasksAPI) RetryRunWithID(ctx context.Context, taskID, runID string) (*domain.Run, error) { + params := &domain.PostTasksIDRunsIDRetryParams{} + response, err := t.apiClient.PostTasksIDRunsIDRetryWithBodyWithResponse(ctx, taskID, runID, params, "application/json; charset=utf-8", nil) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON200, nil +} + +func (t *tasksAPI) CancelRun(ctx context.Context, run *domain.Run) error { + return t.CancelRunWithID(ctx, *run.TaskID, *run.Id) +} + +func (t *tasksAPI) CancelRunWithID(ctx context.Context, taskID, runID string) error { + params := &domain.DeleteTasksIDRunsIDParams{} + response, err := t.apiClient.DeleteTasksIDRunsIDWithResponse(ctx, taskID, runID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (t *tasksAPI) FindLogs(ctx context.Context, task *domain.Task) ([]domain.LogEvent, error) { + return t.FindLogsWithID(ctx, task.Id) +} + +func (t *tasksAPI) FindLogsWithID(ctx context.Context, taskID string) ([]domain.LogEvent, error) { + params := &domain.GetTasksIDLogsParams{} + + response, err := t.apiClient.GetTasksIDLogsWithResponse(ctx, taskID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON200.Events == nil { + return nil, fmt.Errorf("logs for task '%s' not found", taskID) + } + return *response.JSON200.Events, nil +} + +func (t *tasksAPI) FindLabels(ctx context.Context, task *domain.Task) ([]domain.Label, error) { + return t.FindLabelsWithID(ctx, task.Id) +} + +func (t *tasksAPI) FindLabelsWithID(ctx context.Context, taskID string) ([]domain.Label, error) { + params := &domain.GetTasksIDLabelsParams{} + response, err := t.apiClient.GetTasksIDLabelsWithResponse(ctx, taskID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON200.Labels == nil { + return nil, fmt.Errorf("lables for task '%s' not found", taskID) + } + return *response.JSON200.Labels, nil +} + +func (t *tasksAPI) AddLabel(ctx context.Context, task *domain.Task, label *domain.Label) (*domain.Label, error) { + return t.AddLabelWithID(ctx, task.Id, *label.Id) +} + +func (t *tasksAPI) AddLabelWithID(ctx context.Context, taskID, labelID string) (*domain.Label, error) { + params := &domain.PostTasksIDLabelsParams{} + body := &domain.PostTasksIDLabelsJSONRequestBody{LabelID: &labelID} + response, err := t.apiClient.PostTasksIDLabelsWithResponse(ctx, taskID, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return response.JSON201.Label, nil +} + +func (t *tasksAPI) RemoveLabel(ctx context.Context, task *domain.Task, label *domain.Label) error { + return t.RemoveLabelWithID(ctx, task.Id, *label.Id) +} + +func (t *tasksAPI) RemoveLabelWithID(ctx context.Context, taskID, memberID string) error { + params := &domain.DeleteTasksIDLabelsIDParams{} + response, err := t.apiClient.DeleteTasksIDLabelsIDWithResponse(ctx, taskID, memberID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/users.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/users.go new file mode 100644 index 0000000..11d54ea --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/users.go @@ -0,0 +1,288 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + "encoding/base64" + "fmt" + nethttp "net/http" + "net/http/cookiejar" + "sync" + + "github.com/influxdata/influxdb-client-go/v2/api/http" + "github.com/influxdata/influxdb-client-go/v2/domain" + "golang.org/x/net/publicsuffix" +) + +// UsersAPI provides methods for managing users in a InfluxDB server +type UsersAPI interface { + // GetUsers returns all users + GetUsers(ctx context.Context) (*[]domain.User, error) + // FindUserByID returns user with userID + FindUserByID(ctx context.Context, userID string) (*domain.User, error) + // FindUserByName returns user with name userName + FindUserByName(ctx context.Context, userName string) (*domain.User, error) + // CreateUser creates new user + CreateUser(ctx context.Context, user *domain.User) (*domain.User, error) + // CreateUserWithName creates new user with userName + CreateUserWithName(ctx context.Context, userName string) (*domain.User, error) + // UpdateUser updates user + UpdateUser(ctx context.Context, user *domain.User) (*domain.User, error) + // UpdateUserPassword sets password for an user + UpdateUserPassword(ctx context.Context, user *domain.User, password string) error + // UpdateUserPasswordWithID sets password for an user with userID + UpdateUserPasswordWithID(ctx context.Context, userID string, password string) error + // DeleteUserWithID deletes an user with userID + DeleteUserWithID(ctx context.Context, userID string) error + // DeleteUser deletes an user + DeleteUser(ctx context.Context, user *domain.User) error + // Me returns actual user + Me(ctx context.Context) (*domain.User, error) + // MeUpdatePassword set password of actual user + MeUpdatePassword(ctx context.Context, oldPassword, newPassword string) error + // SignIn exchanges username and password credentials to establish an authenticated session with the InfluxDB server. The Client's authentication token is then ignored, it can be empty. + SignIn(ctx context.Context, username, password string) error + // SignOut signs out previously signed in user + SignOut(ctx context.Context) error +} + +// usersAPI implements UsersAPI +type usersAPI struct { + apiClient *domain.ClientWithResponses + httpService http.Service + httpClient *nethttp.Client + deleteCookieJar bool + lock sync.Mutex +} + +// NewUsersAPI creates new instance of UsersAPI +func NewUsersAPI(apiClient *domain.ClientWithResponses, httpService http.Service, httpClient *nethttp.Client) UsersAPI { + return &usersAPI{ + apiClient: apiClient, + httpService: httpService, + httpClient: httpClient, + } +} + +func (u *usersAPI) GetUsers(ctx context.Context) (*[]domain.User, error) { + params := &domain.GetUsersParams{} + response, err := u.apiClient.GetUsersWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return userResponsesToUsers(response.JSON200.Users), nil +} + +func (u *usersAPI) FindUserByID(ctx context.Context, userID string) (*domain.User, error) { + params := &domain.GetUsersIDParams{} + response, err := u.apiClient.GetUsersIDWithResponse(ctx, userID, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return userResponseToUser(response.JSON200), nil +} + +func (u *usersAPI) FindUserByName(ctx context.Context, userName string) (*domain.User, error) { + users, err := u.GetUsers(ctx) + if err != nil { + return nil, err + } + var user *domain.User + for _, u := range *users { + if u.Name == userName { + user = &u + break + } + } + if user == nil { + return nil, fmt.Errorf("user '%s' not found", userName) + } + return user, nil +} + +func (u *usersAPI) CreateUserWithName(ctx context.Context, userName string) (*domain.User, error) { + user := &domain.User{Name: userName} + return u.CreateUser(ctx, user) +} + +func (u *usersAPI) CreateUser(ctx context.Context, user *domain.User) (*domain.User, error) { + params := &domain.PostUsersParams{} + response, err := u.apiClient.PostUsersWithResponse(ctx, params, domain.PostUsersJSONRequestBody(*user)) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return userResponseToUser(response.JSON201), nil +} + +func (u *usersAPI) UpdateUser(ctx context.Context, user *domain.User) (*domain.User, error) { + params := &domain.PatchUsersIDParams{} + response, err := u.apiClient.PatchUsersIDWithResponse(ctx, *user.Id, params, domain.PatchUsersIDJSONRequestBody(*user)) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return userResponseToUser(response.JSON200), nil +} + +func (u *usersAPI) UpdateUserPassword(ctx context.Context, user *domain.User, password string) error { + return u.UpdateUserPasswordWithID(ctx, *user.Id, password) +} + +func (u *usersAPI) UpdateUserPasswordWithID(ctx context.Context, userID string, password string) error { + params := &domain.PostUsersIDPasswordParams{} + body := &domain.PasswordResetBody{Password: password} + response, err := u.apiClient.PostUsersIDPasswordWithResponse(ctx, userID, params, domain.PostUsersIDPasswordJSONRequestBody(*body)) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (u *usersAPI) DeleteUser(ctx context.Context, user *domain.User) error { + return u.DeleteUserWithID(ctx, *user.Id) +} + +func (u *usersAPI) DeleteUserWithID(ctx context.Context, userID string) error { + params := &domain.DeleteUsersIDParams{} + response, err := u.apiClient.DeleteUsersIDWithResponse(ctx, userID, params) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (u *usersAPI) Me(ctx context.Context) (*domain.User, error) { + params := &domain.GetMeParams{} + response, err := u.apiClient.GetMeWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return userResponseToUser(response.JSON200), nil +} + +func (u *usersAPI) MeUpdatePassword(ctx context.Context, oldPassword, newPassword string) error { + u.lock.Lock() + defer u.lock.Unlock() + me, err := u.Me(ctx) + if err != nil { + return err + } + creds := base64.StdEncoding.EncodeToString([]byte(me.Name + ":" + oldPassword)) + auth := u.httpService.Authorization() + defer u.httpService.SetAuthorization(auth) + u.httpService.SetAuthorization("Basic " + creds) + params := &domain.PutMePasswordParams{} + body := &domain.PasswordResetBody{Password: newPassword} + response, err := u.apiClient.PutMePasswordWithResponse(ctx, params, domain.PutMePasswordJSONRequestBody(*body)) + if err != nil { + return err + } + if response.JSONDefault != nil { + return domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + return nil +} + +func (u *usersAPI) SignIn(ctx context.Context, username, password string) error { + u.lock.Lock() + defer u.lock.Unlock() + if u.httpClient.Jar == nil { + jar, err := cookiejar.New(&cookiejar.Options{PublicSuffixList: publicsuffix.List}) + if err != nil { + return err + } + u.httpClient.Jar = jar + u.deleteCookieJar = true + } + creds := base64.StdEncoding.EncodeToString([]byte(username + ":" + password)) + u.httpService.SetAuthorization("Basic " + creds) + defer u.httpService.SetAuthorization("") + resp, err := u.apiClient.PostSigninWithResponse(ctx, &domain.PostSigninParams{}) + if err != nil { + return err + } + if resp.JSONDefault != nil { + return domain.ErrorToHTTPError(resp.JSONDefault, resp.StatusCode()) + } + if resp.JSON401 != nil { + return domain.ErrorToHTTPError(resp.JSON401, resp.StatusCode()) + } + if resp.JSON403 != nil { + return domain.ErrorToHTTPError(resp.JSON403, resp.StatusCode()) + } + return nil +} + +func (u *usersAPI) SignOut(ctx context.Context) error { + u.lock.Lock() + defer u.lock.Unlock() + resp, err := u.apiClient.PostSignoutWithResponse(ctx, &domain.PostSignoutParams{}) + if err != nil { + return err + } + if resp.JSONDefault != nil { + return domain.ErrorToHTTPError(resp.JSONDefault, resp.StatusCode()) + } + if resp.JSON401 != nil { + return domain.ErrorToHTTPError(resp.JSON401, resp.StatusCode()) + } + if u.deleteCookieJar { + u.httpClient.Jar = nil + } + return nil +} + +func userResponseToUser(ur *domain.UserResponse) *domain.User { + if ur == nil { + return nil + } + user := &domain.User{ + Id: ur.Id, + Name: ur.Name, + OauthID: ur.OauthID, + Status: userResponseStatusToUserStatus(ur.Status), + } + return user +} + +func userResponseStatusToUserStatus(urs *domain.UserResponseStatus) *domain.UserStatus { + if urs == nil { + return nil + } + us := domain.UserStatus(*urs) + return &us +} + +func userResponsesToUsers(urs *[]domain.UserResponse) *[]domain.User { + if urs == nil { + return nil + } + us := make([]domain.User, len(*urs)) + for i, ur := range *urs { + us[i] = *userResponseToUser(&ur) + } + return &us +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/write.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/write.go new file mode 100644 index 0000000..b6d60ce --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/write.go @@ -0,0 +1,267 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + "strings" + "sync" + "sync/atomic" + "time" + + http2 "github.com/influxdata/influxdb-client-go/v2/api/http" + "github.com/influxdata/influxdb-client-go/v2/api/write" + "github.com/influxdata/influxdb-client-go/v2/internal/log" + iwrite "github.com/influxdata/influxdb-client-go/v2/internal/write" +) + +// WriteFailedCallback is synchronously notified in case non-blocking write fails. +// batch contains complete payload, error holds detailed error information, +// retryAttempts means number of retries, 0 if it failed during first write. +// It must return true if WriteAPI should continue with retrying, false will discard the batch. +type WriteFailedCallback func(batch string, error http2.Error, retryAttempts uint) bool + +// WriteAPI is Write client interface with non-blocking methods for writing time series data asynchronously in batches into an InfluxDB server. +// WriteAPI can be used concurrently. +// When using multiple goroutines for writing, use a single WriteAPI instance in all goroutines. +type WriteAPI interface { + // WriteRecord writes asynchronously line protocol record into bucket. + // WriteRecord adds record into the buffer which is sent on the background when it reaches the batch size. + // Blocking alternative is available in the WriteAPIBlocking interface + WriteRecord(line string) + // WritePoint writes asynchronously Point into bucket. + // WritePoint adds Point into the buffer which is sent on the background when it reaches the batch size. + // Blocking alternative is available in the WriteAPIBlocking interface + WritePoint(point *write.Point) + // Flush forces all pending writes from the buffer to be sent + Flush() + // Errors returns a channel for reading errors which occurs during async writes. + // Must be called before performing any writes for errors to be collected. + // The chan is unbuffered and must be drained or the writer will block. + Errors() <-chan error + // SetWriteFailedCallback sets callback allowing custom handling of failed writes. + // If callback returns true, failed batch will be retried, otherwise discarded. + SetWriteFailedCallback(cb WriteFailedCallback) +} + +// WriteAPIImpl provides main implementation for WriteAPI +type WriteAPIImpl struct { + service *iwrite.Service + writeBuffer []string + + errCh chan error + writeCh chan *iwrite.Batch + bufferCh chan string + writeStop chan struct{} + bufferStop chan struct{} + bufferFlush chan struct{} + doneCh chan struct{} + bufferInfoCh chan writeBuffInfoReq + writeInfoCh chan writeBuffInfoReq + writeOptions *write.Options + closingMu *sync.Mutex + isErrChReader int32 +} + +type writeBuffInfoReq struct { + writeBuffLen int +} + +// NewWriteAPI returns new non-blocking write client for writing data to bucket belonging to org +func NewWriteAPI(org string, bucket string, service http2.Service, writeOptions *write.Options) *WriteAPIImpl { + w := &WriteAPIImpl{ + service: iwrite.NewService(org, bucket, service, writeOptions), + errCh: make(chan error, 1), + writeBuffer: make([]string, 0, writeOptions.BatchSize()+1), + writeCh: make(chan *iwrite.Batch), + bufferCh: make(chan string), + bufferStop: make(chan struct{}), + writeStop: make(chan struct{}), + bufferFlush: make(chan struct{}), + doneCh: make(chan struct{}), + bufferInfoCh: make(chan writeBuffInfoReq), + writeInfoCh: make(chan writeBuffInfoReq), + writeOptions: writeOptions, + closingMu: &sync.Mutex{}, + } + + go w.bufferProc() + go w.writeProc() + + return w +} + +// SetWriteFailedCallback sets callback allowing custom handling of failed writes. +// If callback returns true, failed batch will be retried, otherwise discarded. +func (w *WriteAPIImpl) SetWriteFailedCallback(cb WriteFailedCallback) { + w.service.SetBatchErrorCallback(func(batch *iwrite.Batch, error2 http2.Error) bool { + return cb(batch.Batch, error2, batch.RetryAttempts) + }) +} + +// Errors returns a channel for reading errors which occurs during async writes. +// Must be called before performing any writes for errors to be collected. +// New error is skipped when channel is not read. +func (w *WriteAPIImpl) Errors() <-chan error { + w.setErrChanRead() + return w.errCh +} + +// Flush forces all pending writes from the buffer to be sent. +// Flush also tries sending batches from retry queue without additional retrying. +func (w *WriteAPIImpl) Flush() { + w.bufferFlush <- struct{}{} + w.waitForFlushing() + w.service.Flush() +} + +func (w *WriteAPIImpl) waitForFlushing() { + for { + w.bufferInfoCh <- writeBuffInfoReq{} + writeBuffInfo := <-w.bufferInfoCh + if writeBuffInfo.writeBuffLen == 0 { + break + } + log.Info("Waiting buffer is flushed") + <-time.After(time.Millisecond) + } + for { + w.writeInfoCh <- writeBuffInfoReq{} + writeBuffInfo := <-w.writeInfoCh + if writeBuffInfo.writeBuffLen == 0 { + break + } + log.Info("Waiting buffer is flushed") + <-time.After(time.Millisecond) + } +} + +func (w *WriteAPIImpl) bufferProc() { + log.Info("Buffer proc started") + ticker := time.NewTicker(time.Duration(w.writeOptions.FlushInterval()) * time.Millisecond) +x: + for { + select { + case line := <-w.bufferCh: + w.writeBuffer = append(w.writeBuffer, line) + if len(w.writeBuffer) == int(w.writeOptions.BatchSize()) { + w.flushBuffer() + } + case <-ticker.C: + w.flushBuffer() + case <-w.bufferFlush: + w.flushBuffer() + case <-w.bufferStop: + ticker.Stop() + w.flushBuffer() + break x + case buffInfo := <-w.bufferInfoCh: + buffInfo.writeBuffLen = len(w.bufferInfoCh) + w.bufferInfoCh <- buffInfo + } + } + log.Info("Buffer proc finished") + w.doneCh <- struct{}{} +} + +func (w *WriteAPIImpl) flushBuffer() { + if len(w.writeBuffer) > 0 { + log.Info("sending batch") + batch := iwrite.NewBatch(buffer(w.writeBuffer), w.writeOptions.MaxRetryTime()) + w.writeCh <- batch + w.writeBuffer = w.writeBuffer[:0] + } +} +func (w *WriteAPIImpl) isErrChanRead() bool { + return atomic.LoadInt32(&w.isErrChReader) > 0 +} + +func (w *WriteAPIImpl) setErrChanRead() { + atomic.StoreInt32(&w.isErrChReader, 1) +} + +func (w *WriteAPIImpl) writeProc() { + log.Info("Write proc started") +x: + for { + select { + case batch := <-w.writeCh: + err := w.service.HandleWrite(context.Background(), batch) + if err != nil && w.isErrChanRead() { + select { + case w.errCh <- err: + default: + log.Warn("Cannot write error to error channel, it is not read") + } + } + case <-w.writeStop: + log.Info("Write proc: received stop") + break x + case buffInfo := <-w.writeInfoCh: + buffInfo.writeBuffLen = len(w.writeCh) + w.writeInfoCh <- buffInfo + } + } + log.Info("Write proc finished") + w.doneCh <- struct{}{} +} + +// Close finishes outstanding write operations, +// stop background routines and closes all channels +func (w *WriteAPIImpl) Close() { + w.closingMu.Lock() + defer w.closingMu.Unlock() + if w.writeCh != nil { + // Flush outstanding metrics + w.Flush() + + // stop and wait for buffer proc + close(w.bufferStop) + <-w.doneCh + + close(w.bufferFlush) + close(w.bufferCh) + + // stop and wait for write proc + close(w.writeStop) + <-w.doneCh + + close(w.writeCh) + close(w.writeInfoCh) + close(w.bufferInfoCh) + w.writeCh = nil + + close(w.errCh) + w.errCh = nil + } +} + +// WriteRecord writes asynchronously line protocol record into bucket. +// WriteRecord adds record into the buffer which is sent on the background when it reaches the batch size. +// Blocking alternative is available in the WriteAPIBlocking interface +func (w *WriteAPIImpl) WriteRecord(line string) { + b := []byte(line) + b = append(b, 0xa) + w.bufferCh <- string(b) +} + +// WritePoint writes asynchronously Point into bucket. +// WritePoint adds Point into the buffer which is sent on the background when it reaches the batch size. +// Blocking alternative is available in the WriteAPIBlocking interface +func (w *WriteAPIImpl) WritePoint(point *write.Point) { + line, err := w.service.EncodePoints(point) + if err != nil { + log.Errorf("point encoding error: %s\n", err.Error()) + if w.errCh != nil { + w.errCh <- err + } + } else { + w.bufferCh <- line + } +} + +func buffer(lines []string) string { + return strings.Join(lines, "") +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/ext.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/ext.go new file mode 100644 index 0000000..f600f53 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/ext.go @@ -0,0 +1,106 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package write + +import ( + "fmt" + "strconv" + "strings" + "time" +) + +// Point extension methods for test + +// PointToLineProtocolBuffer creates InfluxDB line protocol string from the Point, converting associated timestamp according to precision +// and write result to the string builder +func PointToLineProtocolBuffer(p *Point, sb *strings.Builder, precision time.Duration) { + escapeKey(sb, p.Name(), false) + sb.WriteRune(',') + for i, t := range p.TagList() { + if i > 0 { + sb.WriteString(",") + } + escapeKey(sb, t.Key, true) + sb.WriteString("=") + escapeKey(sb, t.Value, true) + } + sb.WriteString(" ") + for i, f := range p.FieldList() { + if i > 0 { + sb.WriteString(",") + } + escapeKey(sb, f.Key, true) + sb.WriteString("=") + switch f.Value.(type) { + case string: + sb.WriteString(`"`) + escapeValue(sb, f.Value.(string)) + sb.WriteString(`"`) + default: + sb.WriteString(fmt.Sprintf("%v", f.Value)) + } + switch f.Value.(type) { + case int64: + sb.WriteString("i") + case uint64: + sb.WriteString("u") + } + } + if !p.Time().IsZero() { + sb.WriteString(" ") + switch precision { + case time.Microsecond: + sb.WriteString(strconv.FormatInt(p.Time().UnixNano()/1000, 10)) + case time.Millisecond: + sb.WriteString(strconv.FormatInt(p.Time().UnixNano()/1000000, 10)) + case time.Second: + sb.WriteString(strconv.FormatInt(p.Time().Unix(), 10)) + default: + sb.WriteString(strconv.FormatInt(p.Time().UnixNano(), 10)) + } + } + sb.WriteString("\n") +} + +// PointToLineProtocol creates InfluxDB line protocol string from the Point, converting associated timestamp according to precision +func PointToLineProtocol(p *Point, precision time.Duration) string { + var sb strings.Builder + sb.Grow(1024) + PointToLineProtocolBuffer(p, &sb, precision) + return sb.String() +} + +func escapeKey(sb *strings.Builder, key string, escapeEqual bool) { + for _, r := range key { + switch r { + case '\n': + sb.WriteString(`\\n`) + continue + case '\r': + sb.WriteString(`\\r`) + continue + case '\t': + sb.WriteString(`\\t`) + continue + case ' ', ',': + sb.WriteString(`\`) + case '=': + if escapeEqual { + sb.WriteString(`\`) + } + } + sb.WriteRune(r) + } +} + +func escapeValue(sb *strings.Builder, value string) { + for _, r := range value { + switch r { + case '\\', '"': + sb.WriteString(`\`) + } + sb.WriteRune(r) + } +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/options.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/options.go new file mode 100644 index 0000000..7d85ad3 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/options.go @@ -0,0 +1,199 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package write + +import ( + "time" +) + +// Options holds write configuration properties +type Options struct { + // Maximum number of points sent to server in single request. Default 5000 + batchSize uint + // Interval, in ms, in which is buffer flushed if it has not been already written (by reaching batch size) . Default 1000ms + flushInterval uint + // Precision to use in writes for timestamp. In unit of duration: time.Nanosecond, time.Microsecond, time.Millisecond, time.Second + // Default time.Nanosecond + precision time.Duration + // Whether to use GZip compression in requests. Default false + useGZip bool + // Tags added to each point during writing. If a point already has a tag with the same key, it is left unchanged. + defaultTags map[string]string + // Default retry interval in ms, if not sent by server. Default 5,000. + retryInterval uint + // Maximum count of retry attempts of failed writes, default 5. + maxRetries uint + // Maximum number of points to keep for retry. Should be multiple of BatchSize. Default 50,000. + retryBufferLimit uint + // The maximum delay between each retry attempt in milliseconds, default 125,000. + maxRetryInterval uint + // The maximum total retry timeout in millisecond, default 180,000. + maxRetryTime uint + // The base for the exponential retry delay + exponentialBase uint + // InfluxDB Enterprise write consistency as explained in https://docs.influxdata.com/enterprise_influxdb/v1.9/concepts/clustering/#write-consistency + consistency Consistency +} + +const ( + // ConsistencyOne requires at least one data node acknowledged a write. + ConsistencyOne Consistency = "one" + + // ConsistencyAll requires all data nodes to acknowledge a write. + ConsistencyAll Consistency = "all" + + // ConsistencyQuorum requires a quorum of data nodes to acknowledge a write. + ConsistencyQuorum Consistency = "quorum" + + // ConsistencyAny allows for hinted hand off, potentially no write happened yet. + ConsistencyAny Consistency = "any" +) + +// Consistency defines enum for allows consistency values for InfluxDB Enterprise, as explained https://docs.influxdata.com/enterprise_influxdb/v1.9/concepts/clustering/#write-consistency +type Consistency string + +// BatchSize returns size of batch +func (o *Options) BatchSize() uint { + return o.batchSize +} + +// SetBatchSize sets number of points sent in single request +func (o *Options) SetBatchSize(batchSize uint) *Options { + o.batchSize = batchSize + return o +} + +// FlushInterval returns flush interval in ms +func (o *Options) FlushInterval() uint { + return o.flushInterval +} + +// SetFlushInterval sets flush interval in ms in which is buffer flushed if it has not been already written +func (o *Options) SetFlushInterval(flushIntervalMs uint) *Options { + o.flushInterval = flushIntervalMs + return o +} + +// RetryInterval returns the default retry interval in ms, if not sent by server. Default 5,000. +func (o *Options) RetryInterval() uint { + return o.retryInterval +} + +// SetRetryInterval sets the time to wait before retry unsuccessful write in ms, if not sent by server +func (o *Options) SetRetryInterval(retryIntervalMs uint) *Options { + o.retryInterval = retryIntervalMs + return o +} + +// MaxRetries returns maximum count of retry attempts of failed writes, default 5. +func (o *Options) MaxRetries() uint { + return o.maxRetries +} + +// SetMaxRetries sets maximum count of retry attempts of failed writes. +// Setting zero value disables retry strategy. +func (o *Options) SetMaxRetries(maxRetries uint) *Options { + o.maxRetries = maxRetries + return o +} + +// RetryBufferLimit returns retry buffer limit. +func (o *Options) RetryBufferLimit() uint { + return o.retryBufferLimit +} + +// SetRetryBufferLimit sets maximum number of points to keep for retry. Should be multiple of BatchSize. +func (o *Options) SetRetryBufferLimit(retryBufferLimit uint) *Options { + o.retryBufferLimit = retryBufferLimit + return o +} + +// MaxRetryInterval returns the maximum delay between each retry attempt in milliseconds, default 125,000. +func (o *Options) MaxRetryInterval() uint { + return o.maxRetryInterval +} + +// SetMaxRetryInterval sets the maximum delay between each retry attempt in millisecond +func (o *Options) SetMaxRetryInterval(maxRetryIntervalMs uint) *Options { + o.maxRetryInterval = maxRetryIntervalMs + return o +} + +// MaxRetryTime returns the maximum total retry timeout in millisecond, default 180,000. +func (o *Options) MaxRetryTime() uint { + return o.maxRetryTime +} + +// SetMaxRetryTime sets the maximum total retry timeout in millisecond. +func (o *Options) SetMaxRetryTime(maxRetryTimeMs uint) *Options { + o.maxRetryTime = maxRetryTimeMs + return o +} + +// ExponentialBase returns the base for the exponential retry delay. Default 2. +func (o *Options) ExponentialBase() uint { + return o.exponentialBase +} + +// SetExponentialBase sets the base for the exponential retry delay. +func (o *Options) SetExponentialBase(retryExponentialBase uint) *Options { + o.exponentialBase = retryExponentialBase + return o +} + +// Precision returns time precision for writes +func (o *Options) Precision() time.Duration { + return o.precision +} + +// SetPrecision sets time precision to use in writes for timestamp. In unit of duration: time.Nanosecond, time.Microsecond, time.Millisecond, time.Second +func (o *Options) SetPrecision(precision time.Duration) *Options { + o.precision = precision + return o +} + +// UseGZip returns true if write request are gzip`ed +func (o *Options) UseGZip() bool { + return o.useGZip +} + +// SetUseGZip specifies whether to use GZip compression in write requests. +func (o *Options) SetUseGZip(useGZip bool) *Options { + o.useGZip = useGZip + return o +} + +// AddDefaultTag adds a default tag. DefaultTags are added to each written point. +// If a tag with the same key already exist it is overwritten. +// If a point already defines such a tag, it is left unchanged. +func (o *Options) AddDefaultTag(key, value string) *Options { + o.DefaultTags()[key] = value + return o +} + +// DefaultTags returns set of default tags +func (o *Options) DefaultTags() map[string]string { + if o.defaultTags == nil { + o.defaultTags = make(map[string]string) + } + return o.defaultTags +} + +// Consistency returns consistency for param value +func (o *Options) Consistency() Consistency { + return o.consistency +} + +// SetConsistency allows setting InfluxDB Enterprise write consistency, as explained in https://docs.influxdata.com/enterprise_influxdb/v1.9/concepts/clustering/#write-consistency */ +func (o *Options) SetConsistency(consistency Consistency) *Options { + o.consistency = consistency + return o +} + +// DefaultOptions returns Options object with default values +func DefaultOptions() *Options { + return &Options{batchSize: 5_000, flushInterval: 1_000, precision: time.Nanosecond, useGZip: false, retryBufferLimit: 50_000, defaultTags: make(map[string]string), + maxRetries: 5, retryInterval: 5_000, maxRetryInterval: 125_000, maxRetryTime: 180_000, exponentialBase: 2} +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/point.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/point.go new file mode 100644 index 0000000..91c9c0e --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/write/point.go @@ -0,0 +1,162 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package write provides the Point struct +package write + +import ( + "fmt" + "sort" + "time" + + lp "github.com/influxdata/line-protocol" +) + +// Point is represents InfluxDB time series point, holding tags and fields +type Point struct { + measurement string + tags []*lp.Tag + fields []*lp.Field + timestamp time.Time +} + +// TagList returns a slice containing tags of a Point. +func (m *Point) TagList() []*lp.Tag { + return m.tags +} + +// FieldList returns a slice containing the fields of a Point. +func (m *Point) FieldList() []*lp.Field { + return m.fields +} + +// SetTime set timestamp for a Point. +func (m *Point) SetTime(timestamp time.Time) *Point { + m.timestamp = timestamp + return m +} + +// Time is the timestamp of a Point. +func (m *Point) Time() time.Time { + return m.timestamp +} + +// SortTags orders the tags of a point alphanumerically by key. +// This is just here as a helper, to make it easy to keep tags sorted if you are creating a Point manually. +func (m *Point) SortTags() *Point { + sort.Slice(m.tags, func(i, j int) bool { return m.tags[i].Key < m.tags[j].Key }) + return m +} + +// SortFields orders the fields of a point alphanumerically by key. +func (m *Point) SortFields() *Point { + sort.Slice(m.fields, func(i, j int) bool { return m.fields[i].Key < m.fields[j].Key }) + return m +} + +// AddTag adds a tag to a point. +func (m *Point) AddTag(k, v string) *Point { + for i, tag := range m.tags { + if k == tag.Key { + m.tags[i].Value = v + return m + } + } + m.tags = append(m.tags, &lp.Tag{Key: k, Value: v}) + return m +} + +// AddField adds a field to a point. +func (m *Point) AddField(k string, v interface{}) *Point { + for i, field := range m.fields { + if k == field.Key { + m.fields[i].Value = v + return m + } + } + m.fields = append(m.fields, &lp.Field{Key: k, Value: convertField(v)}) + return m +} + +// Name returns the name of measurement of a point. +func (m *Point) Name() string { + return m.measurement +} + +// NewPointWithMeasurement creates a empty Point +// Use AddTag and AddField to fill point with data +func NewPointWithMeasurement(measurement string) *Point { + return &Point{measurement: measurement} +} + +// NewPoint creates a Point from measurement name, tags, fields and a timestamp. +func NewPoint( + measurement string, + tags map[string]string, + fields map[string]interface{}, + ts time.Time, +) *Point { + m := &Point{ + measurement: measurement, + tags: nil, + fields: nil, + timestamp: ts, + } + + if len(tags) > 0 { + m.tags = make([]*lp.Tag, 0, len(tags)) + for k, v := range tags { + m.tags = append(m.tags, + &lp.Tag{Key: k, Value: v}) + } + } + + m.fields = make([]*lp.Field, 0, len(fields)) + for k, v := range fields { + v := convertField(v) + if v == nil { + continue + } + m.fields = append(m.fields, &lp.Field{Key: k, Value: v}) + } + m.SortFields() + m.SortTags() + return m +} + +// convertField converts any primitive type to types supported by line protocol +func convertField(v interface{}) interface{} { + switch v := v.(type) { + case bool, int64, string, float64: + return v + case int: + return int64(v) + case uint: + return uint64(v) + case uint64: + return v + case []byte: + return string(v) + case int32: + return int64(v) + case int16: + return int64(v) + case int8: + return int64(v) + case uint32: + return uint64(v) + case uint16: + return uint64(v) + case uint8: + return uint64(v) + case float32: + return float64(v) + case time.Time: + return v.Format(time.RFC3339Nano) + case time.Duration: + return v.String() + default: + return fmt.Sprintf("%v", v) + } +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/api/writeAPIBlocking.go b/vendor/github.com/influxdata/influxdb-client-go/v2/api/writeAPIBlocking.go new file mode 100644 index 0000000..d348aa8 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/api/writeAPIBlocking.go @@ -0,0 +1,124 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package api + +import ( + "context" + "strings" + "sync" + + http2 "github.com/influxdata/influxdb-client-go/v2/api/http" + "github.com/influxdata/influxdb-client-go/v2/api/write" + iwrite "github.com/influxdata/influxdb-client-go/v2/internal/write" +) + +// WriteAPIBlocking offers blocking methods for writing time series data synchronously into an InfluxDB server. +// It doesn't implicitly create batches of points by default. Batches are created from array of points/records. +// +// Implicit batching is enabled with EnableBatching(). In this mode, each call to WritePoint or WriteRecord adds a line +// to internal buffer. If length ot the buffer is equal to the batch-size (set in write.Options), the buffer is sent to the server +// and the result of the operation is returned. +// When a point is written to the buffer, nil error is always returned. +// Flush() can be used to trigger sending of batch when it doesn't have the batch-size. +// +// Synchronous writing is intended to use for writing less frequent data, such as a weather sensing, or if there is a need to have explicit control of failed batches. + +// +// WriteAPIBlocking can be used concurrently. +// When using multiple goroutines for writing, use a single WriteAPIBlocking instance in all goroutines. +type WriteAPIBlocking interface { + // WriteRecord writes line protocol record(s) into bucket. + // WriteRecord writes lines without implicit batching by default, batch is created from given number of records. + // Automatic batching can be enabled by EnableBatching() + // Individual arguments can also be batches (multiple records separated by newline). + // Non-blocking alternative is available in the WriteAPI interface + WriteRecord(ctx context.Context, line ...string) error + // WritePoint data point into bucket. + // WriteRecord writes points without implicit batching by default, batch is created from given number of points. + // Automatic batching can be enabled by EnableBatching(). + // Non-blocking alternative is available in the WriteAPI interface + WritePoint(ctx context.Context, point ...*write.Point) error + // EnableBatching turns on implicit batching + // Batch size is controlled via write.Options + EnableBatching() + // Flush forces write of buffer if batching is enabled, even buffer doesn't have the batch-size. + Flush(ctx context.Context) error +} + +// writeAPIBlocking implements WriteAPIBlocking interface +type writeAPIBlocking struct { + service *iwrite.Service + writeOptions *write.Options + batching bool + batch []string + mu sync.Mutex +} + +// NewWriteAPIBlocking creates new instance of blocking write client for writing data to bucket belonging to org +func NewWriteAPIBlocking(org string, bucket string, service http2.Service, writeOptions *write.Options) WriteAPIBlocking { + return &writeAPIBlocking{service: iwrite.NewService(org, bucket, service, writeOptions), writeOptions: writeOptions} +} + +// NewWriteAPIBlockingWithBatching creates new instance of blocking write client for writing data to bucket belonging to org with batching enabled +func NewWriteAPIBlockingWithBatching(org string, bucket string, service http2.Service, writeOptions *write.Options) WriteAPIBlocking { + api := &writeAPIBlocking{service: iwrite.NewService(org, bucket, service, writeOptions), writeOptions: writeOptions} + api.EnableBatching() + return api +} + +func (w *writeAPIBlocking) EnableBatching() { + w.mu.Lock() + defer w.mu.Unlock() + if !w.batching { + w.batching = true + w.batch = make([]string, 0, w.writeOptions.BatchSize()) + } +} + +func (w *writeAPIBlocking) write(ctx context.Context, line string) error { + w.mu.Lock() + defer w.mu.Unlock() + body := line + if w.batching { + w.batch = append(w.batch, line) + if len(w.batch) == int(w.writeOptions.BatchSize()) { + body = strings.Join(w.batch, "\n") + w.batch = w.batch[:0] + } else { + return nil + } + } + err := w.service.WriteBatch(ctx, iwrite.NewBatch(body, w.writeOptions.MaxRetryTime())) + if err != nil { + return err + } + return nil +} + +func (w *writeAPIBlocking) WriteRecord(ctx context.Context, line ...string) error { + if len(line) == 0 { + return nil + } + return w.write(ctx, strings.Join(line, "\n")) +} + +func (w *writeAPIBlocking) WritePoint(ctx context.Context, point ...*write.Point) error { + line, err := w.service.EncodePoints(point...) + if err != nil { + return err + } + return w.write(ctx, line) +} + +func (w *writeAPIBlocking) Flush(ctx context.Context) error { + w.mu.Lock() + defer w.mu.Unlock() + if w.batching && len(w.batch) > 0 { + body := strings.Join(w.batch, "\n") + w.batch = w.batch[:0] + return w.service.WriteBatch(ctx, iwrite.NewBatch(body, w.writeOptions.MaxRetryTime())) + } + return nil +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/client.go b/vendor/github.com/influxdata/influxdb-client-go/v2/client.go new file mode 100644 index 0000000..9ff6a62 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/client.go @@ -0,0 +1,335 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package influxdb2 provides API for using InfluxDB client in Go. +// It's intended to use with InfluxDB 2 server. WriteAPI, QueryAPI and Health work also with InfluxDB 1.8 +package influxdb2 + +import ( + "context" + "errors" + "strings" + "sync" + "time" + + "github.com/influxdata/influxdb-client-go/v2/api" + "github.com/influxdata/influxdb-client-go/v2/api/http" + "github.com/influxdata/influxdb-client-go/v2/domain" + ilog "github.com/influxdata/influxdb-client-go/v2/internal/log" + "github.com/influxdata/influxdb-client-go/v2/log" +) + +// Client provides API to communicate with InfluxDBServer. +// There two APIs for writing, WriteAPI and WriteAPIBlocking. +// WriteAPI provides asynchronous, non-blocking, methods for writing time series data. +// WriteAPIBlocking provides blocking methods for writing time series data. +type Client interface { + // Setup sends request to initialise new InfluxDB server with user, org and bucket, and data retention period + // and returns details about newly created entities along with the authorization object. + // Retention period of zero will result to infinite retention. + Setup(ctx context.Context, username, password, org, bucket string, retentionPeriodHours int) (*domain.OnboardingResponse, error) + // SetupWithToken sends request to initialise new InfluxDB server with user, org and bucket, data retention period and token + // and returns details about newly created entities along with the authorization object. + // Retention period of zero will result to infinite retention. + SetupWithToken(ctx context.Context, username, password, org, bucket string, retentionPeriodHours int, token string) (*domain.OnboardingResponse, error) + // Ready returns InfluxDB uptime info of server. It doesn't validate authentication params. + Ready(ctx context.Context) (*domain.Ready, error) + // Health returns an InfluxDB server health check result. Read the HealthCheck.Status field to get server status. + // Health doesn't validate authentication params. + Health(ctx context.Context) (*domain.HealthCheck, error) + // Ping validates whether InfluxDB server is running. It doesn't validate authentication params. + Ping(ctx context.Context) (bool, error) + // Close ensures all ongoing asynchronous write clients finish. + // Also closes all idle connections, in case of HTTP client was created internally. + Close() + // Options returns the options associated with client + Options() *Options + // ServerURL returns the url of the server url client talks to + ServerURL() string + // HTTPService returns underlying HTTP service object used by client + HTTPService() http.Service + // WriteAPI returns the asynchronous, non-blocking, Write client. + // Ensures using a single WriteAPI instance for each org/bucket pair. + WriteAPI(org, bucket string) api.WriteAPI + // WriteAPIBlocking returns the synchronous, blocking, Write client. + // Ensures using a single WriteAPIBlocking instance for each org/bucket pair. + WriteAPIBlocking(org, bucket string) api.WriteAPIBlocking + // QueryAPI returns Query client. + // Ensures using a single QueryAPI instance each org. + QueryAPI(org string) api.QueryAPI + // AuthorizationsAPI returns Authorizations API client. + AuthorizationsAPI() api.AuthorizationsAPI + // OrganizationsAPI returns Organizations API client + OrganizationsAPI() api.OrganizationsAPI + // UsersAPI returns Users API client. + UsersAPI() api.UsersAPI + // DeleteAPI returns Delete API client + DeleteAPI() api.DeleteAPI + // BucketsAPI returns Buckets API client + BucketsAPI() api.BucketsAPI + // LabelsAPI returns Labels API client + LabelsAPI() api.LabelsAPI + // TasksAPI returns Tasks API client + TasksAPI() api.TasksAPI +} + +// clientImpl implements Client interface +type clientImpl struct { + serverURL string + options *Options + writeAPIs map[string]api.WriteAPI + syncWriteAPIs map[string]api.WriteAPIBlocking + lock sync.Mutex + httpService http.Service + apiClient *domain.ClientWithResponses + authAPI api.AuthorizationsAPI + orgAPI api.OrganizationsAPI + usersAPI api.UsersAPI + deleteAPI api.DeleteAPI + bucketsAPI api.BucketsAPI + labelsAPI api.LabelsAPI + tasksAPI api.TasksAPI +} + +// NewClient creates Client for connecting to given serverURL with provided authentication token, with the default options. +// serverURL is the InfluxDB server base URL, e.g. http://localhost:8086, +// authToken is an authentication token. It can be empty in case of connecting to newly installed InfluxDB server, which has not been set up yet. +// In such case, calling Setup() will set the authentication token. +func NewClient(serverURL string, authToken string) Client { + return NewClientWithOptions(serverURL, authToken, DefaultOptions()) +} + +// NewClientWithOptions creates Client for connecting to given serverURL with provided authentication token +// and configured with custom Options. +// serverURL is the InfluxDB server base URL, e.g. http://localhost:8086, +// authToken is an authentication token. It can be empty in case of connecting to newly installed InfluxDB server, which has not been set up yet. +// In such case, calling Setup() will set authentication token +func NewClientWithOptions(serverURL string, authToken string, options *Options) Client { + normServerURL := serverURL + if !strings.HasSuffix(normServerURL, "/") { + // For subsequent path parts concatenation, url has to end with '/' + normServerURL = serverURL + "/" + } + authorization := "" + if len(authToken) > 0 { + authorization = "Token " + authToken + } + service := http.NewService(normServerURL, authorization, options.httpOptions) + client := &clientImpl{ + serverURL: serverURL, + options: options, + writeAPIs: make(map[string]api.WriteAPI, 5), + syncWriteAPIs: make(map[string]api.WriteAPIBlocking, 5), + httpService: service, + apiClient: domain.NewClientWithResponses(service), + } + if log.Log != nil { + log.Log.SetLogLevel(options.LogLevel()) + } + if ilog.Level() >= log.InfoLevel { + tokenStr := "" + if len(authToken) > 0 { + tokenStr = ", token '******'" + } + ilog.Infof("Using URL '%s'%s", serverURL, tokenStr) + } + return client +} +func (c *clientImpl) Options() *Options { + return c.options +} + +func (c *clientImpl) ServerURL() string { + return c.serverURL +} + +func (c *clientImpl) HTTPService() http.Service { + return c.httpService +} + +func (c *clientImpl) Ready(ctx context.Context) (*domain.Ready, error) { + params := &domain.GetReadyParams{} + response, err := c.apiClient.GetReadyWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON200 == nil { //response with status 2xx, but not JSON + return nil, errors.New("cannot read Ready response") + + } + return response.JSON200, nil +} + +func (c *clientImpl) Setup(ctx context.Context, username, password, org, bucket string, retentionPeriodHours int) (*domain.OnboardingResponse, error) { + return c.SetupWithToken(ctx, username, password, org, bucket, retentionPeriodHours, "") +} + +func (c *clientImpl) SetupWithToken(ctx context.Context, username, password, org, bucket string, retentionPeriodHours int, token string) (*domain.OnboardingResponse, error) { + if username == "" || password == "" { + return nil, errors.New("a username and a password is required for a setup") + } + c.lock.Lock() + defer c.lock.Unlock() + params := &domain.PostSetupParams{} + retentionPeriodSeconds := int64(retentionPeriodHours * 3600) + retentionPeriodHrs := int(time.Duration(retentionPeriodSeconds) * time.Second) + body := &domain.PostSetupJSONRequestBody{ + Bucket: bucket, + Org: org, + Password: &password, + RetentionPeriodSeconds: &retentionPeriodSeconds, + RetentionPeriodHrs: &retentionPeriodHrs, + Username: username, + } + if token != "" { + body.Token = &token + } + response, err := c.apiClient.PostSetupWithResponse(ctx, params, *body) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + c.httpService.SetAuthorization("Token " + *response.JSON201.Auth.Token) + return response.JSON201, nil +} + +func (c *clientImpl) Health(ctx context.Context) (*domain.HealthCheck, error) { + params := &domain.GetHealthParams{} + response, err := c.apiClient.GetHealthWithResponse(ctx, params) + if err != nil { + return nil, err + } + if response.JSONDefault != nil { + return nil, domain.ErrorToHTTPError(response.JSONDefault, response.StatusCode()) + } + if response.JSON503 != nil { + //unhealthy server + return response.JSON503, nil + } + if response.JSON200 == nil { //response with status 2xx, but not JSON + return nil, errors.New("cannot read Health response") + } + + return response.JSON200, nil +} + +func (c *clientImpl) Ping(ctx context.Context) (bool, error) { + resp, err := c.apiClient.GetPingWithResponse(ctx) + if err != nil { + return false, err + } + return resp.StatusCode() == 204, nil +} + +func createKey(org, bucket string) string { + return org + "\t" + bucket +} + +func (c *clientImpl) WriteAPI(org, bucket string) api.WriteAPI { + c.lock.Lock() + defer c.lock.Unlock() + key := createKey(org, bucket) + if _, ok := c.writeAPIs[key]; !ok { + w := api.NewWriteAPI(org, bucket, c.httpService, c.options.writeOptions) + c.writeAPIs[key] = w + } + return c.writeAPIs[key] +} + +func (c *clientImpl) WriteAPIBlocking(org, bucket string) api.WriteAPIBlocking { + c.lock.Lock() + defer c.lock.Unlock() + key := createKey(org, bucket) + if _, ok := c.syncWriteAPIs[key]; !ok { + w := api.NewWriteAPIBlocking(org, bucket, c.httpService, c.options.writeOptions) + c.syncWriteAPIs[key] = w + } + return c.syncWriteAPIs[key] +} + +func (c *clientImpl) Close() { + for key, w := range c.writeAPIs { + wa := w.(*api.WriteAPIImpl) + wa.Close() + delete(c.writeAPIs, key) + } + for key := range c.syncWriteAPIs { + delete(c.syncWriteAPIs, key) + } + if c.options.HTTPOptions().OwnHTTPClient() { + c.options.HTTPOptions().HTTPClient().CloseIdleConnections() + } +} + +func (c *clientImpl) QueryAPI(org string) api.QueryAPI { + return api.NewQueryAPI(org, c.httpService) +} + +func (c *clientImpl) AuthorizationsAPI() api.AuthorizationsAPI { + c.lock.Lock() + defer c.lock.Unlock() + if c.authAPI == nil { + c.authAPI = api.NewAuthorizationsAPI(c.apiClient) + } + return c.authAPI +} + +func (c *clientImpl) OrganizationsAPI() api.OrganizationsAPI { + c.lock.Lock() + defer c.lock.Unlock() + if c.orgAPI == nil { + c.orgAPI = api.NewOrganizationsAPI(c.apiClient) + } + return c.orgAPI +} + +func (c *clientImpl) UsersAPI() api.UsersAPI { + c.lock.Lock() + defer c.lock.Unlock() + if c.usersAPI == nil { + c.usersAPI = api.NewUsersAPI(c.apiClient, c.httpService, c.options.HTTPClient()) + } + return c.usersAPI +} + +func (c *clientImpl) DeleteAPI() api.DeleteAPI { + c.lock.Lock() + defer c.lock.Unlock() + if c.deleteAPI == nil { + c.deleteAPI = api.NewDeleteAPI(c.apiClient) + } + return c.deleteAPI +} + +func (c *clientImpl) BucketsAPI() api.BucketsAPI { + c.lock.Lock() + defer c.lock.Unlock() + if c.bucketsAPI == nil { + c.bucketsAPI = api.NewBucketsAPI(c.apiClient) + } + return c.bucketsAPI +} + +func (c *clientImpl) LabelsAPI() api.LabelsAPI { + c.lock.Lock() + defer c.lock.Unlock() + if c.labelsAPI == nil { + c.labelsAPI = api.NewLabelsAPI(c.apiClient) + } + return c.labelsAPI +} + +func (c *clientImpl) TasksAPI() api.TasksAPI { + c.lock.Lock() + defer c.lock.Unlock() + if c.tasksAPI == nil { + c.tasksAPI = api.NewTasksAPI(c.apiClient) + } + return c.tasksAPI +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/compatibility.go b/vendor/github.com/influxdata/influxdb-client-go/v2/compatibility.go new file mode 100644 index 0000000..62d993f --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/compatibility.go @@ -0,0 +1,35 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package influxdb2 + +import ( + "github.com/influxdata/influxdb-client-go/v2/api" + "github.com/influxdata/influxdb-client-go/v2/api/write" + "github.com/influxdata/influxdb-client-go/v2/domain" + "time" +) + +// Proxy methods for backward compatibility + +// NewPointWithMeasurement creates a empty Point +// Use AddTag and AddField to fill point with data +func NewPointWithMeasurement(measurement string) *write.Point { + return write.NewPointWithMeasurement(measurement) +} + +// NewPoint creates a Point from measurement name, tags, fields and a timestamp. +func NewPoint( + measurement string, + tags map[string]string, + fields map[string]interface{}, + ts time.Time, +) *write.Point { + return write.NewPoint(measurement, tags, fields, ts) +} + +// DefaultDialect return flux query Dialect with full annotations (datatype, group, default), header and comma char as a delimiter +func DefaultDialect() *domain.Dialect { + return api.DefaultDialect() +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/domain/Readme.md b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/Readme.md new file mode 100644 index 0000000..4209218 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/Readme.md @@ -0,0 +1,23 @@ +# Generated types and API client + +`oss.yml` is copied from InfluxDB and customized, until changes are meged. Must be periodically sync with latest changes + and types and client must be re-generated + + +## Install oapi generator +`git clone git@github.com:bonitoo-io/oapi-codegen.git` +`cd oapi-codegen` +`git checkout dev-master` +`go install ./cmd/oapi-codegen/oapi-codegen.go` +## Download and sync latest swagger +`wget https://raw.githubusercontent.com/influxdata/openapi/master/contracts/oss.yml` + +## Generate +`cd domain` + +Generate types +`oapi-codegen -generate types -o types.gen.go -package domain oss.yml` + +Generate client +`oapi-codegen -generate client -o client.gen.go -package domain -templates .\templates oss.yml` + diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/domain/client.gen.go b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/client.gen.go new file mode 100644 index 0000000..f3e06e7 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/client.gen.go @@ -0,0 +1,33155 @@ +// Package domain provides primitives to interact with the openapi HTTP API. +// +// Code generated by github.com/deepmap/oapi-codegen version (devel) DO NOT EDIT. +package domain + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strings" + + "gopkg.in/yaml.v2" + + "github.com/deepmap/oapi-codegen/pkg/runtime" + ihttp "github.com/influxdata/influxdb-client-go/v2/api/http" +) + +// Client which conforms to the OpenAPI3 specification for this service. +type Client struct { + service ihttp.Service +} + +// Creates a new Client, with reasonable defaults +func NewClient(service ihttp.Service) *Client { + // create a client with sane default values + client := Client{ + service: service, + } + return &client +} + +// The interface specification for the client above. +type ClientInterface interface { + // GetRoutes request + GetRoutes(ctx context.Context, params *GetRoutesParams) (*http.Response, error) + + // GetAuthorizations request + GetAuthorizations(ctx context.Context, params *GetAuthorizationsParams) (*http.Response, error) + + // PostAuthorizations request with any body + PostAuthorizationsWithBody(ctx context.Context, params *PostAuthorizationsParams, contentType string, body io.Reader) (*http.Response, error) + + PostAuthorizations(ctx context.Context, params *PostAuthorizationsParams, body PostAuthorizationsJSONRequestBody) (*http.Response, error) + + // DeleteAuthorizationsID request + DeleteAuthorizationsID(ctx context.Context, authID string, params *DeleteAuthorizationsIDParams) (*http.Response, error) + + // GetAuthorizationsID request + GetAuthorizationsID(ctx context.Context, authID string, params *GetAuthorizationsIDParams) (*http.Response, error) + + // PatchAuthorizationsID request with any body + PatchAuthorizationsIDWithBody(ctx context.Context, authID string, params *PatchAuthorizationsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchAuthorizationsID(ctx context.Context, authID string, params *PatchAuthorizationsIDParams, body PatchAuthorizationsIDJSONRequestBody) (*http.Response, error) + + // GetBackupKV request + GetBackupKV(ctx context.Context, params *GetBackupKVParams) (*http.Response, error) + + // GetBackupMetadata request + GetBackupMetadata(ctx context.Context, params *GetBackupMetadataParams) (*http.Response, error) + + // GetBackupShardId request + GetBackupShardId(ctx context.Context, shardID int64, params *GetBackupShardIdParams) (*http.Response, error) + + // GetBuckets request + GetBuckets(ctx context.Context, params *GetBucketsParams) (*http.Response, error) + + // PostBuckets request with any body + PostBucketsWithBody(ctx context.Context, params *PostBucketsParams, contentType string, body io.Reader) (*http.Response, error) + + PostBuckets(ctx context.Context, params *PostBucketsParams, body PostBucketsJSONRequestBody) (*http.Response, error) + + // DeleteBucketsID request + DeleteBucketsID(ctx context.Context, bucketID string, params *DeleteBucketsIDParams) (*http.Response, error) + + // GetBucketsID request + GetBucketsID(ctx context.Context, bucketID string, params *GetBucketsIDParams) (*http.Response, error) + + // PatchBucketsID request with any body + PatchBucketsIDWithBody(ctx context.Context, bucketID string, params *PatchBucketsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchBucketsID(ctx context.Context, bucketID string, params *PatchBucketsIDParams, body PatchBucketsIDJSONRequestBody) (*http.Response, error) + + // GetBucketsIDLabels request + GetBucketsIDLabels(ctx context.Context, bucketID string, params *GetBucketsIDLabelsParams) (*http.Response, error) + + // PostBucketsIDLabels request with any body + PostBucketsIDLabelsWithBody(ctx context.Context, bucketID string, params *PostBucketsIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostBucketsIDLabels(ctx context.Context, bucketID string, params *PostBucketsIDLabelsParams, body PostBucketsIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteBucketsIDLabelsID request + DeleteBucketsIDLabelsID(ctx context.Context, bucketID string, labelID string, params *DeleteBucketsIDLabelsIDParams) (*http.Response, error) + + // GetBucketsIDMembers request + GetBucketsIDMembers(ctx context.Context, bucketID string, params *GetBucketsIDMembersParams) (*http.Response, error) + + // PostBucketsIDMembers request with any body + PostBucketsIDMembersWithBody(ctx context.Context, bucketID string, params *PostBucketsIDMembersParams, contentType string, body io.Reader) (*http.Response, error) + + PostBucketsIDMembers(ctx context.Context, bucketID string, params *PostBucketsIDMembersParams, body PostBucketsIDMembersJSONRequestBody) (*http.Response, error) + + // DeleteBucketsIDMembersID request + DeleteBucketsIDMembersID(ctx context.Context, bucketID string, userID string, params *DeleteBucketsIDMembersIDParams) (*http.Response, error) + + // GetBucketsIDOwners request + GetBucketsIDOwners(ctx context.Context, bucketID string, params *GetBucketsIDOwnersParams) (*http.Response, error) + + // PostBucketsIDOwners request with any body + PostBucketsIDOwnersWithBody(ctx context.Context, bucketID string, params *PostBucketsIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) + + PostBucketsIDOwners(ctx context.Context, bucketID string, params *PostBucketsIDOwnersParams, body PostBucketsIDOwnersJSONRequestBody) (*http.Response, error) + + // DeleteBucketsIDOwnersID request + DeleteBucketsIDOwnersID(ctx context.Context, bucketID string, userID string, params *DeleteBucketsIDOwnersIDParams) (*http.Response, error) + + // GetChecks request + GetChecks(ctx context.Context, params *GetChecksParams) (*http.Response, error) + + // CreateCheck request with any body + CreateCheckWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) + + CreateCheck(ctx context.Context, body CreateCheckJSONRequestBody) (*http.Response, error) + + // DeleteChecksID request + DeleteChecksID(ctx context.Context, checkID string, params *DeleteChecksIDParams) (*http.Response, error) + + // GetChecksID request + GetChecksID(ctx context.Context, checkID string, params *GetChecksIDParams) (*http.Response, error) + + // PatchChecksID request with any body + PatchChecksIDWithBody(ctx context.Context, checkID string, params *PatchChecksIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchChecksID(ctx context.Context, checkID string, params *PatchChecksIDParams, body PatchChecksIDJSONRequestBody) (*http.Response, error) + + // PutChecksID request with any body + PutChecksIDWithBody(ctx context.Context, checkID string, params *PutChecksIDParams, contentType string, body io.Reader) (*http.Response, error) + + PutChecksID(ctx context.Context, checkID string, params *PutChecksIDParams, body PutChecksIDJSONRequestBody) (*http.Response, error) + + // GetChecksIDLabels request + GetChecksIDLabels(ctx context.Context, checkID string, params *GetChecksIDLabelsParams) (*http.Response, error) + + // PostChecksIDLabels request with any body + PostChecksIDLabelsWithBody(ctx context.Context, checkID string, params *PostChecksIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostChecksIDLabels(ctx context.Context, checkID string, params *PostChecksIDLabelsParams, body PostChecksIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteChecksIDLabelsID request + DeleteChecksIDLabelsID(ctx context.Context, checkID string, labelID string, params *DeleteChecksIDLabelsIDParams) (*http.Response, error) + + // GetChecksIDQuery request + GetChecksIDQuery(ctx context.Context, checkID string, params *GetChecksIDQueryParams) (*http.Response, error) + + // GetConfig request + GetConfig(ctx context.Context, params *GetConfigParams) (*http.Response, error) + + // GetDashboards request + GetDashboards(ctx context.Context, params *GetDashboardsParams) (*http.Response, error) + + // PostDashboards request with any body + PostDashboardsWithBody(ctx context.Context, params *PostDashboardsParams, contentType string, body io.Reader) (*http.Response, error) + + PostDashboards(ctx context.Context, params *PostDashboardsParams, body PostDashboardsJSONRequestBody) (*http.Response, error) + + // DeleteDashboardsID request + DeleteDashboardsID(ctx context.Context, dashboardID string, params *DeleteDashboardsIDParams) (*http.Response, error) + + // GetDashboardsID request + GetDashboardsID(ctx context.Context, dashboardID string, params *GetDashboardsIDParams) (*http.Response, error) + + // PatchDashboardsID request with any body + PatchDashboardsIDWithBody(ctx context.Context, dashboardID string, params *PatchDashboardsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchDashboardsID(ctx context.Context, dashboardID string, params *PatchDashboardsIDParams, body PatchDashboardsIDJSONRequestBody) (*http.Response, error) + + // PostDashboardsIDCells request with any body + PostDashboardsIDCellsWithBody(ctx context.Context, dashboardID string, params *PostDashboardsIDCellsParams, contentType string, body io.Reader) (*http.Response, error) + + PostDashboardsIDCells(ctx context.Context, dashboardID string, params *PostDashboardsIDCellsParams, body PostDashboardsIDCellsJSONRequestBody) (*http.Response, error) + + // PutDashboardsIDCells request with any body + PutDashboardsIDCellsWithBody(ctx context.Context, dashboardID string, params *PutDashboardsIDCellsParams, contentType string, body io.Reader) (*http.Response, error) + + PutDashboardsIDCells(ctx context.Context, dashboardID string, params *PutDashboardsIDCellsParams, body PutDashboardsIDCellsJSONRequestBody) (*http.Response, error) + + // DeleteDashboardsIDCellsID request + DeleteDashboardsIDCellsID(ctx context.Context, dashboardID string, cellID string, params *DeleteDashboardsIDCellsIDParams) (*http.Response, error) + + // PatchDashboardsIDCellsID request with any body + PatchDashboardsIDCellsIDWithBody(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchDashboardsIDCellsID(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, body PatchDashboardsIDCellsIDJSONRequestBody) (*http.Response, error) + + // GetDashboardsIDCellsIDView request + GetDashboardsIDCellsIDView(ctx context.Context, dashboardID string, cellID string, params *GetDashboardsIDCellsIDViewParams) (*http.Response, error) + + // PatchDashboardsIDCellsIDView request with any body + PatchDashboardsIDCellsIDViewWithBody(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, contentType string, body io.Reader) (*http.Response, error) + + PatchDashboardsIDCellsIDView(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, body PatchDashboardsIDCellsIDViewJSONRequestBody) (*http.Response, error) + + // GetDashboardsIDLabels request + GetDashboardsIDLabels(ctx context.Context, dashboardID string, params *GetDashboardsIDLabelsParams) (*http.Response, error) + + // PostDashboardsIDLabels request with any body + PostDashboardsIDLabelsWithBody(ctx context.Context, dashboardID string, params *PostDashboardsIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostDashboardsIDLabels(ctx context.Context, dashboardID string, params *PostDashboardsIDLabelsParams, body PostDashboardsIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteDashboardsIDLabelsID request + DeleteDashboardsIDLabelsID(ctx context.Context, dashboardID string, labelID string, params *DeleteDashboardsIDLabelsIDParams) (*http.Response, error) + + // GetDashboardsIDMembers request + GetDashboardsIDMembers(ctx context.Context, dashboardID string, params *GetDashboardsIDMembersParams) (*http.Response, error) + + // PostDashboardsIDMembers request with any body + PostDashboardsIDMembersWithBody(ctx context.Context, dashboardID string, params *PostDashboardsIDMembersParams, contentType string, body io.Reader) (*http.Response, error) + + PostDashboardsIDMembers(ctx context.Context, dashboardID string, params *PostDashboardsIDMembersParams, body PostDashboardsIDMembersJSONRequestBody) (*http.Response, error) + + // DeleteDashboardsIDMembersID request + DeleteDashboardsIDMembersID(ctx context.Context, dashboardID string, userID string, params *DeleteDashboardsIDMembersIDParams) (*http.Response, error) + + // GetDashboardsIDOwners request + GetDashboardsIDOwners(ctx context.Context, dashboardID string, params *GetDashboardsIDOwnersParams) (*http.Response, error) + + // PostDashboardsIDOwners request with any body + PostDashboardsIDOwnersWithBody(ctx context.Context, dashboardID string, params *PostDashboardsIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) + + PostDashboardsIDOwners(ctx context.Context, dashboardID string, params *PostDashboardsIDOwnersParams, body PostDashboardsIDOwnersJSONRequestBody) (*http.Response, error) + + // DeleteDashboardsIDOwnersID request + DeleteDashboardsIDOwnersID(ctx context.Context, dashboardID string, userID string, params *DeleteDashboardsIDOwnersIDParams) (*http.Response, error) + + // GetDBRPs request + GetDBRPs(ctx context.Context, params *GetDBRPsParams) (*http.Response, error) + + // PostDBRP request with any body + PostDBRPWithBody(ctx context.Context, params *PostDBRPParams, contentType string, body io.Reader) (*http.Response, error) + + PostDBRP(ctx context.Context, params *PostDBRPParams, body PostDBRPJSONRequestBody) (*http.Response, error) + + // DeleteDBRPID request + DeleteDBRPID(ctx context.Context, dbrpID string, params *DeleteDBRPIDParams) (*http.Response, error) + + // GetDBRPsID request + GetDBRPsID(ctx context.Context, dbrpID string, params *GetDBRPsIDParams) (*http.Response, error) + + // PatchDBRPID request with any body + PatchDBRPIDWithBody(ctx context.Context, dbrpID string, params *PatchDBRPIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchDBRPID(ctx context.Context, dbrpID string, params *PatchDBRPIDParams, body PatchDBRPIDJSONRequestBody) (*http.Response, error) + + // PostDelete request with any body + PostDeleteWithBody(ctx context.Context, params *PostDeleteParams, contentType string, body io.Reader) (*http.Response, error) + + PostDelete(ctx context.Context, params *PostDeleteParams, body PostDeleteJSONRequestBody) (*http.Response, error) + + // GetFlags request + GetFlags(ctx context.Context, params *GetFlagsParams) (*http.Response, error) + + // GetHealth request + GetHealth(ctx context.Context, params *GetHealthParams) (*http.Response, error) + + // GetLabels request + GetLabels(ctx context.Context, params *GetLabelsParams) (*http.Response, error) + + // PostLabels request with any body + PostLabelsWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) + + PostLabels(ctx context.Context, body PostLabelsJSONRequestBody) (*http.Response, error) + + // DeleteLabelsID request + DeleteLabelsID(ctx context.Context, labelID string, params *DeleteLabelsIDParams) (*http.Response, error) + + // GetLabelsID request + GetLabelsID(ctx context.Context, labelID string, params *GetLabelsIDParams) (*http.Response, error) + + // PatchLabelsID request with any body + PatchLabelsIDWithBody(ctx context.Context, labelID string, params *PatchLabelsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchLabelsID(ctx context.Context, labelID string, params *PatchLabelsIDParams, body PatchLabelsIDJSONRequestBody) (*http.Response, error) + + // GetLegacyAuthorizations request + GetLegacyAuthorizations(ctx context.Context, params *GetLegacyAuthorizationsParams) (*http.Response, error) + + // PostLegacyAuthorizations request with any body + PostLegacyAuthorizationsWithBody(ctx context.Context, params *PostLegacyAuthorizationsParams, contentType string, body io.Reader) (*http.Response, error) + + PostLegacyAuthorizations(ctx context.Context, params *PostLegacyAuthorizationsParams, body PostLegacyAuthorizationsJSONRequestBody) (*http.Response, error) + + // DeleteLegacyAuthorizationsID request + DeleteLegacyAuthorizationsID(ctx context.Context, authID string, params *DeleteLegacyAuthorizationsIDParams) (*http.Response, error) + + // GetLegacyAuthorizationsID request + GetLegacyAuthorizationsID(ctx context.Context, authID string, params *GetLegacyAuthorizationsIDParams) (*http.Response, error) + + // PatchLegacyAuthorizationsID request with any body + PatchLegacyAuthorizationsIDWithBody(ctx context.Context, authID string, params *PatchLegacyAuthorizationsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchLegacyAuthorizationsID(ctx context.Context, authID string, params *PatchLegacyAuthorizationsIDParams, body PatchLegacyAuthorizationsIDJSONRequestBody) (*http.Response, error) + + // PostLegacyAuthorizationsIDPassword request with any body + PostLegacyAuthorizationsIDPasswordWithBody(ctx context.Context, authID string, params *PostLegacyAuthorizationsIDPasswordParams, contentType string, body io.Reader) (*http.Response, error) + + PostLegacyAuthorizationsIDPassword(ctx context.Context, authID string, params *PostLegacyAuthorizationsIDPasswordParams, body PostLegacyAuthorizationsIDPasswordJSONRequestBody) (*http.Response, error) + + // GetMe request + GetMe(ctx context.Context, params *GetMeParams) (*http.Response, error) + + // PutMePassword request with any body + PutMePasswordWithBody(ctx context.Context, params *PutMePasswordParams, contentType string, body io.Reader) (*http.Response, error) + + PutMePassword(ctx context.Context, params *PutMePasswordParams, body PutMePasswordJSONRequestBody) (*http.Response, error) + + // GetMetrics request + GetMetrics(ctx context.Context, params *GetMetricsParams) (*http.Response, error) + + // GetNotificationEndpoints request + GetNotificationEndpoints(ctx context.Context, params *GetNotificationEndpointsParams) (*http.Response, error) + + // CreateNotificationEndpoint request with any body + CreateNotificationEndpointWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) + + CreateNotificationEndpoint(ctx context.Context, body CreateNotificationEndpointJSONRequestBody) (*http.Response, error) + + // DeleteNotificationEndpointsID request + DeleteNotificationEndpointsID(ctx context.Context, endpointID string, params *DeleteNotificationEndpointsIDParams) (*http.Response, error) + + // GetNotificationEndpointsID request + GetNotificationEndpointsID(ctx context.Context, endpointID string, params *GetNotificationEndpointsIDParams) (*http.Response, error) + + // PatchNotificationEndpointsID request with any body + PatchNotificationEndpointsIDWithBody(ctx context.Context, endpointID string, params *PatchNotificationEndpointsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchNotificationEndpointsID(ctx context.Context, endpointID string, params *PatchNotificationEndpointsIDParams, body PatchNotificationEndpointsIDJSONRequestBody) (*http.Response, error) + + // PutNotificationEndpointsID request with any body + PutNotificationEndpointsIDWithBody(ctx context.Context, endpointID string, params *PutNotificationEndpointsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PutNotificationEndpointsID(ctx context.Context, endpointID string, params *PutNotificationEndpointsIDParams, body PutNotificationEndpointsIDJSONRequestBody) (*http.Response, error) + + // GetNotificationEndpointsIDLabels request + GetNotificationEndpointsIDLabels(ctx context.Context, endpointID string, params *GetNotificationEndpointsIDLabelsParams) (*http.Response, error) + + // PostNotificationEndpointIDLabels request with any body + PostNotificationEndpointIDLabelsWithBody(ctx context.Context, endpointID string, params *PostNotificationEndpointIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostNotificationEndpointIDLabels(ctx context.Context, endpointID string, params *PostNotificationEndpointIDLabelsParams, body PostNotificationEndpointIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteNotificationEndpointsIDLabelsID request + DeleteNotificationEndpointsIDLabelsID(ctx context.Context, endpointID string, labelID string, params *DeleteNotificationEndpointsIDLabelsIDParams) (*http.Response, error) + + // GetNotificationRules request + GetNotificationRules(ctx context.Context, params *GetNotificationRulesParams) (*http.Response, error) + + // CreateNotificationRule request with any body + CreateNotificationRuleWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) + + CreateNotificationRule(ctx context.Context, body CreateNotificationRuleJSONRequestBody) (*http.Response, error) + + // DeleteNotificationRulesID request + DeleteNotificationRulesID(ctx context.Context, ruleID string, params *DeleteNotificationRulesIDParams) (*http.Response, error) + + // GetNotificationRulesID request + GetNotificationRulesID(ctx context.Context, ruleID string, params *GetNotificationRulesIDParams) (*http.Response, error) + + // PatchNotificationRulesID request with any body + PatchNotificationRulesIDWithBody(ctx context.Context, ruleID string, params *PatchNotificationRulesIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchNotificationRulesID(ctx context.Context, ruleID string, params *PatchNotificationRulesIDParams, body PatchNotificationRulesIDJSONRequestBody) (*http.Response, error) + + // PutNotificationRulesID request with any body + PutNotificationRulesIDWithBody(ctx context.Context, ruleID string, params *PutNotificationRulesIDParams, contentType string, body io.Reader) (*http.Response, error) + + PutNotificationRulesID(ctx context.Context, ruleID string, params *PutNotificationRulesIDParams, body PutNotificationRulesIDJSONRequestBody) (*http.Response, error) + + // GetNotificationRulesIDLabels request + GetNotificationRulesIDLabels(ctx context.Context, ruleID string, params *GetNotificationRulesIDLabelsParams) (*http.Response, error) + + // PostNotificationRuleIDLabels request with any body + PostNotificationRuleIDLabelsWithBody(ctx context.Context, ruleID string, params *PostNotificationRuleIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostNotificationRuleIDLabels(ctx context.Context, ruleID string, params *PostNotificationRuleIDLabelsParams, body PostNotificationRuleIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteNotificationRulesIDLabelsID request + DeleteNotificationRulesIDLabelsID(ctx context.Context, ruleID string, labelID string, params *DeleteNotificationRulesIDLabelsIDParams) (*http.Response, error) + + // GetNotificationRulesIDQuery request + GetNotificationRulesIDQuery(ctx context.Context, ruleID string, params *GetNotificationRulesIDQueryParams) (*http.Response, error) + + // GetOrgs request + GetOrgs(ctx context.Context, params *GetOrgsParams) (*http.Response, error) + + // PostOrgs request with any body + PostOrgsWithBody(ctx context.Context, params *PostOrgsParams, contentType string, body io.Reader) (*http.Response, error) + + PostOrgs(ctx context.Context, params *PostOrgsParams, body PostOrgsJSONRequestBody) (*http.Response, error) + + // DeleteOrgsID request + DeleteOrgsID(ctx context.Context, orgID string, params *DeleteOrgsIDParams) (*http.Response, error) + + // GetOrgsID request + GetOrgsID(ctx context.Context, orgID string, params *GetOrgsIDParams) (*http.Response, error) + + // PatchOrgsID request with any body + PatchOrgsIDWithBody(ctx context.Context, orgID string, params *PatchOrgsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchOrgsID(ctx context.Context, orgID string, params *PatchOrgsIDParams, body PatchOrgsIDJSONRequestBody) (*http.Response, error) + + // GetOrgsIDMembers request + GetOrgsIDMembers(ctx context.Context, orgID string, params *GetOrgsIDMembersParams) (*http.Response, error) + + // PostOrgsIDMembers request with any body + PostOrgsIDMembersWithBody(ctx context.Context, orgID string, params *PostOrgsIDMembersParams, contentType string, body io.Reader) (*http.Response, error) + + PostOrgsIDMembers(ctx context.Context, orgID string, params *PostOrgsIDMembersParams, body PostOrgsIDMembersJSONRequestBody) (*http.Response, error) + + // DeleteOrgsIDMembersID request + DeleteOrgsIDMembersID(ctx context.Context, orgID string, userID string, params *DeleteOrgsIDMembersIDParams) (*http.Response, error) + + // GetOrgsIDOwners request + GetOrgsIDOwners(ctx context.Context, orgID string, params *GetOrgsIDOwnersParams) (*http.Response, error) + + // PostOrgsIDOwners request with any body + PostOrgsIDOwnersWithBody(ctx context.Context, orgID string, params *PostOrgsIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) + + PostOrgsIDOwners(ctx context.Context, orgID string, params *PostOrgsIDOwnersParams, body PostOrgsIDOwnersJSONRequestBody) (*http.Response, error) + + // DeleteOrgsIDOwnersID request + DeleteOrgsIDOwnersID(ctx context.Context, orgID string, userID string, params *DeleteOrgsIDOwnersIDParams) (*http.Response, error) + + // GetOrgsIDSecrets request + GetOrgsIDSecrets(ctx context.Context, orgID string, params *GetOrgsIDSecretsParams) (*http.Response, error) + + // PatchOrgsIDSecrets request with any body + PatchOrgsIDSecretsWithBody(ctx context.Context, orgID string, params *PatchOrgsIDSecretsParams, contentType string, body io.Reader) (*http.Response, error) + + PatchOrgsIDSecrets(ctx context.Context, orgID string, params *PatchOrgsIDSecretsParams, body PatchOrgsIDSecretsJSONRequestBody) (*http.Response, error) + + // PostOrgsIDSecrets request with any body + PostOrgsIDSecretsWithBody(ctx context.Context, orgID string, params *PostOrgsIDSecretsParams, contentType string, body io.Reader) (*http.Response, error) + + PostOrgsIDSecrets(ctx context.Context, orgID string, params *PostOrgsIDSecretsParams, body PostOrgsIDSecretsJSONRequestBody) (*http.Response, error) + + // DeleteOrgsIDSecretsID request + DeleteOrgsIDSecretsID(ctx context.Context, orgID string, secretID string, params *DeleteOrgsIDSecretsIDParams) (*http.Response, error) + + // GetPing request + GetPing(ctx context.Context) (*http.Response, error) + + // HeadPing request + HeadPing(ctx context.Context) (*http.Response, error) + + // PostQuery request with any body + PostQueryWithBody(ctx context.Context, params *PostQueryParams, contentType string, body io.Reader) (*http.Response, error) + + PostQuery(ctx context.Context, params *PostQueryParams, body PostQueryJSONRequestBody) (*http.Response, error) + + // PostQueryAnalyze request with any body + PostQueryAnalyzeWithBody(ctx context.Context, params *PostQueryAnalyzeParams, contentType string, body io.Reader) (*http.Response, error) + + PostQueryAnalyze(ctx context.Context, params *PostQueryAnalyzeParams, body PostQueryAnalyzeJSONRequestBody) (*http.Response, error) + + // PostQueryAst request with any body + PostQueryAstWithBody(ctx context.Context, params *PostQueryAstParams, contentType string, body io.Reader) (*http.Response, error) + + PostQueryAst(ctx context.Context, params *PostQueryAstParams, body PostQueryAstJSONRequestBody) (*http.Response, error) + + // GetQuerySuggestions request + GetQuerySuggestions(ctx context.Context, params *GetQuerySuggestionsParams) (*http.Response, error) + + // GetQuerySuggestionsName request + GetQuerySuggestionsName(ctx context.Context, name string, params *GetQuerySuggestionsNameParams) (*http.Response, error) + + // GetReady request + GetReady(ctx context.Context, params *GetReadyParams) (*http.Response, error) + + // GetRemoteConnections request + GetRemoteConnections(ctx context.Context, params *GetRemoteConnectionsParams) (*http.Response, error) + + // PostRemoteConnection request with any body + PostRemoteConnectionWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) + + PostRemoteConnection(ctx context.Context, body PostRemoteConnectionJSONRequestBody) (*http.Response, error) + + // DeleteRemoteConnectionByID request + DeleteRemoteConnectionByID(ctx context.Context, remoteID string, params *DeleteRemoteConnectionByIDParams) (*http.Response, error) + + // GetRemoteConnectionByID request + GetRemoteConnectionByID(ctx context.Context, remoteID string, params *GetRemoteConnectionByIDParams) (*http.Response, error) + + // PatchRemoteConnectionByID request with any body + PatchRemoteConnectionByIDWithBody(ctx context.Context, remoteID string, params *PatchRemoteConnectionByIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchRemoteConnectionByID(ctx context.Context, remoteID string, params *PatchRemoteConnectionByIDParams, body PatchRemoteConnectionByIDJSONRequestBody) (*http.Response, error) + + // GetReplications request + GetReplications(ctx context.Context, params *GetReplicationsParams) (*http.Response, error) + + // PostReplication request with any body + PostReplicationWithBody(ctx context.Context, params *PostReplicationParams, contentType string, body io.Reader) (*http.Response, error) + + PostReplication(ctx context.Context, params *PostReplicationParams, body PostReplicationJSONRequestBody) (*http.Response, error) + + // DeleteReplicationByID request + DeleteReplicationByID(ctx context.Context, replicationID string, params *DeleteReplicationByIDParams) (*http.Response, error) + + // GetReplicationByID request + GetReplicationByID(ctx context.Context, replicationID string, params *GetReplicationByIDParams) (*http.Response, error) + + // PatchReplicationByID request with any body + PatchReplicationByIDWithBody(ctx context.Context, replicationID string, params *PatchReplicationByIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchReplicationByID(ctx context.Context, replicationID string, params *PatchReplicationByIDParams, body PatchReplicationByIDJSONRequestBody) (*http.Response, error) + + // PostValidateReplicationByID request + PostValidateReplicationByID(ctx context.Context, replicationID string, params *PostValidateReplicationByIDParams) (*http.Response, error) + + // GetResources request + GetResources(ctx context.Context, params *GetResourcesParams) (*http.Response, error) + + // PostRestoreBucketID request with any body + PostRestoreBucketIDWithBody(ctx context.Context, bucketID string, params *PostRestoreBucketIDParams, contentType string, body io.Reader) (*http.Response, error) + + // PostRestoreBucketMetadata request with any body + PostRestoreBucketMetadataWithBody(ctx context.Context, params *PostRestoreBucketMetadataParams, contentType string, body io.Reader) (*http.Response, error) + + PostRestoreBucketMetadata(ctx context.Context, params *PostRestoreBucketMetadataParams, body PostRestoreBucketMetadataJSONRequestBody) (*http.Response, error) + + // PostRestoreKV request with any body + PostRestoreKVWithBody(ctx context.Context, params *PostRestoreKVParams, contentType string, body io.Reader) (*http.Response, error) + + // PostRestoreShardId request with any body + PostRestoreShardIdWithBody(ctx context.Context, shardID string, params *PostRestoreShardIdParams, contentType string, body io.Reader) (*http.Response, error) + + // PostRestoreSQL request with any body + PostRestoreSQLWithBody(ctx context.Context, params *PostRestoreSQLParams, contentType string, body io.Reader) (*http.Response, error) + + // GetScrapers request + GetScrapers(ctx context.Context, params *GetScrapersParams) (*http.Response, error) + + // PostScrapers request with any body + PostScrapersWithBody(ctx context.Context, params *PostScrapersParams, contentType string, body io.Reader) (*http.Response, error) + + PostScrapers(ctx context.Context, params *PostScrapersParams, body PostScrapersJSONRequestBody) (*http.Response, error) + + // DeleteScrapersID request + DeleteScrapersID(ctx context.Context, scraperTargetID string, params *DeleteScrapersIDParams) (*http.Response, error) + + // GetScrapersID request + GetScrapersID(ctx context.Context, scraperTargetID string, params *GetScrapersIDParams) (*http.Response, error) + + // PatchScrapersID request with any body + PatchScrapersIDWithBody(ctx context.Context, scraperTargetID string, params *PatchScrapersIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchScrapersID(ctx context.Context, scraperTargetID string, params *PatchScrapersIDParams, body PatchScrapersIDJSONRequestBody) (*http.Response, error) + + // GetScrapersIDLabels request + GetScrapersIDLabels(ctx context.Context, scraperTargetID string, params *GetScrapersIDLabelsParams) (*http.Response, error) + + // PostScrapersIDLabels request with any body + PostScrapersIDLabelsWithBody(ctx context.Context, scraperTargetID string, params *PostScrapersIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostScrapersIDLabels(ctx context.Context, scraperTargetID string, params *PostScrapersIDLabelsParams, body PostScrapersIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteScrapersIDLabelsID request + DeleteScrapersIDLabelsID(ctx context.Context, scraperTargetID string, labelID string, params *DeleteScrapersIDLabelsIDParams) (*http.Response, error) + + // GetScrapersIDMembers request + GetScrapersIDMembers(ctx context.Context, scraperTargetID string, params *GetScrapersIDMembersParams) (*http.Response, error) + + // PostScrapersIDMembers request with any body + PostScrapersIDMembersWithBody(ctx context.Context, scraperTargetID string, params *PostScrapersIDMembersParams, contentType string, body io.Reader) (*http.Response, error) + + PostScrapersIDMembers(ctx context.Context, scraperTargetID string, params *PostScrapersIDMembersParams, body PostScrapersIDMembersJSONRequestBody) (*http.Response, error) + + // DeleteScrapersIDMembersID request + DeleteScrapersIDMembersID(ctx context.Context, scraperTargetID string, userID string, params *DeleteScrapersIDMembersIDParams) (*http.Response, error) + + // GetScrapersIDOwners request + GetScrapersIDOwners(ctx context.Context, scraperTargetID string, params *GetScrapersIDOwnersParams) (*http.Response, error) + + // PostScrapersIDOwners request with any body + PostScrapersIDOwnersWithBody(ctx context.Context, scraperTargetID string, params *PostScrapersIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) + + PostScrapersIDOwners(ctx context.Context, scraperTargetID string, params *PostScrapersIDOwnersParams, body PostScrapersIDOwnersJSONRequestBody) (*http.Response, error) + + // DeleteScrapersIDOwnersID request + DeleteScrapersIDOwnersID(ctx context.Context, scraperTargetID string, userID string, params *DeleteScrapersIDOwnersIDParams) (*http.Response, error) + + // GetSetup request + GetSetup(ctx context.Context, params *GetSetupParams) (*http.Response, error) + + // PostSetup request with any body + PostSetupWithBody(ctx context.Context, params *PostSetupParams, contentType string, body io.Reader) (*http.Response, error) + + PostSetup(ctx context.Context, params *PostSetupParams, body PostSetupJSONRequestBody) (*http.Response, error) + + // PostSignin request + PostSignin(ctx context.Context, params *PostSigninParams) (*http.Response, error) + + // PostSignout request + PostSignout(ctx context.Context, params *PostSignoutParams) (*http.Response, error) + + // GetSources request + GetSources(ctx context.Context, params *GetSourcesParams) (*http.Response, error) + + // PostSources request with any body + PostSourcesWithBody(ctx context.Context, params *PostSourcesParams, contentType string, body io.Reader) (*http.Response, error) + + PostSources(ctx context.Context, params *PostSourcesParams, body PostSourcesJSONRequestBody) (*http.Response, error) + + // DeleteSourcesID request + DeleteSourcesID(ctx context.Context, sourceID string, params *DeleteSourcesIDParams) (*http.Response, error) + + // GetSourcesID request + GetSourcesID(ctx context.Context, sourceID string, params *GetSourcesIDParams) (*http.Response, error) + + // PatchSourcesID request with any body + PatchSourcesIDWithBody(ctx context.Context, sourceID string, params *PatchSourcesIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchSourcesID(ctx context.Context, sourceID string, params *PatchSourcesIDParams, body PatchSourcesIDJSONRequestBody) (*http.Response, error) + + // GetSourcesIDBuckets request + GetSourcesIDBuckets(ctx context.Context, sourceID string, params *GetSourcesIDBucketsParams) (*http.Response, error) + + // GetSourcesIDHealth request + GetSourcesIDHealth(ctx context.Context, sourceID string, params *GetSourcesIDHealthParams) (*http.Response, error) + + // ListStacks request + ListStacks(ctx context.Context, params *ListStacksParams) (*http.Response, error) + + // CreateStack request with any body + CreateStackWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) + + CreateStack(ctx context.Context, body CreateStackJSONRequestBody) (*http.Response, error) + + // DeleteStack request + DeleteStack(ctx context.Context, stackId string, params *DeleteStackParams) (*http.Response, error) + + // ReadStack request + ReadStack(ctx context.Context, stackId string) (*http.Response, error) + + // UpdateStack request with any body + UpdateStackWithBody(ctx context.Context, stackId string, contentType string, body io.Reader) (*http.Response, error) + + UpdateStack(ctx context.Context, stackId string, body UpdateStackJSONRequestBody) (*http.Response, error) + + // UninstallStack request + UninstallStack(ctx context.Context, stackId string) (*http.Response, error) + + // GetTasks request + GetTasks(ctx context.Context, params *GetTasksParams) (*http.Response, error) + + // PostTasks request with any body + PostTasksWithBody(ctx context.Context, params *PostTasksParams, contentType string, body io.Reader) (*http.Response, error) + + PostTasks(ctx context.Context, params *PostTasksParams, body PostTasksJSONRequestBody) (*http.Response, error) + + // DeleteTasksID request + DeleteTasksID(ctx context.Context, taskID string, params *DeleteTasksIDParams) (*http.Response, error) + + // GetTasksID request + GetTasksID(ctx context.Context, taskID string, params *GetTasksIDParams) (*http.Response, error) + + // PatchTasksID request with any body + PatchTasksIDWithBody(ctx context.Context, taskID string, params *PatchTasksIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchTasksID(ctx context.Context, taskID string, params *PatchTasksIDParams, body PatchTasksIDJSONRequestBody) (*http.Response, error) + + // GetTasksIDLabels request + GetTasksIDLabels(ctx context.Context, taskID string, params *GetTasksIDLabelsParams) (*http.Response, error) + + // PostTasksIDLabels request with any body + PostTasksIDLabelsWithBody(ctx context.Context, taskID string, params *PostTasksIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostTasksIDLabels(ctx context.Context, taskID string, params *PostTasksIDLabelsParams, body PostTasksIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteTasksIDLabelsID request + DeleteTasksIDLabelsID(ctx context.Context, taskID string, labelID string, params *DeleteTasksIDLabelsIDParams) (*http.Response, error) + + // GetTasksIDLogs request + GetTasksIDLogs(ctx context.Context, taskID string, params *GetTasksIDLogsParams) (*http.Response, error) + + // GetTasksIDMembers request + GetTasksIDMembers(ctx context.Context, taskID string, params *GetTasksIDMembersParams) (*http.Response, error) + + // PostTasksIDMembers request with any body + PostTasksIDMembersWithBody(ctx context.Context, taskID string, params *PostTasksIDMembersParams, contentType string, body io.Reader) (*http.Response, error) + + PostTasksIDMembers(ctx context.Context, taskID string, params *PostTasksIDMembersParams, body PostTasksIDMembersJSONRequestBody) (*http.Response, error) + + // DeleteTasksIDMembersID request + DeleteTasksIDMembersID(ctx context.Context, taskID string, userID string, params *DeleteTasksIDMembersIDParams) (*http.Response, error) + + // GetTasksIDOwners request + GetTasksIDOwners(ctx context.Context, taskID string, params *GetTasksIDOwnersParams) (*http.Response, error) + + // PostTasksIDOwners request with any body + PostTasksIDOwnersWithBody(ctx context.Context, taskID string, params *PostTasksIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) + + PostTasksIDOwners(ctx context.Context, taskID string, params *PostTasksIDOwnersParams, body PostTasksIDOwnersJSONRequestBody) (*http.Response, error) + + // DeleteTasksIDOwnersID request + DeleteTasksIDOwnersID(ctx context.Context, taskID string, userID string, params *DeleteTasksIDOwnersIDParams) (*http.Response, error) + + // GetTasksIDRuns request + GetTasksIDRuns(ctx context.Context, taskID string, params *GetTasksIDRunsParams) (*http.Response, error) + + // PostTasksIDRuns request with any body + PostTasksIDRunsWithBody(ctx context.Context, taskID string, params *PostTasksIDRunsParams, contentType string, body io.Reader) (*http.Response, error) + + PostTasksIDRuns(ctx context.Context, taskID string, params *PostTasksIDRunsParams, body PostTasksIDRunsJSONRequestBody) (*http.Response, error) + + // DeleteTasksIDRunsID request + DeleteTasksIDRunsID(ctx context.Context, taskID string, runID string, params *DeleteTasksIDRunsIDParams) (*http.Response, error) + + // GetTasksIDRunsID request + GetTasksIDRunsID(ctx context.Context, taskID string, runID string, params *GetTasksIDRunsIDParams) (*http.Response, error) + + // GetTasksIDRunsIDLogs request + GetTasksIDRunsIDLogs(ctx context.Context, taskID string, runID string, params *GetTasksIDRunsIDLogsParams) (*http.Response, error) + + // PostTasksIDRunsIDRetry request with any body + PostTasksIDRunsIDRetryWithBody(ctx context.Context, taskID string, runID string, params *PostTasksIDRunsIDRetryParams, contentType string, body io.Reader) (*http.Response, error) + + // GetTelegrafPlugins request + GetTelegrafPlugins(ctx context.Context, params *GetTelegrafPluginsParams) (*http.Response, error) + + // GetTelegrafs request + GetTelegrafs(ctx context.Context, params *GetTelegrafsParams) (*http.Response, error) + + // PostTelegrafs request with any body + PostTelegrafsWithBody(ctx context.Context, params *PostTelegrafsParams, contentType string, body io.Reader) (*http.Response, error) + + PostTelegrafs(ctx context.Context, params *PostTelegrafsParams, body PostTelegrafsJSONRequestBody) (*http.Response, error) + + // DeleteTelegrafsID request + DeleteTelegrafsID(ctx context.Context, telegrafID string, params *DeleteTelegrafsIDParams) (*http.Response, error) + + // GetTelegrafsID request + GetTelegrafsID(ctx context.Context, telegrafID string, params *GetTelegrafsIDParams) (*http.Response, error) + + // PutTelegrafsID request with any body + PutTelegrafsIDWithBody(ctx context.Context, telegrafID string, params *PutTelegrafsIDParams, contentType string, body io.Reader) (*http.Response, error) + + PutTelegrafsID(ctx context.Context, telegrafID string, params *PutTelegrafsIDParams, body PutTelegrafsIDJSONRequestBody) (*http.Response, error) + + // GetTelegrafsIDLabels request + GetTelegrafsIDLabels(ctx context.Context, telegrafID string, params *GetTelegrafsIDLabelsParams) (*http.Response, error) + + // PostTelegrafsIDLabels request with any body + PostTelegrafsIDLabelsWithBody(ctx context.Context, telegrafID string, params *PostTelegrafsIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostTelegrafsIDLabels(ctx context.Context, telegrafID string, params *PostTelegrafsIDLabelsParams, body PostTelegrafsIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteTelegrafsIDLabelsID request + DeleteTelegrafsIDLabelsID(ctx context.Context, telegrafID string, labelID string, params *DeleteTelegrafsIDLabelsIDParams) (*http.Response, error) + + // GetTelegrafsIDMembers request + GetTelegrafsIDMembers(ctx context.Context, telegrafID string, params *GetTelegrafsIDMembersParams) (*http.Response, error) + + // PostTelegrafsIDMembers request with any body + PostTelegrafsIDMembersWithBody(ctx context.Context, telegrafID string, params *PostTelegrafsIDMembersParams, contentType string, body io.Reader) (*http.Response, error) + + PostTelegrafsIDMembers(ctx context.Context, telegrafID string, params *PostTelegrafsIDMembersParams, body PostTelegrafsIDMembersJSONRequestBody) (*http.Response, error) + + // DeleteTelegrafsIDMembersID request + DeleteTelegrafsIDMembersID(ctx context.Context, telegrafID string, userID string, params *DeleteTelegrafsIDMembersIDParams) (*http.Response, error) + + // GetTelegrafsIDOwners request + GetTelegrafsIDOwners(ctx context.Context, telegrafID string, params *GetTelegrafsIDOwnersParams) (*http.Response, error) + + // PostTelegrafsIDOwners request with any body + PostTelegrafsIDOwnersWithBody(ctx context.Context, telegrafID string, params *PostTelegrafsIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) + + PostTelegrafsIDOwners(ctx context.Context, telegrafID string, params *PostTelegrafsIDOwnersParams, body PostTelegrafsIDOwnersJSONRequestBody) (*http.Response, error) + + // DeleteTelegrafsIDOwnersID request + DeleteTelegrafsIDOwnersID(ctx context.Context, telegrafID string, userID string, params *DeleteTelegrafsIDOwnersIDParams) (*http.Response, error) + + // ApplyTemplate request with any body + ApplyTemplateWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) + + ApplyTemplate(ctx context.Context, body ApplyTemplateJSONRequestBody) (*http.Response, error) + + // ExportTemplate request with any body + ExportTemplateWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) + + ExportTemplate(ctx context.Context, body ExportTemplateJSONRequestBody) (*http.Response, error) + + // GetUsers request + GetUsers(ctx context.Context, params *GetUsersParams) (*http.Response, error) + + // PostUsers request with any body + PostUsersWithBody(ctx context.Context, params *PostUsersParams, contentType string, body io.Reader) (*http.Response, error) + + PostUsers(ctx context.Context, params *PostUsersParams, body PostUsersJSONRequestBody) (*http.Response, error) + + // DeleteUsersID request + DeleteUsersID(ctx context.Context, userID string, params *DeleteUsersIDParams) (*http.Response, error) + + // GetUsersID request + GetUsersID(ctx context.Context, userID string, params *GetUsersIDParams) (*http.Response, error) + + // PatchUsersID request with any body + PatchUsersIDWithBody(ctx context.Context, userID string, params *PatchUsersIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchUsersID(ctx context.Context, userID string, params *PatchUsersIDParams, body PatchUsersIDJSONRequestBody) (*http.Response, error) + + // PostUsersIDPassword request with any body + PostUsersIDPasswordWithBody(ctx context.Context, userID string, params *PostUsersIDPasswordParams, contentType string, body io.Reader) (*http.Response, error) + + PostUsersIDPassword(ctx context.Context, userID string, params *PostUsersIDPasswordParams, body PostUsersIDPasswordJSONRequestBody) (*http.Response, error) + + // GetVariables request + GetVariables(ctx context.Context, params *GetVariablesParams) (*http.Response, error) + + // PostVariables request with any body + PostVariablesWithBody(ctx context.Context, params *PostVariablesParams, contentType string, body io.Reader) (*http.Response, error) + + PostVariables(ctx context.Context, params *PostVariablesParams, body PostVariablesJSONRequestBody) (*http.Response, error) + + // DeleteVariablesID request + DeleteVariablesID(ctx context.Context, variableID string, params *DeleteVariablesIDParams) (*http.Response, error) + + // GetVariablesID request + GetVariablesID(ctx context.Context, variableID string, params *GetVariablesIDParams) (*http.Response, error) + + // PatchVariablesID request with any body + PatchVariablesIDWithBody(ctx context.Context, variableID string, params *PatchVariablesIDParams, contentType string, body io.Reader) (*http.Response, error) + + PatchVariablesID(ctx context.Context, variableID string, params *PatchVariablesIDParams, body PatchVariablesIDJSONRequestBody) (*http.Response, error) + + // PutVariablesID request with any body + PutVariablesIDWithBody(ctx context.Context, variableID string, params *PutVariablesIDParams, contentType string, body io.Reader) (*http.Response, error) + + PutVariablesID(ctx context.Context, variableID string, params *PutVariablesIDParams, body PutVariablesIDJSONRequestBody) (*http.Response, error) + + // GetVariablesIDLabels request + GetVariablesIDLabels(ctx context.Context, variableID string, params *GetVariablesIDLabelsParams) (*http.Response, error) + + // PostVariablesIDLabels request with any body + PostVariablesIDLabelsWithBody(ctx context.Context, variableID string, params *PostVariablesIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) + + PostVariablesIDLabels(ctx context.Context, variableID string, params *PostVariablesIDLabelsParams, body PostVariablesIDLabelsJSONRequestBody) (*http.Response, error) + + // DeleteVariablesIDLabelsID request + DeleteVariablesIDLabelsID(ctx context.Context, variableID string, labelID string, params *DeleteVariablesIDLabelsIDParams) (*http.Response, error) + + // PostWrite request with any body + PostWriteWithBody(ctx context.Context, params *PostWriteParams, contentType string, body io.Reader) (*http.Response, error) +} + +func (c *Client) GetRoutes(ctx context.Context, params *GetRoutesParams) (*http.Response, error) { + req, err := NewGetRoutesRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetAuthorizations(ctx context.Context, params *GetAuthorizationsParams) (*http.Response, error) { + req, err := NewGetAuthorizationsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostAuthorizationsWithBody(ctx context.Context, params *PostAuthorizationsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostAuthorizationsRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostAuthorizations(ctx context.Context, params *PostAuthorizationsParams, body PostAuthorizationsJSONRequestBody) (*http.Response, error) { + req, err := NewPostAuthorizationsRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteAuthorizationsID(ctx context.Context, authID string, params *DeleteAuthorizationsIDParams) (*http.Response, error) { + req, err := NewDeleteAuthorizationsIDRequest(c.service.ServerAPIURL(), authID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetAuthorizationsID(ctx context.Context, authID string, params *GetAuthorizationsIDParams) (*http.Response, error) { + req, err := NewGetAuthorizationsIDRequest(c.service.ServerAPIURL(), authID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchAuthorizationsIDWithBody(ctx context.Context, authID string, params *PatchAuthorizationsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchAuthorizationsIDRequestWithBody(c.service.ServerAPIURL(), authID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchAuthorizationsID(ctx context.Context, authID string, params *PatchAuthorizationsIDParams, body PatchAuthorizationsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchAuthorizationsIDRequest(c.service.ServerAPIURL(), authID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetBackupKV(ctx context.Context, params *GetBackupKVParams) (*http.Response, error) { + req, err := NewGetBackupKVRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetBackupMetadata(ctx context.Context, params *GetBackupMetadataParams) (*http.Response, error) { + req, err := NewGetBackupMetadataRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetBackupShardId(ctx context.Context, shardID int64, params *GetBackupShardIdParams) (*http.Response, error) { + req, err := NewGetBackupShardIdRequest(c.service.ServerAPIURL(), shardID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetBuckets(ctx context.Context, params *GetBucketsParams) (*http.Response, error) { + req, err := NewGetBucketsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostBucketsWithBody(ctx context.Context, params *PostBucketsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostBucketsRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostBuckets(ctx context.Context, params *PostBucketsParams, body PostBucketsJSONRequestBody) (*http.Response, error) { + req, err := NewPostBucketsRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteBucketsID(ctx context.Context, bucketID string, params *DeleteBucketsIDParams) (*http.Response, error) { + req, err := NewDeleteBucketsIDRequest(c.service.ServerAPIURL(), bucketID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetBucketsID(ctx context.Context, bucketID string, params *GetBucketsIDParams) (*http.Response, error) { + req, err := NewGetBucketsIDRequest(c.service.ServerAPIURL(), bucketID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchBucketsIDWithBody(ctx context.Context, bucketID string, params *PatchBucketsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchBucketsIDRequestWithBody(c.service.ServerAPIURL(), bucketID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchBucketsID(ctx context.Context, bucketID string, params *PatchBucketsIDParams, body PatchBucketsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchBucketsIDRequest(c.service.ServerAPIURL(), bucketID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetBucketsIDLabels(ctx context.Context, bucketID string, params *GetBucketsIDLabelsParams) (*http.Response, error) { + req, err := NewGetBucketsIDLabelsRequest(c.service.ServerAPIURL(), bucketID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostBucketsIDLabelsWithBody(ctx context.Context, bucketID string, params *PostBucketsIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostBucketsIDLabelsRequestWithBody(c.service.ServerAPIURL(), bucketID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostBucketsIDLabels(ctx context.Context, bucketID string, params *PostBucketsIDLabelsParams, body PostBucketsIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostBucketsIDLabelsRequest(c.service.ServerAPIURL(), bucketID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteBucketsIDLabelsID(ctx context.Context, bucketID string, labelID string, params *DeleteBucketsIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteBucketsIDLabelsIDRequest(c.service.ServerAPIURL(), bucketID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetBucketsIDMembers(ctx context.Context, bucketID string, params *GetBucketsIDMembersParams) (*http.Response, error) { + req, err := NewGetBucketsIDMembersRequest(c.service.ServerAPIURL(), bucketID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostBucketsIDMembersWithBody(ctx context.Context, bucketID string, params *PostBucketsIDMembersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostBucketsIDMembersRequestWithBody(c.service.ServerAPIURL(), bucketID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostBucketsIDMembers(ctx context.Context, bucketID string, params *PostBucketsIDMembersParams, body PostBucketsIDMembersJSONRequestBody) (*http.Response, error) { + req, err := NewPostBucketsIDMembersRequest(c.service.ServerAPIURL(), bucketID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteBucketsIDMembersID(ctx context.Context, bucketID string, userID string, params *DeleteBucketsIDMembersIDParams) (*http.Response, error) { + req, err := NewDeleteBucketsIDMembersIDRequest(c.service.ServerAPIURL(), bucketID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetBucketsIDOwners(ctx context.Context, bucketID string, params *GetBucketsIDOwnersParams) (*http.Response, error) { + req, err := NewGetBucketsIDOwnersRequest(c.service.ServerAPIURL(), bucketID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostBucketsIDOwnersWithBody(ctx context.Context, bucketID string, params *PostBucketsIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostBucketsIDOwnersRequestWithBody(c.service.ServerAPIURL(), bucketID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostBucketsIDOwners(ctx context.Context, bucketID string, params *PostBucketsIDOwnersParams, body PostBucketsIDOwnersJSONRequestBody) (*http.Response, error) { + req, err := NewPostBucketsIDOwnersRequest(c.service.ServerAPIURL(), bucketID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteBucketsIDOwnersID(ctx context.Context, bucketID string, userID string, params *DeleteBucketsIDOwnersIDParams) (*http.Response, error) { + req, err := NewDeleteBucketsIDOwnersIDRequest(c.service.ServerAPIURL(), bucketID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetChecks(ctx context.Context, params *GetChecksParams) (*http.Response, error) { + req, err := NewGetChecksRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) CreateCheckWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewCreateCheckRequestWithBody(c.service.ServerAPIURL(), contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) CreateCheck(ctx context.Context, body CreateCheckJSONRequestBody) (*http.Response, error) { + req, err := NewCreateCheckRequest(c.service.ServerAPIURL(), body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteChecksID(ctx context.Context, checkID string, params *DeleteChecksIDParams) (*http.Response, error) { + req, err := NewDeleteChecksIDRequest(c.service.ServerAPIURL(), checkID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetChecksID(ctx context.Context, checkID string, params *GetChecksIDParams) (*http.Response, error) { + req, err := NewGetChecksIDRequest(c.service.ServerAPIURL(), checkID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchChecksIDWithBody(ctx context.Context, checkID string, params *PatchChecksIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchChecksIDRequestWithBody(c.service.ServerAPIURL(), checkID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchChecksID(ctx context.Context, checkID string, params *PatchChecksIDParams, body PatchChecksIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchChecksIDRequest(c.service.ServerAPIURL(), checkID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutChecksIDWithBody(ctx context.Context, checkID string, params *PutChecksIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPutChecksIDRequestWithBody(c.service.ServerAPIURL(), checkID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutChecksID(ctx context.Context, checkID string, params *PutChecksIDParams, body PutChecksIDJSONRequestBody) (*http.Response, error) { + req, err := NewPutChecksIDRequest(c.service.ServerAPIURL(), checkID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetChecksIDLabels(ctx context.Context, checkID string, params *GetChecksIDLabelsParams) (*http.Response, error) { + req, err := NewGetChecksIDLabelsRequest(c.service.ServerAPIURL(), checkID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostChecksIDLabelsWithBody(ctx context.Context, checkID string, params *PostChecksIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostChecksIDLabelsRequestWithBody(c.service.ServerAPIURL(), checkID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostChecksIDLabels(ctx context.Context, checkID string, params *PostChecksIDLabelsParams, body PostChecksIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostChecksIDLabelsRequest(c.service.ServerAPIURL(), checkID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteChecksIDLabelsID(ctx context.Context, checkID string, labelID string, params *DeleteChecksIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteChecksIDLabelsIDRequest(c.service.ServerAPIURL(), checkID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetChecksIDQuery(ctx context.Context, checkID string, params *GetChecksIDQueryParams) (*http.Response, error) { + req, err := NewGetChecksIDQueryRequest(c.service.ServerAPIURL(), checkID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetConfig(ctx context.Context, params *GetConfigParams) (*http.Response, error) { + req, err := NewGetConfigRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetDashboards(ctx context.Context, params *GetDashboardsParams) (*http.Response, error) { + req, err := NewGetDashboardsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsWithBody(ctx context.Context, params *PostDashboardsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostDashboardsRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboards(ctx context.Context, params *PostDashboardsParams, body PostDashboardsJSONRequestBody) (*http.Response, error) { + req, err := NewPostDashboardsRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteDashboardsID(ctx context.Context, dashboardID string, params *DeleteDashboardsIDParams) (*http.Response, error) { + req, err := NewDeleteDashboardsIDRequest(c.service.ServerAPIURL(), dashboardID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetDashboardsID(ctx context.Context, dashboardID string, params *GetDashboardsIDParams) (*http.Response, error) { + req, err := NewGetDashboardsIDRequest(c.service.ServerAPIURL(), dashboardID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchDashboardsIDWithBody(ctx context.Context, dashboardID string, params *PatchDashboardsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchDashboardsIDRequestWithBody(c.service.ServerAPIURL(), dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchDashboardsID(ctx context.Context, dashboardID string, params *PatchDashboardsIDParams, body PatchDashboardsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchDashboardsIDRequest(c.service.ServerAPIURL(), dashboardID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsIDCellsWithBody(ctx context.Context, dashboardID string, params *PostDashboardsIDCellsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostDashboardsIDCellsRequestWithBody(c.service.ServerAPIURL(), dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsIDCells(ctx context.Context, dashboardID string, params *PostDashboardsIDCellsParams, body PostDashboardsIDCellsJSONRequestBody) (*http.Response, error) { + req, err := NewPostDashboardsIDCellsRequest(c.service.ServerAPIURL(), dashboardID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutDashboardsIDCellsWithBody(ctx context.Context, dashboardID string, params *PutDashboardsIDCellsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPutDashboardsIDCellsRequestWithBody(c.service.ServerAPIURL(), dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutDashboardsIDCells(ctx context.Context, dashboardID string, params *PutDashboardsIDCellsParams, body PutDashboardsIDCellsJSONRequestBody) (*http.Response, error) { + req, err := NewPutDashboardsIDCellsRequest(c.service.ServerAPIURL(), dashboardID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteDashboardsIDCellsID(ctx context.Context, dashboardID string, cellID string, params *DeleteDashboardsIDCellsIDParams) (*http.Response, error) { + req, err := NewDeleteDashboardsIDCellsIDRequest(c.service.ServerAPIURL(), dashboardID, cellID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchDashboardsIDCellsIDWithBody(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchDashboardsIDCellsIDRequestWithBody(c.service.ServerAPIURL(), dashboardID, cellID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchDashboardsIDCellsID(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, body PatchDashboardsIDCellsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchDashboardsIDCellsIDRequest(c.service.ServerAPIURL(), dashboardID, cellID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetDashboardsIDCellsIDView(ctx context.Context, dashboardID string, cellID string, params *GetDashboardsIDCellsIDViewParams) (*http.Response, error) { + req, err := NewGetDashboardsIDCellsIDViewRequest(c.service.ServerAPIURL(), dashboardID, cellID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchDashboardsIDCellsIDViewWithBody(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchDashboardsIDCellsIDViewRequestWithBody(c.service.ServerAPIURL(), dashboardID, cellID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchDashboardsIDCellsIDView(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, body PatchDashboardsIDCellsIDViewJSONRequestBody) (*http.Response, error) { + req, err := NewPatchDashboardsIDCellsIDViewRequest(c.service.ServerAPIURL(), dashboardID, cellID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetDashboardsIDLabels(ctx context.Context, dashboardID string, params *GetDashboardsIDLabelsParams) (*http.Response, error) { + req, err := NewGetDashboardsIDLabelsRequest(c.service.ServerAPIURL(), dashboardID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsIDLabelsWithBody(ctx context.Context, dashboardID string, params *PostDashboardsIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostDashboardsIDLabelsRequestWithBody(c.service.ServerAPIURL(), dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsIDLabels(ctx context.Context, dashboardID string, params *PostDashboardsIDLabelsParams, body PostDashboardsIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostDashboardsIDLabelsRequest(c.service.ServerAPIURL(), dashboardID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteDashboardsIDLabelsID(ctx context.Context, dashboardID string, labelID string, params *DeleteDashboardsIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteDashboardsIDLabelsIDRequest(c.service.ServerAPIURL(), dashboardID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetDashboardsIDMembers(ctx context.Context, dashboardID string, params *GetDashboardsIDMembersParams) (*http.Response, error) { + req, err := NewGetDashboardsIDMembersRequest(c.service.ServerAPIURL(), dashboardID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsIDMembersWithBody(ctx context.Context, dashboardID string, params *PostDashboardsIDMembersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostDashboardsIDMembersRequestWithBody(c.service.ServerAPIURL(), dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsIDMembers(ctx context.Context, dashboardID string, params *PostDashboardsIDMembersParams, body PostDashboardsIDMembersJSONRequestBody) (*http.Response, error) { + req, err := NewPostDashboardsIDMembersRequest(c.service.ServerAPIURL(), dashboardID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteDashboardsIDMembersID(ctx context.Context, dashboardID string, userID string, params *DeleteDashboardsIDMembersIDParams) (*http.Response, error) { + req, err := NewDeleteDashboardsIDMembersIDRequest(c.service.ServerAPIURL(), dashboardID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetDashboardsIDOwners(ctx context.Context, dashboardID string, params *GetDashboardsIDOwnersParams) (*http.Response, error) { + req, err := NewGetDashboardsIDOwnersRequest(c.service.ServerAPIURL(), dashboardID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsIDOwnersWithBody(ctx context.Context, dashboardID string, params *PostDashboardsIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostDashboardsIDOwnersRequestWithBody(c.service.ServerAPIURL(), dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDashboardsIDOwners(ctx context.Context, dashboardID string, params *PostDashboardsIDOwnersParams, body PostDashboardsIDOwnersJSONRequestBody) (*http.Response, error) { + req, err := NewPostDashboardsIDOwnersRequest(c.service.ServerAPIURL(), dashboardID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteDashboardsIDOwnersID(ctx context.Context, dashboardID string, userID string, params *DeleteDashboardsIDOwnersIDParams) (*http.Response, error) { + req, err := NewDeleteDashboardsIDOwnersIDRequest(c.service.ServerAPIURL(), dashboardID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetDBRPs(ctx context.Context, params *GetDBRPsParams) (*http.Response, error) { + req, err := NewGetDBRPsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDBRPWithBody(ctx context.Context, params *PostDBRPParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostDBRPRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDBRP(ctx context.Context, params *PostDBRPParams, body PostDBRPJSONRequestBody) (*http.Response, error) { + req, err := NewPostDBRPRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteDBRPID(ctx context.Context, dbrpID string, params *DeleteDBRPIDParams) (*http.Response, error) { + req, err := NewDeleteDBRPIDRequest(c.service.ServerAPIURL(), dbrpID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetDBRPsID(ctx context.Context, dbrpID string, params *GetDBRPsIDParams) (*http.Response, error) { + req, err := NewGetDBRPsIDRequest(c.service.ServerAPIURL(), dbrpID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchDBRPIDWithBody(ctx context.Context, dbrpID string, params *PatchDBRPIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchDBRPIDRequestWithBody(c.service.ServerAPIURL(), dbrpID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchDBRPID(ctx context.Context, dbrpID string, params *PatchDBRPIDParams, body PatchDBRPIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchDBRPIDRequest(c.service.ServerAPIURL(), dbrpID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDeleteWithBody(ctx context.Context, params *PostDeleteParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostDeleteRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostDelete(ctx context.Context, params *PostDeleteParams, body PostDeleteJSONRequestBody) (*http.Response, error) { + req, err := NewPostDeleteRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetFlags(ctx context.Context, params *GetFlagsParams) (*http.Response, error) { + req, err := NewGetFlagsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetHealth(ctx context.Context, params *GetHealthParams) (*http.Response, error) { + req, err := NewGetHealthRequest(c.service.ServerURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetLabels(ctx context.Context, params *GetLabelsParams) (*http.Response, error) { + req, err := NewGetLabelsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostLabelsWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostLabelsRequestWithBody(c.service.ServerAPIURL(), contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostLabels(ctx context.Context, body PostLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostLabelsRequest(c.service.ServerAPIURL(), body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteLabelsID(ctx context.Context, labelID string, params *DeleteLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteLabelsIDRequest(c.service.ServerAPIURL(), labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetLabelsID(ctx context.Context, labelID string, params *GetLabelsIDParams) (*http.Response, error) { + req, err := NewGetLabelsIDRequest(c.service.ServerAPIURL(), labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchLabelsIDWithBody(ctx context.Context, labelID string, params *PatchLabelsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchLabelsIDRequestWithBody(c.service.ServerAPIURL(), labelID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchLabelsID(ctx context.Context, labelID string, params *PatchLabelsIDParams, body PatchLabelsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchLabelsIDRequest(c.service.ServerAPIURL(), labelID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetLegacyAuthorizations(ctx context.Context, params *GetLegacyAuthorizationsParams) (*http.Response, error) { + req, err := NewGetLegacyAuthorizationsRequest(c.service.ServerURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostLegacyAuthorizationsWithBody(ctx context.Context, params *PostLegacyAuthorizationsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostLegacyAuthorizationsRequestWithBody(c.service.ServerURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostLegacyAuthorizations(ctx context.Context, params *PostLegacyAuthorizationsParams, body PostLegacyAuthorizationsJSONRequestBody) (*http.Response, error) { + req, err := NewPostLegacyAuthorizationsRequest(c.service.ServerURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteLegacyAuthorizationsID(ctx context.Context, authID string, params *DeleteLegacyAuthorizationsIDParams) (*http.Response, error) { + req, err := NewDeleteLegacyAuthorizationsIDRequest(c.service.ServerURL(), authID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetLegacyAuthorizationsID(ctx context.Context, authID string, params *GetLegacyAuthorizationsIDParams) (*http.Response, error) { + req, err := NewGetLegacyAuthorizationsIDRequest(c.service.ServerURL(), authID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchLegacyAuthorizationsIDWithBody(ctx context.Context, authID string, params *PatchLegacyAuthorizationsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchLegacyAuthorizationsIDRequestWithBody(c.service.ServerURL(), authID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchLegacyAuthorizationsID(ctx context.Context, authID string, params *PatchLegacyAuthorizationsIDParams, body PatchLegacyAuthorizationsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchLegacyAuthorizationsIDRequest(c.service.ServerURL(), authID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostLegacyAuthorizationsIDPasswordWithBody(ctx context.Context, authID string, params *PostLegacyAuthorizationsIDPasswordParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostLegacyAuthorizationsIDPasswordRequestWithBody(c.service.ServerURL(), authID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostLegacyAuthorizationsIDPassword(ctx context.Context, authID string, params *PostLegacyAuthorizationsIDPasswordParams, body PostLegacyAuthorizationsIDPasswordJSONRequestBody) (*http.Response, error) { + req, err := NewPostLegacyAuthorizationsIDPasswordRequest(c.service.ServerURL(), authID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetMe(ctx context.Context, params *GetMeParams) (*http.Response, error) { + req, err := NewGetMeRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutMePasswordWithBody(ctx context.Context, params *PutMePasswordParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPutMePasswordRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutMePassword(ctx context.Context, params *PutMePasswordParams, body PutMePasswordJSONRequestBody) (*http.Response, error) { + req, err := NewPutMePasswordRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetMetrics(ctx context.Context, params *GetMetricsParams) (*http.Response, error) { + req, err := NewGetMetricsRequest(c.service.ServerURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetNotificationEndpoints(ctx context.Context, params *GetNotificationEndpointsParams) (*http.Response, error) { + req, err := NewGetNotificationEndpointsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) CreateNotificationEndpointWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewCreateNotificationEndpointRequestWithBody(c.service.ServerAPIURL(), contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) CreateNotificationEndpoint(ctx context.Context, body CreateNotificationEndpointJSONRequestBody) (*http.Response, error) { + req, err := NewCreateNotificationEndpointRequest(c.service.ServerAPIURL(), body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteNotificationEndpointsID(ctx context.Context, endpointID string, params *DeleteNotificationEndpointsIDParams) (*http.Response, error) { + req, err := NewDeleteNotificationEndpointsIDRequest(c.service.ServerAPIURL(), endpointID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetNotificationEndpointsID(ctx context.Context, endpointID string, params *GetNotificationEndpointsIDParams) (*http.Response, error) { + req, err := NewGetNotificationEndpointsIDRequest(c.service.ServerAPIURL(), endpointID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchNotificationEndpointsIDWithBody(ctx context.Context, endpointID string, params *PatchNotificationEndpointsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchNotificationEndpointsIDRequestWithBody(c.service.ServerAPIURL(), endpointID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchNotificationEndpointsID(ctx context.Context, endpointID string, params *PatchNotificationEndpointsIDParams, body PatchNotificationEndpointsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchNotificationEndpointsIDRequest(c.service.ServerAPIURL(), endpointID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutNotificationEndpointsIDWithBody(ctx context.Context, endpointID string, params *PutNotificationEndpointsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPutNotificationEndpointsIDRequestWithBody(c.service.ServerAPIURL(), endpointID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutNotificationEndpointsID(ctx context.Context, endpointID string, params *PutNotificationEndpointsIDParams, body PutNotificationEndpointsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPutNotificationEndpointsIDRequest(c.service.ServerAPIURL(), endpointID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetNotificationEndpointsIDLabels(ctx context.Context, endpointID string, params *GetNotificationEndpointsIDLabelsParams) (*http.Response, error) { + req, err := NewGetNotificationEndpointsIDLabelsRequest(c.service.ServerAPIURL(), endpointID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostNotificationEndpointIDLabelsWithBody(ctx context.Context, endpointID string, params *PostNotificationEndpointIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostNotificationEndpointIDLabelsRequestWithBody(c.service.ServerAPIURL(), endpointID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostNotificationEndpointIDLabels(ctx context.Context, endpointID string, params *PostNotificationEndpointIDLabelsParams, body PostNotificationEndpointIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostNotificationEndpointIDLabelsRequest(c.service.ServerAPIURL(), endpointID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteNotificationEndpointsIDLabelsID(ctx context.Context, endpointID string, labelID string, params *DeleteNotificationEndpointsIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteNotificationEndpointsIDLabelsIDRequest(c.service.ServerAPIURL(), endpointID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetNotificationRules(ctx context.Context, params *GetNotificationRulesParams) (*http.Response, error) { + req, err := NewGetNotificationRulesRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) CreateNotificationRuleWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewCreateNotificationRuleRequestWithBody(c.service.ServerAPIURL(), contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) CreateNotificationRule(ctx context.Context, body CreateNotificationRuleJSONRequestBody) (*http.Response, error) { + req, err := NewCreateNotificationRuleRequest(c.service.ServerAPIURL(), body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteNotificationRulesID(ctx context.Context, ruleID string, params *DeleteNotificationRulesIDParams) (*http.Response, error) { + req, err := NewDeleteNotificationRulesIDRequest(c.service.ServerAPIURL(), ruleID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetNotificationRulesID(ctx context.Context, ruleID string, params *GetNotificationRulesIDParams) (*http.Response, error) { + req, err := NewGetNotificationRulesIDRequest(c.service.ServerAPIURL(), ruleID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchNotificationRulesIDWithBody(ctx context.Context, ruleID string, params *PatchNotificationRulesIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchNotificationRulesIDRequestWithBody(c.service.ServerAPIURL(), ruleID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchNotificationRulesID(ctx context.Context, ruleID string, params *PatchNotificationRulesIDParams, body PatchNotificationRulesIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchNotificationRulesIDRequest(c.service.ServerAPIURL(), ruleID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutNotificationRulesIDWithBody(ctx context.Context, ruleID string, params *PutNotificationRulesIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPutNotificationRulesIDRequestWithBody(c.service.ServerAPIURL(), ruleID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutNotificationRulesID(ctx context.Context, ruleID string, params *PutNotificationRulesIDParams, body PutNotificationRulesIDJSONRequestBody) (*http.Response, error) { + req, err := NewPutNotificationRulesIDRequest(c.service.ServerAPIURL(), ruleID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetNotificationRulesIDLabels(ctx context.Context, ruleID string, params *GetNotificationRulesIDLabelsParams) (*http.Response, error) { + req, err := NewGetNotificationRulesIDLabelsRequest(c.service.ServerAPIURL(), ruleID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostNotificationRuleIDLabelsWithBody(ctx context.Context, ruleID string, params *PostNotificationRuleIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostNotificationRuleIDLabelsRequestWithBody(c.service.ServerAPIURL(), ruleID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostNotificationRuleIDLabels(ctx context.Context, ruleID string, params *PostNotificationRuleIDLabelsParams, body PostNotificationRuleIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostNotificationRuleIDLabelsRequest(c.service.ServerAPIURL(), ruleID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteNotificationRulesIDLabelsID(ctx context.Context, ruleID string, labelID string, params *DeleteNotificationRulesIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteNotificationRulesIDLabelsIDRequest(c.service.ServerAPIURL(), ruleID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetNotificationRulesIDQuery(ctx context.Context, ruleID string, params *GetNotificationRulesIDQueryParams) (*http.Response, error) { + req, err := NewGetNotificationRulesIDQueryRequest(c.service.ServerAPIURL(), ruleID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetOrgs(ctx context.Context, params *GetOrgsParams) (*http.Response, error) { + req, err := NewGetOrgsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostOrgsWithBody(ctx context.Context, params *PostOrgsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostOrgsRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostOrgs(ctx context.Context, params *PostOrgsParams, body PostOrgsJSONRequestBody) (*http.Response, error) { + req, err := NewPostOrgsRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteOrgsID(ctx context.Context, orgID string, params *DeleteOrgsIDParams) (*http.Response, error) { + req, err := NewDeleteOrgsIDRequest(c.service.ServerAPIURL(), orgID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetOrgsID(ctx context.Context, orgID string, params *GetOrgsIDParams) (*http.Response, error) { + req, err := NewGetOrgsIDRequest(c.service.ServerAPIURL(), orgID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchOrgsIDWithBody(ctx context.Context, orgID string, params *PatchOrgsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchOrgsIDRequestWithBody(c.service.ServerAPIURL(), orgID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchOrgsID(ctx context.Context, orgID string, params *PatchOrgsIDParams, body PatchOrgsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchOrgsIDRequest(c.service.ServerAPIURL(), orgID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetOrgsIDMembers(ctx context.Context, orgID string, params *GetOrgsIDMembersParams) (*http.Response, error) { + req, err := NewGetOrgsIDMembersRequest(c.service.ServerAPIURL(), orgID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostOrgsIDMembersWithBody(ctx context.Context, orgID string, params *PostOrgsIDMembersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostOrgsIDMembersRequestWithBody(c.service.ServerAPIURL(), orgID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostOrgsIDMembers(ctx context.Context, orgID string, params *PostOrgsIDMembersParams, body PostOrgsIDMembersJSONRequestBody) (*http.Response, error) { + req, err := NewPostOrgsIDMembersRequest(c.service.ServerAPIURL(), orgID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteOrgsIDMembersID(ctx context.Context, orgID string, userID string, params *DeleteOrgsIDMembersIDParams) (*http.Response, error) { + req, err := NewDeleteOrgsIDMembersIDRequest(c.service.ServerAPIURL(), orgID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetOrgsIDOwners(ctx context.Context, orgID string, params *GetOrgsIDOwnersParams) (*http.Response, error) { + req, err := NewGetOrgsIDOwnersRequest(c.service.ServerAPIURL(), orgID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostOrgsIDOwnersWithBody(ctx context.Context, orgID string, params *PostOrgsIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostOrgsIDOwnersRequestWithBody(c.service.ServerAPIURL(), orgID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostOrgsIDOwners(ctx context.Context, orgID string, params *PostOrgsIDOwnersParams, body PostOrgsIDOwnersJSONRequestBody) (*http.Response, error) { + req, err := NewPostOrgsIDOwnersRequest(c.service.ServerAPIURL(), orgID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteOrgsIDOwnersID(ctx context.Context, orgID string, userID string, params *DeleteOrgsIDOwnersIDParams) (*http.Response, error) { + req, err := NewDeleteOrgsIDOwnersIDRequest(c.service.ServerAPIURL(), orgID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetOrgsIDSecrets(ctx context.Context, orgID string, params *GetOrgsIDSecretsParams) (*http.Response, error) { + req, err := NewGetOrgsIDSecretsRequest(c.service.ServerAPIURL(), orgID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchOrgsIDSecretsWithBody(ctx context.Context, orgID string, params *PatchOrgsIDSecretsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchOrgsIDSecretsRequestWithBody(c.service.ServerAPIURL(), orgID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchOrgsIDSecrets(ctx context.Context, orgID string, params *PatchOrgsIDSecretsParams, body PatchOrgsIDSecretsJSONRequestBody) (*http.Response, error) { + req, err := NewPatchOrgsIDSecretsRequest(c.service.ServerAPIURL(), orgID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostOrgsIDSecretsWithBody(ctx context.Context, orgID string, params *PostOrgsIDSecretsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostOrgsIDSecretsRequestWithBody(c.service.ServerAPIURL(), orgID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostOrgsIDSecrets(ctx context.Context, orgID string, params *PostOrgsIDSecretsParams, body PostOrgsIDSecretsJSONRequestBody) (*http.Response, error) { + req, err := NewPostOrgsIDSecretsRequest(c.service.ServerAPIURL(), orgID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteOrgsIDSecretsID(ctx context.Context, orgID string, secretID string, params *DeleteOrgsIDSecretsIDParams) (*http.Response, error) { + req, err := NewDeleteOrgsIDSecretsIDRequest(c.service.ServerAPIURL(), orgID, secretID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetPing(ctx context.Context) (*http.Response, error) { + req, err := NewGetPingRequest(c.service.ServerURL()) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) HeadPing(ctx context.Context) (*http.Response, error) { + req, err := NewHeadPingRequest(c.service.ServerURL()) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostQueryWithBody(ctx context.Context, params *PostQueryParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostQueryRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostQuery(ctx context.Context, params *PostQueryParams, body PostQueryJSONRequestBody) (*http.Response, error) { + req, err := NewPostQueryRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostQueryAnalyzeWithBody(ctx context.Context, params *PostQueryAnalyzeParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostQueryAnalyzeRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostQueryAnalyze(ctx context.Context, params *PostQueryAnalyzeParams, body PostQueryAnalyzeJSONRequestBody) (*http.Response, error) { + req, err := NewPostQueryAnalyzeRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostQueryAstWithBody(ctx context.Context, params *PostQueryAstParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostQueryAstRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostQueryAst(ctx context.Context, params *PostQueryAstParams, body PostQueryAstJSONRequestBody) (*http.Response, error) { + req, err := NewPostQueryAstRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetQuerySuggestions(ctx context.Context, params *GetQuerySuggestionsParams) (*http.Response, error) { + req, err := NewGetQuerySuggestionsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetQuerySuggestionsName(ctx context.Context, name string, params *GetQuerySuggestionsNameParams) (*http.Response, error) { + req, err := NewGetQuerySuggestionsNameRequest(c.service.ServerAPIURL(), name, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetReady(ctx context.Context, params *GetReadyParams) (*http.Response, error) { + req, err := NewGetReadyRequest(c.service.ServerURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetRemoteConnections(ctx context.Context, params *GetRemoteConnectionsParams) (*http.Response, error) { + req, err := NewGetRemoteConnectionsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostRemoteConnectionWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostRemoteConnectionRequestWithBody(c.service.ServerAPIURL(), contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostRemoteConnection(ctx context.Context, body PostRemoteConnectionJSONRequestBody) (*http.Response, error) { + req, err := NewPostRemoteConnectionRequest(c.service.ServerAPIURL(), body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteRemoteConnectionByID(ctx context.Context, remoteID string, params *DeleteRemoteConnectionByIDParams) (*http.Response, error) { + req, err := NewDeleteRemoteConnectionByIDRequest(c.service.ServerAPIURL(), remoteID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetRemoteConnectionByID(ctx context.Context, remoteID string, params *GetRemoteConnectionByIDParams) (*http.Response, error) { + req, err := NewGetRemoteConnectionByIDRequest(c.service.ServerAPIURL(), remoteID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchRemoteConnectionByIDWithBody(ctx context.Context, remoteID string, params *PatchRemoteConnectionByIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchRemoteConnectionByIDRequestWithBody(c.service.ServerAPIURL(), remoteID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchRemoteConnectionByID(ctx context.Context, remoteID string, params *PatchRemoteConnectionByIDParams, body PatchRemoteConnectionByIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchRemoteConnectionByIDRequest(c.service.ServerAPIURL(), remoteID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetReplications(ctx context.Context, params *GetReplicationsParams) (*http.Response, error) { + req, err := NewGetReplicationsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostReplicationWithBody(ctx context.Context, params *PostReplicationParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostReplicationRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostReplication(ctx context.Context, params *PostReplicationParams, body PostReplicationJSONRequestBody) (*http.Response, error) { + req, err := NewPostReplicationRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteReplicationByID(ctx context.Context, replicationID string, params *DeleteReplicationByIDParams) (*http.Response, error) { + req, err := NewDeleteReplicationByIDRequest(c.service.ServerAPIURL(), replicationID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetReplicationByID(ctx context.Context, replicationID string, params *GetReplicationByIDParams) (*http.Response, error) { + req, err := NewGetReplicationByIDRequest(c.service.ServerAPIURL(), replicationID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchReplicationByIDWithBody(ctx context.Context, replicationID string, params *PatchReplicationByIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchReplicationByIDRequestWithBody(c.service.ServerAPIURL(), replicationID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchReplicationByID(ctx context.Context, replicationID string, params *PatchReplicationByIDParams, body PatchReplicationByIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchReplicationByIDRequest(c.service.ServerAPIURL(), replicationID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostValidateReplicationByID(ctx context.Context, replicationID string, params *PostValidateReplicationByIDParams) (*http.Response, error) { + req, err := NewPostValidateReplicationByIDRequest(c.service.ServerAPIURL(), replicationID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetResources(ctx context.Context, params *GetResourcesParams) (*http.Response, error) { + req, err := NewGetResourcesRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostRestoreBucketIDWithBody(ctx context.Context, bucketID string, params *PostRestoreBucketIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostRestoreBucketIDRequestWithBody(c.service.ServerAPIURL(), bucketID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostRestoreBucketMetadataWithBody(ctx context.Context, params *PostRestoreBucketMetadataParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostRestoreBucketMetadataRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostRestoreBucketMetadata(ctx context.Context, params *PostRestoreBucketMetadataParams, body PostRestoreBucketMetadataJSONRequestBody) (*http.Response, error) { + req, err := NewPostRestoreBucketMetadataRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostRestoreKVWithBody(ctx context.Context, params *PostRestoreKVParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostRestoreKVRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostRestoreShardIdWithBody(ctx context.Context, shardID string, params *PostRestoreShardIdParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostRestoreShardIdRequestWithBody(c.service.ServerAPIURL(), shardID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostRestoreSQLWithBody(ctx context.Context, params *PostRestoreSQLParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostRestoreSQLRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetScrapers(ctx context.Context, params *GetScrapersParams) (*http.Response, error) { + req, err := NewGetScrapersRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostScrapersWithBody(ctx context.Context, params *PostScrapersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostScrapersRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostScrapers(ctx context.Context, params *PostScrapersParams, body PostScrapersJSONRequestBody) (*http.Response, error) { + req, err := NewPostScrapersRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteScrapersID(ctx context.Context, scraperTargetID string, params *DeleteScrapersIDParams) (*http.Response, error) { + req, err := NewDeleteScrapersIDRequest(c.service.ServerAPIURL(), scraperTargetID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetScrapersID(ctx context.Context, scraperTargetID string, params *GetScrapersIDParams) (*http.Response, error) { + req, err := NewGetScrapersIDRequest(c.service.ServerAPIURL(), scraperTargetID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchScrapersIDWithBody(ctx context.Context, scraperTargetID string, params *PatchScrapersIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchScrapersIDRequestWithBody(c.service.ServerAPIURL(), scraperTargetID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchScrapersID(ctx context.Context, scraperTargetID string, params *PatchScrapersIDParams, body PatchScrapersIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchScrapersIDRequest(c.service.ServerAPIURL(), scraperTargetID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetScrapersIDLabels(ctx context.Context, scraperTargetID string, params *GetScrapersIDLabelsParams) (*http.Response, error) { + req, err := NewGetScrapersIDLabelsRequest(c.service.ServerAPIURL(), scraperTargetID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostScrapersIDLabelsWithBody(ctx context.Context, scraperTargetID string, params *PostScrapersIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostScrapersIDLabelsRequestWithBody(c.service.ServerAPIURL(), scraperTargetID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostScrapersIDLabels(ctx context.Context, scraperTargetID string, params *PostScrapersIDLabelsParams, body PostScrapersIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostScrapersIDLabelsRequest(c.service.ServerAPIURL(), scraperTargetID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteScrapersIDLabelsID(ctx context.Context, scraperTargetID string, labelID string, params *DeleteScrapersIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteScrapersIDLabelsIDRequest(c.service.ServerAPIURL(), scraperTargetID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetScrapersIDMembers(ctx context.Context, scraperTargetID string, params *GetScrapersIDMembersParams) (*http.Response, error) { + req, err := NewGetScrapersIDMembersRequest(c.service.ServerAPIURL(), scraperTargetID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostScrapersIDMembersWithBody(ctx context.Context, scraperTargetID string, params *PostScrapersIDMembersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostScrapersIDMembersRequestWithBody(c.service.ServerAPIURL(), scraperTargetID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostScrapersIDMembers(ctx context.Context, scraperTargetID string, params *PostScrapersIDMembersParams, body PostScrapersIDMembersJSONRequestBody) (*http.Response, error) { + req, err := NewPostScrapersIDMembersRequest(c.service.ServerAPIURL(), scraperTargetID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteScrapersIDMembersID(ctx context.Context, scraperTargetID string, userID string, params *DeleteScrapersIDMembersIDParams) (*http.Response, error) { + req, err := NewDeleteScrapersIDMembersIDRequest(c.service.ServerAPIURL(), scraperTargetID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetScrapersIDOwners(ctx context.Context, scraperTargetID string, params *GetScrapersIDOwnersParams) (*http.Response, error) { + req, err := NewGetScrapersIDOwnersRequest(c.service.ServerAPIURL(), scraperTargetID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostScrapersIDOwnersWithBody(ctx context.Context, scraperTargetID string, params *PostScrapersIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostScrapersIDOwnersRequestWithBody(c.service.ServerAPIURL(), scraperTargetID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostScrapersIDOwners(ctx context.Context, scraperTargetID string, params *PostScrapersIDOwnersParams, body PostScrapersIDOwnersJSONRequestBody) (*http.Response, error) { + req, err := NewPostScrapersIDOwnersRequest(c.service.ServerAPIURL(), scraperTargetID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteScrapersIDOwnersID(ctx context.Context, scraperTargetID string, userID string, params *DeleteScrapersIDOwnersIDParams) (*http.Response, error) { + req, err := NewDeleteScrapersIDOwnersIDRequest(c.service.ServerAPIURL(), scraperTargetID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetSetup(ctx context.Context, params *GetSetupParams) (*http.Response, error) { + req, err := NewGetSetupRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostSetupWithBody(ctx context.Context, params *PostSetupParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostSetupRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostSetup(ctx context.Context, params *PostSetupParams, body PostSetupJSONRequestBody) (*http.Response, error) { + req, err := NewPostSetupRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostSignin(ctx context.Context, params *PostSigninParams) (*http.Response, error) { + req, err := NewPostSigninRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostSignout(ctx context.Context, params *PostSignoutParams) (*http.Response, error) { + req, err := NewPostSignoutRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetSources(ctx context.Context, params *GetSourcesParams) (*http.Response, error) { + req, err := NewGetSourcesRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostSourcesWithBody(ctx context.Context, params *PostSourcesParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostSourcesRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostSources(ctx context.Context, params *PostSourcesParams, body PostSourcesJSONRequestBody) (*http.Response, error) { + req, err := NewPostSourcesRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteSourcesID(ctx context.Context, sourceID string, params *DeleteSourcesIDParams) (*http.Response, error) { + req, err := NewDeleteSourcesIDRequest(c.service.ServerAPIURL(), sourceID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetSourcesID(ctx context.Context, sourceID string, params *GetSourcesIDParams) (*http.Response, error) { + req, err := NewGetSourcesIDRequest(c.service.ServerAPIURL(), sourceID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchSourcesIDWithBody(ctx context.Context, sourceID string, params *PatchSourcesIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchSourcesIDRequestWithBody(c.service.ServerAPIURL(), sourceID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchSourcesID(ctx context.Context, sourceID string, params *PatchSourcesIDParams, body PatchSourcesIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchSourcesIDRequest(c.service.ServerAPIURL(), sourceID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetSourcesIDBuckets(ctx context.Context, sourceID string, params *GetSourcesIDBucketsParams) (*http.Response, error) { + req, err := NewGetSourcesIDBucketsRequest(c.service.ServerAPIURL(), sourceID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetSourcesIDHealth(ctx context.Context, sourceID string, params *GetSourcesIDHealthParams) (*http.Response, error) { + req, err := NewGetSourcesIDHealthRequest(c.service.ServerAPIURL(), sourceID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) ListStacks(ctx context.Context, params *ListStacksParams) (*http.Response, error) { + req, err := NewListStacksRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) CreateStackWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewCreateStackRequestWithBody(c.service.ServerAPIURL(), contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) CreateStack(ctx context.Context, body CreateStackJSONRequestBody) (*http.Response, error) { + req, err := NewCreateStackRequest(c.service.ServerAPIURL(), body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteStack(ctx context.Context, stackId string, params *DeleteStackParams) (*http.Response, error) { + req, err := NewDeleteStackRequest(c.service.ServerAPIURL(), stackId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) ReadStack(ctx context.Context, stackId string) (*http.Response, error) { + req, err := NewReadStackRequest(c.service.ServerAPIURL(), stackId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) UpdateStackWithBody(ctx context.Context, stackId string, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewUpdateStackRequestWithBody(c.service.ServerAPIURL(), stackId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) UpdateStack(ctx context.Context, stackId string, body UpdateStackJSONRequestBody) (*http.Response, error) { + req, err := NewUpdateStackRequest(c.service.ServerAPIURL(), stackId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) UninstallStack(ctx context.Context, stackId string) (*http.Response, error) { + req, err := NewUninstallStackRequest(c.service.ServerAPIURL(), stackId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasks(ctx context.Context, params *GetTasksParams) (*http.Response, error) { + req, err := NewGetTasksRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksWithBody(ctx context.Context, params *PostTasksParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTasksRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasks(ctx context.Context, params *PostTasksParams, body PostTasksJSONRequestBody) (*http.Response, error) { + req, err := NewPostTasksRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTasksID(ctx context.Context, taskID string, params *DeleteTasksIDParams) (*http.Response, error) { + req, err := NewDeleteTasksIDRequest(c.service.ServerAPIURL(), taskID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasksID(ctx context.Context, taskID string, params *GetTasksIDParams) (*http.Response, error) { + req, err := NewGetTasksIDRequest(c.service.ServerAPIURL(), taskID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchTasksIDWithBody(ctx context.Context, taskID string, params *PatchTasksIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchTasksIDRequestWithBody(c.service.ServerAPIURL(), taskID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchTasksID(ctx context.Context, taskID string, params *PatchTasksIDParams, body PatchTasksIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchTasksIDRequest(c.service.ServerAPIURL(), taskID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasksIDLabels(ctx context.Context, taskID string, params *GetTasksIDLabelsParams) (*http.Response, error) { + req, err := NewGetTasksIDLabelsRequest(c.service.ServerAPIURL(), taskID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDLabelsWithBody(ctx context.Context, taskID string, params *PostTasksIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTasksIDLabelsRequestWithBody(c.service.ServerAPIURL(), taskID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDLabels(ctx context.Context, taskID string, params *PostTasksIDLabelsParams, body PostTasksIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostTasksIDLabelsRequest(c.service.ServerAPIURL(), taskID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTasksIDLabelsID(ctx context.Context, taskID string, labelID string, params *DeleteTasksIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteTasksIDLabelsIDRequest(c.service.ServerAPIURL(), taskID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasksIDLogs(ctx context.Context, taskID string, params *GetTasksIDLogsParams) (*http.Response, error) { + req, err := NewGetTasksIDLogsRequest(c.service.ServerAPIURL(), taskID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasksIDMembers(ctx context.Context, taskID string, params *GetTasksIDMembersParams) (*http.Response, error) { + req, err := NewGetTasksIDMembersRequest(c.service.ServerAPIURL(), taskID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDMembersWithBody(ctx context.Context, taskID string, params *PostTasksIDMembersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTasksIDMembersRequestWithBody(c.service.ServerAPIURL(), taskID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDMembers(ctx context.Context, taskID string, params *PostTasksIDMembersParams, body PostTasksIDMembersJSONRequestBody) (*http.Response, error) { + req, err := NewPostTasksIDMembersRequest(c.service.ServerAPIURL(), taskID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTasksIDMembersID(ctx context.Context, taskID string, userID string, params *DeleteTasksIDMembersIDParams) (*http.Response, error) { + req, err := NewDeleteTasksIDMembersIDRequest(c.service.ServerAPIURL(), taskID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasksIDOwners(ctx context.Context, taskID string, params *GetTasksIDOwnersParams) (*http.Response, error) { + req, err := NewGetTasksIDOwnersRequest(c.service.ServerAPIURL(), taskID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDOwnersWithBody(ctx context.Context, taskID string, params *PostTasksIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTasksIDOwnersRequestWithBody(c.service.ServerAPIURL(), taskID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDOwners(ctx context.Context, taskID string, params *PostTasksIDOwnersParams, body PostTasksIDOwnersJSONRequestBody) (*http.Response, error) { + req, err := NewPostTasksIDOwnersRequest(c.service.ServerAPIURL(), taskID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTasksIDOwnersID(ctx context.Context, taskID string, userID string, params *DeleteTasksIDOwnersIDParams) (*http.Response, error) { + req, err := NewDeleteTasksIDOwnersIDRequest(c.service.ServerAPIURL(), taskID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasksIDRuns(ctx context.Context, taskID string, params *GetTasksIDRunsParams) (*http.Response, error) { + req, err := NewGetTasksIDRunsRequest(c.service.ServerAPIURL(), taskID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDRunsWithBody(ctx context.Context, taskID string, params *PostTasksIDRunsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTasksIDRunsRequestWithBody(c.service.ServerAPIURL(), taskID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDRuns(ctx context.Context, taskID string, params *PostTasksIDRunsParams, body PostTasksIDRunsJSONRequestBody) (*http.Response, error) { + req, err := NewPostTasksIDRunsRequest(c.service.ServerAPIURL(), taskID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTasksIDRunsID(ctx context.Context, taskID string, runID string, params *DeleteTasksIDRunsIDParams) (*http.Response, error) { + req, err := NewDeleteTasksIDRunsIDRequest(c.service.ServerAPIURL(), taskID, runID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasksIDRunsID(ctx context.Context, taskID string, runID string, params *GetTasksIDRunsIDParams) (*http.Response, error) { + req, err := NewGetTasksIDRunsIDRequest(c.service.ServerAPIURL(), taskID, runID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTasksIDRunsIDLogs(ctx context.Context, taskID string, runID string, params *GetTasksIDRunsIDLogsParams) (*http.Response, error) { + req, err := NewGetTasksIDRunsIDLogsRequest(c.service.ServerAPIURL(), taskID, runID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTasksIDRunsIDRetryWithBody(ctx context.Context, taskID string, runID string, params *PostTasksIDRunsIDRetryParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTasksIDRunsIDRetryRequestWithBody(c.service.ServerAPIURL(), taskID, runID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTelegrafPlugins(ctx context.Context, params *GetTelegrafPluginsParams) (*http.Response, error) { + req, err := NewGetTelegrafPluginsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTelegrafs(ctx context.Context, params *GetTelegrafsParams) (*http.Response, error) { + req, err := NewGetTelegrafsRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTelegrafsWithBody(ctx context.Context, params *PostTelegrafsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTelegrafsRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTelegrafs(ctx context.Context, params *PostTelegrafsParams, body PostTelegrafsJSONRequestBody) (*http.Response, error) { + req, err := NewPostTelegrafsRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTelegrafsID(ctx context.Context, telegrafID string, params *DeleteTelegrafsIDParams) (*http.Response, error) { + req, err := NewDeleteTelegrafsIDRequest(c.service.ServerAPIURL(), telegrafID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTelegrafsID(ctx context.Context, telegrafID string, params *GetTelegrafsIDParams) (*http.Response, error) { + req, err := NewGetTelegrafsIDRequest(c.service.ServerAPIURL(), telegrafID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutTelegrafsIDWithBody(ctx context.Context, telegrafID string, params *PutTelegrafsIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPutTelegrafsIDRequestWithBody(c.service.ServerAPIURL(), telegrafID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutTelegrafsID(ctx context.Context, telegrafID string, params *PutTelegrafsIDParams, body PutTelegrafsIDJSONRequestBody) (*http.Response, error) { + req, err := NewPutTelegrafsIDRequest(c.service.ServerAPIURL(), telegrafID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTelegrafsIDLabels(ctx context.Context, telegrafID string, params *GetTelegrafsIDLabelsParams) (*http.Response, error) { + req, err := NewGetTelegrafsIDLabelsRequest(c.service.ServerAPIURL(), telegrafID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTelegrafsIDLabelsWithBody(ctx context.Context, telegrafID string, params *PostTelegrafsIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTelegrafsIDLabelsRequestWithBody(c.service.ServerAPIURL(), telegrafID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTelegrafsIDLabels(ctx context.Context, telegrafID string, params *PostTelegrafsIDLabelsParams, body PostTelegrafsIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostTelegrafsIDLabelsRequest(c.service.ServerAPIURL(), telegrafID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTelegrafsIDLabelsID(ctx context.Context, telegrafID string, labelID string, params *DeleteTelegrafsIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteTelegrafsIDLabelsIDRequest(c.service.ServerAPIURL(), telegrafID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTelegrafsIDMembers(ctx context.Context, telegrafID string, params *GetTelegrafsIDMembersParams) (*http.Response, error) { + req, err := NewGetTelegrafsIDMembersRequest(c.service.ServerAPIURL(), telegrafID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTelegrafsIDMembersWithBody(ctx context.Context, telegrafID string, params *PostTelegrafsIDMembersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTelegrafsIDMembersRequestWithBody(c.service.ServerAPIURL(), telegrafID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTelegrafsIDMembers(ctx context.Context, telegrafID string, params *PostTelegrafsIDMembersParams, body PostTelegrafsIDMembersJSONRequestBody) (*http.Response, error) { + req, err := NewPostTelegrafsIDMembersRequest(c.service.ServerAPIURL(), telegrafID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTelegrafsIDMembersID(ctx context.Context, telegrafID string, userID string, params *DeleteTelegrafsIDMembersIDParams) (*http.Response, error) { + req, err := NewDeleteTelegrafsIDMembersIDRequest(c.service.ServerAPIURL(), telegrafID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetTelegrafsIDOwners(ctx context.Context, telegrafID string, params *GetTelegrafsIDOwnersParams) (*http.Response, error) { + req, err := NewGetTelegrafsIDOwnersRequest(c.service.ServerAPIURL(), telegrafID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTelegrafsIDOwnersWithBody(ctx context.Context, telegrafID string, params *PostTelegrafsIDOwnersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostTelegrafsIDOwnersRequestWithBody(c.service.ServerAPIURL(), telegrafID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostTelegrafsIDOwners(ctx context.Context, telegrafID string, params *PostTelegrafsIDOwnersParams, body PostTelegrafsIDOwnersJSONRequestBody) (*http.Response, error) { + req, err := NewPostTelegrafsIDOwnersRequest(c.service.ServerAPIURL(), telegrafID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteTelegrafsIDOwnersID(ctx context.Context, telegrafID string, userID string, params *DeleteTelegrafsIDOwnersIDParams) (*http.Response, error) { + req, err := NewDeleteTelegrafsIDOwnersIDRequest(c.service.ServerAPIURL(), telegrafID, userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) ApplyTemplateWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewApplyTemplateRequestWithBody(c.service.ServerAPIURL(), contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) ApplyTemplate(ctx context.Context, body ApplyTemplateJSONRequestBody) (*http.Response, error) { + req, err := NewApplyTemplateRequest(c.service.ServerAPIURL(), body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) ExportTemplateWithBody(ctx context.Context, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewExportTemplateRequestWithBody(c.service.ServerAPIURL(), contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) ExportTemplate(ctx context.Context, body ExportTemplateJSONRequestBody) (*http.Response, error) { + req, err := NewExportTemplateRequest(c.service.ServerAPIURL(), body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetUsers(ctx context.Context, params *GetUsersParams) (*http.Response, error) { + req, err := NewGetUsersRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostUsersWithBody(ctx context.Context, params *PostUsersParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostUsersRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostUsers(ctx context.Context, params *PostUsersParams, body PostUsersJSONRequestBody) (*http.Response, error) { + req, err := NewPostUsersRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteUsersID(ctx context.Context, userID string, params *DeleteUsersIDParams) (*http.Response, error) { + req, err := NewDeleteUsersIDRequest(c.service.ServerAPIURL(), userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetUsersID(ctx context.Context, userID string, params *GetUsersIDParams) (*http.Response, error) { + req, err := NewGetUsersIDRequest(c.service.ServerAPIURL(), userID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchUsersIDWithBody(ctx context.Context, userID string, params *PatchUsersIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchUsersIDRequestWithBody(c.service.ServerAPIURL(), userID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchUsersID(ctx context.Context, userID string, params *PatchUsersIDParams, body PatchUsersIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchUsersIDRequest(c.service.ServerAPIURL(), userID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostUsersIDPasswordWithBody(ctx context.Context, userID string, params *PostUsersIDPasswordParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostUsersIDPasswordRequestWithBody(c.service.ServerAPIURL(), userID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostUsersIDPassword(ctx context.Context, userID string, params *PostUsersIDPasswordParams, body PostUsersIDPasswordJSONRequestBody) (*http.Response, error) { + req, err := NewPostUsersIDPasswordRequest(c.service.ServerAPIURL(), userID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetVariables(ctx context.Context, params *GetVariablesParams) (*http.Response, error) { + req, err := NewGetVariablesRequest(c.service.ServerAPIURL(), params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostVariablesWithBody(ctx context.Context, params *PostVariablesParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostVariablesRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostVariables(ctx context.Context, params *PostVariablesParams, body PostVariablesJSONRequestBody) (*http.Response, error) { + req, err := NewPostVariablesRequest(c.service.ServerAPIURL(), params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteVariablesID(ctx context.Context, variableID string, params *DeleteVariablesIDParams) (*http.Response, error) { + req, err := NewDeleteVariablesIDRequest(c.service.ServerAPIURL(), variableID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetVariablesID(ctx context.Context, variableID string, params *GetVariablesIDParams) (*http.Response, error) { + req, err := NewGetVariablesIDRequest(c.service.ServerAPIURL(), variableID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchVariablesIDWithBody(ctx context.Context, variableID string, params *PatchVariablesIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPatchVariablesIDRequestWithBody(c.service.ServerAPIURL(), variableID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PatchVariablesID(ctx context.Context, variableID string, params *PatchVariablesIDParams, body PatchVariablesIDJSONRequestBody) (*http.Response, error) { + req, err := NewPatchVariablesIDRequest(c.service.ServerAPIURL(), variableID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutVariablesIDWithBody(ctx context.Context, variableID string, params *PutVariablesIDParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPutVariablesIDRequestWithBody(c.service.ServerAPIURL(), variableID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PutVariablesID(ctx context.Context, variableID string, params *PutVariablesIDParams, body PutVariablesIDJSONRequestBody) (*http.Response, error) { + req, err := NewPutVariablesIDRequest(c.service.ServerAPIURL(), variableID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) GetVariablesIDLabels(ctx context.Context, variableID string, params *GetVariablesIDLabelsParams) (*http.Response, error) { + req, err := NewGetVariablesIDLabelsRequest(c.service.ServerAPIURL(), variableID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostVariablesIDLabelsWithBody(ctx context.Context, variableID string, params *PostVariablesIDLabelsParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostVariablesIDLabelsRequestWithBody(c.service.ServerAPIURL(), variableID, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostVariablesIDLabels(ctx context.Context, variableID string, params *PostVariablesIDLabelsParams, body PostVariablesIDLabelsJSONRequestBody) (*http.Response, error) { + req, err := NewPostVariablesIDLabelsRequest(c.service.ServerAPIURL(), variableID, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) DeleteVariablesIDLabelsID(ctx context.Context, variableID string, labelID string, params *DeleteVariablesIDLabelsIDParams) (*http.Response, error) { + req, err := NewDeleteVariablesIDLabelsIDRequest(c.service.ServerAPIURL(), variableID, labelID, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +func (c *Client) PostWriteWithBody(ctx context.Context, params *PostWriteParams, contentType string, body io.Reader) (*http.Response, error) { + req, err := NewPostWriteRequestWithBody(c.service.ServerAPIURL(), params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + return c.service.DoHTTPRequestWithResponse(req, nil) +} + +// NewGetRoutesRequest generates requests for GetRoutes +func NewGetRoutesRequest(server string, params *GetRoutesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetAuthorizationsRequest generates requests for GetAuthorizations +func NewGetAuthorizationsRequest(server string, params *GetAuthorizationsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/authorizations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.UserID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "userID", runtime.ParamLocationQuery, *params.UserID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.User != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "user", runtime.ParamLocationQuery, *params.User); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostAuthorizationsRequest calls the generic PostAuthorizations builder with application/json body +func NewPostAuthorizationsRequest(server string, params *PostAuthorizationsParams, body PostAuthorizationsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostAuthorizationsRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostAuthorizationsRequestWithBody generates requests for PostAuthorizations with any type of body +func NewPostAuthorizationsRequestWithBody(server string, params *PostAuthorizationsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/authorizations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteAuthorizationsIDRequest generates requests for DeleteAuthorizationsID +func NewDeleteAuthorizationsIDRequest(server string, authID string, params *DeleteAuthorizationsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "authID", runtime.ParamLocationPath, authID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/authorizations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetAuthorizationsIDRequest generates requests for GetAuthorizationsID +func NewGetAuthorizationsIDRequest(server string, authID string, params *GetAuthorizationsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "authID", runtime.ParamLocationPath, authID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/authorizations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchAuthorizationsIDRequest calls the generic PatchAuthorizationsID builder with application/json body +func NewPatchAuthorizationsIDRequest(server string, authID string, params *PatchAuthorizationsIDParams, body PatchAuthorizationsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchAuthorizationsIDRequestWithBody(server, authID, params, "application/json", bodyReader) +} + +// NewPatchAuthorizationsIDRequestWithBody generates requests for PatchAuthorizationsID with any type of body +func NewPatchAuthorizationsIDRequestWithBody(server string, authID string, params *PatchAuthorizationsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "authID", runtime.ParamLocationPath, authID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/authorizations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetBackupKVRequest generates requests for GetBackupKV +func NewGetBackupKVRequest(server string, params *GetBackupKVParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/backup/kv") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetBackupMetadataRequest generates requests for GetBackupMetadata +func NewGetBackupMetadataRequest(server string, params *GetBackupMetadataParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/backup/metadata") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.AcceptEncoding != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Accept-Encoding", runtime.ParamLocationHeader, *params.AcceptEncoding) + if err != nil { + return nil, err + } + + req.Header.Set("Accept-Encoding", headerParam1) + } + + return req, nil +} + +// NewGetBackupShardIdRequest generates requests for GetBackupShardId +func NewGetBackupShardIdRequest(server string, shardID int64, params *GetBackupShardIdParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "shardID", runtime.ParamLocationPath, shardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/backup/shards/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Since != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "since", runtime.ParamLocationQuery, *params.Since); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.AcceptEncoding != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Accept-Encoding", runtime.ParamLocationHeader, *params.AcceptEncoding) + if err != nil { + return nil, err + } + + req.Header.Set("Accept-Encoding", headerParam1) + } + + return req, nil +} + +// NewGetBucketsRequest generates requests for GetBuckets +func NewGetBucketsRequest(server string, params *GetBucketsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Offset != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "offset", runtime.ParamLocationQuery, *params.Offset); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.After != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "after", runtime.ParamLocationQuery, *params.After); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Name != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "name", runtime.ParamLocationQuery, *params.Name); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostBucketsRequest calls the generic PostBuckets builder with application/json body +func NewPostBucketsRequest(server string, params *PostBucketsParams, body PostBucketsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostBucketsRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostBucketsRequestWithBody generates requests for PostBuckets with any type of body +func NewPostBucketsRequestWithBody(server string, params *PostBucketsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteBucketsIDRequest generates requests for DeleteBucketsID +func NewDeleteBucketsIDRequest(server string, bucketID string, params *DeleteBucketsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetBucketsIDRequest generates requests for GetBucketsID +func NewGetBucketsIDRequest(server string, bucketID string, params *GetBucketsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchBucketsIDRequest calls the generic PatchBucketsID builder with application/json body +func NewPatchBucketsIDRequest(server string, bucketID string, params *PatchBucketsIDParams, body PatchBucketsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchBucketsIDRequestWithBody(server, bucketID, params, "application/json", bodyReader) +} + +// NewPatchBucketsIDRequestWithBody generates requests for PatchBucketsID with any type of body +func NewPatchBucketsIDRequestWithBody(server string, bucketID string, params *PatchBucketsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetBucketsIDLabelsRequest generates requests for GetBucketsIDLabels +func NewGetBucketsIDLabelsRequest(server string, bucketID string, params *GetBucketsIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostBucketsIDLabelsRequest calls the generic PostBucketsIDLabels builder with application/json body +func NewPostBucketsIDLabelsRequest(server string, bucketID string, params *PostBucketsIDLabelsParams, body PostBucketsIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostBucketsIDLabelsRequestWithBody(server, bucketID, params, "application/json", bodyReader) +} + +// NewPostBucketsIDLabelsRequestWithBody generates requests for PostBucketsIDLabels with any type of body +func NewPostBucketsIDLabelsRequestWithBody(server string, bucketID string, params *PostBucketsIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteBucketsIDLabelsIDRequest generates requests for DeleteBucketsIDLabelsID +func NewDeleteBucketsIDLabelsIDRequest(server string, bucketID string, labelID string, params *DeleteBucketsIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetBucketsIDMembersRequest generates requests for GetBucketsIDMembers +func NewGetBucketsIDMembersRequest(server string, bucketID string, params *GetBucketsIDMembersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostBucketsIDMembersRequest calls the generic PostBucketsIDMembers builder with application/json body +func NewPostBucketsIDMembersRequest(server string, bucketID string, params *PostBucketsIDMembersParams, body PostBucketsIDMembersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostBucketsIDMembersRequestWithBody(server, bucketID, params, "application/json", bodyReader) +} + +// NewPostBucketsIDMembersRequestWithBody generates requests for PostBucketsIDMembers with any type of body +func NewPostBucketsIDMembersRequestWithBody(server string, bucketID string, params *PostBucketsIDMembersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteBucketsIDMembersIDRequest generates requests for DeleteBucketsIDMembersID +func NewDeleteBucketsIDMembersIDRequest(server string, bucketID string, userID string, params *DeleteBucketsIDMembersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/members/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetBucketsIDOwnersRequest generates requests for GetBucketsIDOwners +func NewGetBucketsIDOwnersRequest(server string, bucketID string, params *GetBucketsIDOwnersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostBucketsIDOwnersRequest calls the generic PostBucketsIDOwners builder with application/json body +func NewPostBucketsIDOwnersRequest(server string, bucketID string, params *PostBucketsIDOwnersParams, body PostBucketsIDOwnersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostBucketsIDOwnersRequestWithBody(server, bucketID, params, "application/json", bodyReader) +} + +// NewPostBucketsIDOwnersRequestWithBody generates requests for PostBucketsIDOwners with any type of body +func NewPostBucketsIDOwnersRequestWithBody(server string, bucketID string, params *PostBucketsIDOwnersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteBucketsIDOwnersIDRequest generates requests for DeleteBucketsIDOwnersID +func NewDeleteBucketsIDOwnersIDRequest(server string, bucketID string, userID string, params *DeleteBucketsIDOwnersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/buckets/%s/owners/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetChecksRequest generates requests for GetChecks +func NewGetChecksRequest(server string, params *GetChecksParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Offset != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "offset", runtime.ParamLocationQuery, *params.Offset); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewCreateCheckRequest calls the generic CreateCheck builder with application/json body +func NewCreateCheckRequest(server string, body CreateCheckJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateCheckRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateCheckRequestWithBody generates requests for CreateCheck with any type of body +func NewCreateCheckRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteChecksIDRequest generates requests for DeleteChecksID +func NewDeleteChecksIDRequest(server string, checkID string, params *DeleteChecksIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "checkID", runtime.ParamLocationPath, checkID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetChecksIDRequest generates requests for GetChecksID +func NewGetChecksIDRequest(server string, checkID string, params *GetChecksIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "checkID", runtime.ParamLocationPath, checkID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchChecksIDRequest calls the generic PatchChecksID builder with application/json body +func NewPatchChecksIDRequest(server string, checkID string, params *PatchChecksIDParams, body PatchChecksIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchChecksIDRequestWithBody(server, checkID, params, "application/json", bodyReader) +} + +// NewPatchChecksIDRequestWithBody generates requests for PatchChecksID with any type of body +func NewPatchChecksIDRequestWithBody(server string, checkID string, params *PatchChecksIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "checkID", runtime.ParamLocationPath, checkID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPutChecksIDRequest calls the generic PutChecksID builder with application/json body +func NewPutChecksIDRequest(server string, checkID string, params *PutChecksIDParams, body PutChecksIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutChecksIDRequestWithBody(server, checkID, params, "application/json", bodyReader) +} + +// NewPutChecksIDRequestWithBody generates requests for PutChecksID with any type of body +func NewPutChecksIDRequestWithBody(server string, checkID string, params *PutChecksIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "checkID", runtime.ParamLocationPath, checkID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetChecksIDLabelsRequest generates requests for GetChecksIDLabels +func NewGetChecksIDLabelsRequest(server string, checkID string, params *GetChecksIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "checkID", runtime.ParamLocationPath, checkID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostChecksIDLabelsRequest calls the generic PostChecksIDLabels builder with application/json body +func NewPostChecksIDLabelsRequest(server string, checkID string, params *PostChecksIDLabelsParams, body PostChecksIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostChecksIDLabelsRequestWithBody(server, checkID, params, "application/json", bodyReader) +} + +// NewPostChecksIDLabelsRequestWithBody generates requests for PostChecksIDLabels with any type of body +func NewPostChecksIDLabelsRequestWithBody(server string, checkID string, params *PostChecksIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "checkID", runtime.ParamLocationPath, checkID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteChecksIDLabelsIDRequest generates requests for DeleteChecksIDLabelsID +func NewDeleteChecksIDLabelsIDRequest(server string, checkID string, labelID string, params *DeleteChecksIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "checkID", runtime.ParamLocationPath, checkID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetChecksIDQueryRequest generates requests for GetChecksIDQuery +func NewGetChecksIDQueryRequest(server string, checkID string, params *GetChecksIDQueryParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "checkID", runtime.ParamLocationPath, checkID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/checks/%s/query", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetConfigRequest generates requests for GetConfig +func NewGetConfigRequest(server string, params *GetConfigParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/config") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetDashboardsRequest generates requests for GetDashboards +func NewGetDashboardsRequest(server string, params *GetDashboardsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Offset != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "offset", runtime.ParamLocationQuery, *params.Offset); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Descending != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "descending", runtime.ParamLocationQuery, *params.Descending); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Owner != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "owner", runtime.ParamLocationQuery, *params.Owner); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortBy != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortBy", runtime.ParamLocationQuery, *params.SortBy); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostDashboardsRequest calls the generic PostDashboards builder with application/json body +func NewPostDashboardsRequest(server string, params *PostDashboardsParams, body PostDashboardsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostDashboardsRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostDashboardsRequestWithBody generates requests for PostDashboards with any type of body +func NewPostDashboardsRequestWithBody(server string, params *PostDashboardsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteDashboardsIDRequest generates requests for DeleteDashboardsID +func NewDeleteDashboardsIDRequest(server string, dashboardID string, params *DeleteDashboardsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetDashboardsIDRequest generates requests for GetDashboardsID +func NewGetDashboardsIDRequest(server string, dashboardID string, params *GetDashboardsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Include != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "include", runtime.ParamLocationQuery, *params.Include); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchDashboardsIDRequest calls the generic PatchDashboardsID builder with application/json body +func NewPatchDashboardsIDRequest(server string, dashboardID string, params *PatchDashboardsIDParams, body PatchDashboardsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchDashboardsIDRequestWithBody(server, dashboardID, params, "application/json", bodyReader) +} + +// NewPatchDashboardsIDRequestWithBody generates requests for PatchDashboardsID with any type of body +func NewPatchDashboardsIDRequestWithBody(server string, dashboardID string, params *PatchDashboardsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostDashboardsIDCellsRequest calls the generic PostDashboardsIDCells builder with application/json body +func NewPostDashboardsIDCellsRequest(server string, dashboardID string, params *PostDashboardsIDCellsParams, body PostDashboardsIDCellsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostDashboardsIDCellsRequestWithBody(server, dashboardID, params, "application/json", bodyReader) +} + +// NewPostDashboardsIDCellsRequestWithBody generates requests for PostDashboardsIDCells with any type of body +func NewPostDashboardsIDCellsRequestWithBody(server string, dashboardID string, params *PostDashboardsIDCellsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/cells", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPutDashboardsIDCellsRequest calls the generic PutDashboardsIDCells builder with application/json body +func NewPutDashboardsIDCellsRequest(server string, dashboardID string, params *PutDashboardsIDCellsParams, body PutDashboardsIDCellsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutDashboardsIDCellsRequestWithBody(server, dashboardID, params, "application/json", bodyReader) +} + +// NewPutDashboardsIDCellsRequestWithBody generates requests for PutDashboardsIDCells with any type of body +func NewPutDashboardsIDCellsRequestWithBody(server string, dashboardID string, params *PutDashboardsIDCellsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/cells", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteDashboardsIDCellsIDRequest generates requests for DeleteDashboardsIDCellsID +func NewDeleteDashboardsIDCellsIDRequest(server string, dashboardID string, cellID string, params *DeleteDashboardsIDCellsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "cellID", runtime.ParamLocationPath, cellID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/cells/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchDashboardsIDCellsIDRequest calls the generic PatchDashboardsIDCellsID builder with application/json body +func NewPatchDashboardsIDCellsIDRequest(server string, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, body PatchDashboardsIDCellsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchDashboardsIDCellsIDRequestWithBody(server, dashboardID, cellID, params, "application/json", bodyReader) +} + +// NewPatchDashboardsIDCellsIDRequestWithBody generates requests for PatchDashboardsIDCellsID with any type of body +func NewPatchDashboardsIDCellsIDRequestWithBody(server string, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "cellID", runtime.ParamLocationPath, cellID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/cells/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetDashboardsIDCellsIDViewRequest generates requests for GetDashboardsIDCellsIDView +func NewGetDashboardsIDCellsIDViewRequest(server string, dashboardID string, cellID string, params *GetDashboardsIDCellsIDViewParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "cellID", runtime.ParamLocationPath, cellID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/cells/%s/view", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchDashboardsIDCellsIDViewRequest calls the generic PatchDashboardsIDCellsIDView builder with application/json body +func NewPatchDashboardsIDCellsIDViewRequest(server string, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, body PatchDashboardsIDCellsIDViewJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchDashboardsIDCellsIDViewRequestWithBody(server, dashboardID, cellID, params, "application/json", bodyReader) +} + +// NewPatchDashboardsIDCellsIDViewRequestWithBody generates requests for PatchDashboardsIDCellsIDView with any type of body +func NewPatchDashboardsIDCellsIDViewRequestWithBody(server string, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "cellID", runtime.ParamLocationPath, cellID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/cells/%s/view", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetDashboardsIDLabelsRequest generates requests for GetDashboardsIDLabels +func NewGetDashboardsIDLabelsRequest(server string, dashboardID string, params *GetDashboardsIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostDashboardsIDLabelsRequest calls the generic PostDashboardsIDLabels builder with application/json body +func NewPostDashboardsIDLabelsRequest(server string, dashboardID string, params *PostDashboardsIDLabelsParams, body PostDashboardsIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostDashboardsIDLabelsRequestWithBody(server, dashboardID, params, "application/json", bodyReader) +} + +// NewPostDashboardsIDLabelsRequestWithBody generates requests for PostDashboardsIDLabels with any type of body +func NewPostDashboardsIDLabelsRequestWithBody(server string, dashboardID string, params *PostDashboardsIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteDashboardsIDLabelsIDRequest generates requests for DeleteDashboardsIDLabelsID +func NewDeleteDashboardsIDLabelsIDRequest(server string, dashboardID string, labelID string, params *DeleteDashboardsIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetDashboardsIDMembersRequest generates requests for GetDashboardsIDMembers +func NewGetDashboardsIDMembersRequest(server string, dashboardID string, params *GetDashboardsIDMembersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostDashboardsIDMembersRequest calls the generic PostDashboardsIDMembers builder with application/json body +func NewPostDashboardsIDMembersRequest(server string, dashboardID string, params *PostDashboardsIDMembersParams, body PostDashboardsIDMembersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostDashboardsIDMembersRequestWithBody(server, dashboardID, params, "application/json", bodyReader) +} + +// NewPostDashboardsIDMembersRequestWithBody generates requests for PostDashboardsIDMembers with any type of body +func NewPostDashboardsIDMembersRequestWithBody(server string, dashboardID string, params *PostDashboardsIDMembersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteDashboardsIDMembersIDRequest generates requests for DeleteDashboardsIDMembersID +func NewDeleteDashboardsIDMembersIDRequest(server string, dashboardID string, userID string, params *DeleteDashboardsIDMembersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/members/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetDashboardsIDOwnersRequest generates requests for GetDashboardsIDOwners +func NewGetDashboardsIDOwnersRequest(server string, dashboardID string, params *GetDashboardsIDOwnersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostDashboardsIDOwnersRequest calls the generic PostDashboardsIDOwners builder with application/json body +func NewPostDashboardsIDOwnersRequest(server string, dashboardID string, params *PostDashboardsIDOwnersParams, body PostDashboardsIDOwnersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostDashboardsIDOwnersRequestWithBody(server, dashboardID, params, "application/json", bodyReader) +} + +// NewPostDashboardsIDOwnersRequestWithBody generates requests for PostDashboardsIDOwners with any type of body +func NewPostDashboardsIDOwnersRequestWithBody(server string, dashboardID string, params *PostDashboardsIDOwnersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteDashboardsIDOwnersIDRequest generates requests for DeleteDashboardsIDOwnersID +func NewDeleteDashboardsIDOwnersIDRequest(server string, dashboardID string, userID string, params *DeleteDashboardsIDOwnersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dashboardID", runtime.ParamLocationPath, dashboardID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dashboards/%s/owners/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetDBRPsRequest generates requests for GetDBRPs +func NewGetDBRPsRequest(server string, params *GetDBRPsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dbrps") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.BucketID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "bucketID", runtime.ParamLocationQuery, *params.BucketID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Default != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "default", runtime.ParamLocationQuery, *params.Default); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Db != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "db", runtime.ParamLocationQuery, *params.Db); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Rp != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "rp", runtime.ParamLocationQuery, *params.Rp); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostDBRPRequest calls the generic PostDBRP builder with application/json body +func NewPostDBRPRequest(server string, params *PostDBRPParams, body PostDBRPJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostDBRPRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostDBRPRequestWithBody generates requests for PostDBRP with any type of body +func NewPostDBRPRequestWithBody(server string, params *PostDBRPParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dbrps") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteDBRPIDRequest generates requests for DeleteDBRPID +func NewDeleteDBRPIDRequest(server string, dbrpID string, params *DeleteDBRPIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dbrpID", runtime.ParamLocationPath, dbrpID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dbrps/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetDBRPsIDRequest generates requests for GetDBRPsID +func NewGetDBRPsIDRequest(server string, dbrpID string, params *GetDBRPsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dbrpID", runtime.ParamLocationPath, dbrpID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dbrps/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchDBRPIDRequest calls the generic PatchDBRPID builder with application/json body +func NewPatchDBRPIDRequest(server string, dbrpID string, params *PatchDBRPIDParams, body PatchDBRPIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchDBRPIDRequestWithBody(server, dbrpID, params, "application/json", bodyReader) +} + +// NewPatchDBRPIDRequestWithBody generates requests for PatchDBRPID with any type of body +func NewPatchDBRPIDRequestWithBody(server string, dbrpID string, params *PatchDBRPIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "dbrpID", runtime.ParamLocationPath, dbrpID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/dbrps/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostDeleteRequest calls the generic PostDelete builder with application/json body +func NewPostDeleteRequest(server string, params *PostDeleteParams, body PostDeleteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostDeleteRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostDeleteRequestWithBody generates requests for PostDelete with any type of body +func NewPostDeleteRequestWithBody(server string, params *PostDeleteParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Bucket != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "bucket", runtime.ParamLocationQuery, *params.Bucket); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.BucketID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "bucketID", runtime.ParamLocationQuery, *params.BucketID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetFlagsRequest generates requests for GetFlags +func NewGetFlagsRequest(server string, params *GetFlagsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/flags") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetHealthRequest generates requests for GetHealth +func NewGetHealthRequest(server string, params *GetHealthParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/health") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetLabelsRequest generates requests for GetLabels +func NewGetLabelsRequest(server string, params *GetLabelsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/labels") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostLabelsRequest calls the generic PostLabels builder with application/json body +func NewPostLabelsRequest(server string, body PostLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostLabelsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostLabelsRequestWithBody generates requests for PostLabels with any type of body +func NewPostLabelsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/labels") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteLabelsIDRequest generates requests for DeleteLabelsID +func NewDeleteLabelsIDRequest(server string, labelID string, params *DeleteLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/labels/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetLabelsIDRequest generates requests for GetLabelsID +func NewGetLabelsIDRequest(server string, labelID string, params *GetLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/labels/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchLabelsIDRequest calls the generic PatchLabelsID builder with application/json body +func NewPatchLabelsIDRequest(server string, labelID string, params *PatchLabelsIDParams, body PatchLabelsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchLabelsIDRequestWithBody(server, labelID, params, "application/json", bodyReader) +} + +// NewPatchLabelsIDRequestWithBody generates requests for PatchLabelsID with any type of body +func NewPatchLabelsIDRequestWithBody(server string, labelID string, params *PatchLabelsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/labels/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetLegacyAuthorizationsRequest generates requests for GetLegacyAuthorizations +func NewGetLegacyAuthorizationsRequest(server string, params *GetLegacyAuthorizationsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/legacy/authorizations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.UserID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "userID", runtime.ParamLocationQuery, *params.UserID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.User != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "user", runtime.ParamLocationQuery, *params.User); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Token != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "token", runtime.ParamLocationQuery, *params.Token); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.AuthID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "authID", runtime.ParamLocationQuery, *params.AuthID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostLegacyAuthorizationsRequest calls the generic PostLegacyAuthorizations builder with application/json body +func NewPostLegacyAuthorizationsRequest(server string, params *PostLegacyAuthorizationsParams, body PostLegacyAuthorizationsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostLegacyAuthorizationsRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostLegacyAuthorizationsRequestWithBody generates requests for PostLegacyAuthorizations with any type of body +func NewPostLegacyAuthorizationsRequestWithBody(server string, params *PostLegacyAuthorizationsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/legacy/authorizations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteLegacyAuthorizationsIDRequest generates requests for DeleteLegacyAuthorizationsID +func NewDeleteLegacyAuthorizationsIDRequest(server string, authID string, params *DeleteLegacyAuthorizationsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "authID", runtime.ParamLocationPath, authID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/legacy/authorizations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetLegacyAuthorizationsIDRequest generates requests for GetLegacyAuthorizationsID +func NewGetLegacyAuthorizationsIDRequest(server string, authID string, params *GetLegacyAuthorizationsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "authID", runtime.ParamLocationPath, authID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/legacy/authorizations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchLegacyAuthorizationsIDRequest calls the generic PatchLegacyAuthorizationsID builder with application/json body +func NewPatchLegacyAuthorizationsIDRequest(server string, authID string, params *PatchLegacyAuthorizationsIDParams, body PatchLegacyAuthorizationsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchLegacyAuthorizationsIDRequestWithBody(server, authID, params, "application/json", bodyReader) +} + +// NewPatchLegacyAuthorizationsIDRequestWithBody generates requests for PatchLegacyAuthorizationsID with any type of body +func NewPatchLegacyAuthorizationsIDRequestWithBody(server string, authID string, params *PatchLegacyAuthorizationsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "authID", runtime.ParamLocationPath, authID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/legacy/authorizations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostLegacyAuthorizationsIDPasswordRequest calls the generic PostLegacyAuthorizationsIDPassword builder with application/json body +func NewPostLegacyAuthorizationsIDPasswordRequest(server string, authID string, params *PostLegacyAuthorizationsIDPasswordParams, body PostLegacyAuthorizationsIDPasswordJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostLegacyAuthorizationsIDPasswordRequestWithBody(server, authID, params, "application/json", bodyReader) +} + +// NewPostLegacyAuthorizationsIDPasswordRequestWithBody generates requests for PostLegacyAuthorizationsIDPassword with any type of body +func NewPostLegacyAuthorizationsIDPasswordRequestWithBody(server string, authID string, params *PostLegacyAuthorizationsIDPasswordParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "authID", runtime.ParamLocationPath, authID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/legacy/authorizations/%s/password", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetMeRequest generates requests for GetMe +func NewGetMeRequest(server string, params *GetMeParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/me") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPutMePasswordRequest calls the generic PutMePassword builder with application/json body +func NewPutMePasswordRequest(server string, params *PutMePasswordParams, body PutMePasswordJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutMePasswordRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPutMePasswordRequestWithBody generates requests for PutMePassword with any type of body +func NewPutMePasswordRequestWithBody(server string, params *PutMePasswordParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/me/password") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetMetricsRequest generates requests for GetMetrics +func NewGetMetricsRequest(server string, params *GetMetricsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/metrics") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetNotificationEndpointsRequest generates requests for GetNotificationEndpoints +func NewGetNotificationEndpointsRequest(server string, params *GetNotificationEndpointsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Offset != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "offset", runtime.ParamLocationQuery, *params.Offset); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewCreateNotificationEndpointRequest calls the generic CreateNotificationEndpoint builder with application/json body +func NewCreateNotificationEndpointRequest(server string, body CreateNotificationEndpointJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateNotificationEndpointRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateNotificationEndpointRequestWithBody generates requests for CreateNotificationEndpoint with any type of body +func NewCreateNotificationEndpointRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteNotificationEndpointsIDRequest generates requests for DeleteNotificationEndpointsID +func NewDeleteNotificationEndpointsIDRequest(server string, endpointID string, params *DeleteNotificationEndpointsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "endpointID", runtime.ParamLocationPath, endpointID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetNotificationEndpointsIDRequest generates requests for GetNotificationEndpointsID +func NewGetNotificationEndpointsIDRequest(server string, endpointID string, params *GetNotificationEndpointsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "endpointID", runtime.ParamLocationPath, endpointID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchNotificationEndpointsIDRequest calls the generic PatchNotificationEndpointsID builder with application/json body +func NewPatchNotificationEndpointsIDRequest(server string, endpointID string, params *PatchNotificationEndpointsIDParams, body PatchNotificationEndpointsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchNotificationEndpointsIDRequestWithBody(server, endpointID, params, "application/json", bodyReader) +} + +// NewPatchNotificationEndpointsIDRequestWithBody generates requests for PatchNotificationEndpointsID with any type of body +func NewPatchNotificationEndpointsIDRequestWithBody(server string, endpointID string, params *PatchNotificationEndpointsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "endpointID", runtime.ParamLocationPath, endpointID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPutNotificationEndpointsIDRequest calls the generic PutNotificationEndpointsID builder with application/json body +func NewPutNotificationEndpointsIDRequest(server string, endpointID string, params *PutNotificationEndpointsIDParams, body PutNotificationEndpointsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutNotificationEndpointsIDRequestWithBody(server, endpointID, params, "application/json", bodyReader) +} + +// NewPutNotificationEndpointsIDRequestWithBody generates requests for PutNotificationEndpointsID with any type of body +func NewPutNotificationEndpointsIDRequestWithBody(server string, endpointID string, params *PutNotificationEndpointsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "endpointID", runtime.ParamLocationPath, endpointID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetNotificationEndpointsIDLabelsRequest generates requests for GetNotificationEndpointsIDLabels +func NewGetNotificationEndpointsIDLabelsRequest(server string, endpointID string, params *GetNotificationEndpointsIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "endpointID", runtime.ParamLocationPath, endpointID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostNotificationEndpointIDLabelsRequest calls the generic PostNotificationEndpointIDLabels builder with application/json body +func NewPostNotificationEndpointIDLabelsRequest(server string, endpointID string, params *PostNotificationEndpointIDLabelsParams, body PostNotificationEndpointIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostNotificationEndpointIDLabelsRequestWithBody(server, endpointID, params, "application/json", bodyReader) +} + +// NewPostNotificationEndpointIDLabelsRequestWithBody generates requests for PostNotificationEndpointIDLabels with any type of body +func NewPostNotificationEndpointIDLabelsRequestWithBody(server string, endpointID string, params *PostNotificationEndpointIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "endpointID", runtime.ParamLocationPath, endpointID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteNotificationEndpointsIDLabelsIDRequest generates requests for DeleteNotificationEndpointsIDLabelsID +func NewDeleteNotificationEndpointsIDLabelsIDRequest(server string, endpointID string, labelID string, params *DeleteNotificationEndpointsIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "endpointID", runtime.ParamLocationPath, endpointID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationEndpoints/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetNotificationRulesRequest generates requests for GetNotificationRules +func NewGetNotificationRulesRequest(server string, params *GetNotificationRulesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Offset != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "offset", runtime.ParamLocationQuery, *params.Offset); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.CheckID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "checkID", runtime.ParamLocationQuery, *params.CheckID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Tag != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "tag", runtime.ParamLocationQuery, *params.Tag); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewCreateNotificationRuleRequest calls the generic CreateNotificationRule builder with application/json body +func NewCreateNotificationRuleRequest(server string, body CreateNotificationRuleJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateNotificationRuleRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateNotificationRuleRequestWithBody generates requests for CreateNotificationRule with any type of body +func NewCreateNotificationRuleRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteNotificationRulesIDRequest generates requests for DeleteNotificationRulesID +func NewDeleteNotificationRulesIDRequest(server string, ruleID string, params *DeleteNotificationRulesIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleID", runtime.ParamLocationPath, ruleID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetNotificationRulesIDRequest generates requests for GetNotificationRulesID +func NewGetNotificationRulesIDRequest(server string, ruleID string, params *GetNotificationRulesIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleID", runtime.ParamLocationPath, ruleID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchNotificationRulesIDRequest calls the generic PatchNotificationRulesID builder with application/json body +func NewPatchNotificationRulesIDRequest(server string, ruleID string, params *PatchNotificationRulesIDParams, body PatchNotificationRulesIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchNotificationRulesIDRequestWithBody(server, ruleID, params, "application/json", bodyReader) +} + +// NewPatchNotificationRulesIDRequestWithBody generates requests for PatchNotificationRulesID with any type of body +func NewPatchNotificationRulesIDRequestWithBody(server string, ruleID string, params *PatchNotificationRulesIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleID", runtime.ParamLocationPath, ruleID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPutNotificationRulesIDRequest calls the generic PutNotificationRulesID builder with application/json body +func NewPutNotificationRulesIDRequest(server string, ruleID string, params *PutNotificationRulesIDParams, body PutNotificationRulesIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutNotificationRulesIDRequestWithBody(server, ruleID, params, "application/json", bodyReader) +} + +// NewPutNotificationRulesIDRequestWithBody generates requests for PutNotificationRulesID with any type of body +func NewPutNotificationRulesIDRequestWithBody(server string, ruleID string, params *PutNotificationRulesIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleID", runtime.ParamLocationPath, ruleID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetNotificationRulesIDLabelsRequest generates requests for GetNotificationRulesIDLabels +func NewGetNotificationRulesIDLabelsRequest(server string, ruleID string, params *GetNotificationRulesIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleID", runtime.ParamLocationPath, ruleID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostNotificationRuleIDLabelsRequest calls the generic PostNotificationRuleIDLabels builder with application/json body +func NewPostNotificationRuleIDLabelsRequest(server string, ruleID string, params *PostNotificationRuleIDLabelsParams, body PostNotificationRuleIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostNotificationRuleIDLabelsRequestWithBody(server, ruleID, params, "application/json", bodyReader) +} + +// NewPostNotificationRuleIDLabelsRequestWithBody generates requests for PostNotificationRuleIDLabels with any type of body +func NewPostNotificationRuleIDLabelsRequestWithBody(server string, ruleID string, params *PostNotificationRuleIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleID", runtime.ParamLocationPath, ruleID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteNotificationRulesIDLabelsIDRequest generates requests for DeleteNotificationRulesIDLabelsID +func NewDeleteNotificationRulesIDLabelsIDRequest(server string, ruleID string, labelID string, params *DeleteNotificationRulesIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleID", runtime.ParamLocationPath, ruleID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetNotificationRulesIDQueryRequest generates requests for GetNotificationRulesIDQuery +func NewGetNotificationRulesIDQueryRequest(server string, ruleID string, params *GetNotificationRulesIDQueryParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleID", runtime.ParamLocationPath, ruleID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/notificationRules/%s/query", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetOrgsRequest generates requests for GetOrgs +func NewGetOrgsRequest(server string, params *GetOrgsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Offset != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "offset", runtime.ParamLocationQuery, *params.Offset); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Descending != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "descending", runtime.ParamLocationQuery, *params.Descending); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.UserID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "userID", runtime.ParamLocationQuery, *params.UserID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostOrgsRequest calls the generic PostOrgs builder with application/json body +func NewPostOrgsRequest(server string, params *PostOrgsParams, body PostOrgsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostOrgsRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostOrgsRequestWithBody generates requests for PostOrgs with any type of body +func NewPostOrgsRequestWithBody(server string, params *PostOrgsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteOrgsIDRequest generates requests for DeleteOrgsID +func NewDeleteOrgsIDRequest(server string, orgID string, params *DeleteOrgsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetOrgsIDRequest generates requests for GetOrgsID +func NewGetOrgsIDRequest(server string, orgID string, params *GetOrgsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchOrgsIDRequest calls the generic PatchOrgsID builder with application/json body +func NewPatchOrgsIDRequest(server string, orgID string, params *PatchOrgsIDParams, body PatchOrgsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchOrgsIDRequestWithBody(server, orgID, params, "application/json", bodyReader) +} + +// NewPatchOrgsIDRequestWithBody generates requests for PatchOrgsID with any type of body +func NewPatchOrgsIDRequestWithBody(server string, orgID string, params *PatchOrgsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetOrgsIDMembersRequest generates requests for GetOrgsIDMembers +func NewGetOrgsIDMembersRequest(server string, orgID string, params *GetOrgsIDMembersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostOrgsIDMembersRequest calls the generic PostOrgsIDMembers builder with application/json body +func NewPostOrgsIDMembersRequest(server string, orgID string, params *PostOrgsIDMembersParams, body PostOrgsIDMembersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostOrgsIDMembersRequestWithBody(server, orgID, params, "application/json", bodyReader) +} + +// NewPostOrgsIDMembersRequestWithBody generates requests for PostOrgsIDMembers with any type of body +func NewPostOrgsIDMembersRequestWithBody(server string, orgID string, params *PostOrgsIDMembersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteOrgsIDMembersIDRequest generates requests for DeleteOrgsIDMembersID +func NewDeleteOrgsIDMembersIDRequest(server string, orgID string, userID string, params *DeleteOrgsIDMembersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/members/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetOrgsIDOwnersRequest generates requests for GetOrgsIDOwners +func NewGetOrgsIDOwnersRequest(server string, orgID string, params *GetOrgsIDOwnersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostOrgsIDOwnersRequest calls the generic PostOrgsIDOwners builder with application/json body +func NewPostOrgsIDOwnersRequest(server string, orgID string, params *PostOrgsIDOwnersParams, body PostOrgsIDOwnersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostOrgsIDOwnersRequestWithBody(server, orgID, params, "application/json", bodyReader) +} + +// NewPostOrgsIDOwnersRequestWithBody generates requests for PostOrgsIDOwners with any type of body +func NewPostOrgsIDOwnersRequestWithBody(server string, orgID string, params *PostOrgsIDOwnersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteOrgsIDOwnersIDRequest generates requests for DeleteOrgsIDOwnersID +func NewDeleteOrgsIDOwnersIDRequest(server string, orgID string, userID string, params *DeleteOrgsIDOwnersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/owners/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetOrgsIDSecretsRequest generates requests for GetOrgsIDSecrets +func NewGetOrgsIDSecretsRequest(server string, orgID string, params *GetOrgsIDSecretsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/secrets", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchOrgsIDSecretsRequest calls the generic PatchOrgsIDSecrets builder with application/json body +func NewPatchOrgsIDSecretsRequest(server string, orgID string, params *PatchOrgsIDSecretsParams, body PatchOrgsIDSecretsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchOrgsIDSecretsRequestWithBody(server, orgID, params, "application/json", bodyReader) +} + +// NewPatchOrgsIDSecretsRequestWithBody generates requests for PatchOrgsIDSecrets with any type of body +func NewPatchOrgsIDSecretsRequestWithBody(server string, orgID string, params *PatchOrgsIDSecretsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/secrets", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostOrgsIDSecretsRequest calls the generic PostOrgsIDSecrets builder with application/json body +func NewPostOrgsIDSecretsRequest(server string, orgID string, params *PostOrgsIDSecretsParams, body PostOrgsIDSecretsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostOrgsIDSecretsRequestWithBody(server, orgID, params, "application/json", bodyReader) +} + +// NewPostOrgsIDSecretsRequestWithBody generates requests for PostOrgsIDSecrets with any type of body +func NewPostOrgsIDSecretsRequestWithBody(server string, orgID string, params *PostOrgsIDSecretsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/secrets/delete", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteOrgsIDSecretsIDRequest generates requests for DeleteOrgsIDSecretsID +func NewDeleteOrgsIDSecretsIDRequest(server string, orgID string, secretID string, params *DeleteOrgsIDSecretsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "orgID", runtime.ParamLocationPath, orgID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "secretID", runtime.ParamLocationPath, secretID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/orgs/%s/secrets/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetPingRequest generates requests for GetPing +func NewGetPingRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/ping") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewHeadPingRequest generates requests for HeadPing +func NewHeadPingRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/ping") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("HEAD", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostQueryRequest calls the generic PostQuery builder with application/json body +func NewPostQueryRequest(server string, params *PostQueryParams, body PostQueryJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostQueryRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostQueryRequestWithBody generates requests for PostQuery with any type of body +func NewPostQueryRequestWithBody(server string, params *PostQueryParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/query") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.AcceptEncoding != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Accept-Encoding", runtime.ParamLocationHeader, *params.AcceptEncoding) + if err != nil { + return nil, err + } + + req.Header.Set("Accept-Encoding", headerParam1) + } + + if params.ContentType != nil { + var headerParam2 string + + headerParam2, err = runtime.StyleParamWithLocation("simple", false, "Content-Type", runtime.ParamLocationHeader, *params.ContentType) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", headerParam2) + } + + return req, nil +} + +// NewPostQueryAnalyzeRequest calls the generic PostQueryAnalyze builder with application/json body +func NewPostQueryAnalyzeRequest(server string, params *PostQueryAnalyzeParams, body PostQueryAnalyzeJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostQueryAnalyzeRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostQueryAnalyzeRequestWithBody generates requests for PostQueryAnalyze with any type of body +func NewPostQueryAnalyzeRequestWithBody(server string, params *PostQueryAnalyzeParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/query/analyze") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.ContentType != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Content-Type", runtime.ParamLocationHeader, *params.ContentType) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", headerParam1) + } + + return req, nil +} + +// NewPostQueryAstRequest calls the generic PostQueryAst builder with application/json body +func NewPostQueryAstRequest(server string, params *PostQueryAstParams, body PostQueryAstJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostQueryAstRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostQueryAstRequestWithBody generates requests for PostQueryAst with any type of body +func NewPostQueryAstRequestWithBody(server string, params *PostQueryAstParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/query/ast") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.ContentType != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Content-Type", runtime.ParamLocationHeader, *params.ContentType) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", headerParam1) + } + + return req, nil +} + +// NewGetQuerySuggestionsRequest generates requests for GetQuerySuggestions +func NewGetQuerySuggestionsRequest(server string, params *GetQuerySuggestionsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/query/suggestions") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetQuerySuggestionsNameRequest generates requests for GetQuerySuggestionsName +func NewGetQuerySuggestionsNameRequest(server string, name string, params *GetQuerySuggestionsNameParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/query/suggestions/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetReadyRequest generates requests for GetReady +func NewGetReadyRequest(server string, params *GetReadyParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/ready") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetRemoteConnectionsRequest generates requests for GetRemoteConnections +func NewGetRemoteConnectionsRequest(server string, params *GetRemoteConnectionsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/remotes") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Name != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "name", runtime.ParamLocationQuery, *params.Name); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.RemoteURL != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "remoteURL", runtime.ParamLocationQuery, *params.RemoteURL); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostRemoteConnectionRequest calls the generic PostRemoteConnection builder with application/json body +func NewPostRemoteConnectionRequest(server string, body PostRemoteConnectionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostRemoteConnectionRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostRemoteConnectionRequestWithBody generates requests for PostRemoteConnection with any type of body +func NewPostRemoteConnectionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/remotes") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteRemoteConnectionByIDRequest generates requests for DeleteRemoteConnectionByID +func NewDeleteRemoteConnectionByIDRequest(server string, remoteID string, params *DeleteRemoteConnectionByIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "remoteID", runtime.ParamLocationPath, remoteID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/remotes/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetRemoteConnectionByIDRequest generates requests for GetRemoteConnectionByID +func NewGetRemoteConnectionByIDRequest(server string, remoteID string, params *GetRemoteConnectionByIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "remoteID", runtime.ParamLocationPath, remoteID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/remotes/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchRemoteConnectionByIDRequest calls the generic PatchRemoteConnectionByID builder with application/json body +func NewPatchRemoteConnectionByIDRequest(server string, remoteID string, params *PatchRemoteConnectionByIDParams, body PatchRemoteConnectionByIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchRemoteConnectionByIDRequestWithBody(server, remoteID, params, "application/json", bodyReader) +} + +// NewPatchRemoteConnectionByIDRequestWithBody generates requests for PatchRemoteConnectionByID with any type of body +func NewPatchRemoteConnectionByIDRequestWithBody(server string, remoteID string, params *PatchRemoteConnectionByIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "remoteID", runtime.ParamLocationPath, remoteID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/remotes/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetReplicationsRequest generates requests for GetReplications +func NewGetReplicationsRequest(server string, params *GetReplicationsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/replications") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Name != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "name", runtime.ParamLocationQuery, *params.Name); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.RemoteID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "remoteID", runtime.ParamLocationQuery, *params.RemoteID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.LocalBucketID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "localBucketID", runtime.ParamLocationQuery, *params.LocalBucketID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostReplicationRequest calls the generic PostReplication builder with application/json body +func NewPostReplicationRequest(server string, params *PostReplicationParams, body PostReplicationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostReplicationRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostReplicationRequestWithBody generates requests for PostReplication with any type of body +func NewPostReplicationRequestWithBody(server string, params *PostReplicationParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/replications") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Validate != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "validate", runtime.ParamLocationQuery, *params.Validate); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteReplicationByIDRequest generates requests for DeleteReplicationByID +func NewDeleteReplicationByIDRequest(server string, replicationID string, params *DeleteReplicationByIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "replicationID", runtime.ParamLocationPath, replicationID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/replications/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetReplicationByIDRequest generates requests for GetReplicationByID +func NewGetReplicationByIDRequest(server string, replicationID string, params *GetReplicationByIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "replicationID", runtime.ParamLocationPath, replicationID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/replications/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchReplicationByIDRequest calls the generic PatchReplicationByID builder with application/json body +func NewPatchReplicationByIDRequest(server string, replicationID string, params *PatchReplicationByIDParams, body PatchReplicationByIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchReplicationByIDRequestWithBody(server, replicationID, params, "application/json", bodyReader) +} + +// NewPatchReplicationByIDRequestWithBody generates requests for PatchReplicationByID with any type of body +func NewPatchReplicationByIDRequestWithBody(server string, replicationID string, params *PatchReplicationByIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "replicationID", runtime.ParamLocationPath, replicationID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/replications/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Validate != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "validate", runtime.ParamLocationQuery, *params.Validate); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostValidateReplicationByIDRequest generates requests for PostValidateReplicationByID +func NewPostValidateReplicationByIDRequest(server string, replicationID string, params *PostValidateReplicationByIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "replicationID", runtime.ParamLocationPath, replicationID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/replications/%s/validate", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetResourcesRequest generates requests for GetResources +func NewGetResourcesRequest(server string, params *GetResourcesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/resources") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostRestoreBucketIDRequestWithBody generates requests for PostRestoreBucketID with any type of body +func NewPostRestoreBucketIDRequestWithBody(server string, bucketID string, params *PostRestoreBucketIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "bucketID", runtime.ParamLocationPath, bucketID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/restore/bucket/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.ContentType != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Content-Type", runtime.ParamLocationHeader, *params.ContentType) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", headerParam1) + } + + return req, nil +} + +// NewPostRestoreBucketMetadataRequest calls the generic PostRestoreBucketMetadata builder with application/json body +func NewPostRestoreBucketMetadataRequest(server string, params *PostRestoreBucketMetadataParams, body PostRestoreBucketMetadataJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostRestoreBucketMetadataRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostRestoreBucketMetadataRequestWithBody generates requests for PostRestoreBucketMetadata with any type of body +func NewPostRestoreBucketMetadataRequestWithBody(server string, params *PostRestoreBucketMetadataParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/restore/bucketMetadata") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostRestoreKVRequestWithBody generates requests for PostRestoreKV with any type of body +func NewPostRestoreKVRequestWithBody(server string, params *PostRestoreKVParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/restore/kv") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.ContentEncoding != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Content-Encoding", runtime.ParamLocationHeader, *params.ContentEncoding) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Encoding", headerParam1) + } + + if params.ContentType != nil { + var headerParam2 string + + headerParam2, err = runtime.StyleParamWithLocation("simple", false, "Content-Type", runtime.ParamLocationHeader, *params.ContentType) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", headerParam2) + } + + return req, nil +} + +// NewPostRestoreShardIdRequestWithBody generates requests for PostRestoreShardId with any type of body +func NewPostRestoreShardIdRequestWithBody(server string, shardID string, params *PostRestoreShardIdParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "shardID", runtime.ParamLocationPath, shardID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/restore/shards/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.ContentEncoding != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Content-Encoding", runtime.ParamLocationHeader, *params.ContentEncoding) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Encoding", headerParam1) + } + + if params.ContentType != nil { + var headerParam2 string + + headerParam2, err = runtime.StyleParamWithLocation("simple", false, "Content-Type", runtime.ParamLocationHeader, *params.ContentType) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", headerParam2) + } + + return req, nil +} + +// NewPostRestoreSQLRequestWithBody generates requests for PostRestoreSQL with any type of body +func NewPostRestoreSQLRequestWithBody(server string, params *PostRestoreSQLParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/restore/sql") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.ContentEncoding != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Content-Encoding", runtime.ParamLocationHeader, *params.ContentEncoding) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Encoding", headerParam1) + } + + if params.ContentType != nil { + var headerParam2 string + + headerParam2, err = runtime.StyleParamWithLocation("simple", false, "Content-Type", runtime.ParamLocationHeader, *params.ContentType) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", headerParam2) + } + + return req, nil +} + +// NewGetScrapersRequest generates requests for GetScrapers +func NewGetScrapersRequest(server string, params *GetScrapersParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Name != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "name", runtime.ParamLocationQuery, *params.Name); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostScrapersRequest calls the generic PostScrapers builder with application/json body +func NewPostScrapersRequest(server string, params *PostScrapersParams, body PostScrapersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostScrapersRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostScrapersRequestWithBody generates requests for PostScrapers with any type of body +func NewPostScrapersRequestWithBody(server string, params *PostScrapersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteScrapersIDRequest generates requests for DeleteScrapersID +func NewDeleteScrapersIDRequest(server string, scraperTargetID string, params *DeleteScrapersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetScrapersIDRequest generates requests for GetScrapersID +func NewGetScrapersIDRequest(server string, scraperTargetID string, params *GetScrapersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchScrapersIDRequest calls the generic PatchScrapersID builder with application/json body +func NewPatchScrapersIDRequest(server string, scraperTargetID string, params *PatchScrapersIDParams, body PatchScrapersIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchScrapersIDRequestWithBody(server, scraperTargetID, params, "application/json", bodyReader) +} + +// NewPatchScrapersIDRequestWithBody generates requests for PatchScrapersID with any type of body +func NewPatchScrapersIDRequestWithBody(server string, scraperTargetID string, params *PatchScrapersIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetScrapersIDLabelsRequest generates requests for GetScrapersIDLabels +func NewGetScrapersIDLabelsRequest(server string, scraperTargetID string, params *GetScrapersIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostScrapersIDLabelsRequest calls the generic PostScrapersIDLabels builder with application/json body +func NewPostScrapersIDLabelsRequest(server string, scraperTargetID string, params *PostScrapersIDLabelsParams, body PostScrapersIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostScrapersIDLabelsRequestWithBody(server, scraperTargetID, params, "application/json", bodyReader) +} + +// NewPostScrapersIDLabelsRequestWithBody generates requests for PostScrapersIDLabels with any type of body +func NewPostScrapersIDLabelsRequestWithBody(server string, scraperTargetID string, params *PostScrapersIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteScrapersIDLabelsIDRequest generates requests for DeleteScrapersIDLabelsID +func NewDeleteScrapersIDLabelsIDRequest(server string, scraperTargetID string, labelID string, params *DeleteScrapersIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetScrapersIDMembersRequest generates requests for GetScrapersIDMembers +func NewGetScrapersIDMembersRequest(server string, scraperTargetID string, params *GetScrapersIDMembersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostScrapersIDMembersRequest calls the generic PostScrapersIDMembers builder with application/json body +func NewPostScrapersIDMembersRequest(server string, scraperTargetID string, params *PostScrapersIDMembersParams, body PostScrapersIDMembersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostScrapersIDMembersRequestWithBody(server, scraperTargetID, params, "application/json", bodyReader) +} + +// NewPostScrapersIDMembersRequestWithBody generates requests for PostScrapersIDMembers with any type of body +func NewPostScrapersIDMembersRequestWithBody(server string, scraperTargetID string, params *PostScrapersIDMembersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteScrapersIDMembersIDRequest generates requests for DeleteScrapersIDMembersID +func NewDeleteScrapersIDMembersIDRequest(server string, scraperTargetID string, userID string, params *DeleteScrapersIDMembersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/members/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetScrapersIDOwnersRequest generates requests for GetScrapersIDOwners +func NewGetScrapersIDOwnersRequest(server string, scraperTargetID string, params *GetScrapersIDOwnersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostScrapersIDOwnersRequest calls the generic PostScrapersIDOwners builder with application/json body +func NewPostScrapersIDOwnersRequest(server string, scraperTargetID string, params *PostScrapersIDOwnersParams, body PostScrapersIDOwnersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostScrapersIDOwnersRequestWithBody(server, scraperTargetID, params, "application/json", bodyReader) +} + +// NewPostScrapersIDOwnersRequestWithBody generates requests for PostScrapersIDOwners with any type of body +func NewPostScrapersIDOwnersRequestWithBody(server string, scraperTargetID string, params *PostScrapersIDOwnersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteScrapersIDOwnersIDRequest generates requests for DeleteScrapersIDOwnersID +func NewDeleteScrapersIDOwnersIDRequest(server string, scraperTargetID string, userID string, params *DeleteScrapersIDOwnersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "scraperTargetID", runtime.ParamLocationPath, scraperTargetID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/scrapers/%s/owners/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetSetupRequest generates requests for GetSetup +func NewGetSetupRequest(server string, params *GetSetupParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/setup") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostSetupRequest calls the generic PostSetup builder with application/json body +func NewPostSetupRequest(server string, params *PostSetupParams, body PostSetupJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSetupRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostSetupRequestWithBody generates requests for PostSetup with any type of body +func NewPostSetupRequestWithBody(server string, params *PostSetupParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/setup") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostSigninRequest generates requests for PostSignin +func NewPostSigninRequest(server string, params *PostSigninParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/signin") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostSignoutRequest generates requests for PostSignout +func NewPostSignoutRequest(server string, params *PostSignoutParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/signout") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetSourcesRequest generates requests for GetSources +func NewGetSourcesRequest(server string, params *GetSourcesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/sources") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostSourcesRequest calls the generic PostSources builder with application/json body +func NewPostSourcesRequest(server string, params *PostSourcesParams, body PostSourcesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSourcesRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostSourcesRequestWithBody generates requests for PostSources with any type of body +func NewPostSourcesRequestWithBody(server string, params *PostSourcesParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/sources") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteSourcesIDRequest generates requests for DeleteSourcesID +func NewDeleteSourcesIDRequest(server string, sourceID string, params *DeleteSourcesIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "sourceID", runtime.ParamLocationPath, sourceID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/sources/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetSourcesIDRequest generates requests for GetSourcesID +func NewGetSourcesIDRequest(server string, sourceID string, params *GetSourcesIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "sourceID", runtime.ParamLocationPath, sourceID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/sources/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchSourcesIDRequest calls the generic PatchSourcesID builder with application/json body +func NewPatchSourcesIDRequest(server string, sourceID string, params *PatchSourcesIDParams, body PatchSourcesIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchSourcesIDRequestWithBody(server, sourceID, params, "application/json", bodyReader) +} + +// NewPatchSourcesIDRequestWithBody generates requests for PatchSourcesID with any type of body +func NewPatchSourcesIDRequestWithBody(server string, sourceID string, params *PatchSourcesIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "sourceID", runtime.ParamLocationPath, sourceID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/sources/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetSourcesIDBucketsRequest generates requests for GetSourcesIDBuckets +func NewGetSourcesIDBucketsRequest(server string, sourceID string, params *GetSourcesIDBucketsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "sourceID", runtime.ParamLocationPath, sourceID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/sources/%s/buckets", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetSourcesIDHealthRequest generates requests for GetSourcesIDHealth +func NewGetSourcesIDHealthRequest(server string, sourceID string, params *GetSourcesIDHealthParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "sourceID", runtime.ParamLocationPath, sourceID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/sources/%s/health", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewListStacksRequest generates requests for ListStacks +func NewListStacksRequest(server string, params *ListStacksParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/stacks") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Name != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "name", runtime.ParamLocationQuery, *params.Name); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.StackID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "stackID", runtime.ParamLocationQuery, *params.StackID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateStackRequest calls the generic CreateStack builder with application/json body +func NewCreateStackRequest(server string, body CreateStackJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateStackRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateStackRequestWithBody generates requests for CreateStack with any type of body +func NewCreateStackRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/stacks") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteStackRequest generates requests for DeleteStack +func NewDeleteStackRequest(server string, stackId string, params *DeleteStackParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "stack_id", runtime.ParamLocationPath, stackId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/stacks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadStackRequest generates requests for ReadStack +func NewReadStackRequest(server string, stackId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "stack_id", runtime.ParamLocationPath, stackId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/stacks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateStackRequest calls the generic UpdateStack builder with application/json body +func NewUpdateStackRequest(server string, stackId string, body UpdateStackJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateStackRequestWithBody(server, stackId, "application/json", bodyReader) +} + +// NewUpdateStackRequestWithBody generates requests for UpdateStack with any type of body +func NewUpdateStackRequestWithBody(server string, stackId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "stack_id", runtime.ParamLocationPath, stackId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/stacks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUninstallStackRequest generates requests for UninstallStack +func NewUninstallStackRequest(server string, stackId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "stack_id", runtime.ParamLocationPath, stackId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/stacks/%s/uninstall", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetTasksRequest generates requests for GetTasks +func NewGetTasksRequest(server string, params *GetTasksParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Name != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "name", runtime.ParamLocationQuery, *params.Name); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.After != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "after", runtime.ParamLocationQuery, *params.After); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.User != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "user", runtime.ParamLocationQuery, *params.User); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Status != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "status", runtime.ParamLocationQuery, *params.Status); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Type != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "type", runtime.ParamLocationQuery, *params.Type); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTasksRequest calls the generic PostTasks builder with application/json body +func NewPostTasksRequest(server string, params *PostTasksParams, body PostTasksJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTasksRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostTasksRequestWithBody generates requests for PostTasks with any type of body +func NewPostTasksRequestWithBody(server string, params *PostTasksParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTasksIDRequest generates requests for DeleteTasksID +func NewDeleteTasksIDRequest(server string, taskID string, params *DeleteTasksIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTasksIDRequest generates requests for GetTasksID +func NewGetTasksIDRequest(server string, taskID string, params *GetTasksIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchTasksIDRequest calls the generic PatchTasksID builder with application/json body +func NewPatchTasksIDRequest(server string, taskID string, params *PatchTasksIDParams, body PatchTasksIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchTasksIDRequestWithBody(server, taskID, params, "application/json", bodyReader) +} + +// NewPatchTasksIDRequestWithBody generates requests for PatchTasksID with any type of body +func NewPatchTasksIDRequestWithBody(server string, taskID string, params *PatchTasksIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTasksIDLabelsRequest generates requests for GetTasksIDLabels +func NewGetTasksIDLabelsRequest(server string, taskID string, params *GetTasksIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTasksIDLabelsRequest calls the generic PostTasksIDLabels builder with application/json body +func NewPostTasksIDLabelsRequest(server string, taskID string, params *PostTasksIDLabelsParams, body PostTasksIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTasksIDLabelsRequestWithBody(server, taskID, params, "application/json", bodyReader) +} + +// NewPostTasksIDLabelsRequestWithBody generates requests for PostTasksIDLabels with any type of body +func NewPostTasksIDLabelsRequestWithBody(server string, taskID string, params *PostTasksIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTasksIDLabelsIDRequest generates requests for DeleteTasksIDLabelsID +func NewDeleteTasksIDLabelsIDRequest(server string, taskID string, labelID string, params *DeleteTasksIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTasksIDLogsRequest generates requests for GetTasksIDLogs +func NewGetTasksIDLogsRequest(server string, taskID string, params *GetTasksIDLogsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/logs", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTasksIDMembersRequest generates requests for GetTasksIDMembers +func NewGetTasksIDMembersRequest(server string, taskID string, params *GetTasksIDMembersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTasksIDMembersRequest calls the generic PostTasksIDMembers builder with application/json body +func NewPostTasksIDMembersRequest(server string, taskID string, params *PostTasksIDMembersParams, body PostTasksIDMembersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTasksIDMembersRequestWithBody(server, taskID, params, "application/json", bodyReader) +} + +// NewPostTasksIDMembersRequestWithBody generates requests for PostTasksIDMembers with any type of body +func NewPostTasksIDMembersRequestWithBody(server string, taskID string, params *PostTasksIDMembersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTasksIDMembersIDRequest generates requests for DeleteTasksIDMembersID +func NewDeleteTasksIDMembersIDRequest(server string, taskID string, userID string, params *DeleteTasksIDMembersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/members/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTasksIDOwnersRequest generates requests for GetTasksIDOwners +func NewGetTasksIDOwnersRequest(server string, taskID string, params *GetTasksIDOwnersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTasksIDOwnersRequest calls the generic PostTasksIDOwners builder with application/json body +func NewPostTasksIDOwnersRequest(server string, taskID string, params *PostTasksIDOwnersParams, body PostTasksIDOwnersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTasksIDOwnersRequestWithBody(server, taskID, params, "application/json", bodyReader) +} + +// NewPostTasksIDOwnersRequestWithBody generates requests for PostTasksIDOwners with any type of body +func NewPostTasksIDOwnersRequestWithBody(server string, taskID string, params *PostTasksIDOwnersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTasksIDOwnersIDRequest generates requests for DeleteTasksIDOwnersID +func NewDeleteTasksIDOwnersIDRequest(server string, taskID string, userID string, params *DeleteTasksIDOwnersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/owners/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTasksIDRunsRequest generates requests for GetTasksIDRuns +func NewGetTasksIDRunsRequest(server string, taskID string, params *GetTasksIDRunsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/runs", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.After != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "after", runtime.ParamLocationQuery, *params.After); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.AfterTime != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "afterTime", runtime.ParamLocationQuery, *params.AfterTime); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.BeforeTime != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "beforeTime", runtime.ParamLocationQuery, *params.BeforeTime); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTasksIDRunsRequest calls the generic PostTasksIDRuns builder with application/json body +func NewPostTasksIDRunsRequest(server string, taskID string, params *PostTasksIDRunsParams, body PostTasksIDRunsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTasksIDRunsRequestWithBody(server, taskID, params, "application/json", bodyReader) +} + +// NewPostTasksIDRunsRequestWithBody generates requests for PostTasksIDRuns with any type of body +func NewPostTasksIDRunsRequestWithBody(server string, taskID string, params *PostTasksIDRunsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/runs", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTasksIDRunsIDRequest generates requests for DeleteTasksIDRunsID +func NewDeleteTasksIDRunsIDRequest(server string, taskID string, runID string, params *DeleteTasksIDRunsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "runID", runtime.ParamLocationPath, runID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/runs/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTasksIDRunsIDRequest generates requests for GetTasksIDRunsID +func NewGetTasksIDRunsIDRequest(server string, taskID string, runID string, params *GetTasksIDRunsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "runID", runtime.ParamLocationPath, runID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/runs/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTasksIDRunsIDLogsRequest generates requests for GetTasksIDRunsIDLogs +func NewGetTasksIDRunsIDLogsRequest(server string, taskID string, runID string, params *GetTasksIDRunsIDLogsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "runID", runtime.ParamLocationPath, runID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/runs/%s/logs", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTasksIDRunsIDRetryRequestWithBody generates requests for PostTasksIDRunsIDRetry with any type of body +func NewPostTasksIDRunsIDRetryRequestWithBody(server string, taskID string, runID string, params *PostTasksIDRunsIDRetryParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskID", runtime.ParamLocationPath, taskID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "runID", runtime.ParamLocationPath, runID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/tasks/%s/runs/%s/retry", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTelegrafPluginsRequest generates requests for GetTelegrafPlugins +func NewGetTelegrafPluginsRequest(server string, params *GetTelegrafPluginsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegraf/plugins") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Type != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "type", runtime.ParamLocationQuery, *params.Type); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTelegrafsRequest generates requests for GetTelegrafs +func NewGetTelegrafsRequest(server string, params *GetTelegrafsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTelegrafsRequest calls the generic PostTelegrafs builder with application/json body +func NewPostTelegrafsRequest(server string, params *PostTelegrafsParams, body PostTelegrafsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTelegrafsRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostTelegrafsRequestWithBody generates requests for PostTelegrafs with any type of body +func NewPostTelegrafsRequestWithBody(server string, params *PostTelegrafsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTelegrafsIDRequest generates requests for DeleteTelegrafsID +func NewDeleteTelegrafsIDRequest(server string, telegrafID string, params *DeleteTelegrafsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTelegrafsIDRequest generates requests for GetTelegrafsID +func NewGetTelegrafsIDRequest(server string, telegrafID string, params *GetTelegrafsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.Accept != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Accept", runtime.ParamLocationHeader, *params.Accept) + if err != nil { + return nil, err + } + + req.Header.Set("Accept", headerParam1) + } + + return req, nil +} + +// NewPutTelegrafsIDRequest calls the generic PutTelegrafsID builder with application/json body +func NewPutTelegrafsIDRequest(server string, telegrafID string, params *PutTelegrafsIDParams, body PutTelegrafsIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutTelegrafsIDRequestWithBody(server, telegrafID, params, "application/json", bodyReader) +} + +// NewPutTelegrafsIDRequestWithBody generates requests for PutTelegrafsID with any type of body +func NewPutTelegrafsIDRequestWithBody(server string, telegrafID string, params *PutTelegrafsIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTelegrafsIDLabelsRequest generates requests for GetTelegrafsIDLabels +func NewGetTelegrafsIDLabelsRequest(server string, telegrafID string, params *GetTelegrafsIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTelegrafsIDLabelsRequest calls the generic PostTelegrafsIDLabels builder with application/json body +func NewPostTelegrafsIDLabelsRequest(server string, telegrafID string, params *PostTelegrafsIDLabelsParams, body PostTelegrafsIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTelegrafsIDLabelsRequestWithBody(server, telegrafID, params, "application/json", bodyReader) +} + +// NewPostTelegrafsIDLabelsRequestWithBody generates requests for PostTelegrafsIDLabels with any type of body +func NewPostTelegrafsIDLabelsRequestWithBody(server string, telegrafID string, params *PostTelegrafsIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTelegrafsIDLabelsIDRequest generates requests for DeleteTelegrafsIDLabelsID +func NewDeleteTelegrafsIDLabelsIDRequest(server string, telegrafID string, labelID string, params *DeleteTelegrafsIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTelegrafsIDMembersRequest generates requests for GetTelegrafsIDMembers +func NewGetTelegrafsIDMembersRequest(server string, telegrafID string, params *GetTelegrafsIDMembersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTelegrafsIDMembersRequest calls the generic PostTelegrafsIDMembers builder with application/json body +func NewPostTelegrafsIDMembersRequest(server string, telegrafID string, params *PostTelegrafsIDMembersParams, body PostTelegrafsIDMembersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTelegrafsIDMembersRequestWithBody(server, telegrafID, params, "application/json", bodyReader) +} + +// NewPostTelegrafsIDMembersRequestWithBody generates requests for PostTelegrafsIDMembers with any type of body +func NewPostTelegrafsIDMembersRequestWithBody(server string, telegrafID string, params *PostTelegrafsIDMembersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/members", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTelegrafsIDMembersIDRequest generates requests for DeleteTelegrafsIDMembersID +func NewDeleteTelegrafsIDMembersIDRequest(server string, telegrafID string, userID string, params *DeleteTelegrafsIDMembersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/members/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetTelegrafsIDOwnersRequest generates requests for GetTelegrafsIDOwners +func NewGetTelegrafsIDOwnersRequest(server string, telegrafID string, params *GetTelegrafsIDOwnersParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostTelegrafsIDOwnersRequest calls the generic PostTelegrafsIDOwners builder with application/json body +func NewPostTelegrafsIDOwnersRequest(server string, telegrafID string, params *PostTelegrafsIDOwnersParams, body PostTelegrafsIDOwnersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostTelegrafsIDOwnersRequestWithBody(server, telegrafID, params, "application/json", bodyReader) +} + +// NewPostTelegrafsIDOwnersRequestWithBody generates requests for PostTelegrafsIDOwners with any type of body +func NewPostTelegrafsIDOwnersRequestWithBody(server string, telegrafID string, params *PostTelegrafsIDOwnersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/owners", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteTelegrafsIDOwnersIDRequest generates requests for DeleteTelegrafsIDOwnersID +func NewDeleteTelegrafsIDOwnersIDRequest(server string, telegrafID string, userID string, params *DeleteTelegrafsIDOwnersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "telegrafID", runtime.ParamLocationPath, telegrafID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/telegrafs/%s/owners/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewApplyTemplateRequest calls the generic ApplyTemplate builder with application/json body +func NewApplyTemplateRequest(server string, body ApplyTemplateJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewApplyTemplateRequestWithBody(server, "application/json", bodyReader) +} + +// NewApplyTemplateRequestWithBody generates requests for ApplyTemplate with any type of body +func NewApplyTemplateRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/templates/apply") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewExportTemplateRequest calls the generic ExportTemplate builder with application/json body +func NewExportTemplateRequest(server string, body ExportTemplateJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewExportTemplateRequestWithBody(server, "application/json", bodyReader) +} + +// NewExportTemplateRequestWithBody generates requests for ExportTemplate with any type of body +func NewExportTemplateRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/templates/export") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetUsersRequest generates requests for GetUsers +func NewGetUsersRequest(server string, params *GetUsersParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/users") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Offset != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "offset", runtime.ParamLocationQuery, *params.Offset); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.After != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "after", runtime.ParamLocationQuery, *params.After); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Name != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "name", runtime.ParamLocationQuery, *params.Name); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostUsersRequest calls the generic PostUsers builder with application/json body +func NewPostUsersRequest(server string, params *PostUsersParams, body PostUsersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostUsersRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostUsersRequestWithBody generates requests for PostUsers with any type of body +func NewPostUsersRequestWithBody(server string, params *PostUsersParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/users") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteUsersIDRequest generates requests for DeleteUsersID +func NewDeleteUsersIDRequest(server string, userID string, params *DeleteUsersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/users/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetUsersIDRequest generates requests for GetUsersID +func NewGetUsersIDRequest(server string, userID string, params *GetUsersIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/users/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchUsersIDRequest calls the generic PatchUsersID builder with application/json body +func NewPatchUsersIDRequest(server string, userID string, params *PatchUsersIDParams, body PatchUsersIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchUsersIDRequestWithBody(server, userID, params, "application/json", bodyReader) +} + +// NewPatchUsersIDRequestWithBody generates requests for PatchUsersID with any type of body +func NewPatchUsersIDRequestWithBody(server string, userID string, params *PatchUsersIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/users/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostUsersIDPasswordRequest calls the generic PostUsersIDPassword builder with application/json body +func NewPostUsersIDPasswordRequest(server string, userID string, params *PostUsersIDPasswordParams, body PostUsersIDPasswordJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostUsersIDPasswordRequestWithBody(server, userID, params, "application/json", bodyReader) +} + +// NewPostUsersIDPasswordRequestWithBody generates requests for PostUsersIDPassword with any type of body +func NewPostUsersIDPasswordRequestWithBody(server string, userID string, params *PostUsersIDPasswordParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "userID", runtime.ParamLocationPath, userID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/users/%s/password", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetVariablesRequest generates requests for GetVariables +func NewGetVariablesRequest(server string, params *GetVariablesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if params.Org != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, *params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostVariablesRequest calls the generic PostVariables builder with application/json body +func NewPostVariablesRequest(server string, params *PostVariablesParams, body PostVariablesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostVariablesRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostVariablesRequestWithBody generates requests for PostVariables with any type of body +func NewPostVariablesRequestWithBody(server string, params *PostVariablesParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteVariablesIDRequest generates requests for DeleteVariablesID +func NewDeleteVariablesIDRequest(server string, variableID string, params *DeleteVariablesIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "variableID", runtime.ParamLocationPath, variableID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetVariablesIDRequest generates requests for GetVariablesID +func NewGetVariablesIDRequest(server string, variableID string, params *GetVariablesIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "variableID", runtime.ParamLocationPath, variableID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPatchVariablesIDRequest calls the generic PatchVariablesID builder with application/json body +func NewPatchVariablesIDRequest(server string, variableID string, params *PatchVariablesIDParams, body PatchVariablesIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchVariablesIDRequestWithBody(server, variableID, params, "application/json", bodyReader) +} + +// NewPatchVariablesIDRequestWithBody generates requests for PatchVariablesID with any type of body +func NewPatchVariablesIDRequestWithBody(server string, variableID string, params *PatchVariablesIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "variableID", runtime.ParamLocationPath, variableID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPutVariablesIDRequest calls the generic PutVariablesID builder with application/json body +func NewPutVariablesIDRequest(server string, variableID string, params *PutVariablesIDParams, body PutVariablesIDJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutVariablesIDRequestWithBody(server, variableID, params, "application/json", bodyReader) +} + +// NewPutVariablesIDRequestWithBody generates requests for PutVariablesID with any type of body +func NewPutVariablesIDRequestWithBody(server string, variableID string, params *PutVariablesIDParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "variableID", runtime.ParamLocationPath, variableID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewGetVariablesIDLabelsRequest generates requests for GetVariablesIDLabels +func NewGetVariablesIDLabelsRequest(server string, variableID string, params *GetVariablesIDLabelsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "variableID", runtime.ParamLocationPath, variableID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostVariablesIDLabelsRequest calls the generic PostVariablesIDLabels builder with application/json body +func NewPostVariablesIDLabelsRequest(server string, variableID string, params *PostVariablesIDLabelsParams, body PostVariablesIDLabelsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostVariablesIDLabelsRequestWithBody(server, variableID, params, "application/json", bodyReader) +} + +// NewPostVariablesIDLabelsRequestWithBody generates requests for PostVariablesIDLabels with any type of body +func NewPostVariablesIDLabelsRequestWithBody(server string, variableID string, params *PostVariablesIDLabelsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "variableID", runtime.ParamLocationPath, variableID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables/%s/labels", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewDeleteVariablesIDLabelsIDRequest generates requests for DeleteVariablesIDLabelsID +func NewDeleteVariablesIDLabelsIDRequest(server string, variableID string, labelID string, params *DeleteVariablesIDLabelsIDParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "variableID", runtime.ParamLocationPath, variableID) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "labelID", runtime.ParamLocationPath, labelID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/variables/%s/labels/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + return req, nil +} + +// NewPostWriteRequestWithBody generates requests for PostWrite with any type of body +func NewPostWriteRequestWithBody(server string, params *PostWriteParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/write") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "org", runtime.ParamLocationQuery, params.Org); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.OrgID != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "orgID", runtime.ParamLocationQuery, *params.OrgID); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "bucket", runtime.ParamLocationQuery, params.Bucket); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Precision != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "precision", runtime.ParamLocationQuery, *params.Precision); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params.ZapTraceSpan != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Zap-Trace-Span", runtime.ParamLocationHeader, *params.ZapTraceSpan) + if err != nil { + return nil, err + } + + req.Header.Set("Zap-Trace-Span", headerParam0) + } + + if params.ContentEncoding != nil { + var headerParam1 string + + headerParam1, err = runtime.StyleParamWithLocation("simple", false, "Content-Encoding", runtime.ParamLocationHeader, *params.ContentEncoding) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Encoding", headerParam1) + } + + if params.ContentType != nil { + var headerParam2 string + + headerParam2, err = runtime.StyleParamWithLocation("simple", false, "Content-Type", runtime.ParamLocationHeader, *params.ContentType) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", headerParam2) + } + + if params.ContentLength != nil { + var headerParam3 string + + headerParam3, err = runtime.StyleParamWithLocation("simple", false, "Content-Length", runtime.ParamLocationHeader, *params.ContentLength) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Length", headerParam3) + } + + if params.Accept != nil { + var headerParam4 string + + headerParam4, err = runtime.StyleParamWithLocation("simple", false, "Accept", runtime.ParamLocationHeader, *params.Accept) + if err != nil { + return nil, err + } + + req.Header.Set("Accept", headerParam4) + } + + return req, nil +} + +// ClientWithResponses builds on ClientInterface to offer response payloads +type ClientWithResponses struct { + ClientInterface +} + +// NewClientWithResponses creates a new ClientWithResponses, which wraps +// Client with return type handling +func NewClientWithResponses(service ihttp.Service) *ClientWithResponses { + client := NewClient(service) + return &ClientWithResponses{client} +} + +// ClientWithResponsesInterface is the interface specification for the client with responses above. +type ClientWithResponsesInterface interface { + // GetRoutes request + GetRoutesWithResponse(ctx context.Context, params *GetRoutesParams) (*GetRoutesResponse, error) + + // GetAuthorizations request + GetAuthorizationsWithResponse(ctx context.Context, params *GetAuthorizationsParams) (*GetAuthorizationsResponse, error) + + // PostAuthorizations request with any body + PostAuthorizationsWithBodyWithResponse(ctx context.Context, params *PostAuthorizationsParams, contentType string, body io.Reader) (*PostAuthorizationsResponse, error) + + PostAuthorizationsWithResponse(ctx context.Context, params *PostAuthorizationsParams, body PostAuthorizationsJSONRequestBody) (*PostAuthorizationsResponse, error) + + // DeleteAuthorizationsID request + DeleteAuthorizationsIDWithResponse(ctx context.Context, authID string, params *DeleteAuthorizationsIDParams) (*DeleteAuthorizationsIDResponse, error) + + // GetAuthorizationsID request + GetAuthorizationsIDWithResponse(ctx context.Context, authID string, params *GetAuthorizationsIDParams) (*GetAuthorizationsIDResponse, error) + + // PatchAuthorizationsID request with any body + PatchAuthorizationsIDWithBodyWithResponse(ctx context.Context, authID string, params *PatchAuthorizationsIDParams, contentType string, body io.Reader) (*PatchAuthorizationsIDResponse, error) + + PatchAuthorizationsIDWithResponse(ctx context.Context, authID string, params *PatchAuthorizationsIDParams, body PatchAuthorizationsIDJSONRequestBody) (*PatchAuthorizationsIDResponse, error) + + // GetBackupKV request + GetBackupKVWithResponse(ctx context.Context, params *GetBackupKVParams) (*GetBackupKVResponse, error) + + // GetBackupMetadata request + GetBackupMetadataWithResponse(ctx context.Context, params *GetBackupMetadataParams) (*GetBackupMetadataResponse, error) + + // GetBackupShardId request + GetBackupShardIdWithResponse(ctx context.Context, shardID int64, params *GetBackupShardIdParams) (*GetBackupShardIdResponse, error) + + // GetBuckets request + GetBucketsWithResponse(ctx context.Context, params *GetBucketsParams) (*GetBucketsResponse, error) + + // PostBuckets request with any body + PostBucketsWithBodyWithResponse(ctx context.Context, params *PostBucketsParams, contentType string, body io.Reader) (*PostBucketsResponse, error) + + PostBucketsWithResponse(ctx context.Context, params *PostBucketsParams, body PostBucketsJSONRequestBody) (*PostBucketsResponse, error) + + // DeleteBucketsID request + DeleteBucketsIDWithResponse(ctx context.Context, bucketID string, params *DeleteBucketsIDParams) (*DeleteBucketsIDResponse, error) + + // GetBucketsID request + GetBucketsIDWithResponse(ctx context.Context, bucketID string, params *GetBucketsIDParams) (*GetBucketsIDResponse, error) + + // PatchBucketsID request with any body + PatchBucketsIDWithBodyWithResponse(ctx context.Context, bucketID string, params *PatchBucketsIDParams, contentType string, body io.Reader) (*PatchBucketsIDResponse, error) + + PatchBucketsIDWithResponse(ctx context.Context, bucketID string, params *PatchBucketsIDParams, body PatchBucketsIDJSONRequestBody) (*PatchBucketsIDResponse, error) + + // GetBucketsIDLabels request + GetBucketsIDLabelsWithResponse(ctx context.Context, bucketID string, params *GetBucketsIDLabelsParams) (*GetBucketsIDLabelsResponse, error) + + // PostBucketsIDLabels request with any body + PostBucketsIDLabelsWithBodyWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDLabelsParams, contentType string, body io.Reader) (*PostBucketsIDLabelsResponse, error) + + PostBucketsIDLabelsWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDLabelsParams, body PostBucketsIDLabelsJSONRequestBody) (*PostBucketsIDLabelsResponse, error) + + // DeleteBucketsIDLabelsID request + DeleteBucketsIDLabelsIDWithResponse(ctx context.Context, bucketID string, labelID string, params *DeleteBucketsIDLabelsIDParams) (*DeleteBucketsIDLabelsIDResponse, error) + + // GetBucketsIDMembers request + GetBucketsIDMembersWithResponse(ctx context.Context, bucketID string, params *GetBucketsIDMembersParams) (*GetBucketsIDMembersResponse, error) + + // PostBucketsIDMembers request with any body + PostBucketsIDMembersWithBodyWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDMembersParams, contentType string, body io.Reader) (*PostBucketsIDMembersResponse, error) + + PostBucketsIDMembersWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDMembersParams, body PostBucketsIDMembersJSONRequestBody) (*PostBucketsIDMembersResponse, error) + + // DeleteBucketsIDMembersID request + DeleteBucketsIDMembersIDWithResponse(ctx context.Context, bucketID string, userID string, params *DeleteBucketsIDMembersIDParams) (*DeleteBucketsIDMembersIDResponse, error) + + // GetBucketsIDOwners request + GetBucketsIDOwnersWithResponse(ctx context.Context, bucketID string, params *GetBucketsIDOwnersParams) (*GetBucketsIDOwnersResponse, error) + + // PostBucketsIDOwners request with any body + PostBucketsIDOwnersWithBodyWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDOwnersParams, contentType string, body io.Reader) (*PostBucketsIDOwnersResponse, error) + + PostBucketsIDOwnersWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDOwnersParams, body PostBucketsIDOwnersJSONRequestBody) (*PostBucketsIDOwnersResponse, error) + + // DeleteBucketsIDOwnersID request + DeleteBucketsIDOwnersIDWithResponse(ctx context.Context, bucketID string, userID string, params *DeleteBucketsIDOwnersIDParams) (*DeleteBucketsIDOwnersIDResponse, error) + + // GetChecks request + GetChecksWithResponse(ctx context.Context, params *GetChecksParams) (*GetChecksResponse, error) + + // CreateCheck request with any body + CreateCheckWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateCheckResponse, error) + + CreateCheckWithResponse(ctx context.Context, body CreateCheckJSONRequestBody) (*CreateCheckResponse, error) + + // DeleteChecksID request + DeleteChecksIDWithResponse(ctx context.Context, checkID string, params *DeleteChecksIDParams) (*DeleteChecksIDResponse, error) + + // GetChecksID request + GetChecksIDWithResponse(ctx context.Context, checkID string, params *GetChecksIDParams) (*GetChecksIDResponse, error) + + // PatchChecksID request with any body + PatchChecksIDWithBodyWithResponse(ctx context.Context, checkID string, params *PatchChecksIDParams, contentType string, body io.Reader) (*PatchChecksIDResponse, error) + + PatchChecksIDWithResponse(ctx context.Context, checkID string, params *PatchChecksIDParams, body PatchChecksIDJSONRequestBody) (*PatchChecksIDResponse, error) + + // PutChecksID request with any body + PutChecksIDWithBodyWithResponse(ctx context.Context, checkID string, params *PutChecksIDParams, contentType string, body io.Reader) (*PutChecksIDResponse, error) + + PutChecksIDWithResponse(ctx context.Context, checkID string, params *PutChecksIDParams, body PutChecksIDJSONRequestBody) (*PutChecksIDResponse, error) + + // GetChecksIDLabels request + GetChecksIDLabelsWithResponse(ctx context.Context, checkID string, params *GetChecksIDLabelsParams) (*GetChecksIDLabelsResponse, error) + + // PostChecksIDLabels request with any body + PostChecksIDLabelsWithBodyWithResponse(ctx context.Context, checkID string, params *PostChecksIDLabelsParams, contentType string, body io.Reader) (*PostChecksIDLabelsResponse, error) + + PostChecksIDLabelsWithResponse(ctx context.Context, checkID string, params *PostChecksIDLabelsParams, body PostChecksIDLabelsJSONRequestBody) (*PostChecksIDLabelsResponse, error) + + // DeleteChecksIDLabelsID request + DeleteChecksIDLabelsIDWithResponse(ctx context.Context, checkID string, labelID string, params *DeleteChecksIDLabelsIDParams) (*DeleteChecksIDLabelsIDResponse, error) + + // GetChecksIDQuery request + GetChecksIDQueryWithResponse(ctx context.Context, checkID string, params *GetChecksIDQueryParams) (*GetChecksIDQueryResponse, error) + + // GetConfig request + GetConfigWithResponse(ctx context.Context, params *GetConfigParams) (*GetConfigResponse, error) + + // GetDashboards request + GetDashboardsWithResponse(ctx context.Context, params *GetDashboardsParams) (*GetDashboardsResponse, error) + + // PostDashboards request with any body + PostDashboardsWithBodyWithResponse(ctx context.Context, params *PostDashboardsParams, contentType string, body io.Reader) (*PostDashboardsResponse, error) + + PostDashboardsWithResponse(ctx context.Context, params *PostDashboardsParams, body PostDashboardsJSONRequestBody) (*PostDashboardsResponse, error) + + // DeleteDashboardsID request + DeleteDashboardsIDWithResponse(ctx context.Context, dashboardID string, params *DeleteDashboardsIDParams) (*DeleteDashboardsIDResponse, error) + + // GetDashboardsID request + GetDashboardsIDWithResponse(ctx context.Context, dashboardID string, params *GetDashboardsIDParams) (*GetDashboardsIDResponse, error) + + // PatchDashboardsID request with any body + PatchDashboardsIDWithBodyWithResponse(ctx context.Context, dashboardID string, params *PatchDashboardsIDParams, contentType string, body io.Reader) (*PatchDashboardsIDResponse, error) + + PatchDashboardsIDWithResponse(ctx context.Context, dashboardID string, params *PatchDashboardsIDParams, body PatchDashboardsIDJSONRequestBody) (*PatchDashboardsIDResponse, error) + + // PostDashboardsIDCells request with any body + PostDashboardsIDCellsWithBodyWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDCellsParams, contentType string, body io.Reader) (*PostDashboardsIDCellsResponse, error) + + PostDashboardsIDCellsWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDCellsParams, body PostDashboardsIDCellsJSONRequestBody) (*PostDashboardsIDCellsResponse, error) + + // PutDashboardsIDCells request with any body + PutDashboardsIDCellsWithBodyWithResponse(ctx context.Context, dashboardID string, params *PutDashboardsIDCellsParams, contentType string, body io.Reader) (*PutDashboardsIDCellsResponse, error) + + PutDashboardsIDCellsWithResponse(ctx context.Context, dashboardID string, params *PutDashboardsIDCellsParams, body PutDashboardsIDCellsJSONRequestBody) (*PutDashboardsIDCellsResponse, error) + + // DeleteDashboardsIDCellsID request + DeleteDashboardsIDCellsIDWithResponse(ctx context.Context, dashboardID string, cellID string, params *DeleteDashboardsIDCellsIDParams) (*DeleteDashboardsIDCellsIDResponse, error) + + // PatchDashboardsIDCellsID request with any body + PatchDashboardsIDCellsIDWithBodyWithResponse(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, contentType string, body io.Reader) (*PatchDashboardsIDCellsIDResponse, error) + + PatchDashboardsIDCellsIDWithResponse(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, body PatchDashboardsIDCellsIDJSONRequestBody) (*PatchDashboardsIDCellsIDResponse, error) + + // GetDashboardsIDCellsIDView request + GetDashboardsIDCellsIDViewWithResponse(ctx context.Context, dashboardID string, cellID string, params *GetDashboardsIDCellsIDViewParams) (*GetDashboardsIDCellsIDViewResponse, error) + + // PatchDashboardsIDCellsIDView request with any body + PatchDashboardsIDCellsIDViewWithBodyWithResponse(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, contentType string, body io.Reader) (*PatchDashboardsIDCellsIDViewResponse, error) + + PatchDashboardsIDCellsIDViewWithResponse(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, body PatchDashboardsIDCellsIDViewJSONRequestBody) (*PatchDashboardsIDCellsIDViewResponse, error) + + // GetDashboardsIDLabels request + GetDashboardsIDLabelsWithResponse(ctx context.Context, dashboardID string, params *GetDashboardsIDLabelsParams) (*GetDashboardsIDLabelsResponse, error) + + // PostDashboardsIDLabels request with any body + PostDashboardsIDLabelsWithBodyWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDLabelsParams, contentType string, body io.Reader) (*PostDashboardsIDLabelsResponse, error) + + PostDashboardsIDLabelsWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDLabelsParams, body PostDashboardsIDLabelsJSONRequestBody) (*PostDashboardsIDLabelsResponse, error) + + // DeleteDashboardsIDLabelsID request + DeleteDashboardsIDLabelsIDWithResponse(ctx context.Context, dashboardID string, labelID string, params *DeleteDashboardsIDLabelsIDParams) (*DeleteDashboardsIDLabelsIDResponse, error) + + // GetDashboardsIDMembers request + GetDashboardsIDMembersWithResponse(ctx context.Context, dashboardID string, params *GetDashboardsIDMembersParams) (*GetDashboardsIDMembersResponse, error) + + // PostDashboardsIDMembers request with any body + PostDashboardsIDMembersWithBodyWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDMembersParams, contentType string, body io.Reader) (*PostDashboardsIDMembersResponse, error) + + PostDashboardsIDMembersWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDMembersParams, body PostDashboardsIDMembersJSONRequestBody) (*PostDashboardsIDMembersResponse, error) + + // DeleteDashboardsIDMembersID request + DeleteDashboardsIDMembersIDWithResponse(ctx context.Context, dashboardID string, userID string, params *DeleteDashboardsIDMembersIDParams) (*DeleteDashboardsIDMembersIDResponse, error) + + // GetDashboardsIDOwners request + GetDashboardsIDOwnersWithResponse(ctx context.Context, dashboardID string, params *GetDashboardsIDOwnersParams) (*GetDashboardsIDOwnersResponse, error) + + // PostDashboardsIDOwners request with any body + PostDashboardsIDOwnersWithBodyWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDOwnersParams, contentType string, body io.Reader) (*PostDashboardsIDOwnersResponse, error) + + PostDashboardsIDOwnersWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDOwnersParams, body PostDashboardsIDOwnersJSONRequestBody) (*PostDashboardsIDOwnersResponse, error) + + // DeleteDashboardsIDOwnersID request + DeleteDashboardsIDOwnersIDWithResponse(ctx context.Context, dashboardID string, userID string, params *DeleteDashboardsIDOwnersIDParams) (*DeleteDashboardsIDOwnersIDResponse, error) + + // GetDBRPs request + GetDBRPsWithResponse(ctx context.Context, params *GetDBRPsParams) (*GetDBRPsResponse, error) + + // PostDBRP request with any body + PostDBRPWithBodyWithResponse(ctx context.Context, params *PostDBRPParams, contentType string, body io.Reader) (*PostDBRPResponse, error) + + PostDBRPWithResponse(ctx context.Context, params *PostDBRPParams, body PostDBRPJSONRequestBody) (*PostDBRPResponse, error) + + // DeleteDBRPID request + DeleteDBRPIDWithResponse(ctx context.Context, dbrpID string, params *DeleteDBRPIDParams) (*DeleteDBRPIDResponse, error) + + // GetDBRPsID request + GetDBRPsIDWithResponse(ctx context.Context, dbrpID string, params *GetDBRPsIDParams) (*GetDBRPsIDResponse, error) + + // PatchDBRPID request with any body + PatchDBRPIDWithBodyWithResponse(ctx context.Context, dbrpID string, params *PatchDBRPIDParams, contentType string, body io.Reader) (*PatchDBRPIDResponse, error) + + PatchDBRPIDWithResponse(ctx context.Context, dbrpID string, params *PatchDBRPIDParams, body PatchDBRPIDJSONRequestBody) (*PatchDBRPIDResponse, error) + + // PostDelete request with any body + PostDeleteWithBodyWithResponse(ctx context.Context, params *PostDeleteParams, contentType string, body io.Reader) (*PostDeleteResponse, error) + + PostDeleteWithResponse(ctx context.Context, params *PostDeleteParams, body PostDeleteJSONRequestBody) (*PostDeleteResponse, error) + + // GetFlags request + GetFlagsWithResponse(ctx context.Context, params *GetFlagsParams) (*GetFlagsResponse, error) + + // GetHealth request + GetHealthWithResponse(ctx context.Context, params *GetHealthParams) (*GetHealthResponse, error) + + // GetLabels request + GetLabelsWithResponse(ctx context.Context, params *GetLabelsParams) (*GetLabelsResponse, error) + + // PostLabels request with any body + PostLabelsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*PostLabelsResponse, error) + + PostLabelsWithResponse(ctx context.Context, body PostLabelsJSONRequestBody) (*PostLabelsResponse, error) + + // DeleteLabelsID request + DeleteLabelsIDWithResponse(ctx context.Context, labelID string, params *DeleteLabelsIDParams) (*DeleteLabelsIDResponse, error) + + // GetLabelsID request + GetLabelsIDWithResponse(ctx context.Context, labelID string, params *GetLabelsIDParams) (*GetLabelsIDResponse, error) + + // PatchLabelsID request with any body + PatchLabelsIDWithBodyWithResponse(ctx context.Context, labelID string, params *PatchLabelsIDParams, contentType string, body io.Reader) (*PatchLabelsIDResponse, error) + + PatchLabelsIDWithResponse(ctx context.Context, labelID string, params *PatchLabelsIDParams, body PatchLabelsIDJSONRequestBody) (*PatchLabelsIDResponse, error) + + // GetLegacyAuthorizations request + GetLegacyAuthorizationsWithResponse(ctx context.Context, params *GetLegacyAuthorizationsParams) (*GetLegacyAuthorizationsResponse, error) + + // PostLegacyAuthorizations request with any body + PostLegacyAuthorizationsWithBodyWithResponse(ctx context.Context, params *PostLegacyAuthorizationsParams, contentType string, body io.Reader) (*PostLegacyAuthorizationsResponse, error) + + PostLegacyAuthorizationsWithResponse(ctx context.Context, params *PostLegacyAuthorizationsParams, body PostLegacyAuthorizationsJSONRequestBody) (*PostLegacyAuthorizationsResponse, error) + + // DeleteLegacyAuthorizationsID request + DeleteLegacyAuthorizationsIDWithResponse(ctx context.Context, authID string, params *DeleteLegacyAuthorizationsIDParams) (*DeleteLegacyAuthorizationsIDResponse, error) + + // GetLegacyAuthorizationsID request + GetLegacyAuthorizationsIDWithResponse(ctx context.Context, authID string, params *GetLegacyAuthorizationsIDParams) (*GetLegacyAuthorizationsIDResponse, error) + + // PatchLegacyAuthorizationsID request with any body + PatchLegacyAuthorizationsIDWithBodyWithResponse(ctx context.Context, authID string, params *PatchLegacyAuthorizationsIDParams, contentType string, body io.Reader) (*PatchLegacyAuthorizationsIDResponse, error) + + PatchLegacyAuthorizationsIDWithResponse(ctx context.Context, authID string, params *PatchLegacyAuthorizationsIDParams, body PatchLegacyAuthorizationsIDJSONRequestBody) (*PatchLegacyAuthorizationsIDResponse, error) + + // PostLegacyAuthorizationsIDPassword request with any body + PostLegacyAuthorizationsIDPasswordWithBodyWithResponse(ctx context.Context, authID string, params *PostLegacyAuthorizationsIDPasswordParams, contentType string, body io.Reader) (*PostLegacyAuthorizationsIDPasswordResponse, error) + + PostLegacyAuthorizationsIDPasswordWithResponse(ctx context.Context, authID string, params *PostLegacyAuthorizationsIDPasswordParams, body PostLegacyAuthorizationsIDPasswordJSONRequestBody) (*PostLegacyAuthorizationsIDPasswordResponse, error) + + // GetMe request + GetMeWithResponse(ctx context.Context, params *GetMeParams) (*GetMeResponse, error) + + // PutMePassword request with any body + PutMePasswordWithBodyWithResponse(ctx context.Context, params *PutMePasswordParams, contentType string, body io.Reader) (*PutMePasswordResponse, error) + + PutMePasswordWithResponse(ctx context.Context, params *PutMePasswordParams, body PutMePasswordJSONRequestBody) (*PutMePasswordResponse, error) + + // GetMetrics request + GetMetricsWithResponse(ctx context.Context, params *GetMetricsParams) (*GetMetricsResponse, error) + + // GetNotificationEndpoints request + GetNotificationEndpointsWithResponse(ctx context.Context, params *GetNotificationEndpointsParams) (*GetNotificationEndpointsResponse, error) + + // CreateNotificationEndpoint request with any body + CreateNotificationEndpointWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateNotificationEndpointResponse, error) + + CreateNotificationEndpointWithResponse(ctx context.Context, body CreateNotificationEndpointJSONRequestBody) (*CreateNotificationEndpointResponse, error) + + // DeleteNotificationEndpointsID request + DeleteNotificationEndpointsIDWithResponse(ctx context.Context, endpointID string, params *DeleteNotificationEndpointsIDParams) (*DeleteNotificationEndpointsIDResponse, error) + + // GetNotificationEndpointsID request + GetNotificationEndpointsIDWithResponse(ctx context.Context, endpointID string, params *GetNotificationEndpointsIDParams) (*GetNotificationEndpointsIDResponse, error) + + // PatchNotificationEndpointsID request with any body + PatchNotificationEndpointsIDWithBodyWithResponse(ctx context.Context, endpointID string, params *PatchNotificationEndpointsIDParams, contentType string, body io.Reader) (*PatchNotificationEndpointsIDResponse, error) + + PatchNotificationEndpointsIDWithResponse(ctx context.Context, endpointID string, params *PatchNotificationEndpointsIDParams, body PatchNotificationEndpointsIDJSONRequestBody) (*PatchNotificationEndpointsIDResponse, error) + + // PutNotificationEndpointsID request with any body + PutNotificationEndpointsIDWithBodyWithResponse(ctx context.Context, endpointID string, params *PutNotificationEndpointsIDParams, contentType string, body io.Reader) (*PutNotificationEndpointsIDResponse, error) + + PutNotificationEndpointsIDWithResponse(ctx context.Context, endpointID string, params *PutNotificationEndpointsIDParams, body PutNotificationEndpointsIDJSONRequestBody) (*PutNotificationEndpointsIDResponse, error) + + // GetNotificationEndpointsIDLabels request + GetNotificationEndpointsIDLabelsWithResponse(ctx context.Context, endpointID string, params *GetNotificationEndpointsIDLabelsParams) (*GetNotificationEndpointsIDLabelsResponse, error) + + // PostNotificationEndpointIDLabels request with any body + PostNotificationEndpointIDLabelsWithBodyWithResponse(ctx context.Context, endpointID string, params *PostNotificationEndpointIDLabelsParams, contentType string, body io.Reader) (*PostNotificationEndpointIDLabelsResponse, error) + + PostNotificationEndpointIDLabelsWithResponse(ctx context.Context, endpointID string, params *PostNotificationEndpointIDLabelsParams, body PostNotificationEndpointIDLabelsJSONRequestBody) (*PostNotificationEndpointIDLabelsResponse, error) + + // DeleteNotificationEndpointsIDLabelsID request + DeleteNotificationEndpointsIDLabelsIDWithResponse(ctx context.Context, endpointID string, labelID string, params *DeleteNotificationEndpointsIDLabelsIDParams) (*DeleteNotificationEndpointsIDLabelsIDResponse, error) + + // GetNotificationRules request + GetNotificationRulesWithResponse(ctx context.Context, params *GetNotificationRulesParams) (*GetNotificationRulesResponse, error) + + // CreateNotificationRule request with any body + CreateNotificationRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateNotificationRuleResponse, error) + + CreateNotificationRuleWithResponse(ctx context.Context, body CreateNotificationRuleJSONRequestBody) (*CreateNotificationRuleResponse, error) + + // DeleteNotificationRulesID request + DeleteNotificationRulesIDWithResponse(ctx context.Context, ruleID string, params *DeleteNotificationRulesIDParams) (*DeleteNotificationRulesIDResponse, error) + + // GetNotificationRulesID request + GetNotificationRulesIDWithResponse(ctx context.Context, ruleID string, params *GetNotificationRulesIDParams) (*GetNotificationRulesIDResponse, error) + + // PatchNotificationRulesID request with any body + PatchNotificationRulesIDWithBodyWithResponse(ctx context.Context, ruleID string, params *PatchNotificationRulesIDParams, contentType string, body io.Reader) (*PatchNotificationRulesIDResponse, error) + + PatchNotificationRulesIDWithResponse(ctx context.Context, ruleID string, params *PatchNotificationRulesIDParams, body PatchNotificationRulesIDJSONRequestBody) (*PatchNotificationRulesIDResponse, error) + + // PutNotificationRulesID request with any body + PutNotificationRulesIDWithBodyWithResponse(ctx context.Context, ruleID string, params *PutNotificationRulesIDParams, contentType string, body io.Reader) (*PutNotificationRulesIDResponse, error) + + PutNotificationRulesIDWithResponse(ctx context.Context, ruleID string, params *PutNotificationRulesIDParams, body PutNotificationRulesIDJSONRequestBody) (*PutNotificationRulesIDResponse, error) + + // GetNotificationRulesIDLabels request + GetNotificationRulesIDLabelsWithResponse(ctx context.Context, ruleID string, params *GetNotificationRulesIDLabelsParams) (*GetNotificationRulesIDLabelsResponse, error) + + // PostNotificationRuleIDLabels request with any body + PostNotificationRuleIDLabelsWithBodyWithResponse(ctx context.Context, ruleID string, params *PostNotificationRuleIDLabelsParams, contentType string, body io.Reader) (*PostNotificationRuleIDLabelsResponse, error) + + PostNotificationRuleIDLabelsWithResponse(ctx context.Context, ruleID string, params *PostNotificationRuleIDLabelsParams, body PostNotificationRuleIDLabelsJSONRequestBody) (*PostNotificationRuleIDLabelsResponse, error) + + // DeleteNotificationRulesIDLabelsID request + DeleteNotificationRulesIDLabelsIDWithResponse(ctx context.Context, ruleID string, labelID string, params *DeleteNotificationRulesIDLabelsIDParams) (*DeleteNotificationRulesIDLabelsIDResponse, error) + + // GetNotificationRulesIDQuery request + GetNotificationRulesIDQueryWithResponse(ctx context.Context, ruleID string, params *GetNotificationRulesIDQueryParams) (*GetNotificationRulesIDQueryResponse, error) + + // GetOrgs request + GetOrgsWithResponse(ctx context.Context, params *GetOrgsParams) (*GetOrgsResponse, error) + + // PostOrgs request with any body + PostOrgsWithBodyWithResponse(ctx context.Context, params *PostOrgsParams, contentType string, body io.Reader) (*PostOrgsResponse, error) + + PostOrgsWithResponse(ctx context.Context, params *PostOrgsParams, body PostOrgsJSONRequestBody) (*PostOrgsResponse, error) + + // DeleteOrgsID request + DeleteOrgsIDWithResponse(ctx context.Context, orgID string, params *DeleteOrgsIDParams) (*DeleteOrgsIDResponse, error) + + // GetOrgsID request + GetOrgsIDWithResponse(ctx context.Context, orgID string, params *GetOrgsIDParams) (*GetOrgsIDResponse, error) + + // PatchOrgsID request with any body + PatchOrgsIDWithBodyWithResponse(ctx context.Context, orgID string, params *PatchOrgsIDParams, contentType string, body io.Reader) (*PatchOrgsIDResponse, error) + + PatchOrgsIDWithResponse(ctx context.Context, orgID string, params *PatchOrgsIDParams, body PatchOrgsIDJSONRequestBody) (*PatchOrgsIDResponse, error) + + // GetOrgsIDMembers request + GetOrgsIDMembersWithResponse(ctx context.Context, orgID string, params *GetOrgsIDMembersParams) (*GetOrgsIDMembersResponse, error) + + // PostOrgsIDMembers request with any body + PostOrgsIDMembersWithBodyWithResponse(ctx context.Context, orgID string, params *PostOrgsIDMembersParams, contentType string, body io.Reader) (*PostOrgsIDMembersResponse, error) + + PostOrgsIDMembersWithResponse(ctx context.Context, orgID string, params *PostOrgsIDMembersParams, body PostOrgsIDMembersJSONRequestBody) (*PostOrgsIDMembersResponse, error) + + // DeleteOrgsIDMembersID request + DeleteOrgsIDMembersIDWithResponse(ctx context.Context, orgID string, userID string, params *DeleteOrgsIDMembersIDParams) (*DeleteOrgsIDMembersIDResponse, error) + + // GetOrgsIDOwners request + GetOrgsIDOwnersWithResponse(ctx context.Context, orgID string, params *GetOrgsIDOwnersParams) (*GetOrgsIDOwnersResponse, error) + + // PostOrgsIDOwners request with any body + PostOrgsIDOwnersWithBodyWithResponse(ctx context.Context, orgID string, params *PostOrgsIDOwnersParams, contentType string, body io.Reader) (*PostOrgsIDOwnersResponse, error) + + PostOrgsIDOwnersWithResponse(ctx context.Context, orgID string, params *PostOrgsIDOwnersParams, body PostOrgsIDOwnersJSONRequestBody) (*PostOrgsIDOwnersResponse, error) + + // DeleteOrgsIDOwnersID request + DeleteOrgsIDOwnersIDWithResponse(ctx context.Context, orgID string, userID string, params *DeleteOrgsIDOwnersIDParams) (*DeleteOrgsIDOwnersIDResponse, error) + + // GetOrgsIDSecrets request + GetOrgsIDSecretsWithResponse(ctx context.Context, orgID string, params *GetOrgsIDSecretsParams) (*GetOrgsIDSecretsResponse, error) + + // PatchOrgsIDSecrets request with any body + PatchOrgsIDSecretsWithBodyWithResponse(ctx context.Context, orgID string, params *PatchOrgsIDSecretsParams, contentType string, body io.Reader) (*PatchOrgsIDSecretsResponse, error) + + PatchOrgsIDSecretsWithResponse(ctx context.Context, orgID string, params *PatchOrgsIDSecretsParams, body PatchOrgsIDSecretsJSONRequestBody) (*PatchOrgsIDSecretsResponse, error) + + // PostOrgsIDSecrets request with any body + PostOrgsIDSecretsWithBodyWithResponse(ctx context.Context, orgID string, params *PostOrgsIDSecretsParams, contentType string, body io.Reader) (*PostOrgsIDSecretsResponse, error) + + PostOrgsIDSecretsWithResponse(ctx context.Context, orgID string, params *PostOrgsIDSecretsParams, body PostOrgsIDSecretsJSONRequestBody) (*PostOrgsIDSecretsResponse, error) + + // DeleteOrgsIDSecretsID request + DeleteOrgsIDSecretsIDWithResponse(ctx context.Context, orgID string, secretID string, params *DeleteOrgsIDSecretsIDParams) (*DeleteOrgsIDSecretsIDResponse, error) + + // GetPing request + GetPingWithResponse(ctx context.Context) (*GetPingResponse, error) + + // HeadPing request + HeadPingWithResponse(ctx context.Context) (*HeadPingResponse, error) + + // PostQuery request with any body + PostQueryWithBodyWithResponse(ctx context.Context, params *PostQueryParams, contentType string, body io.Reader) (*PostQueryResponse, error) + + PostQueryWithResponse(ctx context.Context, params *PostQueryParams, body PostQueryJSONRequestBody) (*PostQueryResponse, error) + + // PostQueryAnalyze request with any body + PostQueryAnalyzeWithBodyWithResponse(ctx context.Context, params *PostQueryAnalyzeParams, contentType string, body io.Reader) (*PostQueryAnalyzeResponse, error) + + PostQueryAnalyzeWithResponse(ctx context.Context, params *PostQueryAnalyzeParams, body PostQueryAnalyzeJSONRequestBody) (*PostQueryAnalyzeResponse, error) + + // PostQueryAst request with any body + PostQueryAstWithBodyWithResponse(ctx context.Context, params *PostQueryAstParams, contentType string, body io.Reader) (*PostQueryAstResponse, error) + + PostQueryAstWithResponse(ctx context.Context, params *PostQueryAstParams, body PostQueryAstJSONRequestBody) (*PostQueryAstResponse, error) + + // GetQuerySuggestions request + GetQuerySuggestionsWithResponse(ctx context.Context, params *GetQuerySuggestionsParams) (*GetQuerySuggestionsResponse, error) + + // GetQuerySuggestionsName request + GetQuerySuggestionsNameWithResponse(ctx context.Context, name string, params *GetQuerySuggestionsNameParams) (*GetQuerySuggestionsNameResponse, error) + + // GetReady request + GetReadyWithResponse(ctx context.Context, params *GetReadyParams) (*GetReadyResponse, error) + + // GetRemoteConnections request + GetRemoteConnectionsWithResponse(ctx context.Context, params *GetRemoteConnectionsParams) (*GetRemoteConnectionsResponse, error) + + // PostRemoteConnection request with any body + PostRemoteConnectionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*PostRemoteConnectionResponse, error) + + PostRemoteConnectionWithResponse(ctx context.Context, body PostRemoteConnectionJSONRequestBody) (*PostRemoteConnectionResponse, error) + + // DeleteRemoteConnectionByID request + DeleteRemoteConnectionByIDWithResponse(ctx context.Context, remoteID string, params *DeleteRemoteConnectionByIDParams) (*DeleteRemoteConnectionByIDResponse, error) + + // GetRemoteConnectionByID request + GetRemoteConnectionByIDWithResponse(ctx context.Context, remoteID string, params *GetRemoteConnectionByIDParams) (*GetRemoteConnectionByIDResponse, error) + + // PatchRemoteConnectionByID request with any body + PatchRemoteConnectionByIDWithBodyWithResponse(ctx context.Context, remoteID string, params *PatchRemoteConnectionByIDParams, contentType string, body io.Reader) (*PatchRemoteConnectionByIDResponse, error) + + PatchRemoteConnectionByIDWithResponse(ctx context.Context, remoteID string, params *PatchRemoteConnectionByIDParams, body PatchRemoteConnectionByIDJSONRequestBody) (*PatchRemoteConnectionByIDResponse, error) + + // GetReplications request + GetReplicationsWithResponse(ctx context.Context, params *GetReplicationsParams) (*GetReplicationsResponse, error) + + // PostReplication request with any body + PostReplicationWithBodyWithResponse(ctx context.Context, params *PostReplicationParams, contentType string, body io.Reader) (*PostReplicationResponse, error) + + PostReplicationWithResponse(ctx context.Context, params *PostReplicationParams, body PostReplicationJSONRequestBody) (*PostReplicationResponse, error) + + // DeleteReplicationByID request + DeleteReplicationByIDWithResponse(ctx context.Context, replicationID string, params *DeleteReplicationByIDParams) (*DeleteReplicationByIDResponse, error) + + // GetReplicationByID request + GetReplicationByIDWithResponse(ctx context.Context, replicationID string, params *GetReplicationByIDParams) (*GetReplicationByIDResponse, error) + + // PatchReplicationByID request with any body + PatchReplicationByIDWithBodyWithResponse(ctx context.Context, replicationID string, params *PatchReplicationByIDParams, contentType string, body io.Reader) (*PatchReplicationByIDResponse, error) + + PatchReplicationByIDWithResponse(ctx context.Context, replicationID string, params *PatchReplicationByIDParams, body PatchReplicationByIDJSONRequestBody) (*PatchReplicationByIDResponse, error) + + // PostValidateReplicationByID request + PostValidateReplicationByIDWithResponse(ctx context.Context, replicationID string, params *PostValidateReplicationByIDParams) (*PostValidateReplicationByIDResponse, error) + + // GetResources request + GetResourcesWithResponse(ctx context.Context, params *GetResourcesParams) (*GetResourcesResponse, error) + + // PostRestoreBucketID request with any body + PostRestoreBucketIDWithBodyWithResponse(ctx context.Context, bucketID string, params *PostRestoreBucketIDParams, contentType string, body io.Reader) (*PostRestoreBucketIDResponse, error) + + // PostRestoreBucketMetadata request with any body + PostRestoreBucketMetadataWithBodyWithResponse(ctx context.Context, params *PostRestoreBucketMetadataParams, contentType string, body io.Reader) (*PostRestoreBucketMetadataResponse, error) + + PostRestoreBucketMetadataWithResponse(ctx context.Context, params *PostRestoreBucketMetadataParams, body PostRestoreBucketMetadataJSONRequestBody) (*PostRestoreBucketMetadataResponse, error) + + // PostRestoreKV request with any body + PostRestoreKVWithBodyWithResponse(ctx context.Context, params *PostRestoreKVParams, contentType string, body io.Reader) (*PostRestoreKVResponse, error) + + // PostRestoreShardId request with any body + PostRestoreShardIdWithBodyWithResponse(ctx context.Context, shardID string, params *PostRestoreShardIdParams, contentType string, body io.Reader) (*PostRestoreShardIdResponse, error) + + // PostRestoreSQL request with any body + PostRestoreSQLWithBodyWithResponse(ctx context.Context, params *PostRestoreSQLParams, contentType string, body io.Reader) (*PostRestoreSQLResponse, error) + + // GetScrapers request + GetScrapersWithResponse(ctx context.Context, params *GetScrapersParams) (*GetScrapersResponse, error) + + // PostScrapers request with any body + PostScrapersWithBodyWithResponse(ctx context.Context, params *PostScrapersParams, contentType string, body io.Reader) (*PostScrapersResponse, error) + + PostScrapersWithResponse(ctx context.Context, params *PostScrapersParams, body PostScrapersJSONRequestBody) (*PostScrapersResponse, error) + + // DeleteScrapersID request + DeleteScrapersIDWithResponse(ctx context.Context, scraperTargetID string, params *DeleteScrapersIDParams) (*DeleteScrapersIDResponse, error) + + // GetScrapersID request + GetScrapersIDWithResponse(ctx context.Context, scraperTargetID string, params *GetScrapersIDParams) (*GetScrapersIDResponse, error) + + // PatchScrapersID request with any body + PatchScrapersIDWithBodyWithResponse(ctx context.Context, scraperTargetID string, params *PatchScrapersIDParams, contentType string, body io.Reader) (*PatchScrapersIDResponse, error) + + PatchScrapersIDWithResponse(ctx context.Context, scraperTargetID string, params *PatchScrapersIDParams, body PatchScrapersIDJSONRequestBody) (*PatchScrapersIDResponse, error) + + // GetScrapersIDLabels request + GetScrapersIDLabelsWithResponse(ctx context.Context, scraperTargetID string, params *GetScrapersIDLabelsParams) (*GetScrapersIDLabelsResponse, error) + + // PostScrapersIDLabels request with any body + PostScrapersIDLabelsWithBodyWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDLabelsParams, contentType string, body io.Reader) (*PostScrapersIDLabelsResponse, error) + + PostScrapersIDLabelsWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDLabelsParams, body PostScrapersIDLabelsJSONRequestBody) (*PostScrapersIDLabelsResponse, error) + + // DeleteScrapersIDLabelsID request + DeleteScrapersIDLabelsIDWithResponse(ctx context.Context, scraperTargetID string, labelID string, params *DeleteScrapersIDLabelsIDParams) (*DeleteScrapersIDLabelsIDResponse, error) + + // GetScrapersIDMembers request + GetScrapersIDMembersWithResponse(ctx context.Context, scraperTargetID string, params *GetScrapersIDMembersParams) (*GetScrapersIDMembersResponse, error) + + // PostScrapersIDMembers request with any body + PostScrapersIDMembersWithBodyWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDMembersParams, contentType string, body io.Reader) (*PostScrapersIDMembersResponse, error) + + PostScrapersIDMembersWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDMembersParams, body PostScrapersIDMembersJSONRequestBody) (*PostScrapersIDMembersResponse, error) + + // DeleteScrapersIDMembersID request + DeleteScrapersIDMembersIDWithResponse(ctx context.Context, scraperTargetID string, userID string, params *DeleteScrapersIDMembersIDParams) (*DeleteScrapersIDMembersIDResponse, error) + + // GetScrapersIDOwners request + GetScrapersIDOwnersWithResponse(ctx context.Context, scraperTargetID string, params *GetScrapersIDOwnersParams) (*GetScrapersIDOwnersResponse, error) + + // PostScrapersIDOwners request with any body + PostScrapersIDOwnersWithBodyWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDOwnersParams, contentType string, body io.Reader) (*PostScrapersIDOwnersResponse, error) + + PostScrapersIDOwnersWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDOwnersParams, body PostScrapersIDOwnersJSONRequestBody) (*PostScrapersIDOwnersResponse, error) + + // DeleteScrapersIDOwnersID request + DeleteScrapersIDOwnersIDWithResponse(ctx context.Context, scraperTargetID string, userID string, params *DeleteScrapersIDOwnersIDParams) (*DeleteScrapersIDOwnersIDResponse, error) + + // GetSetup request + GetSetupWithResponse(ctx context.Context, params *GetSetupParams) (*GetSetupResponse, error) + + // PostSetup request with any body + PostSetupWithBodyWithResponse(ctx context.Context, params *PostSetupParams, contentType string, body io.Reader) (*PostSetupResponse, error) + + PostSetupWithResponse(ctx context.Context, params *PostSetupParams, body PostSetupJSONRequestBody) (*PostSetupResponse, error) + + // PostSignin request + PostSigninWithResponse(ctx context.Context, params *PostSigninParams) (*PostSigninResponse, error) + + // PostSignout request + PostSignoutWithResponse(ctx context.Context, params *PostSignoutParams) (*PostSignoutResponse, error) + + // GetSources request + GetSourcesWithResponse(ctx context.Context, params *GetSourcesParams) (*GetSourcesResponse, error) + + // PostSources request with any body + PostSourcesWithBodyWithResponse(ctx context.Context, params *PostSourcesParams, contentType string, body io.Reader) (*PostSourcesResponse, error) + + PostSourcesWithResponse(ctx context.Context, params *PostSourcesParams, body PostSourcesJSONRequestBody) (*PostSourcesResponse, error) + + // DeleteSourcesID request + DeleteSourcesIDWithResponse(ctx context.Context, sourceID string, params *DeleteSourcesIDParams) (*DeleteSourcesIDResponse, error) + + // GetSourcesID request + GetSourcesIDWithResponse(ctx context.Context, sourceID string, params *GetSourcesIDParams) (*GetSourcesIDResponse, error) + + // PatchSourcesID request with any body + PatchSourcesIDWithBodyWithResponse(ctx context.Context, sourceID string, params *PatchSourcesIDParams, contentType string, body io.Reader) (*PatchSourcesIDResponse, error) + + PatchSourcesIDWithResponse(ctx context.Context, sourceID string, params *PatchSourcesIDParams, body PatchSourcesIDJSONRequestBody) (*PatchSourcesIDResponse, error) + + // GetSourcesIDBuckets request + GetSourcesIDBucketsWithResponse(ctx context.Context, sourceID string, params *GetSourcesIDBucketsParams) (*GetSourcesIDBucketsResponse, error) + + // GetSourcesIDHealth request + GetSourcesIDHealthWithResponse(ctx context.Context, sourceID string, params *GetSourcesIDHealthParams) (*GetSourcesIDHealthResponse, error) + + // ListStacks request + ListStacksWithResponse(ctx context.Context, params *ListStacksParams) (*ListStacksResponse, error) + + // CreateStack request with any body + CreateStackWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateStackResponse, error) + + CreateStackWithResponse(ctx context.Context, body CreateStackJSONRequestBody) (*CreateStackResponse, error) + + // DeleteStack request + DeleteStackWithResponse(ctx context.Context, stackId string, params *DeleteStackParams) (*DeleteStackResponse, error) + + // ReadStack request + ReadStackWithResponse(ctx context.Context, stackId string) (*ReadStackResponse, error) + + // UpdateStack request with any body + UpdateStackWithBodyWithResponse(ctx context.Context, stackId string, contentType string, body io.Reader) (*UpdateStackResponse, error) + + UpdateStackWithResponse(ctx context.Context, stackId string, body UpdateStackJSONRequestBody) (*UpdateStackResponse, error) + + // UninstallStack request + UninstallStackWithResponse(ctx context.Context, stackId string) (*UninstallStackResponse, error) + + // GetTasks request + GetTasksWithResponse(ctx context.Context, params *GetTasksParams) (*GetTasksResponse, error) + + // PostTasks request with any body + PostTasksWithBodyWithResponse(ctx context.Context, params *PostTasksParams, contentType string, body io.Reader) (*PostTasksResponse, error) + + PostTasksWithResponse(ctx context.Context, params *PostTasksParams, body PostTasksJSONRequestBody) (*PostTasksResponse, error) + + // DeleteTasksID request + DeleteTasksIDWithResponse(ctx context.Context, taskID string, params *DeleteTasksIDParams) (*DeleteTasksIDResponse, error) + + // GetTasksID request + GetTasksIDWithResponse(ctx context.Context, taskID string, params *GetTasksIDParams) (*GetTasksIDResponse, error) + + // PatchTasksID request with any body + PatchTasksIDWithBodyWithResponse(ctx context.Context, taskID string, params *PatchTasksIDParams, contentType string, body io.Reader) (*PatchTasksIDResponse, error) + + PatchTasksIDWithResponse(ctx context.Context, taskID string, params *PatchTasksIDParams, body PatchTasksIDJSONRequestBody) (*PatchTasksIDResponse, error) + + // GetTasksIDLabels request + GetTasksIDLabelsWithResponse(ctx context.Context, taskID string, params *GetTasksIDLabelsParams) (*GetTasksIDLabelsResponse, error) + + // PostTasksIDLabels request with any body + PostTasksIDLabelsWithBodyWithResponse(ctx context.Context, taskID string, params *PostTasksIDLabelsParams, contentType string, body io.Reader) (*PostTasksIDLabelsResponse, error) + + PostTasksIDLabelsWithResponse(ctx context.Context, taskID string, params *PostTasksIDLabelsParams, body PostTasksIDLabelsJSONRequestBody) (*PostTasksIDLabelsResponse, error) + + // DeleteTasksIDLabelsID request + DeleteTasksIDLabelsIDWithResponse(ctx context.Context, taskID string, labelID string, params *DeleteTasksIDLabelsIDParams) (*DeleteTasksIDLabelsIDResponse, error) + + // GetTasksIDLogs request + GetTasksIDLogsWithResponse(ctx context.Context, taskID string, params *GetTasksIDLogsParams) (*GetTasksIDLogsResponse, error) + + // GetTasksIDMembers request + GetTasksIDMembersWithResponse(ctx context.Context, taskID string, params *GetTasksIDMembersParams) (*GetTasksIDMembersResponse, error) + + // PostTasksIDMembers request with any body + PostTasksIDMembersWithBodyWithResponse(ctx context.Context, taskID string, params *PostTasksIDMembersParams, contentType string, body io.Reader) (*PostTasksIDMembersResponse, error) + + PostTasksIDMembersWithResponse(ctx context.Context, taskID string, params *PostTasksIDMembersParams, body PostTasksIDMembersJSONRequestBody) (*PostTasksIDMembersResponse, error) + + // DeleteTasksIDMembersID request + DeleteTasksIDMembersIDWithResponse(ctx context.Context, taskID string, userID string, params *DeleteTasksIDMembersIDParams) (*DeleteTasksIDMembersIDResponse, error) + + // GetTasksIDOwners request + GetTasksIDOwnersWithResponse(ctx context.Context, taskID string, params *GetTasksIDOwnersParams) (*GetTasksIDOwnersResponse, error) + + // PostTasksIDOwners request with any body + PostTasksIDOwnersWithBodyWithResponse(ctx context.Context, taskID string, params *PostTasksIDOwnersParams, contentType string, body io.Reader) (*PostTasksIDOwnersResponse, error) + + PostTasksIDOwnersWithResponse(ctx context.Context, taskID string, params *PostTasksIDOwnersParams, body PostTasksIDOwnersJSONRequestBody) (*PostTasksIDOwnersResponse, error) + + // DeleteTasksIDOwnersID request + DeleteTasksIDOwnersIDWithResponse(ctx context.Context, taskID string, userID string, params *DeleteTasksIDOwnersIDParams) (*DeleteTasksIDOwnersIDResponse, error) + + // GetTasksIDRuns request + GetTasksIDRunsWithResponse(ctx context.Context, taskID string, params *GetTasksIDRunsParams) (*GetTasksIDRunsResponse, error) + + // PostTasksIDRuns request with any body + PostTasksIDRunsWithBodyWithResponse(ctx context.Context, taskID string, params *PostTasksIDRunsParams, contentType string, body io.Reader) (*PostTasksIDRunsResponse, error) + + PostTasksIDRunsWithResponse(ctx context.Context, taskID string, params *PostTasksIDRunsParams, body PostTasksIDRunsJSONRequestBody) (*PostTasksIDRunsResponse, error) + + // DeleteTasksIDRunsID request + DeleteTasksIDRunsIDWithResponse(ctx context.Context, taskID string, runID string, params *DeleteTasksIDRunsIDParams) (*DeleteTasksIDRunsIDResponse, error) + + // GetTasksIDRunsID request + GetTasksIDRunsIDWithResponse(ctx context.Context, taskID string, runID string, params *GetTasksIDRunsIDParams) (*GetTasksIDRunsIDResponse, error) + + // GetTasksIDRunsIDLogs request + GetTasksIDRunsIDLogsWithResponse(ctx context.Context, taskID string, runID string, params *GetTasksIDRunsIDLogsParams) (*GetTasksIDRunsIDLogsResponse, error) + + // PostTasksIDRunsIDRetry request with any body + PostTasksIDRunsIDRetryWithBodyWithResponse(ctx context.Context, taskID string, runID string, params *PostTasksIDRunsIDRetryParams, contentType string, body io.Reader) (*PostTasksIDRunsIDRetryResponse, error) + + // GetTelegrafPlugins request + GetTelegrafPluginsWithResponse(ctx context.Context, params *GetTelegrafPluginsParams) (*GetTelegrafPluginsResponse, error) + + // GetTelegrafs request + GetTelegrafsWithResponse(ctx context.Context, params *GetTelegrafsParams) (*GetTelegrafsResponse, error) + + // PostTelegrafs request with any body + PostTelegrafsWithBodyWithResponse(ctx context.Context, params *PostTelegrafsParams, contentType string, body io.Reader) (*PostTelegrafsResponse, error) + + PostTelegrafsWithResponse(ctx context.Context, params *PostTelegrafsParams, body PostTelegrafsJSONRequestBody) (*PostTelegrafsResponse, error) + + // DeleteTelegrafsID request + DeleteTelegrafsIDWithResponse(ctx context.Context, telegrafID string, params *DeleteTelegrafsIDParams) (*DeleteTelegrafsIDResponse, error) + + // GetTelegrafsID request + GetTelegrafsIDWithResponse(ctx context.Context, telegrafID string, params *GetTelegrafsIDParams) (*GetTelegrafsIDResponse, error) + + // PutTelegrafsID request with any body + PutTelegrafsIDWithBodyWithResponse(ctx context.Context, telegrafID string, params *PutTelegrafsIDParams, contentType string, body io.Reader) (*PutTelegrafsIDResponse, error) + + PutTelegrafsIDWithResponse(ctx context.Context, telegrafID string, params *PutTelegrafsIDParams, body PutTelegrafsIDJSONRequestBody) (*PutTelegrafsIDResponse, error) + + // GetTelegrafsIDLabels request + GetTelegrafsIDLabelsWithResponse(ctx context.Context, telegrafID string, params *GetTelegrafsIDLabelsParams) (*GetTelegrafsIDLabelsResponse, error) + + // PostTelegrafsIDLabels request with any body + PostTelegrafsIDLabelsWithBodyWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDLabelsParams, contentType string, body io.Reader) (*PostTelegrafsIDLabelsResponse, error) + + PostTelegrafsIDLabelsWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDLabelsParams, body PostTelegrafsIDLabelsJSONRequestBody) (*PostTelegrafsIDLabelsResponse, error) + + // DeleteTelegrafsIDLabelsID request + DeleteTelegrafsIDLabelsIDWithResponse(ctx context.Context, telegrafID string, labelID string, params *DeleteTelegrafsIDLabelsIDParams) (*DeleteTelegrafsIDLabelsIDResponse, error) + + // GetTelegrafsIDMembers request + GetTelegrafsIDMembersWithResponse(ctx context.Context, telegrafID string, params *GetTelegrafsIDMembersParams) (*GetTelegrafsIDMembersResponse, error) + + // PostTelegrafsIDMembers request with any body + PostTelegrafsIDMembersWithBodyWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDMembersParams, contentType string, body io.Reader) (*PostTelegrafsIDMembersResponse, error) + + PostTelegrafsIDMembersWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDMembersParams, body PostTelegrafsIDMembersJSONRequestBody) (*PostTelegrafsIDMembersResponse, error) + + // DeleteTelegrafsIDMembersID request + DeleteTelegrafsIDMembersIDWithResponse(ctx context.Context, telegrafID string, userID string, params *DeleteTelegrafsIDMembersIDParams) (*DeleteTelegrafsIDMembersIDResponse, error) + + // GetTelegrafsIDOwners request + GetTelegrafsIDOwnersWithResponse(ctx context.Context, telegrafID string, params *GetTelegrafsIDOwnersParams) (*GetTelegrafsIDOwnersResponse, error) + + // PostTelegrafsIDOwners request with any body + PostTelegrafsIDOwnersWithBodyWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDOwnersParams, contentType string, body io.Reader) (*PostTelegrafsIDOwnersResponse, error) + + PostTelegrafsIDOwnersWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDOwnersParams, body PostTelegrafsIDOwnersJSONRequestBody) (*PostTelegrafsIDOwnersResponse, error) + + // DeleteTelegrafsIDOwnersID request + DeleteTelegrafsIDOwnersIDWithResponse(ctx context.Context, telegrafID string, userID string, params *DeleteTelegrafsIDOwnersIDParams) (*DeleteTelegrafsIDOwnersIDResponse, error) + + // ApplyTemplate request with any body + ApplyTemplateWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*ApplyTemplateResponse, error) + + ApplyTemplateWithResponse(ctx context.Context, body ApplyTemplateJSONRequestBody) (*ApplyTemplateResponse, error) + + // ExportTemplate request with any body + ExportTemplateWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*ExportTemplateResponse, error) + + ExportTemplateWithResponse(ctx context.Context, body ExportTemplateJSONRequestBody) (*ExportTemplateResponse, error) + + // GetUsers request + GetUsersWithResponse(ctx context.Context, params *GetUsersParams) (*GetUsersResponse, error) + + // PostUsers request with any body + PostUsersWithBodyWithResponse(ctx context.Context, params *PostUsersParams, contentType string, body io.Reader) (*PostUsersResponse, error) + + PostUsersWithResponse(ctx context.Context, params *PostUsersParams, body PostUsersJSONRequestBody) (*PostUsersResponse, error) + + // DeleteUsersID request + DeleteUsersIDWithResponse(ctx context.Context, userID string, params *DeleteUsersIDParams) (*DeleteUsersIDResponse, error) + + // GetUsersID request + GetUsersIDWithResponse(ctx context.Context, userID string, params *GetUsersIDParams) (*GetUsersIDResponse, error) + + // PatchUsersID request with any body + PatchUsersIDWithBodyWithResponse(ctx context.Context, userID string, params *PatchUsersIDParams, contentType string, body io.Reader) (*PatchUsersIDResponse, error) + + PatchUsersIDWithResponse(ctx context.Context, userID string, params *PatchUsersIDParams, body PatchUsersIDJSONRequestBody) (*PatchUsersIDResponse, error) + + // PostUsersIDPassword request with any body + PostUsersIDPasswordWithBodyWithResponse(ctx context.Context, userID string, params *PostUsersIDPasswordParams, contentType string, body io.Reader) (*PostUsersIDPasswordResponse, error) + + PostUsersIDPasswordWithResponse(ctx context.Context, userID string, params *PostUsersIDPasswordParams, body PostUsersIDPasswordJSONRequestBody) (*PostUsersIDPasswordResponse, error) + + // GetVariables request + GetVariablesWithResponse(ctx context.Context, params *GetVariablesParams) (*GetVariablesResponse, error) + + // PostVariables request with any body + PostVariablesWithBodyWithResponse(ctx context.Context, params *PostVariablesParams, contentType string, body io.Reader) (*PostVariablesResponse, error) + + PostVariablesWithResponse(ctx context.Context, params *PostVariablesParams, body PostVariablesJSONRequestBody) (*PostVariablesResponse, error) + + // DeleteVariablesID request + DeleteVariablesIDWithResponse(ctx context.Context, variableID string, params *DeleteVariablesIDParams) (*DeleteVariablesIDResponse, error) + + // GetVariablesID request + GetVariablesIDWithResponse(ctx context.Context, variableID string, params *GetVariablesIDParams) (*GetVariablesIDResponse, error) + + // PatchVariablesID request with any body + PatchVariablesIDWithBodyWithResponse(ctx context.Context, variableID string, params *PatchVariablesIDParams, contentType string, body io.Reader) (*PatchVariablesIDResponse, error) + + PatchVariablesIDWithResponse(ctx context.Context, variableID string, params *PatchVariablesIDParams, body PatchVariablesIDJSONRequestBody) (*PatchVariablesIDResponse, error) + + // PutVariablesID request with any body + PutVariablesIDWithBodyWithResponse(ctx context.Context, variableID string, params *PutVariablesIDParams, contentType string, body io.Reader) (*PutVariablesIDResponse, error) + + PutVariablesIDWithResponse(ctx context.Context, variableID string, params *PutVariablesIDParams, body PutVariablesIDJSONRequestBody) (*PutVariablesIDResponse, error) + + // GetVariablesIDLabels request + GetVariablesIDLabelsWithResponse(ctx context.Context, variableID string, params *GetVariablesIDLabelsParams) (*GetVariablesIDLabelsResponse, error) + + // PostVariablesIDLabels request with any body + PostVariablesIDLabelsWithBodyWithResponse(ctx context.Context, variableID string, params *PostVariablesIDLabelsParams, contentType string, body io.Reader) (*PostVariablesIDLabelsResponse, error) + + PostVariablesIDLabelsWithResponse(ctx context.Context, variableID string, params *PostVariablesIDLabelsParams, body PostVariablesIDLabelsJSONRequestBody) (*PostVariablesIDLabelsResponse, error) + + // DeleteVariablesIDLabelsID request + DeleteVariablesIDLabelsIDWithResponse(ctx context.Context, variableID string, labelID string, params *DeleteVariablesIDLabelsIDParams) (*DeleteVariablesIDLabelsIDResponse, error) + + // PostWrite request with any body + PostWriteWithBodyWithResponse(ctx context.Context, params *PostWriteParams, contentType string, body io.Reader) (*PostWriteResponse, error) +} + +type GetRoutesResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Routes +} + +// Status returns HTTPResponse.Status +func (r GetRoutesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetRoutesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAuthorizationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Authorizations + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetAuthorizationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAuthorizationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAuthorizationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Authorization + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostAuthorizationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAuthorizationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteAuthorizationsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteAuthorizationsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteAuthorizationsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAuthorizationsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Authorization + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetAuthorizationsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAuthorizationsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchAuthorizationsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Authorization + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchAuthorizationsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchAuthorizationsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetBackupKVResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetBackupKVResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetBackupKVResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetBackupMetadataResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetBackupMetadataResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetBackupMetadataResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetBackupShardIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetBackupShardIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetBackupShardIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetBucketsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Buckets + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetBucketsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetBucketsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostBucketsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Bucket + JSON422 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostBucketsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostBucketsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteBucketsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteBucketsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteBucketsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetBucketsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Bucket + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetBucketsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetBucketsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchBucketsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Bucket + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchBucketsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchBucketsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetBucketsIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetBucketsIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetBucketsIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostBucketsIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostBucketsIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostBucketsIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteBucketsIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteBucketsIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteBucketsIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetBucketsIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceMembers + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetBucketsIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetBucketsIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostBucketsIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceMember + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostBucketsIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostBucketsIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteBucketsIDMembersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteBucketsIDMembersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteBucketsIDMembersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetBucketsIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceOwners + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetBucketsIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetBucketsIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostBucketsIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceOwner + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostBucketsIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostBucketsIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteBucketsIDOwnersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteBucketsIDOwnersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteBucketsIDOwnersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetChecksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Checks + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetChecksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetChecksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateCheckResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Check + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r CreateCheckResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateCheckResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteChecksIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteChecksIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteChecksIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetChecksIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Check + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetChecksIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetChecksIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchChecksIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Check + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchChecksIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchChecksIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutChecksIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Check + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PutChecksIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutChecksIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetChecksIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetChecksIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetChecksIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostChecksIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostChecksIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostChecksIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteChecksIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteChecksIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteChecksIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetChecksIDQueryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *FluxResponse + JSON400 *Error + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetChecksIDQueryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetChecksIDQueryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetConfigResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Config + JSON401 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetConfigResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetConfigResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDashboardsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Dashboards + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetDashboardsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDashboardsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostDashboardsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *interface{} + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostDashboardsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostDashboardsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteDashboardsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteDashboardsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteDashboardsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDashboardsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *interface{} + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetDashboardsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDashboardsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchDashboardsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Dashboard + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchDashboardsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchDashboardsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostDashboardsIDCellsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Cell + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostDashboardsIDCellsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostDashboardsIDCellsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutDashboardsIDCellsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Dashboard + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PutDashboardsIDCellsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutDashboardsIDCellsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteDashboardsIDCellsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteDashboardsIDCellsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteDashboardsIDCellsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchDashboardsIDCellsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Cell + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchDashboardsIDCellsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchDashboardsIDCellsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDashboardsIDCellsIDViewResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *View + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetDashboardsIDCellsIDViewResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDashboardsIDCellsIDViewResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchDashboardsIDCellsIDViewResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *View + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchDashboardsIDCellsIDViewResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchDashboardsIDCellsIDViewResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDashboardsIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetDashboardsIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDashboardsIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostDashboardsIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostDashboardsIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostDashboardsIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteDashboardsIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteDashboardsIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteDashboardsIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDashboardsIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceMembers + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetDashboardsIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDashboardsIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostDashboardsIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceMember + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostDashboardsIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostDashboardsIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteDashboardsIDMembersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteDashboardsIDMembersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteDashboardsIDMembersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDashboardsIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceOwners + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetDashboardsIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDashboardsIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostDashboardsIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceOwner + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostDashboardsIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostDashboardsIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteDashboardsIDOwnersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteDashboardsIDOwnersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteDashboardsIDOwnersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDBRPsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *DBRPs + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetDBRPsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDBRPsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostDBRPResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *DBRP + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostDBRPResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostDBRPResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteDBRPIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteDBRPIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteDBRPIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDBRPsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *DBRPGet + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetDBRPsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDBRPsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchDBRPIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *DBRPGet + JSON400 *Error + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchDBRPIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchDBRPIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostDeleteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *Error + JSON403 *Error + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostDeleteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostDeleteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFlagsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Flags + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetFlagsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFlagsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetHealthResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *HealthCheck + JSON503 *HealthCheck + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetHealthResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetHealthResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelResponse + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetLegacyAuthorizationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Authorizations + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetLegacyAuthorizationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetLegacyAuthorizationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostLegacyAuthorizationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Authorization + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostLegacyAuthorizationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostLegacyAuthorizationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteLegacyAuthorizationsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteLegacyAuthorizationsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteLegacyAuthorizationsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetLegacyAuthorizationsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Authorization + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetLegacyAuthorizationsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetLegacyAuthorizationsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchLegacyAuthorizationsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Authorization + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchLegacyAuthorizationsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchLegacyAuthorizationsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostLegacyAuthorizationsIDPasswordResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostLegacyAuthorizationsIDPasswordResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostLegacyAuthorizationsIDPasswordResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetMeResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *UserResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetMeResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetMeResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutMePasswordResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PutMePasswordResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutMePasswordResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetMetricsResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetMetricsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetMetricsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetNotificationEndpointsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *NotificationEndpoints + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetNotificationEndpointsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetNotificationEndpointsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateNotificationEndpointResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *NotificationEndpoint + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r CreateNotificationEndpointResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateNotificationEndpointResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteNotificationEndpointsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteNotificationEndpointsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteNotificationEndpointsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetNotificationEndpointsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *NotificationEndpoint + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetNotificationEndpointsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetNotificationEndpointsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchNotificationEndpointsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *NotificationEndpoint + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchNotificationEndpointsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchNotificationEndpointsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutNotificationEndpointsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *NotificationEndpoint + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PutNotificationEndpointsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutNotificationEndpointsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetNotificationEndpointsIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetNotificationEndpointsIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetNotificationEndpointsIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostNotificationEndpointIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostNotificationEndpointIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostNotificationEndpointIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteNotificationEndpointsIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteNotificationEndpointsIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteNotificationEndpointsIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetNotificationRulesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *NotificationRules + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetNotificationRulesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetNotificationRulesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateNotificationRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *NotificationRule + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r CreateNotificationRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateNotificationRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteNotificationRulesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteNotificationRulesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteNotificationRulesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetNotificationRulesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *NotificationRule + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetNotificationRulesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetNotificationRulesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchNotificationRulesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *NotificationRule + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchNotificationRulesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchNotificationRulesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutNotificationRulesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *NotificationRule + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PutNotificationRulesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutNotificationRulesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetNotificationRulesIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetNotificationRulesIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetNotificationRulesIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostNotificationRuleIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostNotificationRuleIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostNotificationRuleIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteNotificationRulesIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteNotificationRulesIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteNotificationRulesIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetNotificationRulesIDQueryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *FluxResponse + JSON400 *Error + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetNotificationRulesIDQueryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetNotificationRulesIDQueryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetOrgsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Organizations + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetOrgsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetOrgsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostOrgsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Organization + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostOrgsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostOrgsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteOrgsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteOrgsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteOrgsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetOrgsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Organization + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetOrgsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetOrgsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchOrgsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Organization + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchOrgsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchOrgsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetOrgsIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceMembers + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetOrgsIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetOrgsIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostOrgsIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceMember + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostOrgsIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostOrgsIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteOrgsIDMembersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteOrgsIDMembersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteOrgsIDMembersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetOrgsIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceOwners + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetOrgsIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetOrgsIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostOrgsIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceOwner + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostOrgsIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostOrgsIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteOrgsIDOwnersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteOrgsIDOwnersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteOrgsIDOwnersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetOrgsIDSecretsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecretKeysResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetOrgsIDSecretsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetOrgsIDSecretsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchOrgsIDSecretsResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchOrgsIDSecretsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchOrgsIDSecretsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostOrgsIDSecretsResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostOrgsIDSecretsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostOrgsIDSecretsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteOrgsIDSecretsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteOrgsIDSecretsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteOrgsIDSecretsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetPingResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r GetPingResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetPingResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type HeadPingResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r HeadPingResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r HeadPingResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostQueryResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostQueryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostQueryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostQueryAnalyzeResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *AnalyzeQueryResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostQueryAnalyzeResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostQueryAnalyzeResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostQueryAstResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ASTResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostQueryAstResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostQueryAstResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetQuerySuggestionsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *FluxSuggestions + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetQuerySuggestionsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetQuerySuggestionsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetQuerySuggestionsNameResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *FluxSuggestion + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetQuerySuggestionsNameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetQuerySuggestionsNameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetReadyResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Ready + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetReadyResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetReadyResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetRemoteConnectionsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *RemoteConnections + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetRemoteConnectionsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetRemoteConnectionsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostRemoteConnectionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *RemoteConnection + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostRemoteConnectionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostRemoteConnectionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteRemoteConnectionByIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteRemoteConnectionByIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteRemoteConnectionByIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetRemoteConnectionByIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *RemoteConnection + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetRemoteConnectionByIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetRemoteConnectionByIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchRemoteConnectionByIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *RemoteConnection + JSON400 *Error + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchRemoteConnectionByIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchRemoteConnectionByIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetReplicationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Replications + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetReplicationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetReplicationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostReplicationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Replication + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostReplicationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostReplicationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteReplicationByIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteReplicationByIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteReplicationByIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetReplicationByIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Replication + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetReplicationByIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetReplicationByIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchReplicationByIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Replication + JSON400 *Error + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchReplicationByIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchReplicationByIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostValidateReplicationByIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostValidateReplicationByIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostValidateReplicationByIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetResourcesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]string + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetResourcesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetResourcesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostRestoreBucketIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]byte + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostRestoreBucketIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostRestoreBucketIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostRestoreBucketMetadataResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *RestoredBucketMappings + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostRestoreBucketMetadataResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostRestoreBucketMetadataResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostRestoreKVResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // token is the root token for the instance after restore (this is overwritten during the restore) + Token *string `json:"token,omitempty"` + } + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostRestoreKVResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostRestoreKVResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostRestoreShardIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostRestoreShardIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostRestoreShardIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostRestoreSQLResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostRestoreSQLResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostRestoreSQLResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetScrapersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ScraperTargetResponses +} + +// Status returns HTTPResponse.Status +func (r GetScrapersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetScrapersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostScrapersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ScraperTargetResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostScrapersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostScrapersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteScrapersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteScrapersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteScrapersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetScrapersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ScraperTargetResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetScrapersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetScrapersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchScrapersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ScraperTargetResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchScrapersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchScrapersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetScrapersIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetScrapersIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetScrapersIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostScrapersIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostScrapersIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostScrapersIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteScrapersIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteScrapersIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteScrapersIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetScrapersIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceMembers + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetScrapersIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetScrapersIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostScrapersIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceMember + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostScrapersIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostScrapersIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteScrapersIDMembersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteScrapersIDMembersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteScrapersIDMembersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetScrapersIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceOwners + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetScrapersIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetScrapersIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostScrapersIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceOwner + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostScrapersIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostScrapersIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteScrapersIDOwnersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteScrapersIDOwnersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteScrapersIDOwnersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSetupResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *IsOnboarding +} + +// Status returns HTTPResponse.Status +func (r GetSetupResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSetupResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostSetupResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *OnboardingResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostSetupResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostSetupResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostSigninResponse struct { + Body []byte + HTTPResponse *http.Response + JSON401 *Error + JSON403 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostSigninResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostSigninResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostSignoutResponse struct { + Body []byte + HTTPResponse *http.Response + JSON401 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostSignoutResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostSignoutResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSourcesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Sources + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetSourcesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSourcesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostSourcesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Source + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostSourcesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostSourcesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteSourcesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteSourcesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSourcesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSourcesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Source + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetSourcesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSourcesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchSourcesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Source + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchSourcesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchSourcesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSourcesIDBucketsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Buckets + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetSourcesIDBucketsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSourcesIDBucketsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSourcesIDHealthResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *HealthCheck + JSON503 *HealthCheck + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetSourcesIDHealthResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSourcesIDHealthResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ListStacksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Stacks *[]Stack `json:"stacks,omitempty"` + } + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r ListStacksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListStacksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateStackResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Stack + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r CreateStackResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateStackResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteStackResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteStackResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteStackResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadStackResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Stack + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r ReadStackResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadStackResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateStackResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Stack + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r UpdateStackResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateStackResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UninstallStackResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Stack + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r UninstallStackResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UninstallStackResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Tasks + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTasksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Task + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTasksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTasksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTasksIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTasksIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTasksIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Task + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchTasksIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Task + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchTasksIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchTasksIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTasksIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTasksIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTasksIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTasksIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTasksIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTasksIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksIDLogsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Logs + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksIDLogsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksIDLogsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceMembers + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTasksIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceMember + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTasksIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTasksIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTasksIDMembersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTasksIDMembersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTasksIDMembersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceOwners + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTasksIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceOwner + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTasksIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTasksIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTasksIDOwnersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTasksIDOwnersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTasksIDOwnersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksIDRunsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Runs + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksIDRunsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksIDRunsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTasksIDRunsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Run + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTasksIDRunsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTasksIDRunsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTasksIDRunsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTasksIDRunsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTasksIDRunsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksIDRunsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Run + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksIDRunsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksIDRunsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTasksIDRunsIDLogsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Logs + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTasksIDRunsIDLogsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTasksIDRunsIDLogsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTasksIDRunsIDRetryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Run + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTasksIDRunsIDRetryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTasksIDRunsIDRetryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTelegrafPluginsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *TelegrafPlugins + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTelegrafPluginsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTelegrafPluginsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTelegrafsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Telegrafs + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTelegrafsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTelegrafsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTelegrafsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Telegraf + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTelegrafsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTelegrafsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTelegrafsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTelegrafsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTelegrafsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTelegrafsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Telegraf + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTelegrafsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTelegrafsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutTelegrafsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Telegraf + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PutTelegrafsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutTelegrafsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTelegrafsIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTelegrafsIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTelegrafsIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTelegrafsIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTelegrafsIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTelegrafsIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTelegrafsIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTelegrafsIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTelegrafsIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTelegrafsIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceMembers + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTelegrafsIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTelegrafsIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTelegrafsIDMembersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceMember + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTelegrafsIDMembersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTelegrafsIDMembersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTelegrafsIDMembersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTelegrafsIDMembersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTelegrafsIDMembersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetTelegrafsIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ResourceOwners + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetTelegrafsIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetTelegrafsIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostTelegrafsIDOwnersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *ResourceOwner + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostTelegrafsIDOwnersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostTelegrafsIDOwnersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteTelegrafsIDOwnersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteTelegrafsIDOwnersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTelegrafsIDOwnersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ApplyTemplateResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *TemplateSummary + JSON201 *TemplateSummary + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r ApplyTemplateResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ApplyTemplateResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ExportTemplateResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Template + YAML200 *Template + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r ExportTemplateResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ExportTemplateResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetUsersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Users + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetUsersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetUsersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostUsersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *UserResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostUsersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostUsersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteUsersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteUsersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteUsersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetUsersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *UserResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetUsersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetUsersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchUsersIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *UserResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchUsersIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchUsersIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostUsersIDPasswordResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostUsersIDPasswordResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostUsersIDPasswordResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetVariablesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Variables + JSON400 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetVariablesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetVariablesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostVariablesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *Variable + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostVariablesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostVariablesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteVariablesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteVariablesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteVariablesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetVariablesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Variable + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetVariablesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetVariablesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchVariablesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Variable + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PatchVariablesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchVariablesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutVariablesIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *Variable + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PutVariablesIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutVariablesIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetVariablesIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LabelsResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r GetVariablesIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetVariablesIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostVariablesIDLabelsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *LabelResponse + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostVariablesIDLabelsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostVariablesIDLabelsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteVariablesIDLabelsIDResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r DeleteVariablesIDLabelsIDResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteVariablesIDLabelsIDResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostWriteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *LineProtocolError + JSON401 *Error + JSON404 *Error + JSON413 *LineProtocolLengthError + JSON500 *Error + JSONDefault *Error +} + +// Status returns HTTPResponse.Status +func (r PostWriteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostWriteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +// GetRoutesWithResponse request returning *GetRoutesResponse +func (c *ClientWithResponses) GetRoutesWithResponse(ctx context.Context, params *GetRoutesParams) (*GetRoutesResponse, error) { + rsp, err := c.GetRoutes(ctx, params) + if err != nil { + return nil, err + } + return ParseGetRoutesResponse(rsp) +} + +// GetAuthorizationsWithResponse request returning *GetAuthorizationsResponse +func (c *ClientWithResponses) GetAuthorizationsWithResponse(ctx context.Context, params *GetAuthorizationsParams) (*GetAuthorizationsResponse, error) { + rsp, err := c.GetAuthorizations(ctx, params) + if err != nil { + return nil, err + } + return ParseGetAuthorizationsResponse(rsp) +} + +// PostAuthorizationsWithBodyWithResponse request with arbitrary body returning *PostAuthorizationsResponse +func (c *ClientWithResponses) PostAuthorizationsWithBodyWithResponse(ctx context.Context, params *PostAuthorizationsParams, contentType string, body io.Reader) (*PostAuthorizationsResponse, error) { + rsp, err := c.PostAuthorizationsWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostAuthorizationsResponse(rsp) +} + +func (c *ClientWithResponses) PostAuthorizationsWithResponse(ctx context.Context, params *PostAuthorizationsParams, body PostAuthorizationsJSONRequestBody) (*PostAuthorizationsResponse, error) { + rsp, err := c.PostAuthorizations(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostAuthorizationsResponse(rsp) +} + +// DeleteAuthorizationsIDWithResponse request returning *DeleteAuthorizationsIDResponse +func (c *ClientWithResponses) DeleteAuthorizationsIDWithResponse(ctx context.Context, authID string, params *DeleteAuthorizationsIDParams) (*DeleteAuthorizationsIDResponse, error) { + rsp, err := c.DeleteAuthorizationsID(ctx, authID, params) + if err != nil { + return nil, err + } + return ParseDeleteAuthorizationsIDResponse(rsp) +} + +// GetAuthorizationsIDWithResponse request returning *GetAuthorizationsIDResponse +func (c *ClientWithResponses) GetAuthorizationsIDWithResponse(ctx context.Context, authID string, params *GetAuthorizationsIDParams) (*GetAuthorizationsIDResponse, error) { + rsp, err := c.GetAuthorizationsID(ctx, authID, params) + if err != nil { + return nil, err + } + return ParseGetAuthorizationsIDResponse(rsp) +} + +// PatchAuthorizationsIDWithBodyWithResponse request with arbitrary body returning *PatchAuthorizationsIDResponse +func (c *ClientWithResponses) PatchAuthorizationsIDWithBodyWithResponse(ctx context.Context, authID string, params *PatchAuthorizationsIDParams, contentType string, body io.Reader) (*PatchAuthorizationsIDResponse, error) { + rsp, err := c.PatchAuthorizationsIDWithBody(ctx, authID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchAuthorizationsIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchAuthorizationsIDWithResponse(ctx context.Context, authID string, params *PatchAuthorizationsIDParams, body PatchAuthorizationsIDJSONRequestBody) (*PatchAuthorizationsIDResponse, error) { + rsp, err := c.PatchAuthorizationsID(ctx, authID, params, body) + if err != nil { + return nil, err + } + return ParsePatchAuthorizationsIDResponse(rsp) +} + +// GetBackupKVWithResponse request returning *GetBackupKVResponse +func (c *ClientWithResponses) GetBackupKVWithResponse(ctx context.Context, params *GetBackupKVParams) (*GetBackupKVResponse, error) { + rsp, err := c.GetBackupKV(ctx, params) + if err != nil { + return nil, err + } + return ParseGetBackupKVResponse(rsp) +} + +// GetBackupMetadataWithResponse request returning *GetBackupMetadataResponse +func (c *ClientWithResponses) GetBackupMetadataWithResponse(ctx context.Context, params *GetBackupMetadataParams) (*GetBackupMetadataResponse, error) { + rsp, err := c.GetBackupMetadata(ctx, params) + if err != nil { + return nil, err + } + return ParseGetBackupMetadataResponse(rsp) +} + +// GetBackupShardIdWithResponse request returning *GetBackupShardIdResponse +func (c *ClientWithResponses) GetBackupShardIdWithResponse(ctx context.Context, shardID int64, params *GetBackupShardIdParams) (*GetBackupShardIdResponse, error) { + rsp, err := c.GetBackupShardId(ctx, shardID, params) + if err != nil { + return nil, err + } + return ParseGetBackupShardIdResponse(rsp) +} + +// GetBucketsWithResponse request returning *GetBucketsResponse +func (c *ClientWithResponses) GetBucketsWithResponse(ctx context.Context, params *GetBucketsParams) (*GetBucketsResponse, error) { + rsp, err := c.GetBuckets(ctx, params) + if err != nil { + return nil, err + } + return ParseGetBucketsResponse(rsp) +} + +// PostBucketsWithBodyWithResponse request with arbitrary body returning *PostBucketsResponse +func (c *ClientWithResponses) PostBucketsWithBodyWithResponse(ctx context.Context, params *PostBucketsParams, contentType string, body io.Reader) (*PostBucketsResponse, error) { + rsp, err := c.PostBucketsWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostBucketsResponse(rsp) +} + +func (c *ClientWithResponses) PostBucketsWithResponse(ctx context.Context, params *PostBucketsParams, body PostBucketsJSONRequestBody) (*PostBucketsResponse, error) { + rsp, err := c.PostBuckets(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostBucketsResponse(rsp) +} + +// DeleteBucketsIDWithResponse request returning *DeleteBucketsIDResponse +func (c *ClientWithResponses) DeleteBucketsIDWithResponse(ctx context.Context, bucketID string, params *DeleteBucketsIDParams) (*DeleteBucketsIDResponse, error) { + rsp, err := c.DeleteBucketsID(ctx, bucketID, params) + if err != nil { + return nil, err + } + return ParseDeleteBucketsIDResponse(rsp) +} + +// GetBucketsIDWithResponse request returning *GetBucketsIDResponse +func (c *ClientWithResponses) GetBucketsIDWithResponse(ctx context.Context, bucketID string, params *GetBucketsIDParams) (*GetBucketsIDResponse, error) { + rsp, err := c.GetBucketsID(ctx, bucketID, params) + if err != nil { + return nil, err + } + return ParseGetBucketsIDResponse(rsp) +} + +// PatchBucketsIDWithBodyWithResponse request with arbitrary body returning *PatchBucketsIDResponse +func (c *ClientWithResponses) PatchBucketsIDWithBodyWithResponse(ctx context.Context, bucketID string, params *PatchBucketsIDParams, contentType string, body io.Reader) (*PatchBucketsIDResponse, error) { + rsp, err := c.PatchBucketsIDWithBody(ctx, bucketID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchBucketsIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchBucketsIDWithResponse(ctx context.Context, bucketID string, params *PatchBucketsIDParams, body PatchBucketsIDJSONRequestBody) (*PatchBucketsIDResponse, error) { + rsp, err := c.PatchBucketsID(ctx, bucketID, params, body) + if err != nil { + return nil, err + } + return ParsePatchBucketsIDResponse(rsp) +} + +// GetBucketsIDLabelsWithResponse request returning *GetBucketsIDLabelsResponse +func (c *ClientWithResponses) GetBucketsIDLabelsWithResponse(ctx context.Context, bucketID string, params *GetBucketsIDLabelsParams) (*GetBucketsIDLabelsResponse, error) { + rsp, err := c.GetBucketsIDLabels(ctx, bucketID, params) + if err != nil { + return nil, err + } + return ParseGetBucketsIDLabelsResponse(rsp) +} + +// PostBucketsIDLabelsWithBodyWithResponse request with arbitrary body returning *PostBucketsIDLabelsResponse +func (c *ClientWithResponses) PostBucketsIDLabelsWithBodyWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDLabelsParams, contentType string, body io.Reader) (*PostBucketsIDLabelsResponse, error) { + rsp, err := c.PostBucketsIDLabelsWithBody(ctx, bucketID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostBucketsIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostBucketsIDLabelsWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDLabelsParams, body PostBucketsIDLabelsJSONRequestBody) (*PostBucketsIDLabelsResponse, error) { + rsp, err := c.PostBucketsIDLabels(ctx, bucketID, params, body) + if err != nil { + return nil, err + } + return ParsePostBucketsIDLabelsResponse(rsp) +} + +// DeleteBucketsIDLabelsIDWithResponse request returning *DeleteBucketsIDLabelsIDResponse +func (c *ClientWithResponses) DeleteBucketsIDLabelsIDWithResponse(ctx context.Context, bucketID string, labelID string, params *DeleteBucketsIDLabelsIDParams) (*DeleteBucketsIDLabelsIDResponse, error) { + rsp, err := c.DeleteBucketsIDLabelsID(ctx, bucketID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteBucketsIDLabelsIDResponse(rsp) +} + +// GetBucketsIDMembersWithResponse request returning *GetBucketsIDMembersResponse +func (c *ClientWithResponses) GetBucketsIDMembersWithResponse(ctx context.Context, bucketID string, params *GetBucketsIDMembersParams) (*GetBucketsIDMembersResponse, error) { + rsp, err := c.GetBucketsIDMembers(ctx, bucketID, params) + if err != nil { + return nil, err + } + return ParseGetBucketsIDMembersResponse(rsp) +} + +// PostBucketsIDMembersWithBodyWithResponse request with arbitrary body returning *PostBucketsIDMembersResponse +func (c *ClientWithResponses) PostBucketsIDMembersWithBodyWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDMembersParams, contentType string, body io.Reader) (*PostBucketsIDMembersResponse, error) { + rsp, err := c.PostBucketsIDMembersWithBody(ctx, bucketID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostBucketsIDMembersResponse(rsp) +} + +func (c *ClientWithResponses) PostBucketsIDMembersWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDMembersParams, body PostBucketsIDMembersJSONRequestBody) (*PostBucketsIDMembersResponse, error) { + rsp, err := c.PostBucketsIDMembers(ctx, bucketID, params, body) + if err != nil { + return nil, err + } + return ParsePostBucketsIDMembersResponse(rsp) +} + +// DeleteBucketsIDMembersIDWithResponse request returning *DeleteBucketsIDMembersIDResponse +func (c *ClientWithResponses) DeleteBucketsIDMembersIDWithResponse(ctx context.Context, bucketID string, userID string, params *DeleteBucketsIDMembersIDParams) (*DeleteBucketsIDMembersIDResponse, error) { + rsp, err := c.DeleteBucketsIDMembersID(ctx, bucketID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteBucketsIDMembersIDResponse(rsp) +} + +// GetBucketsIDOwnersWithResponse request returning *GetBucketsIDOwnersResponse +func (c *ClientWithResponses) GetBucketsIDOwnersWithResponse(ctx context.Context, bucketID string, params *GetBucketsIDOwnersParams) (*GetBucketsIDOwnersResponse, error) { + rsp, err := c.GetBucketsIDOwners(ctx, bucketID, params) + if err != nil { + return nil, err + } + return ParseGetBucketsIDOwnersResponse(rsp) +} + +// PostBucketsIDOwnersWithBodyWithResponse request with arbitrary body returning *PostBucketsIDOwnersResponse +func (c *ClientWithResponses) PostBucketsIDOwnersWithBodyWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDOwnersParams, contentType string, body io.Reader) (*PostBucketsIDOwnersResponse, error) { + rsp, err := c.PostBucketsIDOwnersWithBody(ctx, bucketID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostBucketsIDOwnersResponse(rsp) +} + +func (c *ClientWithResponses) PostBucketsIDOwnersWithResponse(ctx context.Context, bucketID string, params *PostBucketsIDOwnersParams, body PostBucketsIDOwnersJSONRequestBody) (*PostBucketsIDOwnersResponse, error) { + rsp, err := c.PostBucketsIDOwners(ctx, bucketID, params, body) + if err != nil { + return nil, err + } + return ParsePostBucketsIDOwnersResponse(rsp) +} + +// DeleteBucketsIDOwnersIDWithResponse request returning *DeleteBucketsIDOwnersIDResponse +func (c *ClientWithResponses) DeleteBucketsIDOwnersIDWithResponse(ctx context.Context, bucketID string, userID string, params *DeleteBucketsIDOwnersIDParams) (*DeleteBucketsIDOwnersIDResponse, error) { + rsp, err := c.DeleteBucketsIDOwnersID(ctx, bucketID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteBucketsIDOwnersIDResponse(rsp) +} + +// GetChecksWithResponse request returning *GetChecksResponse +func (c *ClientWithResponses) GetChecksWithResponse(ctx context.Context, params *GetChecksParams) (*GetChecksResponse, error) { + rsp, err := c.GetChecks(ctx, params) + if err != nil { + return nil, err + } + return ParseGetChecksResponse(rsp) +} + +// CreateCheckWithBodyWithResponse request with arbitrary body returning *CreateCheckResponse +func (c *ClientWithResponses) CreateCheckWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateCheckResponse, error) { + rsp, err := c.CreateCheckWithBody(ctx, contentType, body) + if err != nil { + return nil, err + } + return ParseCreateCheckResponse(rsp) +} + +func (c *ClientWithResponses) CreateCheckWithResponse(ctx context.Context, body CreateCheckJSONRequestBody) (*CreateCheckResponse, error) { + rsp, err := c.CreateCheck(ctx, body) + if err != nil { + return nil, err + } + return ParseCreateCheckResponse(rsp) +} + +// DeleteChecksIDWithResponse request returning *DeleteChecksIDResponse +func (c *ClientWithResponses) DeleteChecksIDWithResponse(ctx context.Context, checkID string, params *DeleteChecksIDParams) (*DeleteChecksIDResponse, error) { + rsp, err := c.DeleteChecksID(ctx, checkID, params) + if err != nil { + return nil, err + } + return ParseDeleteChecksIDResponse(rsp) +} + +// GetChecksIDWithResponse request returning *GetChecksIDResponse +func (c *ClientWithResponses) GetChecksIDWithResponse(ctx context.Context, checkID string, params *GetChecksIDParams) (*GetChecksIDResponse, error) { + rsp, err := c.GetChecksID(ctx, checkID, params) + if err != nil { + return nil, err + } + return ParseGetChecksIDResponse(rsp) +} + +// PatchChecksIDWithBodyWithResponse request with arbitrary body returning *PatchChecksIDResponse +func (c *ClientWithResponses) PatchChecksIDWithBodyWithResponse(ctx context.Context, checkID string, params *PatchChecksIDParams, contentType string, body io.Reader) (*PatchChecksIDResponse, error) { + rsp, err := c.PatchChecksIDWithBody(ctx, checkID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchChecksIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchChecksIDWithResponse(ctx context.Context, checkID string, params *PatchChecksIDParams, body PatchChecksIDJSONRequestBody) (*PatchChecksIDResponse, error) { + rsp, err := c.PatchChecksID(ctx, checkID, params, body) + if err != nil { + return nil, err + } + return ParsePatchChecksIDResponse(rsp) +} + +// PutChecksIDWithBodyWithResponse request with arbitrary body returning *PutChecksIDResponse +func (c *ClientWithResponses) PutChecksIDWithBodyWithResponse(ctx context.Context, checkID string, params *PutChecksIDParams, contentType string, body io.Reader) (*PutChecksIDResponse, error) { + rsp, err := c.PutChecksIDWithBody(ctx, checkID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePutChecksIDResponse(rsp) +} + +func (c *ClientWithResponses) PutChecksIDWithResponse(ctx context.Context, checkID string, params *PutChecksIDParams, body PutChecksIDJSONRequestBody) (*PutChecksIDResponse, error) { + rsp, err := c.PutChecksID(ctx, checkID, params, body) + if err != nil { + return nil, err + } + return ParsePutChecksIDResponse(rsp) +} + +// GetChecksIDLabelsWithResponse request returning *GetChecksIDLabelsResponse +func (c *ClientWithResponses) GetChecksIDLabelsWithResponse(ctx context.Context, checkID string, params *GetChecksIDLabelsParams) (*GetChecksIDLabelsResponse, error) { + rsp, err := c.GetChecksIDLabels(ctx, checkID, params) + if err != nil { + return nil, err + } + return ParseGetChecksIDLabelsResponse(rsp) +} + +// PostChecksIDLabelsWithBodyWithResponse request with arbitrary body returning *PostChecksIDLabelsResponse +func (c *ClientWithResponses) PostChecksIDLabelsWithBodyWithResponse(ctx context.Context, checkID string, params *PostChecksIDLabelsParams, contentType string, body io.Reader) (*PostChecksIDLabelsResponse, error) { + rsp, err := c.PostChecksIDLabelsWithBody(ctx, checkID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostChecksIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostChecksIDLabelsWithResponse(ctx context.Context, checkID string, params *PostChecksIDLabelsParams, body PostChecksIDLabelsJSONRequestBody) (*PostChecksIDLabelsResponse, error) { + rsp, err := c.PostChecksIDLabels(ctx, checkID, params, body) + if err != nil { + return nil, err + } + return ParsePostChecksIDLabelsResponse(rsp) +} + +// DeleteChecksIDLabelsIDWithResponse request returning *DeleteChecksIDLabelsIDResponse +func (c *ClientWithResponses) DeleteChecksIDLabelsIDWithResponse(ctx context.Context, checkID string, labelID string, params *DeleteChecksIDLabelsIDParams) (*DeleteChecksIDLabelsIDResponse, error) { + rsp, err := c.DeleteChecksIDLabelsID(ctx, checkID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteChecksIDLabelsIDResponse(rsp) +} + +// GetChecksIDQueryWithResponse request returning *GetChecksIDQueryResponse +func (c *ClientWithResponses) GetChecksIDQueryWithResponse(ctx context.Context, checkID string, params *GetChecksIDQueryParams) (*GetChecksIDQueryResponse, error) { + rsp, err := c.GetChecksIDQuery(ctx, checkID, params) + if err != nil { + return nil, err + } + return ParseGetChecksIDQueryResponse(rsp) +} + +// GetConfigWithResponse request returning *GetConfigResponse +func (c *ClientWithResponses) GetConfigWithResponse(ctx context.Context, params *GetConfigParams) (*GetConfigResponse, error) { + rsp, err := c.GetConfig(ctx, params) + if err != nil { + return nil, err + } + return ParseGetConfigResponse(rsp) +} + +// GetDashboardsWithResponse request returning *GetDashboardsResponse +func (c *ClientWithResponses) GetDashboardsWithResponse(ctx context.Context, params *GetDashboardsParams) (*GetDashboardsResponse, error) { + rsp, err := c.GetDashboards(ctx, params) + if err != nil { + return nil, err + } + return ParseGetDashboardsResponse(rsp) +} + +// PostDashboardsWithBodyWithResponse request with arbitrary body returning *PostDashboardsResponse +func (c *ClientWithResponses) PostDashboardsWithBodyWithResponse(ctx context.Context, params *PostDashboardsParams, contentType string, body io.Reader) (*PostDashboardsResponse, error) { + rsp, err := c.PostDashboardsWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsResponse(rsp) +} + +func (c *ClientWithResponses) PostDashboardsWithResponse(ctx context.Context, params *PostDashboardsParams, body PostDashboardsJSONRequestBody) (*PostDashboardsResponse, error) { + rsp, err := c.PostDashboards(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsResponse(rsp) +} + +// DeleteDashboardsIDWithResponse request returning *DeleteDashboardsIDResponse +func (c *ClientWithResponses) DeleteDashboardsIDWithResponse(ctx context.Context, dashboardID string, params *DeleteDashboardsIDParams) (*DeleteDashboardsIDResponse, error) { + rsp, err := c.DeleteDashboardsID(ctx, dashboardID, params) + if err != nil { + return nil, err + } + return ParseDeleteDashboardsIDResponse(rsp) +} + +// GetDashboardsIDWithResponse request returning *GetDashboardsIDResponse +func (c *ClientWithResponses) GetDashboardsIDWithResponse(ctx context.Context, dashboardID string, params *GetDashboardsIDParams) (*GetDashboardsIDResponse, error) { + rsp, err := c.GetDashboardsID(ctx, dashboardID, params) + if err != nil { + return nil, err + } + return ParseGetDashboardsIDResponse(rsp) +} + +// PatchDashboardsIDWithBodyWithResponse request with arbitrary body returning *PatchDashboardsIDResponse +func (c *ClientWithResponses) PatchDashboardsIDWithBodyWithResponse(ctx context.Context, dashboardID string, params *PatchDashboardsIDParams, contentType string, body io.Reader) (*PatchDashboardsIDResponse, error) { + rsp, err := c.PatchDashboardsIDWithBody(ctx, dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchDashboardsIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchDashboardsIDWithResponse(ctx context.Context, dashboardID string, params *PatchDashboardsIDParams, body PatchDashboardsIDJSONRequestBody) (*PatchDashboardsIDResponse, error) { + rsp, err := c.PatchDashboardsID(ctx, dashboardID, params, body) + if err != nil { + return nil, err + } + return ParsePatchDashboardsIDResponse(rsp) +} + +// PostDashboardsIDCellsWithBodyWithResponse request with arbitrary body returning *PostDashboardsIDCellsResponse +func (c *ClientWithResponses) PostDashboardsIDCellsWithBodyWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDCellsParams, contentType string, body io.Reader) (*PostDashboardsIDCellsResponse, error) { + rsp, err := c.PostDashboardsIDCellsWithBody(ctx, dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsIDCellsResponse(rsp) +} + +func (c *ClientWithResponses) PostDashboardsIDCellsWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDCellsParams, body PostDashboardsIDCellsJSONRequestBody) (*PostDashboardsIDCellsResponse, error) { + rsp, err := c.PostDashboardsIDCells(ctx, dashboardID, params, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsIDCellsResponse(rsp) +} + +// PutDashboardsIDCellsWithBodyWithResponse request with arbitrary body returning *PutDashboardsIDCellsResponse +func (c *ClientWithResponses) PutDashboardsIDCellsWithBodyWithResponse(ctx context.Context, dashboardID string, params *PutDashboardsIDCellsParams, contentType string, body io.Reader) (*PutDashboardsIDCellsResponse, error) { + rsp, err := c.PutDashboardsIDCellsWithBody(ctx, dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePutDashboardsIDCellsResponse(rsp) +} + +func (c *ClientWithResponses) PutDashboardsIDCellsWithResponse(ctx context.Context, dashboardID string, params *PutDashboardsIDCellsParams, body PutDashboardsIDCellsJSONRequestBody) (*PutDashboardsIDCellsResponse, error) { + rsp, err := c.PutDashboardsIDCells(ctx, dashboardID, params, body) + if err != nil { + return nil, err + } + return ParsePutDashboardsIDCellsResponse(rsp) +} + +// DeleteDashboardsIDCellsIDWithResponse request returning *DeleteDashboardsIDCellsIDResponse +func (c *ClientWithResponses) DeleteDashboardsIDCellsIDWithResponse(ctx context.Context, dashboardID string, cellID string, params *DeleteDashboardsIDCellsIDParams) (*DeleteDashboardsIDCellsIDResponse, error) { + rsp, err := c.DeleteDashboardsIDCellsID(ctx, dashboardID, cellID, params) + if err != nil { + return nil, err + } + return ParseDeleteDashboardsIDCellsIDResponse(rsp) +} + +// PatchDashboardsIDCellsIDWithBodyWithResponse request with arbitrary body returning *PatchDashboardsIDCellsIDResponse +func (c *ClientWithResponses) PatchDashboardsIDCellsIDWithBodyWithResponse(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, contentType string, body io.Reader) (*PatchDashboardsIDCellsIDResponse, error) { + rsp, err := c.PatchDashboardsIDCellsIDWithBody(ctx, dashboardID, cellID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchDashboardsIDCellsIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchDashboardsIDCellsIDWithResponse(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDParams, body PatchDashboardsIDCellsIDJSONRequestBody) (*PatchDashboardsIDCellsIDResponse, error) { + rsp, err := c.PatchDashboardsIDCellsID(ctx, dashboardID, cellID, params, body) + if err != nil { + return nil, err + } + return ParsePatchDashboardsIDCellsIDResponse(rsp) +} + +// GetDashboardsIDCellsIDViewWithResponse request returning *GetDashboardsIDCellsIDViewResponse +func (c *ClientWithResponses) GetDashboardsIDCellsIDViewWithResponse(ctx context.Context, dashboardID string, cellID string, params *GetDashboardsIDCellsIDViewParams) (*GetDashboardsIDCellsIDViewResponse, error) { + rsp, err := c.GetDashboardsIDCellsIDView(ctx, dashboardID, cellID, params) + if err != nil { + return nil, err + } + return ParseGetDashboardsIDCellsIDViewResponse(rsp) +} + +// PatchDashboardsIDCellsIDViewWithBodyWithResponse request with arbitrary body returning *PatchDashboardsIDCellsIDViewResponse +func (c *ClientWithResponses) PatchDashboardsIDCellsIDViewWithBodyWithResponse(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, contentType string, body io.Reader) (*PatchDashboardsIDCellsIDViewResponse, error) { + rsp, err := c.PatchDashboardsIDCellsIDViewWithBody(ctx, dashboardID, cellID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchDashboardsIDCellsIDViewResponse(rsp) +} + +func (c *ClientWithResponses) PatchDashboardsIDCellsIDViewWithResponse(ctx context.Context, dashboardID string, cellID string, params *PatchDashboardsIDCellsIDViewParams, body PatchDashboardsIDCellsIDViewJSONRequestBody) (*PatchDashboardsIDCellsIDViewResponse, error) { + rsp, err := c.PatchDashboardsIDCellsIDView(ctx, dashboardID, cellID, params, body) + if err != nil { + return nil, err + } + return ParsePatchDashboardsIDCellsIDViewResponse(rsp) +} + +// GetDashboardsIDLabelsWithResponse request returning *GetDashboardsIDLabelsResponse +func (c *ClientWithResponses) GetDashboardsIDLabelsWithResponse(ctx context.Context, dashboardID string, params *GetDashboardsIDLabelsParams) (*GetDashboardsIDLabelsResponse, error) { + rsp, err := c.GetDashboardsIDLabels(ctx, dashboardID, params) + if err != nil { + return nil, err + } + return ParseGetDashboardsIDLabelsResponse(rsp) +} + +// PostDashboardsIDLabelsWithBodyWithResponse request with arbitrary body returning *PostDashboardsIDLabelsResponse +func (c *ClientWithResponses) PostDashboardsIDLabelsWithBodyWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDLabelsParams, contentType string, body io.Reader) (*PostDashboardsIDLabelsResponse, error) { + rsp, err := c.PostDashboardsIDLabelsWithBody(ctx, dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostDashboardsIDLabelsWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDLabelsParams, body PostDashboardsIDLabelsJSONRequestBody) (*PostDashboardsIDLabelsResponse, error) { + rsp, err := c.PostDashboardsIDLabels(ctx, dashboardID, params, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsIDLabelsResponse(rsp) +} + +// DeleteDashboardsIDLabelsIDWithResponse request returning *DeleteDashboardsIDLabelsIDResponse +func (c *ClientWithResponses) DeleteDashboardsIDLabelsIDWithResponse(ctx context.Context, dashboardID string, labelID string, params *DeleteDashboardsIDLabelsIDParams) (*DeleteDashboardsIDLabelsIDResponse, error) { + rsp, err := c.DeleteDashboardsIDLabelsID(ctx, dashboardID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteDashboardsIDLabelsIDResponse(rsp) +} + +// GetDashboardsIDMembersWithResponse request returning *GetDashboardsIDMembersResponse +func (c *ClientWithResponses) GetDashboardsIDMembersWithResponse(ctx context.Context, dashboardID string, params *GetDashboardsIDMembersParams) (*GetDashboardsIDMembersResponse, error) { + rsp, err := c.GetDashboardsIDMembers(ctx, dashboardID, params) + if err != nil { + return nil, err + } + return ParseGetDashboardsIDMembersResponse(rsp) +} + +// PostDashboardsIDMembersWithBodyWithResponse request with arbitrary body returning *PostDashboardsIDMembersResponse +func (c *ClientWithResponses) PostDashboardsIDMembersWithBodyWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDMembersParams, contentType string, body io.Reader) (*PostDashboardsIDMembersResponse, error) { + rsp, err := c.PostDashboardsIDMembersWithBody(ctx, dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsIDMembersResponse(rsp) +} + +func (c *ClientWithResponses) PostDashboardsIDMembersWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDMembersParams, body PostDashboardsIDMembersJSONRequestBody) (*PostDashboardsIDMembersResponse, error) { + rsp, err := c.PostDashboardsIDMembers(ctx, dashboardID, params, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsIDMembersResponse(rsp) +} + +// DeleteDashboardsIDMembersIDWithResponse request returning *DeleteDashboardsIDMembersIDResponse +func (c *ClientWithResponses) DeleteDashboardsIDMembersIDWithResponse(ctx context.Context, dashboardID string, userID string, params *DeleteDashboardsIDMembersIDParams) (*DeleteDashboardsIDMembersIDResponse, error) { + rsp, err := c.DeleteDashboardsIDMembersID(ctx, dashboardID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteDashboardsIDMembersIDResponse(rsp) +} + +// GetDashboardsIDOwnersWithResponse request returning *GetDashboardsIDOwnersResponse +func (c *ClientWithResponses) GetDashboardsIDOwnersWithResponse(ctx context.Context, dashboardID string, params *GetDashboardsIDOwnersParams) (*GetDashboardsIDOwnersResponse, error) { + rsp, err := c.GetDashboardsIDOwners(ctx, dashboardID, params) + if err != nil { + return nil, err + } + return ParseGetDashboardsIDOwnersResponse(rsp) +} + +// PostDashboardsIDOwnersWithBodyWithResponse request with arbitrary body returning *PostDashboardsIDOwnersResponse +func (c *ClientWithResponses) PostDashboardsIDOwnersWithBodyWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDOwnersParams, contentType string, body io.Reader) (*PostDashboardsIDOwnersResponse, error) { + rsp, err := c.PostDashboardsIDOwnersWithBody(ctx, dashboardID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsIDOwnersResponse(rsp) +} + +func (c *ClientWithResponses) PostDashboardsIDOwnersWithResponse(ctx context.Context, dashboardID string, params *PostDashboardsIDOwnersParams, body PostDashboardsIDOwnersJSONRequestBody) (*PostDashboardsIDOwnersResponse, error) { + rsp, err := c.PostDashboardsIDOwners(ctx, dashboardID, params, body) + if err != nil { + return nil, err + } + return ParsePostDashboardsIDOwnersResponse(rsp) +} + +// DeleteDashboardsIDOwnersIDWithResponse request returning *DeleteDashboardsIDOwnersIDResponse +func (c *ClientWithResponses) DeleteDashboardsIDOwnersIDWithResponse(ctx context.Context, dashboardID string, userID string, params *DeleteDashboardsIDOwnersIDParams) (*DeleteDashboardsIDOwnersIDResponse, error) { + rsp, err := c.DeleteDashboardsIDOwnersID(ctx, dashboardID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteDashboardsIDOwnersIDResponse(rsp) +} + +// GetDBRPsWithResponse request returning *GetDBRPsResponse +func (c *ClientWithResponses) GetDBRPsWithResponse(ctx context.Context, params *GetDBRPsParams) (*GetDBRPsResponse, error) { + rsp, err := c.GetDBRPs(ctx, params) + if err != nil { + return nil, err + } + return ParseGetDBRPsResponse(rsp) +} + +// PostDBRPWithBodyWithResponse request with arbitrary body returning *PostDBRPResponse +func (c *ClientWithResponses) PostDBRPWithBodyWithResponse(ctx context.Context, params *PostDBRPParams, contentType string, body io.Reader) (*PostDBRPResponse, error) { + rsp, err := c.PostDBRPWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostDBRPResponse(rsp) +} + +func (c *ClientWithResponses) PostDBRPWithResponse(ctx context.Context, params *PostDBRPParams, body PostDBRPJSONRequestBody) (*PostDBRPResponse, error) { + rsp, err := c.PostDBRP(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostDBRPResponse(rsp) +} + +// DeleteDBRPIDWithResponse request returning *DeleteDBRPIDResponse +func (c *ClientWithResponses) DeleteDBRPIDWithResponse(ctx context.Context, dbrpID string, params *DeleteDBRPIDParams) (*DeleteDBRPIDResponse, error) { + rsp, err := c.DeleteDBRPID(ctx, dbrpID, params) + if err != nil { + return nil, err + } + return ParseDeleteDBRPIDResponse(rsp) +} + +// GetDBRPsIDWithResponse request returning *GetDBRPsIDResponse +func (c *ClientWithResponses) GetDBRPsIDWithResponse(ctx context.Context, dbrpID string, params *GetDBRPsIDParams) (*GetDBRPsIDResponse, error) { + rsp, err := c.GetDBRPsID(ctx, dbrpID, params) + if err != nil { + return nil, err + } + return ParseGetDBRPsIDResponse(rsp) +} + +// PatchDBRPIDWithBodyWithResponse request with arbitrary body returning *PatchDBRPIDResponse +func (c *ClientWithResponses) PatchDBRPIDWithBodyWithResponse(ctx context.Context, dbrpID string, params *PatchDBRPIDParams, contentType string, body io.Reader) (*PatchDBRPIDResponse, error) { + rsp, err := c.PatchDBRPIDWithBody(ctx, dbrpID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchDBRPIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchDBRPIDWithResponse(ctx context.Context, dbrpID string, params *PatchDBRPIDParams, body PatchDBRPIDJSONRequestBody) (*PatchDBRPIDResponse, error) { + rsp, err := c.PatchDBRPID(ctx, dbrpID, params, body) + if err != nil { + return nil, err + } + return ParsePatchDBRPIDResponse(rsp) +} + +// PostDeleteWithBodyWithResponse request with arbitrary body returning *PostDeleteResponse +func (c *ClientWithResponses) PostDeleteWithBodyWithResponse(ctx context.Context, params *PostDeleteParams, contentType string, body io.Reader) (*PostDeleteResponse, error) { + rsp, err := c.PostDeleteWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostDeleteResponse(rsp) +} + +func (c *ClientWithResponses) PostDeleteWithResponse(ctx context.Context, params *PostDeleteParams, body PostDeleteJSONRequestBody) (*PostDeleteResponse, error) { + rsp, err := c.PostDelete(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostDeleteResponse(rsp) +} + +// GetFlagsWithResponse request returning *GetFlagsResponse +func (c *ClientWithResponses) GetFlagsWithResponse(ctx context.Context, params *GetFlagsParams) (*GetFlagsResponse, error) { + rsp, err := c.GetFlags(ctx, params) + if err != nil { + return nil, err + } + return ParseGetFlagsResponse(rsp) +} + +// GetHealthWithResponse request returning *GetHealthResponse +func (c *ClientWithResponses) GetHealthWithResponse(ctx context.Context, params *GetHealthParams) (*GetHealthResponse, error) { + rsp, err := c.GetHealth(ctx, params) + if err != nil { + return nil, err + } + return ParseGetHealthResponse(rsp) +} + +// GetLabelsWithResponse request returning *GetLabelsResponse +func (c *ClientWithResponses) GetLabelsWithResponse(ctx context.Context, params *GetLabelsParams) (*GetLabelsResponse, error) { + rsp, err := c.GetLabels(ctx, params) + if err != nil { + return nil, err + } + return ParseGetLabelsResponse(rsp) +} + +// PostLabelsWithBodyWithResponse request with arbitrary body returning *PostLabelsResponse +func (c *ClientWithResponses) PostLabelsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*PostLabelsResponse, error) { + rsp, err := c.PostLabelsWithBody(ctx, contentType, body) + if err != nil { + return nil, err + } + return ParsePostLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostLabelsWithResponse(ctx context.Context, body PostLabelsJSONRequestBody) (*PostLabelsResponse, error) { + rsp, err := c.PostLabels(ctx, body) + if err != nil { + return nil, err + } + return ParsePostLabelsResponse(rsp) +} + +// DeleteLabelsIDWithResponse request returning *DeleteLabelsIDResponse +func (c *ClientWithResponses) DeleteLabelsIDWithResponse(ctx context.Context, labelID string, params *DeleteLabelsIDParams) (*DeleteLabelsIDResponse, error) { + rsp, err := c.DeleteLabelsID(ctx, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteLabelsIDResponse(rsp) +} + +// GetLabelsIDWithResponse request returning *GetLabelsIDResponse +func (c *ClientWithResponses) GetLabelsIDWithResponse(ctx context.Context, labelID string, params *GetLabelsIDParams) (*GetLabelsIDResponse, error) { + rsp, err := c.GetLabelsID(ctx, labelID, params) + if err != nil { + return nil, err + } + return ParseGetLabelsIDResponse(rsp) +} + +// PatchLabelsIDWithBodyWithResponse request with arbitrary body returning *PatchLabelsIDResponse +func (c *ClientWithResponses) PatchLabelsIDWithBodyWithResponse(ctx context.Context, labelID string, params *PatchLabelsIDParams, contentType string, body io.Reader) (*PatchLabelsIDResponse, error) { + rsp, err := c.PatchLabelsIDWithBody(ctx, labelID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchLabelsIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchLabelsIDWithResponse(ctx context.Context, labelID string, params *PatchLabelsIDParams, body PatchLabelsIDJSONRequestBody) (*PatchLabelsIDResponse, error) { + rsp, err := c.PatchLabelsID(ctx, labelID, params, body) + if err != nil { + return nil, err + } + return ParsePatchLabelsIDResponse(rsp) +} + +// GetLegacyAuthorizationsWithResponse request returning *GetLegacyAuthorizationsResponse +func (c *ClientWithResponses) GetLegacyAuthorizationsWithResponse(ctx context.Context, params *GetLegacyAuthorizationsParams) (*GetLegacyAuthorizationsResponse, error) { + rsp, err := c.GetLegacyAuthorizations(ctx, params) + if err != nil { + return nil, err + } + return ParseGetLegacyAuthorizationsResponse(rsp) +} + +// PostLegacyAuthorizationsWithBodyWithResponse request with arbitrary body returning *PostLegacyAuthorizationsResponse +func (c *ClientWithResponses) PostLegacyAuthorizationsWithBodyWithResponse(ctx context.Context, params *PostLegacyAuthorizationsParams, contentType string, body io.Reader) (*PostLegacyAuthorizationsResponse, error) { + rsp, err := c.PostLegacyAuthorizationsWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostLegacyAuthorizationsResponse(rsp) +} + +func (c *ClientWithResponses) PostLegacyAuthorizationsWithResponse(ctx context.Context, params *PostLegacyAuthorizationsParams, body PostLegacyAuthorizationsJSONRequestBody) (*PostLegacyAuthorizationsResponse, error) { + rsp, err := c.PostLegacyAuthorizations(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostLegacyAuthorizationsResponse(rsp) +} + +// DeleteLegacyAuthorizationsIDWithResponse request returning *DeleteLegacyAuthorizationsIDResponse +func (c *ClientWithResponses) DeleteLegacyAuthorizationsIDWithResponse(ctx context.Context, authID string, params *DeleteLegacyAuthorizationsIDParams) (*DeleteLegacyAuthorizationsIDResponse, error) { + rsp, err := c.DeleteLegacyAuthorizationsID(ctx, authID, params) + if err != nil { + return nil, err + } + return ParseDeleteLegacyAuthorizationsIDResponse(rsp) +} + +// GetLegacyAuthorizationsIDWithResponse request returning *GetLegacyAuthorizationsIDResponse +func (c *ClientWithResponses) GetLegacyAuthorizationsIDWithResponse(ctx context.Context, authID string, params *GetLegacyAuthorizationsIDParams) (*GetLegacyAuthorizationsIDResponse, error) { + rsp, err := c.GetLegacyAuthorizationsID(ctx, authID, params) + if err != nil { + return nil, err + } + return ParseGetLegacyAuthorizationsIDResponse(rsp) +} + +// PatchLegacyAuthorizationsIDWithBodyWithResponse request with arbitrary body returning *PatchLegacyAuthorizationsIDResponse +func (c *ClientWithResponses) PatchLegacyAuthorizationsIDWithBodyWithResponse(ctx context.Context, authID string, params *PatchLegacyAuthorizationsIDParams, contentType string, body io.Reader) (*PatchLegacyAuthorizationsIDResponse, error) { + rsp, err := c.PatchLegacyAuthorizationsIDWithBody(ctx, authID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchLegacyAuthorizationsIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchLegacyAuthorizationsIDWithResponse(ctx context.Context, authID string, params *PatchLegacyAuthorizationsIDParams, body PatchLegacyAuthorizationsIDJSONRequestBody) (*PatchLegacyAuthorizationsIDResponse, error) { + rsp, err := c.PatchLegacyAuthorizationsID(ctx, authID, params, body) + if err != nil { + return nil, err + } + return ParsePatchLegacyAuthorizationsIDResponse(rsp) +} + +// PostLegacyAuthorizationsIDPasswordWithBodyWithResponse request with arbitrary body returning *PostLegacyAuthorizationsIDPasswordResponse +func (c *ClientWithResponses) PostLegacyAuthorizationsIDPasswordWithBodyWithResponse(ctx context.Context, authID string, params *PostLegacyAuthorizationsIDPasswordParams, contentType string, body io.Reader) (*PostLegacyAuthorizationsIDPasswordResponse, error) { + rsp, err := c.PostLegacyAuthorizationsIDPasswordWithBody(ctx, authID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostLegacyAuthorizationsIDPasswordResponse(rsp) +} + +func (c *ClientWithResponses) PostLegacyAuthorizationsIDPasswordWithResponse(ctx context.Context, authID string, params *PostLegacyAuthorizationsIDPasswordParams, body PostLegacyAuthorizationsIDPasswordJSONRequestBody) (*PostLegacyAuthorizationsIDPasswordResponse, error) { + rsp, err := c.PostLegacyAuthorizationsIDPassword(ctx, authID, params, body) + if err != nil { + return nil, err + } + return ParsePostLegacyAuthorizationsIDPasswordResponse(rsp) +} + +// GetMeWithResponse request returning *GetMeResponse +func (c *ClientWithResponses) GetMeWithResponse(ctx context.Context, params *GetMeParams) (*GetMeResponse, error) { + rsp, err := c.GetMe(ctx, params) + if err != nil { + return nil, err + } + return ParseGetMeResponse(rsp) +} + +// PutMePasswordWithBodyWithResponse request with arbitrary body returning *PutMePasswordResponse +func (c *ClientWithResponses) PutMePasswordWithBodyWithResponse(ctx context.Context, params *PutMePasswordParams, contentType string, body io.Reader) (*PutMePasswordResponse, error) { + rsp, err := c.PutMePasswordWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePutMePasswordResponse(rsp) +} + +func (c *ClientWithResponses) PutMePasswordWithResponse(ctx context.Context, params *PutMePasswordParams, body PutMePasswordJSONRequestBody) (*PutMePasswordResponse, error) { + rsp, err := c.PutMePassword(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePutMePasswordResponse(rsp) +} + +// GetMetricsWithResponse request returning *GetMetricsResponse +func (c *ClientWithResponses) GetMetricsWithResponse(ctx context.Context, params *GetMetricsParams) (*GetMetricsResponse, error) { + rsp, err := c.GetMetrics(ctx, params) + if err != nil { + return nil, err + } + return ParseGetMetricsResponse(rsp) +} + +// GetNotificationEndpointsWithResponse request returning *GetNotificationEndpointsResponse +func (c *ClientWithResponses) GetNotificationEndpointsWithResponse(ctx context.Context, params *GetNotificationEndpointsParams) (*GetNotificationEndpointsResponse, error) { + rsp, err := c.GetNotificationEndpoints(ctx, params) + if err != nil { + return nil, err + } + return ParseGetNotificationEndpointsResponse(rsp) +} + +// CreateNotificationEndpointWithBodyWithResponse request with arbitrary body returning *CreateNotificationEndpointResponse +func (c *ClientWithResponses) CreateNotificationEndpointWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateNotificationEndpointResponse, error) { + rsp, err := c.CreateNotificationEndpointWithBody(ctx, contentType, body) + if err != nil { + return nil, err + } + return ParseCreateNotificationEndpointResponse(rsp) +} + +func (c *ClientWithResponses) CreateNotificationEndpointWithResponse(ctx context.Context, body CreateNotificationEndpointJSONRequestBody) (*CreateNotificationEndpointResponse, error) { + rsp, err := c.CreateNotificationEndpoint(ctx, body) + if err != nil { + return nil, err + } + return ParseCreateNotificationEndpointResponse(rsp) +} + +// DeleteNotificationEndpointsIDWithResponse request returning *DeleteNotificationEndpointsIDResponse +func (c *ClientWithResponses) DeleteNotificationEndpointsIDWithResponse(ctx context.Context, endpointID string, params *DeleteNotificationEndpointsIDParams) (*DeleteNotificationEndpointsIDResponse, error) { + rsp, err := c.DeleteNotificationEndpointsID(ctx, endpointID, params) + if err != nil { + return nil, err + } + return ParseDeleteNotificationEndpointsIDResponse(rsp) +} + +// GetNotificationEndpointsIDWithResponse request returning *GetNotificationEndpointsIDResponse +func (c *ClientWithResponses) GetNotificationEndpointsIDWithResponse(ctx context.Context, endpointID string, params *GetNotificationEndpointsIDParams) (*GetNotificationEndpointsIDResponse, error) { + rsp, err := c.GetNotificationEndpointsID(ctx, endpointID, params) + if err != nil { + return nil, err + } + return ParseGetNotificationEndpointsIDResponse(rsp) +} + +// PatchNotificationEndpointsIDWithBodyWithResponse request with arbitrary body returning *PatchNotificationEndpointsIDResponse +func (c *ClientWithResponses) PatchNotificationEndpointsIDWithBodyWithResponse(ctx context.Context, endpointID string, params *PatchNotificationEndpointsIDParams, contentType string, body io.Reader) (*PatchNotificationEndpointsIDResponse, error) { + rsp, err := c.PatchNotificationEndpointsIDWithBody(ctx, endpointID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchNotificationEndpointsIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchNotificationEndpointsIDWithResponse(ctx context.Context, endpointID string, params *PatchNotificationEndpointsIDParams, body PatchNotificationEndpointsIDJSONRequestBody) (*PatchNotificationEndpointsIDResponse, error) { + rsp, err := c.PatchNotificationEndpointsID(ctx, endpointID, params, body) + if err != nil { + return nil, err + } + return ParsePatchNotificationEndpointsIDResponse(rsp) +} + +// PutNotificationEndpointsIDWithBodyWithResponse request with arbitrary body returning *PutNotificationEndpointsIDResponse +func (c *ClientWithResponses) PutNotificationEndpointsIDWithBodyWithResponse(ctx context.Context, endpointID string, params *PutNotificationEndpointsIDParams, contentType string, body io.Reader) (*PutNotificationEndpointsIDResponse, error) { + rsp, err := c.PutNotificationEndpointsIDWithBody(ctx, endpointID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePutNotificationEndpointsIDResponse(rsp) +} + +func (c *ClientWithResponses) PutNotificationEndpointsIDWithResponse(ctx context.Context, endpointID string, params *PutNotificationEndpointsIDParams, body PutNotificationEndpointsIDJSONRequestBody) (*PutNotificationEndpointsIDResponse, error) { + rsp, err := c.PutNotificationEndpointsID(ctx, endpointID, params, body) + if err != nil { + return nil, err + } + return ParsePutNotificationEndpointsIDResponse(rsp) +} + +// GetNotificationEndpointsIDLabelsWithResponse request returning *GetNotificationEndpointsIDLabelsResponse +func (c *ClientWithResponses) GetNotificationEndpointsIDLabelsWithResponse(ctx context.Context, endpointID string, params *GetNotificationEndpointsIDLabelsParams) (*GetNotificationEndpointsIDLabelsResponse, error) { + rsp, err := c.GetNotificationEndpointsIDLabels(ctx, endpointID, params) + if err != nil { + return nil, err + } + return ParseGetNotificationEndpointsIDLabelsResponse(rsp) +} + +// PostNotificationEndpointIDLabelsWithBodyWithResponse request with arbitrary body returning *PostNotificationEndpointIDLabelsResponse +func (c *ClientWithResponses) PostNotificationEndpointIDLabelsWithBodyWithResponse(ctx context.Context, endpointID string, params *PostNotificationEndpointIDLabelsParams, contentType string, body io.Reader) (*PostNotificationEndpointIDLabelsResponse, error) { + rsp, err := c.PostNotificationEndpointIDLabelsWithBody(ctx, endpointID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostNotificationEndpointIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostNotificationEndpointIDLabelsWithResponse(ctx context.Context, endpointID string, params *PostNotificationEndpointIDLabelsParams, body PostNotificationEndpointIDLabelsJSONRequestBody) (*PostNotificationEndpointIDLabelsResponse, error) { + rsp, err := c.PostNotificationEndpointIDLabels(ctx, endpointID, params, body) + if err != nil { + return nil, err + } + return ParsePostNotificationEndpointIDLabelsResponse(rsp) +} + +// DeleteNotificationEndpointsIDLabelsIDWithResponse request returning *DeleteNotificationEndpointsIDLabelsIDResponse +func (c *ClientWithResponses) DeleteNotificationEndpointsIDLabelsIDWithResponse(ctx context.Context, endpointID string, labelID string, params *DeleteNotificationEndpointsIDLabelsIDParams) (*DeleteNotificationEndpointsIDLabelsIDResponse, error) { + rsp, err := c.DeleteNotificationEndpointsIDLabelsID(ctx, endpointID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteNotificationEndpointsIDLabelsIDResponse(rsp) +} + +// GetNotificationRulesWithResponse request returning *GetNotificationRulesResponse +func (c *ClientWithResponses) GetNotificationRulesWithResponse(ctx context.Context, params *GetNotificationRulesParams) (*GetNotificationRulesResponse, error) { + rsp, err := c.GetNotificationRules(ctx, params) + if err != nil { + return nil, err + } + return ParseGetNotificationRulesResponse(rsp) +} + +// CreateNotificationRuleWithBodyWithResponse request with arbitrary body returning *CreateNotificationRuleResponse +func (c *ClientWithResponses) CreateNotificationRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateNotificationRuleResponse, error) { + rsp, err := c.CreateNotificationRuleWithBody(ctx, contentType, body) + if err != nil { + return nil, err + } + return ParseCreateNotificationRuleResponse(rsp) +} + +func (c *ClientWithResponses) CreateNotificationRuleWithResponse(ctx context.Context, body CreateNotificationRuleJSONRequestBody) (*CreateNotificationRuleResponse, error) { + rsp, err := c.CreateNotificationRule(ctx, body) + if err != nil { + return nil, err + } + return ParseCreateNotificationRuleResponse(rsp) +} + +// DeleteNotificationRulesIDWithResponse request returning *DeleteNotificationRulesIDResponse +func (c *ClientWithResponses) DeleteNotificationRulesIDWithResponse(ctx context.Context, ruleID string, params *DeleteNotificationRulesIDParams) (*DeleteNotificationRulesIDResponse, error) { + rsp, err := c.DeleteNotificationRulesID(ctx, ruleID, params) + if err != nil { + return nil, err + } + return ParseDeleteNotificationRulesIDResponse(rsp) +} + +// GetNotificationRulesIDWithResponse request returning *GetNotificationRulesIDResponse +func (c *ClientWithResponses) GetNotificationRulesIDWithResponse(ctx context.Context, ruleID string, params *GetNotificationRulesIDParams) (*GetNotificationRulesIDResponse, error) { + rsp, err := c.GetNotificationRulesID(ctx, ruleID, params) + if err != nil { + return nil, err + } + return ParseGetNotificationRulesIDResponse(rsp) +} + +// PatchNotificationRulesIDWithBodyWithResponse request with arbitrary body returning *PatchNotificationRulesIDResponse +func (c *ClientWithResponses) PatchNotificationRulesIDWithBodyWithResponse(ctx context.Context, ruleID string, params *PatchNotificationRulesIDParams, contentType string, body io.Reader) (*PatchNotificationRulesIDResponse, error) { + rsp, err := c.PatchNotificationRulesIDWithBody(ctx, ruleID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchNotificationRulesIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchNotificationRulesIDWithResponse(ctx context.Context, ruleID string, params *PatchNotificationRulesIDParams, body PatchNotificationRulesIDJSONRequestBody) (*PatchNotificationRulesIDResponse, error) { + rsp, err := c.PatchNotificationRulesID(ctx, ruleID, params, body) + if err != nil { + return nil, err + } + return ParsePatchNotificationRulesIDResponse(rsp) +} + +// PutNotificationRulesIDWithBodyWithResponse request with arbitrary body returning *PutNotificationRulesIDResponse +func (c *ClientWithResponses) PutNotificationRulesIDWithBodyWithResponse(ctx context.Context, ruleID string, params *PutNotificationRulesIDParams, contentType string, body io.Reader) (*PutNotificationRulesIDResponse, error) { + rsp, err := c.PutNotificationRulesIDWithBody(ctx, ruleID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePutNotificationRulesIDResponse(rsp) +} + +func (c *ClientWithResponses) PutNotificationRulesIDWithResponse(ctx context.Context, ruleID string, params *PutNotificationRulesIDParams, body PutNotificationRulesIDJSONRequestBody) (*PutNotificationRulesIDResponse, error) { + rsp, err := c.PutNotificationRulesID(ctx, ruleID, params, body) + if err != nil { + return nil, err + } + return ParsePutNotificationRulesIDResponse(rsp) +} + +// GetNotificationRulesIDLabelsWithResponse request returning *GetNotificationRulesIDLabelsResponse +func (c *ClientWithResponses) GetNotificationRulesIDLabelsWithResponse(ctx context.Context, ruleID string, params *GetNotificationRulesIDLabelsParams) (*GetNotificationRulesIDLabelsResponse, error) { + rsp, err := c.GetNotificationRulesIDLabels(ctx, ruleID, params) + if err != nil { + return nil, err + } + return ParseGetNotificationRulesIDLabelsResponse(rsp) +} + +// PostNotificationRuleIDLabelsWithBodyWithResponse request with arbitrary body returning *PostNotificationRuleIDLabelsResponse +func (c *ClientWithResponses) PostNotificationRuleIDLabelsWithBodyWithResponse(ctx context.Context, ruleID string, params *PostNotificationRuleIDLabelsParams, contentType string, body io.Reader) (*PostNotificationRuleIDLabelsResponse, error) { + rsp, err := c.PostNotificationRuleIDLabelsWithBody(ctx, ruleID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostNotificationRuleIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostNotificationRuleIDLabelsWithResponse(ctx context.Context, ruleID string, params *PostNotificationRuleIDLabelsParams, body PostNotificationRuleIDLabelsJSONRequestBody) (*PostNotificationRuleIDLabelsResponse, error) { + rsp, err := c.PostNotificationRuleIDLabels(ctx, ruleID, params, body) + if err != nil { + return nil, err + } + return ParsePostNotificationRuleIDLabelsResponse(rsp) +} + +// DeleteNotificationRulesIDLabelsIDWithResponse request returning *DeleteNotificationRulesIDLabelsIDResponse +func (c *ClientWithResponses) DeleteNotificationRulesIDLabelsIDWithResponse(ctx context.Context, ruleID string, labelID string, params *DeleteNotificationRulesIDLabelsIDParams) (*DeleteNotificationRulesIDLabelsIDResponse, error) { + rsp, err := c.DeleteNotificationRulesIDLabelsID(ctx, ruleID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteNotificationRulesIDLabelsIDResponse(rsp) +} + +// GetNotificationRulesIDQueryWithResponse request returning *GetNotificationRulesIDQueryResponse +func (c *ClientWithResponses) GetNotificationRulesIDQueryWithResponse(ctx context.Context, ruleID string, params *GetNotificationRulesIDQueryParams) (*GetNotificationRulesIDQueryResponse, error) { + rsp, err := c.GetNotificationRulesIDQuery(ctx, ruleID, params) + if err != nil { + return nil, err + } + return ParseGetNotificationRulesIDQueryResponse(rsp) +} + +// GetOrgsWithResponse request returning *GetOrgsResponse +func (c *ClientWithResponses) GetOrgsWithResponse(ctx context.Context, params *GetOrgsParams) (*GetOrgsResponse, error) { + rsp, err := c.GetOrgs(ctx, params) + if err != nil { + return nil, err + } + return ParseGetOrgsResponse(rsp) +} + +// PostOrgsWithBodyWithResponse request with arbitrary body returning *PostOrgsResponse +func (c *ClientWithResponses) PostOrgsWithBodyWithResponse(ctx context.Context, params *PostOrgsParams, contentType string, body io.Reader) (*PostOrgsResponse, error) { + rsp, err := c.PostOrgsWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostOrgsResponse(rsp) +} + +func (c *ClientWithResponses) PostOrgsWithResponse(ctx context.Context, params *PostOrgsParams, body PostOrgsJSONRequestBody) (*PostOrgsResponse, error) { + rsp, err := c.PostOrgs(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostOrgsResponse(rsp) +} + +// DeleteOrgsIDWithResponse request returning *DeleteOrgsIDResponse +func (c *ClientWithResponses) DeleteOrgsIDWithResponse(ctx context.Context, orgID string, params *DeleteOrgsIDParams) (*DeleteOrgsIDResponse, error) { + rsp, err := c.DeleteOrgsID(ctx, orgID, params) + if err != nil { + return nil, err + } + return ParseDeleteOrgsIDResponse(rsp) +} + +// GetOrgsIDWithResponse request returning *GetOrgsIDResponse +func (c *ClientWithResponses) GetOrgsIDWithResponse(ctx context.Context, orgID string, params *GetOrgsIDParams) (*GetOrgsIDResponse, error) { + rsp, err := c.GetOrgsID(ctx, orgID, params) + if err != nil { + return nil, err + } + return ParseGetOrgsIDResponse(rsp) +} + +// PatchOrgsIDWithBodyWithResponse request with arbitrary body returning *PatchOrgsIDResponse +func (c *ClientWithResponses) PatchOrgsIDWithBodyWithResponse(ctx context.Context, orgID string, params *PatchOrgsIDParams, contentType string, body io.Reader) (*PatchOrgsIDResponse, error) { + rsp, err := c.PatchOrgsIDWithBody(ctx, orgID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchOrgsIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchOrgsIDWithResponse(ctx context.Context, orgID string, params *PatchOrgsIDParams, body PatchOrgsIDJSONRequestBody) (*PatchOrgsIDResponse, error) { + rsp, err := c.PatchOrgsID(ctx, orgID, params, body) + if err != nil { + return nil, err + } + return ParsePatchOrgsIDResponse(rsp) +} + +// GetOrgsIDMembersWithResponse request returning *GetOrgsIDMembersResponse +func (c *ClientWithResponses) GetOrgsIDMembersWithResponse(ctx context.Context, orgID string, params *GetOrgsIDMembersParams) (*GetOrgsIDMembersResponse, error) { + rsp, err := c.GetOrgsIDMembers(ctx, orgID, params) + if err != nil { + return nil, err + } + return ParseGetOrgsIDMembersResponse(rsp) +} + +// PostOrgsIDMembersWithBodyWithResponse request with arbitrary body returning *PostOrgsIDMembersResponse +func (c *ClientWithResponses) PostOrgsIDMembersWithBodyWithResponse(ctx context.Context, orgID string, params *PostOrgsIDMembersParams, contentType string, body io.Reader) (*PostOrgsIDMembersResponse, error) { + rsp, err := c.PostOrgsIDMembersWithBody(ctx, orgID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostOrgsIDMembersResponse(rsp) +} + +func (c *ClientWithResponses) PostOrgsIDMembersWithResponse(ctx context.Context, orgID string, params *PostOrgsIDMembersParams, body PostOrgsIDMembersJSONRequestBody) (*PostOrgsIDMembersResponse, error) { + rsp, err := c.PostOrgsIDMembers(ctx, orgID, params, body) + if err != nil { + return nil, err + } + return ParsePostOrgsIDMembersResponse(rsp) +} + +// DeleteOrgsIDMembersIDWithResponse request returning *DeleteOrgsIDMembersIDResponse +func (c *ClientWithResponses) DeleteOrgsIDMembersIDWithResponse(ctx context.Context, orgID string, userID string, params *DeleteOrgsIDMembersIDParams) (*DeleteOrgsIDMembersIDResponse, error) { + rsp, err := c.DeleteOrgsIDMembersID(ctx, orgID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteOrgsIDMembersIDResponse(rsp) +} + +// GetOrgsIDOwnersWithResponse request returning *GetOrgsIDOwnersResponse +func (c *ClientWithResponses) GetOrgsIDOwnersWithResponse(ctx context.Context, orgID string, params *GetOrgsIDOwnersParams) (*GetOrgsIDOwnersResponse, error) { + rsp, err := c.GetOrgsIDOwners(ctx, orgID, params) + if err != nil { + return nil, err + } + return ParseGetOrgsIDOwnersResponse(rsp) +} + +// PostOrgsIDOwnersWithBodyWithResponse request with arbitrary body returning *PostOrgsIDOwnersResponse +func (c *ClientWithResponses) PostOrgsIDOwnersWithBodyWithResponse(ctx context.Context, orgID string, params *PostOrgsIDOwnersParams, contentType string, body io.Reader) (*PostOrgsIDOwnersResponse, error) { + rsp, err := c.PostOrgsIDOwnersWithBody(ctx, orgID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostOrgsIDOwnersResponse(rsp) +} + +func (c *ClientWithResponses) PostOrgsIDOwnersWithResponse(ctx context.Context, orgID string, params *PostOrgsIDOwnersParams, body PostOrgsIDOwnersJSONRequestBody) (*PostOrgsIDOwnersResponse, error) { + rsp, err := c.PostOrgsIDOwners(ctx, orgID, params, body) + if err != nil { + return nil, err + } + return ParsePostOrgsIDOwnersResponse(rsp) +} + +// DeleteOrgsIDOwnersIDWithResponse request returning *DeleteOrgsIDOwnersIDResponse +func (c *ClientWithResponses) DeleteOrgsIDOwnersIDWithResponse(ctx context.Context, orgID string, userID string, params *DeleteOrgsIDOwnersIDParams) (*DeleteOrgsIDOwnersIDResponse, error) { + rsp, err := c.DeleteOrgsIDOwnersID(ctx, orgID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteOrgsIDOwnersIDResponse(rsp) +} + +// GetOrgsIDSecretsWithResponse request returning *GetOrgsIDSecretsResponse +func (c *ClientWithResponses) GetOrgsIDSecretsWithResponse(ctx context.Context, orgID string, params *GetOrgsIDSecretsParams) (*GetOrgsIDSecretsResponse, error) { + rsp, err := c.GetOrgsIDSecrets(ctx, orgID, params) + if err != nil { + return nil, err + } + return ParseGetOrgsIDSecretsResponse(rsp) +} + +// PatchOrgsIDSecretsWithBodyWithResponse request with arbitrary body returning *PatchOrgsIDSecretsResponse +func (c *ClientWithResponses) PatchOrgsIDSecretsWithBodyWithResponse(ctx context.Context, orgID string, params *PatchOrgsIDSecretsParams, contentType string, body io.Reader) (*PatchOrgsIDSecretsResponse, error) { + rsp, err := c.PatchOrgsIDSecretsWithBody(ctx, orgID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchOrgsIDSecretsResponse(rsp) +} + +func (c *ClientWithResponses) PatchOrgsIDSecretsWithResponse(ctx context.Context, orgID string, params *PatchOrgsIDSecretsParams, body PatchOrgsIDSecretsJSONRequestBody) (*PatchOrgsIDSecretsResponse, error) { + rsp, err := c.PatchOrgsIDSecrets(ctx, orgID, params, body) + if err != nil { + return nil, err + } + return ParsePatchOrgsIDSecretsResponse(rsp) +} + +// PostOrgsIDSecretsWithBodyWithResponse request with arbitrary body returning *PostOrgsIDSecretsResponse +func (c *ClientWithResponses) PostOrgsIDSecretsWithBodyWithResponse(ctx context.Context, orgID string, params *PostOrgsIDSecretsParams, contentType string, body io.Reader) (*PostOrgsIDSecretsResponse, error) { + rsp, err := c.PostOrgsIDSecretsWithBody(ctx, orgID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostOrgsIDSecretsResponse(rsp) +} + +func (c *ClientWithResponses) PostOrgsIDSecretsWithResponse(ctx context.Context, orgID string, params *PostOrgsIDSecretsParams, body PostOrgsIDSecretsJSONRequestBody) (*PostOrgsIDSecretsResponse, error) { + rsp, err := c.PostOrgsIDSecrets(ctx, orgID, params, body) + if err != nil { + return nil, err + } + return ParsePostOrgsIDSecretsResponse(rsp) +} + +// DeleteOrgsIDSecretsIDWithResponse request returning *DeleteOrgsIDSecretsIDResponse +func (c *ClientWithResponses) DeleteOrgsIDSecretsIDWithResponse(ctx context.Context, orgID string, secretID string, params *DeleteOrgsIDSecretsIDParams) (*DeleteOrgsIDSecretsIDResponse, error) { + rsp, err := c.DeleteOrgsIDSecretsID(ctx, orgID, secretID, params) + if err != nil { + return nil, err + } + return ParseDeleteOrgsIDSecretsIDResponse(rsp) +} + +// GetPingWithResponse request returning *GetPingResponse +func (c *ClientWithResponses) GetPingWithResponse(ctx context.Context) (*GetPingResponse, error) { + rsp, err := c.GetPing(ctx) + if err != nil { + return nil, err + } + return ParseGetPingResponse(rsp) +} + +// HeadPingWithResponse request returning *HeadPingResponse +func (c *ClientWithResponses) HeadPingWithResponse(ctx context.Context) (*HeadPingResponse, error) { + rsp, err := c.HeadPing(ctx) + if err != nil { + return nil, err + } + return ParseHeadPingResponse(rsp) +} + +// PostQueryWithBodyWithResponse request with arbitrary body returning *PostQueryResponse +func (c *ClientWithResponses) PostQueryWithBodyWithResponse(ctx context.Context, params *PostQueryParams, contentType string, body io.Reader) (*PostQueryResponse, error) { + rsp, err := c.PostQueryWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostQueryResponse(rsp) +} + +func (c *ClientWithResponses) PostQueryWithResponse(ctx context.Context, params *PostQueryParams, body PostQueryJSONRequestBody) (*PostQueryResponse, error) { + rsp, err := c.PostQuery(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostQueryResponse(rsp) +} + +// PostQueryAnalyzeWithBodyWithResponse request with arbitrary body returning *PostQueryAnalyzeResponse +func (c *ClientWithResponses) PostQueryAnalyzeWithBodyWithResponse(ctx context.Context, params *PostQueryAnalyzeParams, contentType string, body io.Reader) (*PostQueryAnalyzeResponse, error) { + rsp, err := c.PostQueryAnalyzeWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostQueryAnalyzeResponse(rsp) +} + +func (c *ClientWithResponses) PostQueryAnalyzeWithResponse(ctx context.Context, params *PostQueryAnalyzeParams, body PostQueryAnalyzeJSONRequestBody) (*PostQueryAnalyzeResponse, error) { + rsp, err := c.PostQueryAnalyze(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostQueryAnalyzeResponse(rsp) +} + +// PostQueryAstWithBodyWithResponse request with arbitrary body returning *PostQueryAstResponse +func (c *ClientWithResponses) PostQueryAstWithBodyWithResponse(ctx context.Context, params *PostQueryAstParams, contentType string, body io.Reader) (*PostQueryAstResponse, error) { + rsp, err := c.PostQueryAstWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostQueryAstResponse(rsp) +} + +func (c *ClientWithResponses) PostQueryAstWithResponse(ctx context.Context, params *PostQueryAstParams, body PostQueryAstJSONRequestBody) (*PostQueryAstResponse, error) { + rsp, err := c.PostQueryAst(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostQueryAstResponse(rsp) +} + +// GetQuerySuggestionsWithResponse request returning *GetQuerySuggestionsResponse +func (c *ClientWithResponses) GetQuerySuggestionsWithResponse(ctx context.Context, params *GetQuerySuggestionsParams) (*GetQuerySuggestionsResponse, error) { + rsp, err := c.GetQuerySuggestions(ctx, params) + if err != nil { + return nil, err + } + return ParseGetQuerySuggestionsResponse(rsp) +} + +// GetQuerySuggestionsNameWithResponse request returning *GetQuerySuggestionsNameResponse +func (c *ClientWithResponses) GetQuerySuggestionsNameWithResponse(ctx context.Context, name string, params *GetQuerySuggestionsNameParams) (*GetQuerySuggestionsNameResponse, error) { + rsp, err := c.GetQuerySuggestionsName(ctx, name, params) + if err != nil { + return nil, err + } + return ParseGetQuerySuggestionsNameResponse(rsp) +} + +// GetReadyWithResponse request returning *GetReadyResponse +func (c *ClientWithResponses) GetReadyWithResponse(ctx context.Context, params *GetReadyParams) (*GetReadyResponse, error) { + rsp, err := c.GetReady(ctx, params) + if err != nil { + return nil, err + } + return ParseGetReadyResponse(rsp) +} + +// GetRemoteConnectionsWithResponse request returning *GetRemoteConnectionsResponse +func (c *ClientWithResponses) GetRemoteConnectionsWithResponse(ctx context.Context, params *GetRemoteConnectionsParams) (*GetRemoteConnectionsResponse, error) { + rsp, err := c.GetRemoteConnections(ctx, params) + if err != nil { + return nil, err + } + return ParseGetRemoteConnectionsResponse(rsp) +} + +// PostRemoteConnectionWithBodyWithResponse request with arbitrary body returning *PostRemoteConnectionResponse +func (c *ClientWithResponses) PostRemoteConnectionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*PostRemoteConnectionResponse, error) { + rsp, err := c.PostRemoteConnectionWithBody(ctx, contentType, body) + if err != nil { + return nil, err + } + return ParsePostRemoteConnectionResponse(rsp) +} + +func (c *ClientWithResponses) PostRemoteConnectionWithResponse(ctx context.Context, body PostRemoteConnectionJSONRequestBody) (*PostRemoteConnectionResponse, error) { + rsp, err := c.PostRemoteConnection(ctx, body) + if err != nil { + return nil, err + } + return ParsePostRemoteConnectionResponse(rsp) +} + +// DeleteRemoteConnectionByIDWithResponse request returning *DeleteRemoteConnectionByIDResponse +func (c *ClientWithResponses) DeleteRemoteConnectionByIDWithResponse(ctx context.Context, remoteID string, params *DeleteRemoteConnectionByIDParams) (*DeleteRemoteConnectionByIDResponse, error) { + rsp, err := c.DeleteRemoteConnectionByID(ctx, remoteID, params) + if err != nil { + return nil, err + } + return ParseDeleteRemoteConnectionByIDResponse(rsp) +} + +// GetRemoteConnectionByIDWithResponse request returning *GetRemoteConnectionByIDResponse +func (c *ClientWithResponses) GetRemoteConnectionByIDWithResponse(ctx context.Context, remoteID string, params *GetRemoteConnectionByIDParams) (*GetRemoteConnectionByIDResponse, error) { + rsp, err := c.GetRemoteConnectionByID(ctx, remoteID, params) + if err != nil { + return nil, err + } + return ParseGetRemoteConnectionByIDResponse(rsp) +} + +// PatchRemoteConnectionByIDWithBodyWithResponse request with arbitrary body returning *PatchRemoteConnectionByIDResponse +func (c *ClientWithResponses) PatchRemoteConnectionByIDWithBodyWithResponse(ctx context.Context, remoteID string, params *PatchRemoteConnectionByIDParams, contentType string, body io.Reader) (*PatchRemoteConnectionByIDResponse, error) { + rsp, err := c.PatchRemoteConnectionByIDWithBody(ctx, remoteID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchRemoteConnectionByIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchRemoteConnectionByIDWithResponse(ctx context.Context, remoteID string, params *PatchRemoteConnectionByIDParams, body PatchRemoteConnectionByIDJSONRequestBody) (*PatchRemoteConnectionByIDResponse, error) { + rsp, err := c.PatchRemoteConnectionByID(ctx, remoteID, params, body) + if err != nil { + return nil, err + } + return ParsePatchRemoteConnectionByIDResponse(rsp) +} + +// GetReplicationsWithResponse request returning *GetReplicationsResponse +func (c *ClientWithResponses) GetReplicationsWithResponse(ctx context.Context, params *GetReplicationsParams) (*GetReplicationsResponse, error) { + rsp, err := c.GetReplications(ctx, params) + if err != nil { + return nil, err + } + return ParseGetReplicationsResponse(rsp) +} + +// PostReplicationWithBodyWithResponse request with arbitrary body returning *PostReplicationResponse +func (c *ClientWithResponses) PostReplicationWithBodyWithResponse(ctx context.Context, params *PostReplicationParams, contentType string, body io.Reader) (*PostReplicationResponse, error) { + rsp, err := c.PostReplicationWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostReplicationResponse(rsp) +} + +func (c *ClientWithResponses) PostReplicationWithResponse(ctx context.Context, params *PostReplicationParams, body PostReplicationJSONRequestBody) (*PostReplicationResponse, error) { + rsp, err := c.PostReplication(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostReplicationResponse(rsp) +} + +// DeleteReplicationByIDWithResponse request returning *DeleteReplicationByIDResponse +func (c *ClientWithResponses) DeleteReplicationByIDWithResponse(ctx context.Context, replicationID string, params *DeleteReplicationByIDParams) (*DeleteReplicationByIDResponse, error) { + rsp, err := c.DeleteReplicationByID(ctx, replicationID, params) + if err != nil { + return nil, err + } + return ParseDeleteReplicationByIDResponse(rsp) +} + +// GetReplicationByIDWithResponse request returning *GetReplicationByIDResponse +func (c *ClientWithResponses) GetReplicationByIDWithResponse(ctx context.Context, replicationID string, params *GetReplicationByIDParams) (*GetReplicationByIDResponse, error) { + rsp, err := c.GetReplicationByID(ctx, replicationID, params) + if err != nil { + return nil, err + } + return ParseGetReplicationByIDResponse(rsp) +} + +// PatchReplicationByIDWithBodyWithResponse request with arbitrary body returning *PatchReplicationByIDResponse +func (c *ClientWithResponses) PatchReplicationByIDWithBodyWithResponse(ctx context.Context, replicationID string, params *PatchReplicationByIDParams, contentType string, body io.Reader) (*PatchReplicationByIDResponse, error) { + rsp, err := c.PatchReplicationByIDWithBody(ctx, replicationID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchReplicationByIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchReplicationByIDWithResponse(ctx context.Context, replicationID string, params *PatchReplicationByIDParams, body PatchReplicationByIDJSONRequestBody) (*PatchReplicationByIDResponse, error) { + rsp, err := c.PatchReplicationByID(ctx, replicationID, params, body) + if err != nil { + return nil, err + } + return ParsePatchReplicationByIDResponse(rsp) +} + +// PostValidateReplicationByIDWithResponse request returning *PostValidateReplicationByIDResponse +func (c *ClientWithResponses) PostValidateReplicationByIDWithResponse(ctx context.Context, replicationID string, params *PostValidateReplicationByIDParams) (*PostValidateReplicationByIDResponse, error) { + rsp, err := c.PostValidateReplicationByID(ctx, replicationID, params) + if err != nil { + return nil, err + } + return ParsePostValidateReplicationByIDResponse(rsp) +} + +// GetResourcesWithResponse request returning *GetResourcesResponse +func (c *ClientWithResponses) GetResourcesWithResponse(ctx context.Context, params *GetResourcesParams) (*GetResourcesResponse, error) { + rsp, err := c.GetResources(ctx, params) + if err != nil { + return nil, err + } + return ParseGetResourcesResponse(rsp) +} + +// PostRestoreBucketIDWithBodyWithResponse request with arbitrary body returning *PostRestoreBucketIDResponse +func (c *ClientWithResponses) PostRestoreBucketIDWithBodyWithResponse(ctx context.Context, bucketID string, params *PostRestoreBucketIDParams, contentType string, body io.Reader) (*PostRestoreBucketIDResponse, error) { + rsp, err := c.PostRestoreBucketIDWithBody(ctx, bucketID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostRestoreBucketIDResponse(rsp) +} + +// PostRestoreBucketMetadataWithBodyWithResponse request with arbitrary body returning *PostRestoreBucketMetadataResponse +func (c *ClientWithResponses) PostRestoreBucketMetadataWithBodyWithResponse(ctx context.Context, params *PostRestoreBucketMetadataParams, contentType string, body io.Reader) (*PostRestoreBucketMetadataResponse, error) { + rsp, err := c.PostRestoreBucketMetadataWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostRestoreBucketMetadataResponse(rsp) +} + +func (c *ClientWithResponses) PostRestoreBucketMetadataWithResponse(ctx context.Context, params *PostRestoreBucketMetadataParams, body PostRestoreBucketMetadataJSONRequestBody) (*PostRestoreBucketMetadataResponse, error) { + rsp, err := c.PostRestoreBucketMetadata(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostRestoreBucketMetadataResponse(rsp) +} + +// PostRestoreKVWithBodyWithResponse request with arbitrary body returning *PostRestoreKVResponse +func (c *ClientWithResponses) PostRestoreKVWithBodyWithResponse(ctx context.Context, params *PostRestoreKVParams, contentType string, body io.Reader) (*PostRestoreKVResponse, error) { + rsp, err := c.PostRestoreKVWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostRestoreKVResponse(rsp) +} + +// PostRestoreShardIdWithBodyWithResponse request with arbitrary body returning *PostRestoreShardIdResponse +func (c *ClientWithResponses) PostRestoreShardIdWithBodyWithResponse(ctx context.Context, shardID string, params *PostRestoreShardIdParams, contentType string, body io.Reader) (*PostRestoreShardIdResponse, error) { + rsp, err := c.PostRestoreShardIdWithBody(ctx, shardID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostRestoreShardIdResponse(rsp) +} + +// PostRestoreSQLWithBodyWithResponse request with arbitrary body returning *PostRestoreSQLResponse +func (c *ClientWithResponses) PostRestoreSQLWithBodyWithResponse(ctx context.Context, params *PostRestoreSQLParams, contentType string, body io.Reader) (*PostRestoreSQLResponse, error) { + rsp, err := c.PostRestoreSQLWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostRestoreSQLResponse(rsp) +} + +// GetScrapersWithResponse request returning *GetScrapersResponse +func (c *ClientWithResponses) GetScrapersWithResponse(ctx context.Context, params *GetScrapersParams) (*GetScrapersResponse, error) { + rsp, err := c.GetScrapers(ctx, params) + if err != nil { + return nil, err + } + return ParseGetScrapersResponse(rsp) +} + +// PostScrapersWithBodyWithResponse request with arbitrary body returning *PostScrapersResponse +func (c *ClientWithResponses) PostScrapersWithBodyWithResponse(ctx context.Context, params *PostScrapersParams, contentType string, body io.Reader) (*PostScrapersResponse, error) { + rsp, err := c.PostScrapersWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostScrapersResponse(rsp) +} + +func (c *ClientWithResponses) PostScrapersWithResponse(ctx context.Context, params *PostScrapersParams, body PostScrapersJSONRequestBody) (*PostScrapersResponse, error) { + rsp, err := c.PostScrapers(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostScrapersResponse(rsp) +} + +// DeleteScrapersIDWithResponse request returning *DeleteScrapersIDResponse +func (c *ClientWithResponses) DeleteScrapersIDWithResponse(ctx context.Context, scraperTargetID string, params *DeleteScrapersIDParams) (*DeleteScrapersIDResponse, error) { + rsp, err := c.DeleteScrapersID(ctx, scraperTargetID, params) + if err != nil { + return nil, err + } + return ParseDeleteScrapersIDResponse(rsp) +} + +// GetScrapersIDWithResponse request returning *GetScrapersIDResponse +func (c *ClientWithResponses) GetScrapersIDWithResponse(ctx context.Context, scraperTargetID string, params *GetScrapersIDParams) (*GetScrapersIDResponse, error) { + rsp, err := c.GetScrapersID(ctx, scraperTargetID, params) + if err != nil { + return nil, err + } + return ParseGetScrapersIDResponse(rsp) +} + +// PatchScrapersIDWithBodyWithResponse request with arbitrary body returning *PatchScrapersIDResponse +func (c *ClientWithResponses) PatchScrapersIDWithBodyWithResponse(ctx context.Context, scraperTargetID string, params *PatchScrapersIDParams, contentType string, body io.Reader) (*PatchScrapersIDResponse, error) { + rsp, err := c.PatchScrapersIDWithBody(ctx, scraperTargetID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchScrapersIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchScrapersIDWithResponse(ctx context.Context, scraperTargetID string, params *PatchScrapersIDParams, body PatchScrapersIDJSONRequestBody) (*PatchScrapersIDResponse, error) { + rsp, err := c.PatchScrapersID(ctx, scraperTargetID, params, body) + if err != nil { + return nil, err + } + return ParsePatchScrapersIDResponse(rsp) +} + +// GetScrapersIDLabelsWithResponse request returning *GetScrapersIDLabelsResponse +func (c *ClientWithResponses) GetScrapersIDLabelsWithResponse(ctx context.Context, scraperTargetID string, params *GetScrapersIDLabelsParams) (*GetScrapersIDLabelsResponse, error) { + rsp, err := c.GetScrapersIDLabels(ctx, scraperTargetID, params) + if err != nil { + return nil, err + } + return ParseGetScrapersIDLabelsResponse(rsp) +} + +// PostScrapersIDLabelsWithBodyWithResponse request with arbitrary body returning *PostScrapersIDLabelsResponse +func (c *ClientWithResponses) PostScrapersIDLabelsWithBodyWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDLabelsParams, contentType string, body io.Reader) (*PostScrapersIDLabelsResponse, error) { + rsp, err := c.PostScrapersIDLabelsWithBody(ctx, scraperTargetID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostScrapersIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostScrapersIDLabelsWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDLabelsParams, body PostScrapersIDLabelsJSONRequestBody) (*PostScrapersIDLabelsResponse, error) { + rsp, err := c.PostScrapersIDLabels(ctx, scraperTargetID, params, body) + if err != nil { + return nil, err + } + return ParsePostScrapersIDLabelsResponse(rsp) +} + +// DeleteScrapersIDLabelsIDWithResponse request returning *DeleteScrapersIDLabelsIDResponse +func (c *ClientWithResponses) DeleteScrapersIDLabelsIDWithResponse(ctx context.Context, scraperTargetID string, labelID string, params *DeleteScrapersIDLabelsIDParams) (*DeleteScrapersIDLabelsIDResponse, error) { + rsp, err := c.DeleteScrapersIDLabelsID(ctx, scraperTargetID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteScrapersIDLabelsIDResponse(rsp) +} + +// GetScrapersIDMembersWithResponse request returning *GetScrapersIDMembersResponse +func (c *ClientWithResponses) GetScrapersIDMembersWithResponse(ctx context.Context, scraperTargetID string, params *GetScrapersIDMembersParams) (*GetScrapersIDMembersResponse, error) { + rsp, err := c.GetScrapersIDMembers(ctx, scraperTargetID, params) + if err != nil { + return nil, err + } + return ParseGetScrapersIDMembersResponse(rsp) +} + +// PostScrapersIDMembersWithBodyWithResponse request with arbitrary body returning *PostScrapersIDMembersResponse +func (c *ClientWithResponses) PostScrapersIDMembersWithBodyWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDMembersParams, contentType string, body io.Reader) (*PostScrapersIDMembersResponse, error) { + rsp, err := c.PostScrapersIDMembersWithBody(ctx, scraperTargetID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostScrapersIDMembersResponse(rsp) +} + +func (c *ClientWithResponses) PostScrapersIDMembersWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDMembersParams, body PostScrapersIDMembersJSONRequestBody) (*PostScrapersIDMembersResponse, error) { + rsp, err := c.PostScrapersIDMembers(ctx, scraperTargetID, params, body) + if err != nil { + return nil, err + } + return ParsePostScrapersIDMembersResponse(rsp) +} + +// DeleteScrapersIDMembersIDWithResponse request returning *DeleteScrapersIDMembersIDResponse +func (c *ClientWithResponses) DeleteScrapersIDMembersIDWithResponse(ctx context.Context, scraperTargetID string, userID string, params *DeleteScrapersIDMembersIDParams) (*DeleteScrapersIDMembersIDResponse, error) { + rsp, err := c.DeleteScrapersIDMembersID(ctx, scraperTargetID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteScrapersIDMembersIDResponse(rsp) +} + +// GetScrapersIDOwnersWithResponse request returning *GetScrapersIDOwnersResponse +func (c *ClientWithResponses) GetScrapersIDOwnersWithResponse(ctx context.Context, scraperTargetID string, params *GetScrapersIDOwnersParams) (*GetScrapersIDOwnersResponse, error) { + rsp, err := c.GetScrapersIDOwners(ctx, scraperTargetID, params) + if err != nil { + return nil, err + } + return ParseGetScrapersIDOwnersResponse(rsp) +} + +// PostScrapersIDOwnersWithBodyWithResponse request with arbitrary body returning *PostScrapersIDOwnersResponse +func (c *ClientWithResponses) PostScrapersIDOwnersWithBodyWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDOwnersParams, contentType string, body io.Reader) (*PostScrapersIDOwnersResponse, error) { + rsp, err := c.PostScrapersIDOwnersWithBody(ctx, scraperTargetID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostScrapersIDOwnersResponse(rsp) +} + +func (c *ClientWithResponses) PostScrapersIDOwnersWithResponse(ctx context.Context, scraperTargetID string, params *PostScrapersIDOwnersParams, body PostScrapersIDOwnersJSONRequestBody) (*PostScrapersIDOwnersResponse, error) { + rsp, err := c.PostScrapersIDOwners(ctx, scraperTargetID, params, body) + if err != nil { + return nil, err + } + return ParsePostScrapersIDOwnersResponse(rsp) +} + +// DeleteScrapersIDOwnersIDWithResponse request returning *DeleteScrapersIDOwnersIDResponse +func (c *ClientWithResponses) DeleteScrapersIDOwnersIDWithResponse(ctx context.Context, scraperTargetID string, userID string, params *DeleteScrapersIDOwnersIDParams) (*DeleteScrapersIDOwnersIDResponse, error) { + rsp, err := c.DeleteScrapersIDOwnersID(ctx, scraperTargetID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteScrapersIDOwnersIDResponse(rsp) +} + +// GetSetupWithResponse request returning *GetSetupResponse +func (c *ClientWithResponses) GetSetupWithResponse(ctx context.Context, params *GetSetupParams) (*GetSetupResponse, error) { + rsp, err := c.GetSetup(ctx, params) + if err != nil { + return nil, err + } + return ParseGetSetupResponse(rsp) +} + +// PostSetupWithBodyWithResponse request with arbitrary body returning *PostSetupResponse +func (c *ClientWithResponses) PostSetupWithBodyWithResponse(ctx context.Context, params *PostSetupParams, contentType string, body io.Reader) (*PostSetupResponse, error) { + rsp, err := c.PostSetupWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostSetupResponse(rsp) +} + +func (c *ClientWithResponses) PostSetupWithResponse(ctx context.Context, params *PostSetupParams, body PostSetupJSONRequestBody) (*PostSetupResponse, error) { + rsp, err := c.PostSetup(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostSetupResponse(rsp) +} + +// PostSigninWithResponse request returning *PostSigninResponse +func (c *ClientWithResponses) PostSigninWithResponse(ctx context.Context, params *PostSigninParams) (*PostSigninResponse, error) { + rsp, err := c.PostSignin(ctx, params) + if err != nil { + return nil, err + } + return ParsePostSigninResponse(rsp) +} + +// PostSignoutWithResponse request returning *PostSignoutResponse +func (c *ClientWithResponses) PostSignoutWithResponse(ctx context.Context, params *PostSignoutParams) (*PostSignoutResponse, error) { + rsp, err := c.PostSignout(ctx, params) + if err != nil { + return nil, err + } + return ParsePostSignoutResponse(rsp) +} + +// GetSourcesWithResponse request returning *GetSourcesResponse +func (c *ClientWithResponses) GetSourcesWithResponse(ctx context.Context, params *GetSourcesParams) (*GetSourcesResponse, error) { + rsp, err := c.GetSources(ctx, params) + if err != nil { + return nil, err + } + return ParseGetSourcesResponse(rsp) +} + +// PostSourcesWithBodyWithResponse request with arbitrary body returning *PostSourcesResponse +func (c *ClientWithResponses) PostSourcesWithBodyWithResponse(ctx context.Context, params *PostSourcesParams, contentType string, body io.Reader) (*PostSourcesResponse, error) { + rsp, err := c.PostSourcesWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostSourcesResponse(rsp) +} + +func (c *ClientWithResponses) PostSourcesWithResponse(ctx context.Context, params *PostSourcesParams, body PostSourcesJSONRequestBody) (*PostSourcesResponse, error) { + rsp, err := c.PostSources(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostSourcesResponse(rsp) +} + +// DeleteSourcesIDWithResponse request returning *DeleteSourcesIDResponse +func (c *ClientWithResponses) DeleteSourcesIDWithResponse(ctx context.Context, sourceID string, params *DeleteSourcesIDParams) (*DeleteSourcesIDResponse, error) { + rsp, err := c.DeleteSourcesID(ctx, sourceID, params) + if err != nil { + return nil, err + } + return ParseDeleteSourcesIDResponse(rsp) +} + +// GetSourcesIDWithResponse request returning *GetSourcesIDResponse +func (c *ClientWithResponses) GetSourcesIDWithResponse(ctx context.Context, sourceID string, params *GetSourcesIDParams) (*GetSourcesIDResponse, error) { + rsp, err := c.GetSourcesID(ctx, sourceID, params) + if err != nil { + return nil, err + } + return ParseGetSourcesIDResponse(rsp) +} + +// PatchSourcesIDWithBodyWithResponse request with arbitrary body returning *PatchSourcesIDResponse +func (c *ClientWithResponses) PatchSourcesIDWithBodyWithResponse(ctx context.Context, sourceID string, params *PatchSourcesIDParams, contentType string, body io.Reader) (*PatchSourcesIDResponse, error) { + rsp, err := c.PatchSourcesIDWithBody(ctx, sourceID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchSourcesIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchSourcesIDWithResponse(ctx context.Context, sourceID string, params *PatchSourcesIDParams, body PatchSourcesIDJSONRequestBody) (*PatchSourcesIDResponse, error) { + rsp, err := c.PatchSourcesID(ctx, sourceID, params, body) + if err != nil { + return nil, err + } + return ParsePatchSourcesIDResponse(rsp) +} + +// GetSourcesIDBucketsWithResponse request returning *GetSourcesIDBucketsResponse +func (c *ClientWithResponses) GetSourcesIDBucketsWithResponse(ctx context.Context, sourceID string, params *GetSourcesIDBucketsParams) (*GetSourcesIDBucketsResponse, error) { + rsp, err := c.GetSourcesIDBuckets(ctx, sourceID, params) + if err != nil { + return nil, err + } + return ParseGetSourcesIDBucketsResponse(rsp) +} + +// GetSourcesIDHealthWithResponse request returning *GetSourcesIDHealthResponse +func (c *ClientWithResponses) GetSourcesIDHealthWithResponse(ctx context.Context, sourceID string, params *GetSourcesIDHealthParams) (*GetSourcesIDHealthResponse, error) { + rsp, err := c.GetSourcesIDHealth(ctx, sourceID, params) + if err != nil { + return nil, err + } + return ParseGetSourcesIDHealthResponse(rsp) +} + +// ListStacksWithResponse request returning *ListStacksResponse +func (c *ClientWithResponses) ListStacksWithResponse(ctx context.Context, params *ListStacksParams) (*ListStacksResponse, error) { + rsp, err := c.ListStacks(ctx, params) + if err != nil { + return nil, err + } + return ParseListStacksResponse(rsp) +} + +// CreateStackWithBodyWithResponse request with arbitrary body returning *CreateStackResponse +func (c *ClientWithResponses) CreateStackWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateStackResponse, error) { + rsp, err := c.CreateStackWithBody(ctx, contentType, body) + if err != nil { + return nil, err + } + return ParseCreateStackResponse(rsp) +} + +func (c *ClientWithResponses) CreateStackWithResponse(ctx context.Context, body CreateStackJSONRequestBody) (*CreateStackResponse, error) { + rsp, err := c.CreateStack(ctx, body) + if err != nil { + return nil, err + } + return ParseCreateStackResponse(rsp) +} + +// DeleteStackWithResponse request returning *DeleteStackResponse +func (c *ClientWithResponses) DeleteStackWithResponse(ctx context.Context, stackId string, params *DeleteStackParams) (*DeleteStackResponse, error) { + rsp, err := c.DeleteStack(ctx, stackId, params) + if err != nil { + return nil, err + } + return ParseDeleteStackResponse(rsp) +} + +// ReadStackWithResponse request returning *ReadStackResponse +func (c *ClientWithResponses) ReadStackWithResponse(ctx context.Context, stackId string) (*ReadStackResponse, error) { + rsp, err := c.ReadStack(ctx, stackId) + if err != nil { + return nil, err + } + return ParseReadStackResponse(rsp) +} + +// UpdateStackWithBodyWithResponse request with arbitrary body returning *UpdateStackResponse +func (c *ClientWithResponses) UpdateStackWithBodyWithResponse(ctx context.Context, stackId string, contentType string, body io.Reader) (*UpdateStackResponse, error) { + rsp, err := c.UpdateStackWithBody(ctx, stackId, contentType, body) + if err != nil { + return nil, err + } + return ParseUpdateStackResponse(rsp) +} + +func (c *ClientWithResponses) UpdateStackWithResponse(ctx context.Context, stackId string, body UpdateStackJSONRequestBody) (*UpdateStackResponse, error) { + rsp, err := c.UpdateStack(ctx, stackId, body) + if err != nil { + return nil, err + } + return ParseUpdateStackResponse(rsp) +} + +// UninstallStackWithResponse request returning *UninstallStackResponse +func (c *ClientWithResponses) UninstallStackWithResponse(ctx context.Context, stackId string) (*UninstallStackResponse, error) { + rsp, err := c.UninstallStack(ctx, stackId) + if err != nil { + return nil, err + } + return ParseUninstallStackResponse(rsp) +} + +// GetTasksWithResponse request returning *GetTasksResponse +func (c *ClientWithResponses) GetTasksWithResponse(ctx context.Context, params *GetTasksParams) (*GetTasksResponse, error) { + rsp, err := c.GetTasks(ctx, params) + if err != nil { + return nil, err + } + return ParseGetTasksResponse(rsp) +} + +// PostTasksWithBodyWithResponse request with arbitrary body returning *PostTasksResponse +func (c *ClientWithResponses) PostTasksWithBodyWithResponse(ctx context.Context, params *PostTasksParams, contentType string, body io.Reader) (*PostTasksResponse, error) { + rsp, err := c.PostTasksWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTasksResponse(rsp) +} + +func (c *ClientWithResponses) PostTasksWithResponse(ctx context.Context, params *PostTasksParams, body PostTasksJSONRequestBody) (*PostTasksResponse, error) { + rsp, err := c.PostTasks(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostTasksResponse(rsp) +} + +// DeleteTasksIDWithResponse request returning *DeleteTasksIDResponse +func (c *ClientWithResponses) DeleteTasksIDWithResponse(ctx context.Context, taskID string, params *DeleteTasksIDParams) (*DeleteTasksIDResponse, error) { + rsp, err := c.DeleteTasksID(ctx, taskID, params) + if err != nil { + return nil, err + } + return ParseDeleteTasksIDResponse(rsp) +} + +// GetTasksIDWithResponse request returning *GetTasksIDResponse +func (c *ClientWithResponses) GetTasksIDWithResponse(ctx context.Context, taskID string, params *GetTasksIDParams) (*GetTasksIDResponse, error) { + rsp, err := c.GetTasksID(ctx, taskID, params) + if err != nil { + return nil, err + } + return ParseGetTasksIDResponse(rsp) +} + +// PatchTasksIDWithBodyWithResponse request with arbitrary body returning *PatchTasksIDResponse +func (c *ClientWithResponses) PatchTasksIDWithBodyWithResponse(ctx context.Context, taskID string, params *PatchTasksIDParams, contentType string, body io.Reader) (*PatchTasksIDResponse, error) { + rsp, err := c.PatchTasksIDWithBody(ctx, taskID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchTasksIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchTasksIDWithResponse(ctx context.Context, taskID string, params *PatchTasksIDParams, body PatchTasksIDJSONRequestBody) (*PatchTasksIDResponse, error) { + rsp, err := c.PatchTasksID(ctx, taskID, params, body) + if err != nil { + return nil, err + } + return ParsePatchTasksIDResponse(rsp) +} + +// GetTasksIDLabelsWithResponse request returning *GetTasksIDLabelsResponse +func (c *ClientWithResponses) GetTasksIDLabelsWithResponse(ctx context.Context, taskID string, params *GetTasksIDLabelsParams) (*GetTasksIDLabelsResponse, error) { + rsp, err := c.GetTasksIDLabels(ctx, taskID, params) + if err != nil { + return nil, err + } + return ParseGetTasksIDLabelsResponse(rsp) +} + +// PostTasksIDLabelsWithBodyWithResponse request with arbitrary body returning *PostTasksIDLabelsResponse +func (c *ClientWithResponses) PostTasksIDLabelsWithBodyWithResponse(ctx context.Context, taskID string, params *PostTasksIDLabelsParams, contentType string, body io.Reader) (*PostTasksIDLabelsResponse, error) { + rsp, err := c.PostTasksIDLabelsWithBody(ctx, taskID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostTasksIDLabelsWithResponse(ctx context.Context, taskID string, params *PostTasksIDLabelsParams, body PostTasksIDLabelsJSONRequestBody) (*PostTasksIDLabelsResponse, error) { + rsp, err := c.PostTasksIDLabels(ctx, taskID, params, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDLabelsResponse(rsp) +} + +// DeleteTasksIDLabelsIDWithResponse request returning *DeleteTasksIDLabelsIDResponse +func (c *ClientWithResponses) DeleteTasksIDLabelsIDWithResponse(ctx context.Context, taskID string, labelID string, params *DeleteTasksIDLabelsIDParams) (*DeleteTasksIDLabelsIDResponse, error) { + rsp, err := c.DeleteTasksIDLabelsID(ctx, taskID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteTasksIDLabelsIDResponse(rsp) +} + +// GetTasksIDLogsWithResponse request returning *GetTasksIDLogsResponse +func (c *ClientWithResponses) GetTasksIDLogsWithResponse(ctx context.Context, taskID string, params *GetTasksIDLogsParams) (*GetTasksIDLogsResponse, error) { + rsp, err := c.GetTasksIDLogs(ctx, taskID, params) + if err != nil { + return nil, err + } + return ParseGetTasksIDLogsResponse(rsp) +} + +// GetTasksIDMembersWithResponse request returning *GetTasksIDMembersResponse +func (c *ClientWithResponses) GetTasksIDMembersWithResponse(ctx context.Context, taskID string, params *GetTasksIDMembersParams) (*GetTasksIDMembersResponse, error) { + rsp, err := c.GetTasksIDMembers(ctx, taskID, params) + if err != nil { + return nil, err + } + return ParseGetTasksIDMembersResponse(rsp) +} + +// PostTasksIDMembersWithBodyWithResponse request with arbitrary body returning *PostTasksIDMembersResponse +func (c *ClientWithResponses) PostTasksIDMembersWithBodyWithResponse(ctx context.Context, taskID string, params *PostTasksIDMembersParams, contentType string, body io.Reader) (*PostTasksIDMembersResponse, error) { + rsp, err := c.PostTasksIDMembersWithBody(ctx, taskID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDMembersResponse(rsp) +} + +func (c *ClientWithResponses) PostTasksIDMembersWithResponse(ctx context.Context, taskID string, params *PostTasksIDMembersParams, body PostTasksIDMembersJSONRequestBody) (*PostTasksIDMembersResponse, error) { + rsp, err := c.PostTasksIDMembers(ctx, taskID, params, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDMembersResponse(rsp) +} + +// DeleteTasksIDMembersIDWithResponse request returning *DeleteTasksIDMembersIDResponse +func (c *ClientWithResponses) DeleteTasksIDMembersIDWithResponse(ctx context.Context, taskID string, userID string, params *DeleteTasksIDMembersIDParams) (*DeleteTasksIDMembersIDResponse, error) { + rsp, err := c.DeleteTasksIDMembersID(ctx, taskID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteTasksIDMembersIDResponse(rsp) +} + +// GetTasksIDOwnersWithResponse request returning *GetTasksIDOwnersResponse +func (c *ClientWithResponses) GetTasksIDOwnersWithResponse(ctx context.Context, taskID string, params *GetTasksIDOwnersParams) (*GetTasksIDOwnersResponse, error) { + rsp, err := c.GetTasksIDOwners(ctx, taskID, params) + if err != nil { + return nil, err + } + return ParseGetTasksIDOwnersResponse(rsp) +} + +// PostTasksIDOwnersWithBodyWithResponse request with arbitrary body returning *PostTasksIDOwnersResponse +func (c *ClientWithResponses) PostTasksIDOwnersWithBodyWithResponse(ctx context.Context, taskID string, params *PostTasksIDOwnersParams, contentType string, body io.Reader) (*PostTasksIDOwnersResponse, error) { + rsp, err := c.PostTasksIDOwnersWithBody(ctx, taskID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDOwnersResponse(rsp) +} + +func (c *ClientWithResponses) PostTasksIDOwnersWithResponse(ctx context.Context, taskID string, params *PostTasksIDOwnersParams, body PostTasksIDOwnersJSONRequestBody) (*PostTasksIDOwnersResponse, error) { + rsp, err := c.PostTasksIDOwners(ctx, taskID, params, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDOwnersResponse(rsp) +} + +// DeleteTasksIDOwnersIDWithResponse request returning *DeleteTasksIDOwnersIDResponse +func (c *ClientWithResponses) DeleteTasksIDOwnersIDWithResponse(ctx context.Context, taskID string, userID string, params *DeleteTasksIDOwnersIDParams) (*DeleteTasksIDOwnersIDResponse, error) { + rsp, err := c.DeleteTasksIDOwnersID(ctx, taskID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteTasksIDOwnersIDResponse(rsp) +} + +// GetTasksIDRunsWithResponse request returning *GetTasksIDRunsResponse +func (c *ClientWithResponses) GetTasksIDRunsWithResponse(ctx context.Context, taskID string, params *GetTasksIDRunsParams) (*GetTasksIDRunsResponse, error) { + rsp, err := c.GetTasksIDRuns(ctx, taskID, params) + if err != nil { + return nil, err + } + return ParseGetTasksIDRunsResponse(rsp) +} + +// PostTasksIDRunsWithBodyWithResponse request with arbitrary body returning *PostTasksIDRunsResponse +func (c *ClientWithResponses) PostTasksIDRunsWithBodyWithResponse(ctx context.Context, taskID string, params *PostTasksIDRunsParams, contentType string, body io.Reader) (*PostTasksIDRunsResponse, error) { + rsp, err := c.PostTasksIDRunsWithBody(ctx, taskID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDRunsResponse(rsp) +} + +func (c *ClientWithResponses) PostTasksIDRunsWithResponse(ctx context.Context, taskID string, params *PostTasksIDRunsParams, body PostTasksIDRunsJSONRequestBody) (*PostTasksIDRunsResponse, error) { + rsp, err := c.PostTasksIDRuns(ctx, taskID, params, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDRunsResponse(rsp) +} + +// DeleteTasksIDRunsIDWithResponse request returning *DeleteTasksIDRunsIDResponse +func (c *ClientWithResponses) DeleteTasksIDRunsIDWithResponse(ctx context.Context, taskID string, runID string, params *DeleteTasksIDRunsIDParams) (*DeleteTasksIDRunsIDResponse, error) { + rsp, err := c.DeleteTasksIDRunsID(ctx, taskID, runID, params) + if err != nil { + return nil, err + } + return ParseDeleteTasksIDRunsIDResponse(rsp) +} + +// GetTasksIDRunsIDWithResponse request returning *GetTasksIDRunsIDResponse +func (c *ClientWithResponses) GetTasksIDRunsIDWithResponse(ctx context.Context, taskID string, runID string, params *GetTasksIDRunsIDParams) (*GetTasksIDRunsIDResponse, error) { + rsp, err := c.GetTasksIDRunsID(ctx, taskID, runID, params) + if err != nil { + return nil, err + } + return ParseGetTasksIDRunsIDResponse(rsp) +} + +// GetTasksIDRunsIDLogsWithResponse request returning *GetTasksIDRunsIDLogsResponse +func (c *ClientWithResponses) GetTasksIDRunsIDLogsWithResponse(ctx context.Context, taskID string, runID string, params *GetTasksIDRunsIDLogsParams) (*GetTasksIDRunsIDLogsResponse, error) { + rsp, err := c.GetTasksIDRunsIDLogs(ctx, taskID, runID, params) + if err != nil { + return nil, err + } + return ParseGetTasksIDRunsIDLogsResponse(rsp) +} + +// PostTasksIDRunsIDRetryWithBodyWithResponse request with arbitrary body returning *PostTasksIDRunsIDRetryResponse +func (c *ClientWithResponses) PostTasksIDRunsIDRetryWithBodyWithResponse(ctx context.Context, taskID string, runID string, params *PostTasksIDRunsIDRetryParams, contentType string, body io.Reader) (*PostTasksIDRunsIDRetryResponse, error) { + rsp, err := c.PostTasksIDRunsIDRetryWithBody(ctx, taskID, runID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTasksIDRunsIDRetryResponse(rsp) +} + +// GetTelegrafPluginsWithResponse request returning *GetTelegrafPluginsResponse +func (c *ClientWithResponses) GetTelegrafPluginsWithResponse(ctx context.Context, params *GetTelegrafPluginsParams) (*GetTelegrafPluginsResponse, error) { + rsp, err := c.GetTelegrafPlugins(ctx, params) + if err != nil { + return nil, err + } + return ParseGetTelegrafPluginsResponse(rsp) +} + +// GetTelegrafsWithResponse request returning *GetTelegrafsResponse +func (c *ClientWithResponses) GetTelegrafsWithResponse(ctx context.Context, params *GetTelegrafsParams) (*GetTelegrafsResponse, error) { + rsp, err := c.GetTelegrafs(ctx, params) + if err != nil { + return nil, err + } + return ParseGetTelegrafsResponse(rsp) +} + +// PostTelegrafsWithBodyWithResponse request with arbitrary body returning *PostTelegrafsResponse +func (c *ClientWithResponses) PostTelegrafsWithBodyWithResponse(ctx context.Context, params *PostTelegrafsParams, contentType string, body io.Reader) (*PostTelegrafsResponse, error) { + rsp, err := c.PostTelegrafsWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTelegrafsResponse(rsp) +} + +func (c *ClientWithResponses) PostTelegrafsWithResponse(ctx context.Context, params *PostTelegrafsParams, body PostTelegrafsJSONRequestBody) (*PostTelegrafsResponse, error) { + rsp, err := c.PostTelegrafs(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostTelegrafsResponse(rsp) +} + +// DeleteTelegrafsIDWithResponse request returning *DeleteTelegrafsIDResponse +func (c *ClientWithResponses) DeleteTelegrafsIDWithResponse(ctx context.Context, telegrafID string, params *DeleteTelegrafsIDParams) (*DeleteTelegrafsIDResponse, error) { + rsp, err := c.DeleteTelegrafsID(ctx, telegrafID, params) + if err != nil { + return nil, err + } + return ParseDeleteTelegrafsIDResponse(rsp) +} + +// GetTelegrafsIDWithResponse request returning *GetTelegrafsIDResponse +func (c *ClientWithResponses) GetTelegrafsIDWithResponse(ctx context.Context, telegrafID string, params *GetTelegrafsIDParams) (*GetTelegrafsIDResponse, error) { + rsp, err := c.GetTelegrafsID(ctx, telegrafID, params) + if err != nil { + return nil, err + } + return ParseGetTelegrafsIDResponse(rsp) +} + +// PutTelegrafsIDWithBodyWithResponse request with arbitrary body returning *PutTelegrafsIDResponse +func (c *ClientWithResponses) PutTelegrafsIDWithBodyWithResponse(ctx context.Context, telegrafID string, params *PutTelegrafsIDParams, contentType string, body io.Reader) (*PutTelegrafsIDResponse, error) { + rsp, err := c.PutTelegrafsIDWithBody(ctx, telegrafID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePutTelegrafsIDResponse(rsp) +} + +func (c *ClientWithResponses) PutTelegrafsIDWithResponse(ctx context.Context, telegrafID string, params *PutTelegrafsIDParams, body PutTelegrafsIDJSONRequestBody) (*PutTelegrafsIDResponse, error) { + rsp, err := c.PutTelegrafsID(ctx, telegrafID, params, body) + if err != nil { + return nil, err + } + return ParsePutTelegrafsIDResponse(rsp) +} + +// GetTelegrafsIDLabelsWithResponse request returning *GetTelegrafsIDLabelsResponse +func (c *ClientWithResponses) GetTelegrafsIDLabelsWithResponse(ctx context.Context, telegrafID string, params *GetTelegrafsIDLabelsParams) (*GetTelegrafsIDLabelsResponse, error) { + rsp, err := c.GetTelegrafsIDLabels(ctx, telegrafID, params) + if err != nil { + return nil, err + } + return ParseGetTelegrafsIDLabelsResponse(rsp) +} + +// PostTelegrafsIDLabelsWithBodyWithResponse request with arbitrary body returning *PostTelegrafsIDLabelsResponse +func (c *ClientWithResponses) PostTelegrafsIDLabelsWithBodyWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDLabelsParams, contentType string, body io.Reader) (*PostTelegrafsIDLabelsResponse, error) { + rsp, err := c.PostTelegrafsIDLabelsWithBody(ctx, telegrafID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTelegrafsIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostTelegrafsIDLabelsWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDLabelsParams, body PostTelegrafsIDLabelsJSONRequestBody) (*PostTelegrafsIDLabelsResponse, error) { + rsp, err := c.PostTelegrafsIDLabels(ctx, telegrafID, params, body) + if err != nil { + return nil, err + } + return ParsePostTelegrafsIDLabelsResponse(rsp) +} + +// DeleteTelegrafsIDLabelsIDWithResponse request returning *DeleteTelegrafsIDLabelsIDResponse +func (c *ClientWithResponses) DeleteTelegrafsIDLabelsIDWithResponse(ctx context.Context, telegrafID string, labelID string, params *DeleteTelegrafsIDLabelsIDParams) (*DeleteTelegrafsIDLabelsIDResponse, error) { + rsp, err := c.DeleteTelegrafsIDLabelsID(ctx, telegrafID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteTelegrafsIDLabelsIDResponse(rsp) +} + +// GetTelegrafsIDMembersWithResponse request returning *GetTelegrafsIDMembersResponse +func (c *ClientWithResponses) GetTelegrafsIDMembersWithResponse(ctx context.Context, telegrafID string, params *GetTelegrafsIDMembersParams) (*GetTelegrafsIDMembersResponse, error) { + rsp, err := c.GetTelegrafsIDMembers(ctx, telegrafID, params) + if err != nil { + return nil, err + } + return ParseGetTelegrafsIDMembersResponse(rsp) +} + +// PostTelegrafsIDMembersWithBodyWithResponse request with arbitrary body returning *PostTelegrafsIDMembersResponse +func (c *ClientWithResponses) PostTelegrafsIDMembersWithBodyWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDMembersParams, contentType string, body io.Reader) (*PostTelegrafsIDMembersResponse, error) { + rsp, err := c.PostTelegrafsIDMembersWithBody(ctx, telegrafID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTelegrafsIDMembersResponse(rsp) +} + +func (c *ClientWithResponses) PostTelegrafsIDMembersWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDMembersParams, body PostTelegrafsIDMembersJSONRequestBody) (*PostTelegrafsIDMembersResponse, error) { + rsp, err := c.PostTelegrafsIDMembers(ctx, telegrafID, params, body) + if err != nil { + return nil, err + } + return ParsePostTelegrafsIDMembersResponse(rsp) +} + +// DeleteTelegrafsIDMembersIDWithResponse request returning *DeleteTelegrafsIDMembersIDResponse +func (c *ClientWithResponses) DeleteTelegrafsIDMembersIDWithResponse(ctx context.Context, telegrafID string, userID string, params *DeleteTelegrafsIDMembersIDParams) (*DeleteTelegrafsIDMembersIDResponse, error) { + rsp, err := c.DeleteTelegrafsIDMembersID(ctx, telegrafID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteTelegrafsIDMembersIDResponse(rsp) +} + +// GetTelegrafsIDOwnersWithResponse request returning *GetTelegrafsIDOwnersResponse +func (c *ClientWithResponses) GetTelegrafsIDOwnersWithResponse(ctx context.Context, telegrafID string, params *GetTelegrafsIDOwnersParams) (*GetTelegrafsIDOwnersResponse, error) { + rsp, err := c.GetTelegrafsIDOwners(ctx, telegrafID, params) + if err != nil { + return nil, err + } + return ParseGetTelegrafsIDOwnersResponse(rsp) +} + +// PostTelegrafsIDOwnersWithBodyWithResponse request with arbitrary body returning *PostTelegrafsIDOwnersResponse +func (c *ClientWithResponses) PostTelegrafsIDOwnersWithBodyWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDOwnersParams, contentType string, body io.Reader) (*PostTelegrafsIDOwnersResponse, error) { + rsp, err := c.PostTelegrafsIDOwnersWithBody(ctx, telegrafID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostTelegrafsIDOwnersResponse(rsp) +} + +func (c *ClientWithResponses) PostTelegrafsIDOwnersWithResponse(ctx context.Context, telegrafID string, params *PostTelegrafsIDOwnersParams, body PostTelegrafsIDOwnersJSONRequestBody) (*PostTelegrafsIDOwnersResponse, error) { + rsp, err := c.PostTelegrafsIDOwners(ctx, telegrafID, params, body) + if err != nil { + return nil, err + } + return ParsePostTelegrafsIDOwnersResponse(rsp) +} + +// DeleteTelegrafsIDOwnersIDWithResponse request returning *DeleteTelegrafsIDOwnersIDResponse +func (c *ClientWithResponses) DeleteTelegrafsIDOwnersIDWithResponse(ctx context.Context, telegrafID string, userID string, params *DeleteTelegrafsIDOwnersIDParams) (*DeleteTelegrafsIDOwnersIDResponse, error) { + rsp, err := c.DeleteTelegrafsIDOwnersID(ctx, telegrafID, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteTelegrafsIDOwnersIDResponse(rsp) +} + +// ApplyTemplateWithBodyWithResponse request with arbitrary body returning *ApplyTemplateResponse +func (c *ClientWithResponses) ApplyTemplateWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*ApplyTemplateResponse, error) { + rsp, err := c.ApplyTemplateWithBody(ctx, contentType, body) + if err != nil { + return nil, err + } + return ParseApplyTemplateResponse(rsp) +} + +func (c *ClientWithResponses) ApplyTemplateWithResponse(ctx context.Context, body ApplyTemplateJSONRequestBody) (*ApplyTemplateResponse, error) { + rsp, err := c.ApplyTemplate(ctx, body) + if err != nil { + return nil, err + } + return ParseApplyTemplateResponse(rsp) +} + +// ExportTemplateWithBodyWithResponse request with arbitrary body returning *ExportTemplateResponse +func (c *ClientWithResponses) ExportTemplateWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*ExportTemplateResponse, error) { + rsp, err := c.ExportTemplateWithBody(ctx, contentType, body) + if err != nil { + return nil, err + } + return ParseExportTemplateResponse(rsp) +} + +func (c *ClientWithResponses) ExportTemplateWithResponse(ctx context.Context, body ExportTemplateJSONRequestBody) (*ExportTemplateResponse, error) { + rsp, err := c.ExportTemplate(ctx, body) + if err != nil { + return nil, err + } + return ParseExportTemplateResponse(rsp) +} + +// GetUsersWithResponse request returning *GetUsersResponse +func (c *ClientWithResponses) GetUsersWithResponse(ctx context.Context, params *GetUsersParams) (*GetUsersResponse, error) { + rsp, err := c.GetUsers(ctx, params) + if err != nil { + return nil, err + } + return ParseGetUsersResponse(rsp) +} + +// PostUsersWithBodyWithResponse request with arbitrary body returning *PostUsersResponse +func (c *ClientWithResponses) PostUsersWithBodyWithResponse(ctx context.Context, params *PostUsersParams, contentType string, body io.Reader) (*PostUsersResponse, error) { + rsp, err := c.PostUsersWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostUsersResponse(rsp) +} + +func (c *ClientWithResponses) PostUsersWithResponse(ctx context.Context, params *PostUsersParams, body PostUsersJSONRequestBody) (*PostUsersResponse, error) { + rsp, err := c.PostUsers(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostUsersResponse(rsp) +} + +// DeleteUsersIDWithResponse request returning *DeleteUsersIDResponse +func (c *ClientWithResponses) DeleteUsersIDWithResponse(ctx context.Context, userID string, params *DeleteUsersIDParams) (*DeleteUsersIDResponse, error) { + rsp, err := c.DeleteUsersID(ctx, userID, params) + if err != nil { + return nil, err + } + return ParseDeleteUsersIDResponse(rsp) +} + +// GetUsersIDWithResponse request returning *GetUsersIDResponse +func (c *ClientWithResponses) GetUsersIDWithResponse(ctx context.Context, userID string, params *GetUsersIDParams) (*GetUsersIDResponse, error) { + rsp, err := c.GetUsersID(ctx, userID, params) + if err != nil { + return nil, err + } + return ParseGetUsersIDResponse(rsp) +} + +// PatchUsersIDWithBodyWithResponse request with arbitrary body returning *PatchUsersIDResponse +func (c *ClientWithResponses) PatchUsersIDWithBodyWithResponse(ctx context.Context, userID string, params *PatchUsersIDParams, contentType string, body io.Reader) (*PatchUsersIDResponse, error) { + rsp, err := c.PatchUsersIDWithBody(ctx, userID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchUsersIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchUsersIDWithResponse(ctx context.Context, userID string, params *PatchUsersIDParams, body PatchUsersIDJSONRequestBody) (*PatchUsersIDResponse, error) { + rsp, err := c.PatchUsersID(ctx, userID, params, body) + if err != nil { + return nil, err + } + return ParsePatchUsersIDResponse(rsp) +} + +// PostUsersIDPasswordWithBodyWithResponse request with arbitrary body returning *PostUsersIDPasswordResponse +func (c *ClientWithResponses) PostUsersIDPasswordWithBodyWithResponse(ctx context.Context, userID string, params *PostUsersIDPasswordParams, contentType string, body io.Reader) (*PostUsersIDPasswordResponse, error) { + rsp, err := c.PostUsersIDPasswordWithBody(ctx, userID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostUsersIDPasswordResponse(rsp) +} + +func (c *ClientWithResponses) PostUsersIDPasswordWithResponse(ctx context.Context, userID string, params *PostUsersIDPasswordParams, body PostUsersIDPasswordJSONRequestBody) (*PostUsersIDPasswordResponse, error) { + rsp, err := c.PostUsersIDPassword(ctx, userID, params, body) + if err != nil { + return nil, err + } + return ParsePostUsersIDPasswordResponse(rsp) +} + +// GetVariablesWithResponse request returning *GetVariablesResponse +func (c *ClientWithResponses) GetVariablesWithResponse(ctx context.Context, params *GetVariablesParams) (*GetVariablesResponse, error) { + rsp, err := c.GetVariables(ctx, params) + if err != nil { + return nil, err + } + return ParseGetVariablesResponse(rsp) +} + +// PostVariablesWithBodyWithResponse request with arbitrary body returning *PostVariablesResponse +func (c *ClientWithResponses) PostVariablesWithBodyWithResponse(ctx context.Context, params *PostVariablesParams, contentType string, body io.Reader) (*PostVariablesResponse, error) { + rsp, err := c.PostVariablesWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostVariablesResponse(rsp) +} + +func (c *ClientWithResponses) PostVariablesWithResponse(ctx context.Context, params *PostVariablesParams, body PostVariablesJSONRequestBody) (*PostVariablesResponse, error) { + rsp, err := c.PostVariables(ctx, params, body) + if err != nil { + return nil, err + } + return ParsePostVariablesResponse(rsp) +} + +// DeleteVariablesIDWithResponse request returning *DeleteVariablesIDResponse +func (c *ClientWithResponses) DeleteVariablesIDWithResponse(ctx context.Context, variableID string, params *DeleteVariablesIDParams) (*DeleteVariablesIDResponse, error) { + rsp, err := c.DeleteVariablesID(ctx, variableID, params) + if err != nil { + return nil, err + } + return ParseDeleteVariablesIDResponse(rsp) +} + +// GetVariablesIDWithResponse request returning *GetVariablesIDResponse +func (c *ClientWithResponses) GetVariablesIDWithResponse(ctx context.Context, variableID string, params *GetVariablesIDParams) (*GetVariablesIDResponse, error) { + rsp, err := c.GetVariablesID(ctx, variableID, params) + if err != nil { + return nil, err + } + return ParseGetVariablesIDResponse(rsp) +} + +// PatchVariablesIDWithBodyWithResponse request with arbitrary body returning *PatchVariablesIDResponse +func (c *ClientWithResponses) PatchVariablesIDWithBodyWithResponse(ctx context.Context, variableID string, params *PatchVariablesIDParams, contentType string, body io.Reader) (*PatchVariablesIDResponse, error) { + rsp, err := c.PatchVariablesIDWithBody(ctx, variableID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePatchVariablesIDResponse(rsp) +} + +func (c *ClientWithResponses) PatchVariablesIDWithResponse(ctx context.Context, variableID string, params *PatchVariablesIDParams, body PatchVariablesIDJSONRequestBody) (*PatchVariablesIDResponse, error) { + rsp, err := c.PatchVariablesID(ctx, variableID, params, body) + if err != nil { + return nil, err + } + return ParsePatchVariablesIDResponse(rsp) +} + +// PutVariablesIDWithBodyWithResponse request with arbitrary body returning *PutVariablesIDResponse +func (c *ClientWithResponses) PutVariablesIDWithBodyWithResponse(ctx context.Context, variableID string, params *PutVariablesIDParams, contentType string, body io.Reader) (*PutVariablesIDResponse, error) { + rsp, err := c.PutVariablesIDWithBody(ctx, variableID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePutVariablesIDResponse(rsp) +} + +func (c *ClientWithResponses) PutVariablesIDWithResponse(ctx context.Context, variableID string, params *PutVariablesIDParams, body PutVariablesIDJSONRequestBody) (*PutVariablesIDResponse, error) { + rsp, err := c.PutVariablesID(ctx, variableID, params, body) + if err != nil { + return nil, err + } + return ParsePutVariablesIDResponse(rsp) +} + +// GetVariablesIDLabelsWithResponse request returning *GetVariablesIDLabelsResponse +func (c *ClientWithResponses) GetVariablesIDLabelsWithResponse(ctx context.Context, variableID string, params *GetVariablesIDLabelsParams) (*GetVariablesIDLabelsResponse, error) { + rsp, err := c.GetVariablesIDLabels(ctx, variableID, params) + if err != nil { + return nil, err + } + return ParseGetVariablesIDLabelsResponse(rsp) +} + +// PostVariablesIDLabelsWithBodyWithResponse request with arbitrary body returning *PostVariablesIDLabelsResponse +func (c *ClientWithResponses) PostVariablesIDLabelsWithBodyWithResponse(ctx context.Context, variableID string, params *PostVariablesIDLabelsParams, contentType string, body io.Reader) (*PostVariablesIDLabelsResponse, error) { + rsp, err := c.PostVariablesIDLabelsWithBody(ctx, variableID, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostVariablesIDLabelsResponse(rsp) +} + +func (c *ClientWithResponses) PostVariablesIDLabelsWithResponse(ctx context.Context, variableID string, params *PostVariablesIDLabelsParams, body PostVariablesIDLabelsJSONRequestBody) (*PostVariablesIDLabelsResponse, error) { + rsp, err := c.PostVariablesIDLabels(ctx, variableID, params, body) + if err != nil { + return nil, err + } + return ParsePostVariablesIDLabelsResponse(rsp) +} + +// DeleteVariablesIDLabelsIDWithResponse request returning *DeleteVariablesIDLabelsIDResponse +func (c *ClientWithResponses) DeleteVariablesIDLabelsIDWithResponse(ctx context.Context, variableID string, labelID string, params *DeleteVariablesIDLabelsIDParams) (*DeleteVariablesIDLabelsIDResponse, error) { + rsp, err := c.DeleteVariablesIDLabelsID(ctx, variableID, labelID, params) + if err != nil { + return nil, err + } + return ParseDeleteVariablesIDLabelsIDResponse(rsp) +} + +// PostWriteWithBodyWithResponse request with arbitrary body returning *PostWriteResponse +func (c *ClientWithResponses) PostWriteWithBodyWithResponse(ctx context.Context, params *PostWriteParams, contentType string, body io.Reader) (*PostWriteResponse, error) { + rsp, err := c.PostWriteWithBody(ctx, params, contentType, body) + if err != nil { + return nil, err + } + return ParsePostWriteResponse(rsp) +} + +// ParseGetRoutesResponse parses an HTTP response from a GetRoutesWithResponse call +func ParseGetRoutesResponse(rsp *http.Response) (*GetRoutesResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetRoutesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Routes + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetAuthorizationsResponse parses an HTTP response from a GetAuthorizationsWithResponse call +func ParseGetAuthorizationsResponse(rsp *http.Response) (*GetAuthorizationsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetAuthorizationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Authorizations + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostAuthorizationsResponse parses an HTTP response from a PostAuthorizationsWithResponse call +func ParsePostAuthorizationsResponse(rsp *http.Response) (*PostAuthorizationsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostAuthorizationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Authorization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteAuthorizationsIDResponse parses an HTTP response from a DeleteAuthorizationsIDWithResponse call +func ParseDeleteAuthorizationsIDResponse(rsp *http.Response) (*DeleteAuthorizationsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteAuthorizationsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetAuthorizationsIDResponse parses an HTTP response from a GetAuthorizationsIDWithResponse call +func ParseGetAuthorizationsIDResponse(rsp *http.Response) (*GetAuthorizationsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetAuthorizationsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Authorization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchAuthorizationsIDResponse parses an HTTP response from a PatchAuthorizationsIDWithResponse call +func ParsePatchAuthorizationsIDResponse(rsp *http.Response) (*PatchAuthorizationsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchAuthorizationsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Authorization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetBackupKVResponse parses an HTTP response from a GetBackupKVWithResponse call +func ParseGetBackupKVResponse(rsp *http.Response) (*GetBackupKVResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetBackupKVResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetBackupMetadataResponse parses an HTTP response from a GetBackupMetadataWithResponse call +func ParseGetBackupMetadataResponse(rsp *http.Response) (*GetBackupMetadataResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetBackupMetadataResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetBackupShardIdResponse parses an HTTP response from a GetBackupShardIdWithResponse call +func ParseGetBackupShardIdResponse(rsp *http.Response) (*GetBackupShardIdResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetBackupShardIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetBucketsResponse parses an HTTP response from a GetBucketsWithResponse call +func ParseGetBucketsResponse(rsp *http.Response) (*GetBucketsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetBucketsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Buckets + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostBucketsResponse parses an HTTP response from a PostBucketsWithResponse call +func ParsePostBucketsResponse(rsp *http.Response) (*PostBucketsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostBucketsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Bucket + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 422: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON422 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteBucketsIDResponse parses an HTTP response from a DeleteBucketsIDWithResponse call +func ParseDeleteBucketsIDResponse(rsp *http.Response) (*DeleteBucketsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteBucketsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetBucketsIDResponse parses an HTTP response from a GetBucketsIDWithResponse call +func ParseGetBucketsIDResponse(rsp *http.Response) (*GetBucketsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetBucketsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Bucket + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchBucketsIDResponse parses an HTTP response from a PatchBucketsIDWithResponse call +func ParsePatchBucketsIDResponse(rsp *http.Response) (*PatchBucketsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchBucketsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Bucket + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetBucketsIDLabelsResponse parses an HTTP response from a GetBucketsIDLabelsWithResponse call +func ParseGetBucketsIDLabelsResponse(rsp *http.Response) (*GetBucketsIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetBucketsIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostBucketsIDLabelsResponse parses an HTTP response from a PostBucketsIDLabelsWithResponse call +func ParsePostBucketsIDLabelsResponse(rsp *http.Response) (*PostBucketsIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostBucketsIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteBucketsIDLabelsIDResponse parses an HTTP response from a DeleteBucketsIDLabelsIDWithResponse call +func ParseDeleteBucketsIDLabelsIDResponse(rsp *http.Response) (*DeleteBucketsIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteBucketsIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetBucketsIDMembersResponse parses an HTTP response from a GetBucketsIDMembersWithResponse call +func ParseGetBucketsIDMembersResponse(rsp *http.Response) (*GetBucketsIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetBucketsIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceMembers + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostBucketsIDMembersResponse parses an HTTP response from a PostBucketsIDMembersWithResponse call +func ParsePostBucketsIDMembersResponse(rsp *http.Response) (*PostBucketsIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostBucketsIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceMember + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteBucketsIDMembersIDResponse parses an HTTP response from a DeleteBucketsIDMembersIDWithResponse call +func ParseDeleteBucketsIDMembersIDResponse(rsp *http.Response) (*DeleteBucketsIDMembersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteBucketsIDMembersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetBucketsIDOwnersResponse parses an HTTP response from a GetBucketsIDOwnersWithResponse call +func ParseGetBucketsIDOwnersResponse(rsp *http.Response) (*GetBucketsIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetBucketsIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceOwners + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostBucketsIDOwnersResponse parses an HTTP response from a PostBucketsIDOwnersWithResponse call +func ParsePostBucketsIDOwnersResponse(rsp *http.Response) (*PostBucketsIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostBucketsIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceOwner + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteBucketsIDOwnersIDResponse parses an HTTP response from a DeleteBucketsIDOwnersIDWithResponse call +func ParseDeleteBucketsIDOwnersIDResponse(rsp *http.Response) (*DeleteBucketsIDOwnersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteBucketsIDOwnersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetChecksResponse parses an HTTP response from a GetChecksWithResponse call +func ParseGetChecksResponse(rsp *http.Response) (*GetChecksResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetChecksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Checks + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseCreateCheckResponse parses an HTTP response from a CreateCheckWithResponse call +func ParseCreateCheckResponse(rsp *http.Response) (*CreateCheckResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &CreateCheckResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Check + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteChecksIDResponse parses an HTTP response from a DeleteChecksIDWithResponse call +func ParseDeleteChecksIDResponse(rsp *http.Response) (*DeleteChecksIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteChecksIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetChecksIDResponse parses an HTTP response from a GetChecksIDWithResponse call +func ParseGetChecksIDResponse(rsp *http.Response) (*GetChecksIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetChecksIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Check + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchChecksIDResponse parses an HTTP response from a PatchChecksIDWithResponse call +func ParsePatchChecksIDResponse(rsp *http.Response) (*PatchChecksIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchChecksIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Check + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePutChecksIDResponse parses an HTTP response from a PutChecksIDWithResponse call +func ParsePutChecksIDResponse(rsp *http.Response) (*PutChecksIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PutChecksIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Check + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetChecksIDLabelsResponse parses an HTTP response from a GetChecksIDLabelsWithResponse call +func ParseGetChecksIDLabelsResponse(rsp *http.Response) (*GetChecksIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetChecksIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostChecksIDLabelsResponse parses an HTTP response from a PostChecksIDLabelsWithResponse call +func ParsePostChecksIDLabelsResponse(rsp *http.Response) (*PostChecksIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostChecksIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteChecksIDLabelsIDResponse parses an HTTP response from a DeleteChecksIDLabelsIDWithResponse call +func ParseDeleteChecksIDLabelsIDResponse(rsp *http.Response) (*DeleteChecksIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteChecksIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetChecksIDQueryResponse parses an HTTP response from a GetChecksIDQueryWithResponse call +func ParseGetChecksIDQueryResponse(rsp *http.Response) (*GetChecksIDQueryResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetChecksIDQueryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest FluxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetConfigResponse parses an HTTP response from a GetConfigWithResponse call +func ParseGetConfigResponse(rsp *http.Response) (*GetConfigResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetConfigResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Config + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetDashboardsResponse parses an HTTP response from a GetDashboardsWithResponse call +func ParseGetDashboardsResponse(rsp *http.Response) (*GetDashboardsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetDashboardsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Dashboards + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostDashboardsResponse parses an HTTP response from a PostDashboardsWithResponse call +func ParsePostDashboardsResponse(rsp *http.Response) (*PostDashboardsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostDashboardsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteDashboardsIDResponse parses an HTTP response from a DeleteDashboardsIDWithResponse call +func ParseDeleteDashboardsIDResponse(rsp *http.Response) (*DeleteDashboardsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteDashboardsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetDashboardsIDResponse parses an HTTP response from a GetDashboardsIDWithResponse call +func ParseGetDashboardsIDResponse(rsp *http.Response) (*GetDashboardsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetDashboardsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchDashboardsIDResponse parses an HTTP response from a PatchDashboardsIDWithResponse call +func ParsePatchDashboardsIDResponse(rsp *http.Response) (*PatchDashboardsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchDashboardsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Dashboard + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostDashboardsIDCellsResponse parses an HTTP response from a PostDashboardsIDCellsWithResponse call +func ParsePostDashboardsIDCellsResponse(rsp *http.Response) (*PostDashboardsIDCellsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostDashboardsIDCellsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Cell + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePutDashboardsIDCellsResponse parses an HTTP response from a PutDashboardsIDCellsWithResponse call +func ParsePutDashboardsIDCellsResponse(rsp *http.Response) (*PutDashboardsIDCellsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PutDashboardsIDCellsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Dashboard + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteDashboardsIDCellsIDResponse parses an HTTP response from a DeleteDashboardsIDCellsIDWithResponse call +func ParseDeleteDashboardsIDCellsIDResponse(rsp *http.Response) (*DeleteDashboardsIDCellsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteDashboardsIDCellsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchDashboardsIDCellsIDResponse parses an HTTP response from a PatchDashboardsIDCellsIDWithResponse call +func ParsePatchDashboardsIDCellsIDResponse(rsp *http.Response) (*PatchDashboardsIDCellsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchDashboardsIDCellsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Cell + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetDashboardsIDCellsIDViewResponse parses an HTTP response from a GetDashboardsIDCellsIDViewWithResponse call +func ParseGetDashboardsIDCellsIDViewResponse(rsp *http.Response) (*GetDashboardsIDCellsIDViewResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetDashboardsIDCellsIDViewResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest View + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchDashboardsIDCellsIDViewResponse parses an HTTP response from a PatchDashboardsIDCellsIDViewWithResponse call +func ParsePatchDashboardsIDCellsIDViewResponse(rsp *http.Response) (*PatchDashboardsIDCellsIDViewResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchDashboardsIDCellsIDViewResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest View + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetDashboardsIDLabelsResponse parses an HTTP response from a GetDashboardsIDLabelsWithResponse call +func ParseGetDashboardsIDLabelsResponse(rsp *http.Response) (*GetDashboardsIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetDashboardsIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostDashboardsIDLabelsResponse parses an HTTP response from a PostDashboardsIDLabelsWithResponse call +func ParsePostDashboardsIDLabelsResponse(rsp *http.Response) (*PostDashboardsIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostDashboardsIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteDashboardsIDLabelsIDResponse parses an HTTP response from a DeleteDashboardsIDLabelsIDWithResponse call +func ParseDeleteDashboardsIDLabelsIDResponse(rsp *http.Response) (*DeleteDashboardsIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteDashboardsIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetDashboardsIDMembersResponse parses an HTTP response from a GetDashboardsIDMembersWithResponse call +func ParseGetDashboardsIDMembersResponse(rsp *http.Response) (*GetDashboardsIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetDashboardsIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceMembers + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostDashboardsIDMembersResponse parses an HTTP response from a PostDashboardsIDMembersWithResponse call +func ParsePostDashboardsIDMembersResponse(rsp *http.Response) (*PostDashboardsIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostDashboardsIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceMember + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteDashboardsIDMembersIDResponse parses an HTTP response from a DeleteDashboardsIDMembersIDWithResponse call +func ParseDeleteDashboardsIDMembersIDResponse(rsp *http.Response) (*DeleteDashboardsIDMembersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteDashboardsIDMembersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetDashboardsIDOwnersResponse parses an HTTP response from a GetDashboardsIDOwnersWithResponse call +func ParseGetDashboardsIDOwnersResponse(rsp *http.Response) (*GetDashboardsIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetDashboardsIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceOwners + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostDashboardsIDOwnersResponse parses an HTTP response from a PostDashboardsIDOwnersWithResponse call +func ParsePostDashboardsIDOwnersResponse(rsp *http.Response) (*PostDashboardsIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostDashboardsIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceOwner + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteDashboardsIDOwnersIDResponse parses an HTTP response from a DeleteDashboardsIDOwnersIDWithResponse call +func ParseDeleteDashboardsIDOwnersIDResponse(rsp *http.Response) (*DeleteDashboardsIDOwnersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteDashboardsIDOwnersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetDBRPsResponse parses an HTTP response from a GetDBRPsWithResponse call +func ParseGetDBRPsResponse(rsp *http.Response) (*GetDBRPsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetDBRPsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest DBRPs + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostDBRPResponse parses an HTTP response from a PostDBRPWithResponse call +func ParsePostDBRPResponse(rsp *http.Response) (*PostDBRPResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostDBRPResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest DBRP + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteDBRPIDResponse parses an HTTP response from a DeleteDBRPIDWithResponse call +func ParseDeleteDBRPIDResponse(rsp *http.Response) (*DeleteDBRPIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteDBRPIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetDBRPsIDResponse parses an HTTP response from a GetDBRPsIDWithResponse call +func ParseGetDBRPsIDResponse(rsp *http.Response) (*GetDBRPsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetDBRPsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest DBRPGet + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchDBRPIDResponse parses an HTTP response from a PatchDBRPIDWithResponse call +func ParsePatchDBRPIDResponse(rsp *http.Response) (*PatchDBRPIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchDBRPIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest DBRPGet + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostDeleteResponse parses an HTTP response from a PostDeleteWithResponse call +func ParsePostDeleteResponse(rsp *http.Response) (*PostDeleteResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostDeleteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetFlagsResponse parses an HTTP response from a GetFlagsWithResponse call +func ParseGetFlagsResponse(rsp *http.Response) (*GetFlagsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetFlagsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Flags + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetHealthResponse parses an HTTP response from a GetHealthWithResponse call +func ParseGetHealthResponse(rsp *http.Response) (*GetHealthResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetHealthResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest HealthCheck + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 503: + var dest HealthCheck + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON503 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetLabelsResponse parses an HTTP response from a GetLabelsWithResponse call +func ParseGetLabelsResponse(rsp *http.Response) (*GetLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostLabelsResponse parses an HTTP response from a PostLabelsWithResponse call +func ParsePostLabelsResponse(rsp *http.Response) (*PostLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteLabelsIDResponse parses an HTTP response from a DeleteLabelsIDWithResponse call +func ParseDeleteLabelsIDResponse(rsp *http.Response) (*DeleteLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetLabelsIDResponse parses an HTTP response from a GetLabelsIDWithResponse call +func ParseGetLabelsIDResponse(rsp *http.Response) (*GetLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchLabelsIDResponse parses an HTTP response from a PatchLabelsIDWithResponse call +func ParsePatchLabelsIDResponse(rsp *http.Response) (*PatchLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetLegacyAuthorizationsResponse parses an HTTP response from a GetLegacyAuthorizationsWithResponse call +func ParseGetLegacyAuthorizationsResponse(rsp *http.Response) (*GetLegacyAuthorizationsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetLegacyAuthorizationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Authorizations + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostLegacyAuthorizationsResponse parses an HTTP response from a PostLegacyAuthorizationsWithResponse call +func ParsePostLegacyAuthorizationsResponse(rsp *http.Response) (*PostLegacyAuthorizationsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostLegacyAuthorizationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Authorization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteLegacyAuthorizationsIDResponse parses an HTTP response from a DeleteLegacyAuthorizationsIDWithResponse call +func ParseDeleteLegacyAuthorizationsIDResponse(rsp *http.Response) (*DeleteLegacyAuthorizationsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteLegacyAuthorizationsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetLegacyAuthorizationsIDResponse parses an HTTP response from a GetLegacyAuthorizationsIDWithResponse call +func ParseGetLegacyAuthorizationsIDResponse(rsp *http.Response) (*GetLegacyAuthorizationsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetLegacyAuthorizationsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Authorization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchLegacyAuthorizationsIDResponse parses an HTTP response from a PatchLegacyAuthorizationsIDWithResponse call +func ParsePatchLegacyAuthorizationsIDResponse(rsp *http.Response) (*PatchLegacyAuthorizationsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchLegacyAuthorizationsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Authorization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostLegacyAuthorizationsIDPasswordResponse parses an HTTP response from a PostLegacyAuthorizationsIDPasswordWithResponse call +func ParsePostLegacyAuthorizationsIDPasswordResponse(rsp *http.Response) (*PostLegacyAuthorizationsIDPasswordResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostLegacyAuthorizationsIDPasswordResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetMeResponse parses an HTTP response from a GetMeWithResponse call +func ParseGetMeResponse(rsp *http.Response) (*GetMeResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetMeResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest UserResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePutMePasswordResponse parses an HTTP response from a PutMePasswordWithResponse call +func ParsePutMePasswordResponse(rsp *http.Response) (*PutMePasswordResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PutMePasswordResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetMetricsResponse parses an HTTP response from a GetMetricsWithResponse call +func ParseGetMetricsResponse(rsp *http.Response) (*GetMetricsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetMetricsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetNotificationEndpointsResponse parses an HTTP response from a GetNotificationEndpointsWithResponse call +func ParseGetNotificationEndpointsResponse(rsp *http.Response) (*GetNotificationEndpointsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetNotificationEndpointsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest NotificationEndpoints + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseCreateNotificationEndpointResponse parses an HTTP response from a CreateNotificationEndpointWithResponse call +func ParseCreateNotificationEndpointResponse(rsp *http.Response) (*CreateNotificationEndpointResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &CreateNotificationEndpointResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest NotificationEndpoint + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteNotificationEndpointsIDResponse parses an HTTP response from a DeleteNotificationEndpointsIDWithResponse call +func ParseDeleteNotificationEndpointsIDResponse(rsp *http.Response) (*DeleteNotificationEndpointsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteNotificationEndpointsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetNotificationEndpointsIDResponse parses an HTTP response from a GetNotificationEndpointsIDWithResponse call +func ParseGetNotificationEndpointsIDResponse(rsp *http.Response) (*GetNotificationEndpointsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetNotificationEndpointsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest NotificationEndpoint + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchNotificationEndpointsIDResponse parses an HTTP response from a PatchNotificationEndpointsIDWithResponse call +func ParsePatchNotificationEndpointsIDResponse(rsp *http.Response) (*PatchNotificationEndpointsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchNotificationEndpointsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest NotificationEndpoint + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePutNotificationEndpointsIDResponse parses an HTTP response from a PutNotificationEndpointsIDWithResponse call +func ParsePutNotificationEndpointsIDResponse(rsp *http.Response) (*PutNotificationEndpointsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PutNotificationEndpointsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest NotificationEndpoint + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetNotificationEndpointsIDLabelsResponse parses an HTTP response from a GetNotificationEndpointsIDLabelsWithResponse call +func ParseGetNotificationEndpointsIDLabelsResponse(rsp *http.Response) (*GetNotificationEndpointsIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetNotificationEndpointsIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostNotificationEndpointIDLabelsResponse parses an HTTP response from a PostNotificationEndpointIDLabelsWithResponse call +func ParsePostNotificationEndpointIDLabelsResponse(rsp *http.Response) (*PostNotificationEndpointIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostNotificationEndpointIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteNotificationEndpointsIDLabelsIDResponse parses an HTTP response from a DeleteNotificationEndpointsIDLabelsIDWithResponse call +func ParseDeleteNotificationEndpointsIDLabelsIDResponse(rsp *http.Response) (*DeleteNotificationEndpointsIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteNotificationEndpointsIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetNotificationRulesResponse parses an HTTP response from a GetNotificationRulesWithResponse call +func ParseGetNotificationRulesResponse(rsp *http.Response) (*GetNotificationRulesResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetNotificationRulesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest NotificationRules + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseCreateNotificationRuleResponse parses an HTTP response from a CreateNotificationRuleWithResponse call +func ParseCreateNotificationRuleResponse(rsp *http.Response) (*CreateNotificationRuleResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &CreateNotificationRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest NotificationRule + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteNotificationRulesIDResponse parses an HTTP response from a DeleteNotificationRulesIDWithResponse call +func ParseDeleteNotificationRulesIDResponse(rsp *http.Response) (*DeleteNotificationRulesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteNotificationRulesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetNotificationRulesIDResponse parses an HTTP response from a GetNotificationRulesIDWithResponse call +func ParseGetNotificationRulesIDResponse(rsp *http.Response) (*GetNotificationRulesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetNotificationRulesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest NotificationRule + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchNotificationRulesIDResponse parses an HTTP response from a PatchNotificationRulesIDWithResponse call +func ParsePatchNotificationRulesIDResponse(rsp *http.Response) (*PatchNotificationRulesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchNotificationRulesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest NotificationRule + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePutNotificationRulesIDResponse parses an HTTP response from a PutNotificationRulesIDWithResponse call +func ParsePutNotificationRulesIDResponse(rsp *http.Response) (*PutNotificationRulesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PutNotificationRulesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest NotificationRule + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetNotificationRulesIDLabelsResponse parses an HTTP response from a GetNotificationRulesIDLabelsWithResponse call +func ParseGetNotificationRulesIDLabelsResponse(rsp *http.Response) (*GetNotificationRulesIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetNotificationRulesIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostNotificationRuleIDLabelsResponse parses an HTTP response from a PostNotificationRuleIDLabelsWithResponse call +func ParsePostNotificationRuleIDLabelsResponse(rsp *http.Response) (*PostNotificationRuleIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostNotificationRuleIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteNotificationRulesIDLabelsIDResponse parses an HTTP response from a DeleteNotificationRulesIDLabelsIDWithResponse call +func ParseDeleteNotificationRulesIDLabelsIDResponse(rsp *http.Response) (*DeleteNotificationRulesIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteNotificationRulesIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetNotificationRulesIDQueryResponse parses an HTTP response from a GetNotificationRulesIDQueryWithResponse call +func ParseGetNotificationRulesIDQueryResponse(rsp *http.Response) (*GetNotificationRulesIDQueryResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetNotificationRulesIDQueryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest FluxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetOrgsResponse parses an HTTP response from a GetOrgsWithResponse call +func ParseGetOrgsResponse(rsp *http.Response) (*GetOrgsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetOrgsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Organizations + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostOrgsResponse parses an HTTP response from a PostOrgsWithResponse call +func ParsePostOrgsResponse(rsp *http.Response) (*PostOrgsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostOrgsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Organization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteOrgsIDResponse parses an HTTP response from a DeleteOrgsIDWithResponse call +func ParseDeleteOrgsIDResponse(rsp *http.Response) (*DeleteOrgsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteOrgsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetOrgsIDResponse parses an HTTP response from a GetOrgsIDWithResponse call +func ParseGetOrgsIDResponse(rsp *http.Response) (*GetOrgsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetOrgsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Organization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchOrgsIDResponse parses an HTTP response from a PatchOrgsIDWithResponse call +func ParsePatchOrgsIDResponse(rsp *http.Response) (*PatchOrgsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchOrgsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Organization + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetOrgsIDMembersResponse parses an HTTP response from a GetOrgsIDMembersWithResponse call +func ParseGetOrgsIDMembersResponse(rsp *http.Response) (*GetOrgsIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetOrgsIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceMembers + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostOrgsIDMembersResponse parses an HTTP response from a PostOrgsIDMembersWithResponse call +func ParsePostOrgsIDMembersResponse(rsp *http.Response) (*PostOrgsIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostOrgsIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceMember + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteOrgsIDMembersIDResponse parses an HTTP response from a DeleteOrgsIDMembersIDWithResponse call +func ParseDeleteOrgsIDMembersIDResponse(rsp *http.Response) (*DeleteOrgsIDMembersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteOrgsIDMembersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetOrgsIDOwnersResponse parses an HTTP response from a GetOrgsIDOwnersWithResponse call +func ParseGetOrgsIDOwnersResponse(rsp *http.Response) (*GetOrgsIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetOrgsIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceOwners + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostOrgsIDOwnersResponse parses an HTTP response from a PostOrgsIDOwnersWithResponse call +func ParsePostOrgsIDOwnersResponse(rsp *http.Response) (*PostOrgsIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostOrgsIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceOwner + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteOrgsIDOwnersIDResponse parses an HTTP response from a DeleteOrgsIDOwnersIDWithResponse call +func ParseDeleteOrgsIDOwnersIDResponse(rsp *http.Response) (*DeleteOrgsIDOwnersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteOrgsIDOwnersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetOrgsIDSecretsResponse parses an HTTP response from a GetOrgsIDSecretsWithResponse call +func ParseGetOrgsIDSecretsResponse(rsp *http.Response) (*GetOrgsIDSecretsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetOrgsIDSecretsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecretKeysResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchOrgsIDSecretsResponse parses an HTTP response from a PatchOrgsIDSecretsWithResponse call +func ParsePatchOrgsIDSecretsResponse(rsp *http.Response) (*PatchOrgsIDSecretsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchOrgsIDSecretsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostOrgsIDSecretsResponse parses an HTTP response from a PostOrgsIDSecretsWithResponse call +func ParsePostOrgsIDSecretsResponse(rsp *http.Response) (*PostOrgsIDSecretsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostOrgsIDSecretsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteOrgsIDSecretsIDResponse parses an HTTP response from a DeleteOrgsIDSecretsIDWithResponse call +func ParseDeleteOrgsIDSecretsIDResponse(rsp *http.Response) (*DeleteOrgsIDSecretsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteOrgsIDSecretsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetPingResponse parses an HTTP response from a GetPingWithResponse call +func ParseGetPingResponse(rsp *http.Response) (*GetPingResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetPingResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseHeadPingResponse parses an HTTP response from a HeadPingWithResponse call +func ParseHeadPingResponse(rsp *http.Response) (*HeadPingResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &HeadPingResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostQueryResponse parses an HTTP response from a PostQueryWithResponse call +func ParsePostQueryResponse(rsp *http.Response) (*PostQueryResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostQueryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostQueryAnalyzeResponse parses an HTTP response from a PostQueryAnalyzeWithResponse call +func ParsePostQueryAnalyzeResponse(rsp *http.Response) (*PostQueryAnalyzeResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostQueryAnalyzeResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest AnalyzeQueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostQueryAstResponse parses an HTTP response from a PostQueryAstWithResponse call +func ParsePostQueryAstResponse(rsp *http.Response) (*PostQueryAstResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostQueryAstResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ASTResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetQuerySuggestionsResponse parses an HTTP response from a GetQuerySuggestionsWithResponse call +func ParseGetQuerySuggestionsResponse(rsp *http.Response) (*GetQuerySuggestionsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetQuerySuggestionsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest FluxSuggestions + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetQuerySuggestionsNameResponse parses an HTTP response from a GetQuerySuggestionsNameWithResponse call +func ParseGetQuerySuggestionsNameResponse(rsp *http.Response) (*GetQuerySuggestionsNameResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetQuerySuggestionsNameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest FluxSuggestion + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetReadyResponse parses an HTTP response from a GetReadyWithResponse call +func ParseGetReadyResponse(rsp *http.Response) (*GetReadyResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetReadyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Ready + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetRemoteConnectionsResponse parses an HTTP response from a GetRemoteConnectionsWithResponse call +func ParseGetRemoteConnectionsResponse(rsp *http.Response) (*GetRemoteConnectionsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetRemoteConnectionsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest RemoteConnections + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostRemoteConnectionResponse parses an HTTP response from a PostRemoteConnectionWithResponse call +func ParsePostRemoteConnectionResponse(rsp *http.Response) (*PostRemoteConnectionResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostRemoteConnectionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest RemoteConnection + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteRemoteConnectionByIDResponse parses an HTTP response from a DeleteRemoteConnectionByIDWithResponse call +func ParseDeleteRemoteConnectionByIDResponse(rsp *http.Response) (*DeleteRemoteConnectionByIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteRemoteConnectionByIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetRemoteConnectionByIDResponse parses an HTTP response from a GetRemoteConnectionByIDWithResponse call +func ParseGetRemoteConnectionByIDResponse(rsp *http.Response) (*GetRemoteConnectionByIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetRemoteConnectionByIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest RemoteConnection + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchRemoteConnectionByIDResponse parses an HTTP response from a PatchRemoteConnectionByIDWithResponse call +func ParsePatchRemoteConnectionByIDResponse(rsp *http.Response) (*PatchRemoteConnectionByIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchRemoteConnectionByIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest RemoteConnection + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetReplicationsResponse parses an HTTP response from a GetReplicationsWithResponse call +func ParseGetReplicationsResponse(rsp *http.Response) (*GetReplicationsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetReplicationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Replications + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostReplicationResponse parses an HTTP response from a PostReplicationWithResponse call +func ParsePostReplicationResponse(rsp *http.Response) (*PostReplicationResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostReplicationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Replication + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteReplicationByIDResponse parses an HTTP response from a DeleteReplicationByIDWithResponse call +func ParseDeleteReplicationByIDResponse(rsp *http.Response) (*DeleteReplicationByIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteReplicationByIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetReplicationByIDResponse parses an HTTP response from a GetReplicationByIDWithResponse call +func ParseGetReplicationByIDResponse(rsp *http.Response) (*GetReplicationByIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetReplicationByIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Replication + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchReplicationByIDResponse parses an HTTP response from a PatchReplicationByIDWithResponse call +func ParsePatchReplicationByIDResponse(rsp *http.Response) (*PatchReplicationByIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchReplicationByIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Replication + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostValidateReplicationByIDResponse parses an HTTP response from a PostValidateReplicationByIDWithResponse call +func ParsePostValidateReplicationByIDResponse(rsp *http.Response) (*PostValidateReplicationByIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostValidateReplicationByIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetResourcesResponse parses an HTTP response from a GetResourcesWithResponse call +func ParseGetResourcesResponse(rsp *http.Response) (*GetResourcesResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetResourcesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []string + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostRestoreBucketIDResponse parses an HTTP response from a PostRestoreBucketIDWithResponse call +func ParsePostRestoreBucketIDResponse(rsp *http.Response) (*PostRestoreBucketIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostRestoreBucketIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []byte + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostRestoreBucketMetadataResponse parses an HTTP response from a PostRestoreBucketMetadataWithResponse call +func ParsePostRestoreBucketMetadataResponse(rsp *http.Response) (*PostRestoreBucketMetadataResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostRestoreBucketMetadataResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest RestoredBucketMappings + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostRestoreKVResponse parses an HTTP response from a PostRestoreKVWithResponse call +func ParsePostRestoreKVResponse(rsp *http.Response) (*PostRestoreKVResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostRestoreKVResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // token is the root token for the instance after restore (this is overwritten during the restore) + Token *string `json:"token,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostRestoreShardIdResponse parses an HTTP response from a PostRestoreShardIdWithResponse call +func ParsePostRestoreShardIdResponse(rsp *http.Response) (*PostRestoreShardIdResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostRestoreShardIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostRestoreSQLResponse parses an HTTP response from a PostRestoreSQLWithResponse call +func ParsePostRestoreSQLResponse(rsp *http.Response) (*PostRestoreSQLResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostRestoreSQLResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetScrapersResponse parses an HTTP response from a GetScrapersWithResponse call +func ParseGetScrapersResponse(rsp *http.Response) (*GetScrapersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetScrapersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ScraperTargetResponses + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostScrapersResponse parses an HTTP response from a PostScrapersWithResponse call +func ParsePostScrapersResponse(rsp *http.Response) (*PostScrapersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostScrapersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ScraperTargetResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteScrapersIDResponse parses an HTTP response from a DeleteScrapersIDWithResponse call +func ParseDeleteScrapersIDResponse(rsp *http.Response) (*DeleteScrapersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteScrapersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetScrapersIDResponse parses an HTTP response from a GetScrapersIDWithResponse call +func ParseGetScrapersIDResponse(rsp *http.Response) (*GetScrapersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetScrapersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ScraperTargetResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchScrapersIDResponse parses an HTTP response from a PatchScrapersIDWithResponse call +func ParsePatchScrapersIDResponse(rsp *http.Response) (*PatchScrapersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchScrapersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ScraperTargetResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetScrapersIDLabelsResponse parses an HTTP response from a GetScrapersIDLabelsWithResponse call +func ParseGetScrapersIDLabelsResponse(rsp *http.Response) (*GetScrapersIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetScrapersIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostScrapersIDLabelsResponse parses an HTTP response from a PostScrapersIDLabelsWithResponse call +func ParsePostScrapersIDLabelsResponse(rsp *http.Response) (*PostScrapersIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostScrapersIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteScrapersIDLabelsIDResponse parses an HTTP response from a DeleteScrapersIDLabelsIDWithResponse call +func ParseDeleteScrapersIDLabelsIDResponse(rsp *http.Response) (*DeleteScrapersIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteScrapersIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetScrapersIDMembersResponse parses an HTTP response from a GetScrapersIDMembersWithResponse call +func ParseGetScrapersIDMembersResponse(rsp *http.Response) (*GetScrapersIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetScrapersIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceMembers + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostScrapersIDMembersResponse parses an HTTP response from a PostScrapersIDMembersWithResponse call +func ParsePostScrapersIDMembersResponse(rsp *http.Response) (*PostScrapersIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostScrapersIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceMember + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteScrapersIDMembersIDResponse parses an HTTP response from a DeleteScrapersIDMembersIDWithResponse call +func ParseDeleteScrapersIDMembersIDResponse(rsp *http.Response) (*DeleteScrapersIDMembersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteScrapersIDMembersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetScrapersIDOwnersResponse parses an HTTP response from a GetScrapersIDOwnersWithResponse call +func ParseGetScrapersIDOwnersResponse(rsp *http.Response) (*GetScrapersIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetScrapersIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceOwners + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostScrapersIDOwnersResponse parses an HTTP response from a PostScrapersIDOwnersWithResponse call +func ParsePostScrapersIDOwnersResponse(rsp *http.Response) (*PostScrapersIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostScrapersIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceOwner + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteScrapersIDOwnersIDResponse parses an HTTP response from a DeleteScrapersIDOwnersIDWithResponse call +func ParseDeleteScrapersIDOwnersIDResponse(rsp *http.Response) (*DeleteScrapersIDOwnersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteScrapersIDOwnersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetSetupResponse parses an HTTP response from a GetSetupWithResponse call +func ParseGetSetupResponse(rsp *http.Response) (*GetSetupResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetSetupResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest IsOnboarding + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostSetupResponse parses an HTTP response from a PostSetupWithResponse call +func ParsePostSetupResponse(rsp *http.Response) (*PostSetupResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostSetupResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest OnboardingResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostSigninResponse parses an HTTP response from a PostSigninWithResponse call +func ParsePostSigninResponse(rsp *http.Response) (*PostSigninResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostSigninResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostSignoutResponse parses an HTTP response from a PostSignoutWithResponse call +func ParsePostSignoutResponse(rsp *http.Response) (*PostSignoutResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostSignoutResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetSourcesResponse parses an HTTP response from a GetSourcesWithResponse call +func ParseGetSourcesResponse(rsp *http.Response) (*GetSourcesResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetSourcesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Sources + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostSourcesResponse parses an HTTP response from a PostSourcesWithResponse call +func ParsePostSourcesResponse(rsp *http.Response) (*PostSourcesResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostSourcesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Source + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteSourcesIDResponse parses an HTTP response from a DeleteSourcesIDWithResponse call +func ParseDeleteSourcesIDResponse(rsp *http.Response) (*DeleteSourcesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteSourcesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetSourcesIDResponse parses an HTTP response from a GetSourcesIDWithResponse call +func ParseGetSourcesIDResponse(rsp *http.Response) (*GetSourcesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetSourcesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Source + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchSourcesIDResponse parses an HTTP response from a PatchSourcesIDWithResponse call +func ParsePatchSourcesIDResponse(rsp *http.Response) (*PatchSourcesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchSourcesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Source + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetSourcesIDBucketsResponse parses an HTTP response from a GetSourcesIDBucketsWithResponse call +func ParseGetSourcesIDBucketsResponse(rsp *http.Response) (*GetSourcesIDBucketsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetSourcesIDBucketsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Buckets + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetSourcesIDHealthResponse parses an HTTP response from a GetSourcesIDHealthWithResponse call +func ParseGetSourcesIDHealthResponse(rsp *http.Response) (*GetSourcesIDHealthResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetSourcesIDHealthResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest HealthCheck + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 503: + var dest HealthCheck + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON503 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseListStacksResponse parses an HTTP response from a ListStacksWithResponse call +func ParseListStacksResponse(rsp *http.Response) (*ListStacksResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &ListStacksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Stacks *[]Stack `json:"stacks,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseCreateStackResponse parses an HTTP response from a CreateStackWithResponse call +func ParseCreateStackResponse(rsp *http.Response) (*CreateStackResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &CreateStackResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Stack + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteStackResponse parses an HTTP response from a DeleteStackWithResponse call +func ParseDeleteStackResponse(rsp *http.Response) (*DeleteStackResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteStackResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseReadStackResponse parses an HTTP response from a ReadStackWithResponse call +func ParseReadStackResponse(rsp *http.Response) (*ReadStackResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &ReadStackResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Stack + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseUpdateStackResponse parses an HTTP response from a UpdateStackWithResponse call +func ParseUpdateStackResponse(rsp *http.Response) (*UpdateStackResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &UpdateStackResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Stack + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseUninstallStackResponse parses an HTTP response from a UninstallStackWithResponse call +func ParseUninstallStackResponse(rsp *http.Response) (*UninstallStackResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &UninstallStackResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Stack + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksResponse parses an HTTP response from a GetTasksWithResponse call +func ParseGetTasksResponse(rsp *http.Response) (*GetTasksResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Tasks + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTasksResponse parses an HTTP response from a PostTasksWithResponse call +func ParsePostTasksResponse(rsp *http.Response) (*PostTasksResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTasksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Task + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTasksIDResponse parses an HTTP response from a DeleteTasksIDWithResponse call +func ParseDeleteTasksIDResponse(rsp *http.Response) (*DeleteTasksIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTasksIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksIDResponse parses an HTTP response from a GetTasksIDWithResponse call +func ParseGetTasksIDResponse(rsp *http.Response) (*GetTasksIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Task + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchTasksIDResponse parses an HTTP response from a PatchTasksIDWithResponse call +func ParsePatchTasksIDResponse(rsp *http.Response) (*PatchTasksIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchTasksIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Task + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksIDLabelsResponse parses an HTTP response from a GetTasksIDLabelsWithResponse call +func ParseGetTasksIDLabelsResponse(rsp *http.Response) (*GetTasksIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTasksIDLabelsResponse parses an HTTP response from a PostTasksIDLabelsWithResponse call +func ParsePostTasksIDLabelsResponse(rsp *http.Response) (*PostTasksIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTasksIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTasksIDLabelsIDResponse parses an HTTP response from a DeleteTasksIDLabelsIDWithResponse call +func ParseDeleteTasksIDLabelsIDResponse(rsp *http.Response) (*DeleteTasksIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTasksIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksIDLogsResponse parses an HTTP response from a GetTasksIDLogsWithResponse call +func ParseGetTasksIDLogsResponse(rsp *http.Response) (*GetTasksIDLogsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksIDLogsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Logs + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksIDMembersResponse parses an HTTP response from a GetTasksIDMembersWithResponse call +func ParseGetTasksIDMembersResponse(rsp *http.Response) (*GetTasksIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceMembers + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTasksIDMembersResponse parses an HTTP response from a PostTasksIDMembersWithResponse call +func ParsePostTasksIDMembersResponse(rsp *http.Response) (*PostTasksIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTasksIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceMember + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTasksIDMembersIDResponse parses an HTTP response from a DeleteTasksIDMembersIDWithResponse call +func ParseDeleteTasksIDMembersIDResponse(rsp *http.Response) (*DeleteTasksIDMembersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTasksIDMembersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksIDOwnersResponse parses an HTTP response from a GetTasksIDOwnersWithResponse call +func ParseGetTasksIDOwnersResponse(rsp *http.Response) (*GetTasksIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceOwners + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTasksIDOwnersResponse parses an HTTP response from a PostTasksIDOwnersWithResponse call +func ParsePostTasksIDOwnersResponse(rsp *http.Response) (*PostTasksIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTasksIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceOwner + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTasksIDOwnersIDResponse parses an HTTP response from a DeleteTasksIDOwnersIDWithResponse call +func ParseDeleteTasksIDOwnersIDResponse(rsp *http.Response) (*DeleteTasksIDOwnersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTasksIDOwnersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksIDRunsResponse parses an HTTP response from a GetTasksIDRunsWithResponse call +func ParseGetTasksIDRunsResponse(rsp *http.Response) (*GetTasksIDRunsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksIDRunsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Runs + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTasksIDRunsResponse parses an HTTP response from a PostTasksIDRunsWithResponse call +func ParsePostTasksIDRunsResponse(rsp *http.Response) (*PostTasksIDRunsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTasksIDRunsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Run + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTasksIDRunsIDResponse parses an HTTP response from a DeleteTasksIDRunsIDWithResponse call +func ParseDeleteTasksIDRunsIDResponse(rsp *http.Response) (*DeleteTasksIDRunsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTasksIDRunsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksIDRunsIDResponse parses an HTTP response from a GetTasksIDRunsIDWithResponse call +func ParseGetTasksIDRunsIDResponse(rsp *http.Response) (*GetTasksIDRunsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksIDRunsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Run + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTasksIDRunsIDLogsResponse parses an HTTP response from a GetTasksIDRunsIDLogsWithResponse call +func ParseGetTasksIDRunsIDLogsResponse(rsp *http.Response) (*GetTasksIDRunsIDLogsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTasksIDRunsIDLogsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Logs + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTasksIDRunsIDRetryResponse parses an HTTP response from a PostTasksIDRunsIDRetryWithResponse call +func ParsePostTasksIDRunsIDRetryResponse(rsp *http.Response) (*PostTasksIDRunsIDRetryResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTasksIDRunsIDRetryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Run + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTelegrafPluginsResponse parses an HTTP response from a GetTelegrafPluginsWithResponse call +func ParseGetTelegrafPluginsResponse(rsp *http.Response) (*GetTelegrafPluginsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTelegrafPluginsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest TelegrafPlugins + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTelegrafsResponse parses an HTTP response from a GetTelegrafsWithResponse call +func ParseGetTelegrafsResponse(rsp *http.Response) (*GetTelegrafsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTelegrafsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Telegrafs + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTelegrafsResponse parses an HTTP response from a PostTelegrafsWithResponse call +func ParsePostTelegrafsResponse(rsp *http.Response) (*PostTelegrafsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTelegrafsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Telegraf + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTelegrafsIDResponse parses an HTTP response from a DeleteTelegrafsIDWithResponse call +func ParseDeleteTelegrafsIDResponse(rsp *http.Response) (*DeleteTelegrafsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTelegrafsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTelegrafsIDResponse parses an HTTP response from a GetTelegrafsIDWithResponse call +func ParseGetTelegrafsIDResponse(rsp *http.Response) (*GetTelegrafsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTelegrafsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Telegraf + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + case rsp.StatusCode == 200: + // Content-type (application/toml) unsupported + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePutTelegrafsIDResponse parses an HTTP response from a PutTelegrafsIDWithResponse call +func ParsePutTelegrafsIDResponse(rsp *http.Response) (*PutTelegrafsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PutTelegrafsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Telegraf + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTelegrafsIDLabelsResponse parses an HTTP response from a GetTelegrafsIDLabelsWithResponse call +func ParseGetTelegrafsIDLabelsResponse(rsp *http.Response) (*GetTelegrafsIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTelegrafsIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTelegrafsIDLabelsResponse parses an HTTP response from a PostTelegrafsIDLabelsWithResponse call +func ParsePostTelegrafsIDLabelsResponse(rsp *http.Response) (*PostTelegrafsIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTelegrafsIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTelegrafsIDLabelsIDResponse parses an HTTP response from a DeleteTelegrafsIDLabelsIDWithResponse call +func ParseDeleteTelegrafsIDLabelsIDResponse(rsp *http.Response) (*DeleteTelegrafsIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTelegrafsIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTelegrafsIDMembersResponse parses an HTTP response from a GetTelegrafsIDMembersWithResponse call +func ParseGetTelegrafsIDMembersResponse(rsp *http.Response) (*GetTelegrafsIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTelegrafsIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceMembers + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTelegrafsIDMembersResponse parses an HTTP response from a PostTelegrafsIDMembersWithResponse call +func ParsePostTelegrafsIDMembersResponse(rsp *http.Response) (*PostTelegrafsIDMembersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTelegrafsIDMembersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceMember + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTelegrafsIDMembersIDResponse parses an HTTP response from a DeleteTelegrafsIDMembersIDWithResponse call +func ParseDeleteTelegrafsIDMembersIDResponse(rsp *http.Response) (*DeleteTelegrafsIDMembersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTelegrafsIDMembersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetTelegrafsIDOwnersResponse parses an HTTP response from a GetTelegrafsIDOwnersWithResponse call +func ParseGetTelegrafsIDOwnersResponse(rsp *http.Response) (*GetTelegrafsIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetTelegrafsIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ResourceOwners + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostTelegrafsIDOwnersResponse parses an HTTP response from a PostTelegrafsIDOwnersWithResponse call +func ParsePostTelegrafsIDOwnersResponse(rsp *http.Response) (*PostTelegrafsIDOwnersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostTelegrafsIDOwnersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest ResourceOwner + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteTelegrafsIDOwnersIDResponse parses an HTTP response from a DeleteTelegrafsIDOwnersIDWithResponse call +func ParseDeleteTelegrafsIDOwnersIDResponse(rsp *http.Response) (*DeleteTelegrafsIDOwnersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteTelegrafsIDOwnersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseApplyTemplateResponse parses an HTTP response from a ApplyTemplateWithResponse call +func ParseApplyTemplateResponse(rsp *http.Response) (*ApplyTemplateResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &ApplyTemplateResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest TemplateSummary + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest TemplateSummary + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseExportTemplateResponse parses an HTTP response from a ExportTemplateWithResponse call +func ParseExportTemplateResponse(rsp *http.Response) (*ExportTemplateResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &ExportTemplateResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Template + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "yaml") && rsp.StatusCode == 200: + var dest Template + if err := yaml.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.YAML200 = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetUsersResponse parses an HTTP response from a GetUsersWithResponse call +func ParseGetUsersResponse(rsp *http.Response) (*GetUsersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetUsersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Users + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostUsersResponse parses an HTTP response from a PostUsersWithResponse call +func ParsePostUsersResponse(rsp *http.Response) (*PostUsersResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostUsersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest UserResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteUsersIDResponse parses an HTTP response from a DeleteUsersIDWithResponse call +func ParseDeleteUsersIDResponse(rsp *http.Response) (*DeleteUsersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteUsersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetUsersIDResponse parses an HTTP response from a GetUsersIDWithResponse call +func ParseGetUsersIDResponse(rsp *http.Response) (*GetUsersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetUsersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest UserResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchUsersIDResponse parses an HTTP response from a PatchUsersIDWithResponse call +func ParsePatchUsersIDResponse(rsp *http.Response) (*PatchUsersIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchUsersIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest UserResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostUsersIDPasswordResponse parses an HTTP response from a PostUsersIDPasswordWithResponse call +func ParsePostUsersIDPasswordResponse(rsp *http.Response) (*PostUsersIDPasswordResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostUsersIDPasswordResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetVariablesResponse parses an HTTP response from a GetVariablesWithResponse call +func ParseGetVariablesResponse(rsp *http.Response) (*GetVariablesResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetVariablesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Variables + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostVariablesResponse parses an HTTP response from a PostVariablesWithResponse call +func ParsePostVariablesResponse(rsp *http.Response) (*PostVariablesResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostVariablesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest Variable + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteVariablesIDResponse parses an HTTP response from a DeleteVariablesIDWithResponse call +func ParseDeleteVariablesIDResponse(rsp *http.Response) (*DeleteVariablesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteVariablesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetVariablesIDResponse parses an HTTP response from a GetVariablesIDWithResponse call +func ParseGetVariablesIDResponse(rsp *http.Response) (*GetVariablesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetVariablesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Variable + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePatchVariablesIDResponse parses an HTTP response from a PatchVariablesIDWithResponse call +func ParsePatchVariablesIDResponse(rsp *http.Response) (*PatchVariablesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PatchVariablesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Variable + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePutVariablesIDResponse parses an HTTP response from a PutVariablesIDWithResponse call +func ParsePutVariablesIDResponse(rsp *http.Response) (*PutVariablesIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PutVariablesIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest Variable + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseGetVariablesIDLabelsResponse parses an HTTP response from a GetVariablesIDLabelsWithResponse call +func ParseGetVariablesIDLabelsResponse(rsp *http.Response) (*GetVariablesIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &GetVariablesIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LabelsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostVariablesIDLabelsResponse parses an HTTP response from a PostVariablesIDLabelsWithResponse call +func ParsePostVariablesIDLabelsResponse(rsp *http.Response) (*PostVariablesIDLabelsResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostVariablesIDLabelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest LabelResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParseDeleteVariablesIDLabelsIDResponse parses an HTTP response from a DeleteVariablesIDLabelsIDWithResponse call +func ParseDeleteVariablesIDLabelsIDResponse(rsp *http.Response) (*DeleteVariablesIDLabelsIDResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &DeleteVariablesIDLabelsIDResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} + +// ParsePostWriteResponse parses an HTTP response from a PostWriteWithResponse call +func ParsePostWriteResponse(rsp *http.Response) (*PostWriteResponse, error) { + bodyBytes, err := ioutil.ReadAll(rsp.Body) + defer rsp.Body.Close() + if err != nil { + return nil, err + } + + response := &PostWriteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest LineProtocolError + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 413: + var dest LineProtocolLengthError + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON413 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest Error + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + + case rsp.StatusCode == 413: + // Content-type (text/html) unsupported + + // Fallback for unexpected error + default: + if rsp.StatusCode > 299 { + return nil, &ihttp.Error{StatusCode: rsp.StatusCode, Message: rsp.Status} + } + } + + return response, nil +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/domain/oss.yml b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/oss.yml new file mode 100644 index 0000000..eb891a1 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/oss.yml @@ -0,0 +1,13288 @@ +openapi: 3.0.0 +info: + title: InfluxDB OSS API Service + version: 2.0.0 + description: | + The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. +servers: + - url: /api/v2 +tags: + - name: Authentication + description: | + The InfluxDB `/api/v2` API requires authentication for all requests. + Use InfluxDB API tokens to authenticate requests to the `/api/v2` API. + + + For examples and more information, see + [Token authentication](#section/Authentication/TokenAuthentication) + + + x-traitTag: true + - name: Quick start + x-traitTag: true + description: | + See the [**API Quick Start**](https://docs.influxdata.com/influxdb/v2.1/api-guide/api_intro/) to get up and running authenticating with tokens, writing to buckets, and querying data. + + [**InfluxDB API client libraries**](https://docs.influxdata.com/influxdb/v2.1/api-guide/client-libraries/) are available for popular languages and ready to import into your application. + - name: Response codes + x-traitTag: true + description: | + The InfluxDB API uses standard HTTP status codes for success and failure responses. + The response body may include additional details. For details about a specific operation's response, see **Responses** and **Response Samples** for that operation. + + API operations may return the following HTTP status codes: + + |  Code  | Status | Description | + |:-----------:|:------------------------ |:--------------------- | + | `200` | Success | | + | `204` | No content | For a `POST` request, `204` indicates that InfluxDB accepted the request and request data is valid. Asynchronous operations, such as `write`, might not have completed yet. | + | `400` | Bad request | `Authorization` header is missing or malformed or the API token does not have permission for the operation. | + | `401` | Unauthorized | May indicate one of the following:
  • `Authorization: Token` header is missing or malformed
  • API token value is missing from the header
  • API token does not have permission. For more information about token types and permissions, see [Manage API tokens](https://docs.influxdata.com/influxdb/v2.1/security/tokens/)
  • | + | `404` | Not found | Requested resource was not found. `message` in the response body provides details about the requested resource. | + | `413` | Request entity too large | Request payload exceeds the size limit. | + | `422` | Unprocessible entity | Request data is invalid. `code` and `message` in the response body provide details about the problem. | + | `429` | Too many requests | API token is temporarily over the request quota. The `Retry-After` header describes when to try the request again. | + | `500` | Internal server error | | + | `503` | Service unavailable | Server is temporarily unavailable to process the request. The `Retry-After` header describes when to try the request again. | + - name: Query + description: | + Retrieve data, analyze queries, and get query suggestions. + - name: Write + description: | + Write time series data to buckets. + - name: Authorizations + description: | + Create and manage API tokens. An **authorization** associates a list of permissions to an **organization** and provides a token for API access. Optionally, you can restrict an authorization and its token to a specific user. + + For more information and examples, see the following: + - [Authorize API requests](https://docs.influxdata.com/influxdb/v2.1/api-guide/api_intro/#authentication). + - [Manage API tokens](https://docs.influxdata.com/influxdb/v2.1/security/tokens/). + - [Assign a token to a specific user](https://docs.influxdata.com/influxdb/v2.1/security/tokens/create-token/). +x-tagGroups: + - name: Overview + tags: + - Quick start + - Authentication + - Response codes + - name: Data I/O endpoints + tags: + - Write + - Query + - name: Resource endpoints + tags: + - Buckets + - Dashboards + - Tasks + - Resources + - name: Security and access endpoints + tags: + - Authorizations + - Organizations + - Users + - name: System information endpoints + tags: + - Health + - Metrics + - Ping + - Ready + - Routes + - name: All endpoints + tags: + - Authorizations + - Backup + - Buckets + - Cells + - Checks + - Config + - DBRPs + - Dashboards + - Delete + - Health + - Metrics + - Labels + - Legacy Authorizations + - NotificationEndpoints + - NotificationRules + - Organizations + - Ping + - Query + - Ready + - RemoteConnections + - Replications + - Resources + - Restore + - Routes + - Rules + - Scraper Targets + - Secrets + - Setup + - Signin + - Signout + - Sources + - Tasks + - Telegraf Plugins + - Telegrafs + - Templates + - Users + - Variables + - Write +paths: + /signin: + post: + operationId: PostSignin + summary: Create a user session. + description: 'Authenticates ***Basic Auth*** credentials for a user. If successful, creates a new UI session for the user.' + tags: + - Signin + security: + - BasicAuthentication: [] + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '204': + description: Success. User authenticated. + '401': + description: Unauthorized access. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '403': + description: User account is disabled. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unsuccessful authentication. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /signout: + post: + operationId: PostSignout + summary: Expire the current UI session + tags: + - Signout + description: Expires the current UI session for the user. + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '204': + description: Session successfully expired + '401': + description: Unauthorized access + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unsuccessful session expiry + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /ping: + get: + operationId: GetPing + summary: Checks the status of InfluxDB instance and version of InfluxDB. + servers: + - url: '' + tags: + - Ping + responses: + '204': + description: OK + headers: + X-Influxdb-Build: + schema: + type: string + description: The type of InfluxDB build. + X-Influxdb-Version: + schema: + type: integer + description: The version of InfluxDB. + head: + operationId: HeadPing + summary: Checks the status of InfluxDB instance and version of InfluxDB. + servers: + - url: '' + tags: + - Ping + responses: + '204': + description: OK + headers: + X-Influxdb-Build: + schema: + type: string + description: The type of InfluxDB build. + X-Influxdb-Version: + schema: + type: integer + description: The version of InfluxDB. + /: + get: + operationId: GetRoutes + summary: List all top level routes + tags: + - Routes + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + default: + description: All routes + content: + application/json: + schema: + $ref: '#/components/schemas/Routes' + /dbrps: + get: + operationId: GetDBRPs + tags: + - DBRPs + summary: List database retention policy mappings + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: orgID + description: Specifies the organization ID to filter on + schema: + type: string + - in: query + name: org + description: Specifies the organization name to filter on + schema: + type: string + - in: query + name: id + description: Specifies the mapping ID to filter on + schema: + type: string + - in: query + name: bucketID + description: Specifies the bucket ID to filter on + schema: + type: string + - in: query + name: default + description: Specifies filtering on default + schema: + type: boolean + - in: query + name: db + description: Specifies the database to filter on + schema: + type: string + - in: query + name: rp + description: Specifies the retention policy to filter on + schema: + type: string + responses: + '200': + description: Success. Returns a list of database retention policy mappings. + content: + application/json: + schema: + $ref: '#/components/schemas/DBRPs' + '400': + description: Bad request. The request has one or more invalid parameters. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostDBRP + tags: + - DBRPs + summary: Add a database retention policy mapping + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: The database retention policy mapping to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DBRPCreate' + responses: + '201': + description: Created. Returns the created database retention policy mapping. + content: + application/json: + schema: + $ref: '#/components/schemas/DBRP' + '400': + description: Bad request. The mapping in the request has one or more invalid IDs. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dbrps/{dbrpID}': + get: + operationId: GetDBRPsID + tags: + - DBRPs + summary: Retrieve a database retention policy mapping + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: orgID + description: Specifies the organization ID of the mapping + schema: + type: string + - in: query + name: org + description: Specifies the organization name of the mapping + schema: + type: string + - in: path + name: dbrpID + schema: + type: string + required: true + description: The database retention policy mapping ID + responses: + '200': + description: The database retention policy requested + content: + application/json: + schema: + $ref: '#/components/schemas/DBRPGet' + '400': + description: if any of the IDs passed is invalid + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchDBRPID + tags: + - DBRPs + summary: Update a database retention policy mapping + requestBody: + description: Database retention policy update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DBRPUpdate' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: orgID + description: Specifies the organization ID of the mapping + schema: + type: string + - in: query + name: org + description: Specifies the organization name of the mapping + schema: + type: string + - in: path + name: dbrpID + schema: + type: string + required: true + description: The database retention policy mapping. + responses: + '200': + description: An updated mapping + content: + application/json: + schema: + $ref: '#/components/schemas/DBRPGet' + '400': + description: if any of the IDs passed is invalid + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '404': + description: The mapping was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteDBRPID + tags: + - DBRPs + summary: Delete a database retention policy + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: orgID + description: Specifies the organization ID of the mapping + schema: + type: string + - in: query + name: org + description: Specifies the organization name of the mapping + schema: + type: string + - in: path + name: dbrpID + schema: + type: string + required: true + description: The database retention policy mapping + responses: + '204': + description: Delete has been accepted + '400': + description: if any of the IDs passed is invalid + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /telegraf/plugins: + get: + operationId: GetTelegrafPlugins + tags: + - Telegraf Plugins + summary: List all Telegraf plugins + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: type + description: The type of plugin desired. + schema: + type: string + responses: + '200': + description: A list of Telegraf plugins. + content: + application/json: + schema: + $ref: '#/components/schemas/TelegrafPlugins' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /telegrafs: + get: + operationId: GetTelegrafs + tags: + - Telegrafs + summary: List all Telegraf configurations + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: orgID + description: The organization ID the Telegraf config belongs to. + schema: + type: string + responses: + '200': + description: A list of Telegraf configurations + content: + application/json: + schema: + $ref: '#/components/schemas/Telegrafs' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTelegrafs + tags: + - Telegrafs + summary: Create a Telegraf configuration + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Telegraf configuration to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TelegrafPluginRequest' + responses: + '201': + description: Telegraf configuration created + content: + application/json: + schema: + $ref: '#/components/schemas/Telegraf' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/telegrafs/{telegrafID}': + get: + operationId: GetTelegrafsID + tags: + - Telegrafs + summary: Retrieve a Telegraf configuration + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf configuration ID. + - in: header + name: Accept + required: false + schema: + type: string + default: application/toml + enum: + - application/toml + - application/json + - application/octet-stream + responses: + '200': + description: Telegraf configuration details + content: + application/toml: + example: |- + [agent] + interval = "10s" + schema: + type: string + application/json: + schema: + $ref: '#/components/schemas/Telegraf' + application/octet-stream: + example: |- + [agent] + interval = "10s" + schema: + type: string + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + put: + operationId: PutTelegrafsID + tags: + - Telegrafs + summary: Update a Telegraf configuration + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf config ID. + requestBody: + description: Telegraf configuration update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TelegrafPluginRequest' + responses: + '200': + description: An updated Telegraf configurations + content: + application/json: + schema: + $ref: '#/components/schemas/Telegraf' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteTelegrafsID + tags: + - Telegrafs + summary: Delete a Telegraf configuration + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf configuration ID. + responses: + '204': + description: Delete has been accepted + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/telegrafs/{telegrafID}/labels': + get: + operationId: GetTelegrafsIDLabels + tags: + - Telegrafs + summary: List all labels for a Telegraf config + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf config ID. + responses: + '200': + description: A list of all labels for a Telegraf config + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTelegrafsIDLabels + tags: + - Telegrafs + summary: Add a label to a Telegraf config + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf config ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: The label added to the Telegraf config + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/telegrafs/{telegrafID}/labels/{labelID}': + delete: + operationId: DeleteTelegrafsIDLabelsID + tags: + - Telegrafs + summary: Delete a label from a Telegraf config + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf config ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The label ID. + responses: + '204': + description: Delete has been accepted + '404': + description: Telegraf config not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/telegrafs/{telegrafID}/members': + get: + operationId: GetTelegrafsIDMembers + tags: + - Telegrafs + summary: List all users with member privileges for a Telegraf config + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf config ID. + responses: + '200': + description: A list of Telegraf config members + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMembers' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTelegrafsIDMembers + tags: + - Telegrafs + summary: Add a member to a Telegraf config + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf config ID. + requestBody: + description: User to add as member + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Member added to Telegraf config + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMember' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/telegrafs/{telegrafID}/members/{userID}': + delete: + operationId: DeleteTelegrafsIDMembersID + tags: + - Telegrafs + summary: Remove a member from a Telegraf config + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the member to remove. + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf config ID. + responses: + '204': + description: Member removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/telegrafs/{telegrafID}/owners': + get: + operationId: GetTelegrafsIDOwners + tags: + - Telegrafs + summary: List all owners of a Telegraf configuration + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf configuration ID. + responses: + '200': + description: Returns Telegraf configuration owners as a ResourceOwners list + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwners' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTelegrafsIDOwners + tags: + - Telegrafs + summary: Add an owner to a Telegraf configuration + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf configuration ID. + requestBody: + description: User to add as owner + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Telegraf configuration owner was added. Returns a ResourceOwner that references the User. + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwner' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/telegrafs/{telegrafID}/owners/{userID}': + delete: + operationId: DeleteTelegrafsIDOwnersID + tags: + - Telegrafs + summary: Remove an owner from a Telegraf config + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the owner to remove. + - in: path + name: telegrafID + schema: + type: string + required: true + description: The Telegraf config ID. + responses: + '204': + description: Owner removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/variables/{variableID}/labels': + get: + operationId: GetVariablesIDLabels + tags: + - Variables + summary: List all labels for a variable + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + schema: + type: string + required: true + description: The variable ID. + responses: + '200': + description: A list of all labels for a variable + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostVariablesIDLabels + tags: + - Variables + summary: Add a label to a variable + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + schema: + type: string + required: true + description: The variable ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: The newly added label + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/variables/{variableID}/labels/{labelID}': + delete: + operationId: DeleteVariablesIDLabelsID + tags: + - Variables + summary: Delete a label from a variable + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + schema: + type: string + required: true + description: The variable ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The label ID to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Variable not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /write: + post: + operationId: PostWrite + tags: + - Write + summary: Write data + description: | + Writes data to a bucket. + + To write data into InfluxDB, you need the following: + + - **organization name or ID** – _See [View organizations](https://docs.influxdata.com/influxdb/v2.1/organizations/view-orgs/#view-your-organization-id) for instructions on viewing your organization ID._ + - **bucket** – _See [View buckets](https://docs.influxdata.com/influxdb/v2.1/organizations/buckets/view-buckets/) for + instructions on viewing your bucket ID._ + - **API token** – _See [View tokens](https://docs.influxdata.com/influxdb/v2.1/security/tokens/view-tokens/) + for instructions on viewing your API token._ + - **InfluxDB URL** – _See [InfluxDB URLs](https://docs.influxdata.com/influxdb/v2.1/reference/urls/)_. + - data in [line protocol](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/line-protocol) format. + + InfluxDB Cloud enforces rate and size limits different from InfluxDB OSS. For details, see Responses. + + For more information and examples, see the following: + - [Write data with the InfluxDB API](https://docs.influxdata.com/influxdb/v2.1/write-data/developer-tools/api). + - [Optimize writes to InfluxDB](https://docs.influxdata.com/influxdb/v2.1/write-data/best-practices/optimize-writes/). + requestBody: + description: Data in line protocol format. + required: true + content: + text/plain: + schema: + type: string + format: byte + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Content-Encoding + description: | + The value tells InfluxDB what compression is applied to the line protocol in the request payload. + To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + schema: + type: string + description: 'The content coding. Use `gzip` for compressed data or `identity` for unmodified, uncompressed data.' + default: identity + enum: + - gzip + - identity + - in: header + name: Content-Type + description: The header value indicates the format of the data in the request body. + schema: + type: string + description: | + `text/plain` specifies line protocol. `UTF-8` is the default character set. + default: text/plain; charset=utf-8 + enum: + - text/plain + - text/plain; charset=utf-8 + - application/vnd.influx.arrow + - in: header + name: Content-Length + description: 'The header value indicates the size of the entity-body, in bytes, sent to the database. If the length is greater than the database''s `max body` configuration option, the server responds with status code `413`.' + schema: + type: integer + description: The length in decimal number of octets. + - in: header + name: Accept + description: The header value specifies the response format. + schema: + type: string + description: The response format for errors. + default: application/json + enum: + - application/json + - in: query + name: org + description: 'The parameter value specifies the destination organization for writes. The database writes all points in the batch to this organization. If you provide both `orgID` and `org` parameters, `org` takes precedence.' + required: true + schema: + type: string + description: Organization name or ID. + - in: query + name: orgID + description: 'The parameter value specifies the ID of the destination organization for writes. If both `orgID` and `org` are specified, `org` takes precedence.' + schema: + type: string + - in: query + name: bucket + description: The destination bucket for writes. + required: true + schema: + type: string + description: All points within batch are written to this bucket. + - in: query + name: precision + description: The precision for the unix timestamps within the body line-protocol. + schema: + $ref: '#/components/schemas/WritePrecision' + responses: + '204': + description: 'InfluxDB validated the request data format and accepted the data for writing to the bucket. `204` doesn''t indicate a successful write operation since writes are asynchronous. See [how to check for write errors](https://docs.influxdata.com/influxdb/v2.1/write-data/troubleshoot/).' + '400': + description: Bad request. The line protocol data in the request is malformed. The response body contains the first malformed line in the data. InfluxDB rejected the batch and did not write any data. + content: + application/json: + schema: + $ref: '#/components/schemas/LineProtocolError' + examples: + measurementSchemaFieldTypeConflict: + summary: Field type conflict thrown by an explicit bucket schema + value: + code: invalid + message: 'partial write error (2 written): unable to parse ''air_sensor,service=S1,sensor=L1 temperature="90.5",humidity=70.0 1632850122'': schema: field type for field "temperature" not permitted by schema; got String but expected Float' + '401': + description: | + Unauthorized. The error may indicate one of the following: + * The `Authorization: Token` header is missing or malformed. + * The API token value is missing from the header. + * The token does not have sufficient permissions to write to this organization and bucket. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tokenNotAuthorized: + summary: Token is not authorized to access the organization or resource + value: + code: unauthorized + message: unauthorized access + '404': + description: 'Not found. A requested resource was not found. The response body contains the requested resource type, e.g. `organization name` or `bucket`, and name.' + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + resource-not-found: + summary: Not found error + value: + code: not found + message: bucket "air_sensor" not found + '413': + description: | + The request payload is too large. InfluxDB rejected the batch and did not write any data. + #### InfluxDB Cloud: + - returns this error if the payload exceeds the 50MB size limit. + - returns `Content-Type: text/html` for this error. + + #### InfluxDB OSS: + - returns this error only if the [Go (golang) `ioutil.ReadAll()`](https://pkg.go.dev/io/ioutil#ReadAll) function raises an error. + - returns `Content-Type: application/json` for this error. + content: + application/json: + schema: + $ref: '#/components/schemas/LineProtocolLengthError' + examples: + dataExceedsSizeLimitOSS: + summary: InfluxDB OSS response + value: | + {"code":"request too large","message":"unable to read data: points batch is too large"} + text/html: + schema: + type: string + examples: + dataExceedsSizeLimit: + summary: InfluxDB Cloud response + value: | + + 413 Request Entity Too Large + +

    413 Request Entity Too Large

    +
    +
    nginx
    + + + '429': + description: InfluxDB Cloud only. The token is temporarily over quota. The Retry-After header describes when to try the write again. + headers: + Retry-After: + description: A non-negative decimal integer indicating the seconds to delay after the response is received. + schema: + type: integer + format: int32 + '500': + description: Internal server error. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + internalError: + summary: Internal error example + value: + code: internal error + '503': + description: The server is temporarily unavailable to accept writes. The `Retry-After` header describes when to try the write again. + headers: + Retry-After: + description: A non-negative decimal integer indicating the seconds to delay after the response is received. + schema: + type: integer + format: int32 + default: + $ref: '#/components/responses/ServerError' + /delete: + post: + operationId: PostDelete + tags: + - Delete + summary: Delete data + requestBody: + description: Deletes data from an InfluxDB bucket. + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DeletePredicateRequest' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: org + description: Specifies the organization to delete data from. + schema: + type: string + description: Only points from this organization are deleted. + - in: query + name: bucket + description: Specifies the bucket to delete data from. + schema: + type: string + description: Only points from this bucket are deleted. + - in: query + name: orgID + description: Specifies the organization ID of the resource. + schema: + type: string + - in: query + name: bucketID + description: Specifies the bucket ID to delete data from. + schema: + type: string + description: Only points from this bucket ID are deleted. + responses: + '204': + description: delete has been accepted + '400': + description: Invalid request. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '403': + description: no token was sent or does not have sufficient permissions. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '404': + description: the bucket or organization is not found. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /labels: + post: + operationId: PostLabels + tags: + - Labels + summary: Create a label + requestBody: + description: Label to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelCreateRequest' + responses: + '201': + description: Added label + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + get: + operationId: GetLabels + tags: + - Labels + summary: List all labels + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: orgID + description: The organization ID. + schema: + type: string + responses: + '200': + description: A list of labels + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/labels/{labelID}': + get: + operationId: GetLabelsID + tags: + - Labels + summary: Retrieve a label + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: labelID + schema: + type: string + required: true + description: The ID of the label to update. + responses: + '200': + description: A label + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchLabelsID + tags: + - Labels + summary: Update a label + requestBody: + description: Label update + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelUpdate' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: labelID + schema: + type: string + required: true + description: The ID of the label to update. + responses: + '200': + description: Updated label + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + '404': + description: Label not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteLabelsID + tags: + - Labels + summary: Delete a label + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: labelID + schema: + type: string + required: true + description: The ID of the label to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Label not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}': + get: + operationId: GetDashboardsID + tags: + - Dashboards + summary: Retrieve a Dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The ID of the dashboard to update. + - in: query + name: include + required: false + schema: + type: string + enum: + - properties + description: Includes the cell view properties in the response if set to `properties` + responses: + '200': + description: Retrieve a single dashboard + content: + application/json: + schema: + oneOf: + - $ref: '#/components/schemas/Dashboard' + - $ref: '#/components/schemas/DashboardWithViewProperties' + '404': + description: Dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchDashboardsID + tags: + - Dashboards + summary: Update a dashboard + requestBody: + description: Patching of a dashboard + required: true + content: + application/json: + schema: + type: object + title: PatchDashboardRequest + properties: + name: + description: 'optional, when provided will replace the name' + type: string + description: + description: 'optional, when provided will replace the description' + type: string + cells: + description: 'optional, when provided will replace all existing cells with the cells provided' + $ref: '#/components/schemas/CellWithViewProperties' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The ID of the dashboard to update. + responses: + '200': + description: Updated dashboard + content: + application/json: + schema: + $ref: '#/components/schemas/Dashboard' + '404': + description: Dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteDashboardsID + tags: + - Dashboards + summary: Delete a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The ID of the dashboard to update. + responses: + '204': + description: Delete has been accepted + '404': + description: Dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/cells': + put: + operationId: PutDashboardsIDCells + tags: + - Cells + - Dashboards + summary: Replace cells in a dashboard + description: Replaces all cells in a dashboard. This is used primarily to update the positional information of all cells. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Cells' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The ID of the dashboard to update. + responses: + '201': + description: Replaced dashboard cells + content: + application/json: + schema: + $ref: '#/components/schemas/Dashboard' + '404': + description: Dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostDashboardsIDCells + tags: + - Cells + - Dashboards + summary: Create a dashboard cell + requestBody: + description: Cell that will be added + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CreateCell' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The ID of the dashboard to update. + responses: + '201': + description: Cell successfully added + content: + application/json: + schema: + $ref: '#/components/schemas/Cell' + '404': + description: Dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/cells/{cellID}': + patch: + operationId: PatchDashboardsIDCellsID + tags: + - Cells + - Dashboards + summary: Update the non-positional information related to a cell + description: Updates the non positional information related to a cell. Updates to a single cell's positional data could cause grid conflicts. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CellUpdate' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The ID of the dashboard to update. + - in: path + name: cellID + schema: + type: string + required: true + description: The ID of the cell to update. + responses: + '200': + description: Updated dashboard cell + content: + application/json: + schema: + $ref: '#/components/schemas/Cell' + '404': + description: Cell or dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteDashboardsIDCellsID + tags: + - Cells + - Dashboards + summary: Delete a dashboard cell + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The ID of the dashboard to delete. + - in: path + name: cellID + schema: + type: string + required: true + description: The ID of the cell to delete. + responses: + '204': + description: Cell successfully deleted + '404': + description: Cell or dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/cells/{cellID}/view': + get: + operationId: GetDashboardsIDCellsIDView + tags: + - Cells + - Dashboards + - Views + summary: Retrieve the view for a cell + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + - in: path + name: cellID + schema: + type: string + required: true + description: The cell ID. + responses: + '200': + description: A dashboard cells view + content: + application/json: + schema: + $ref: '#/components/schemas/View' + '404': + description: Cell or dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchDashboardsIDCellsIDView + tags: + - Cells + - Dashboards + - Views + summary: Update the view for a cell + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/View' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The ID of the dashboard to update. + - in: path + name: cellID + schema: + type: string + required: true + description: The ID of the cell to update. + responses: + '200': + description: Updated cell view + content: + application/json: + schema: + $ref: '#/components/schemas/View' + '404': + description: Cell or dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/labels': + get: + operationId: GetDashboardsIDLabels + tags: + - Dashboards + summary: List all labels for a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + responses: + '200': + description: A list of all labels for a dashboard + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostDashboardsIDLabels + tags: + - Dashboards + summary: Add a label to a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: The label added to the dashboard + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/labels/{labelID}': + delete: + operationId: DeleteDashboardsIDLabelsID + tags: + - Dashboards + summary: Delete a label from a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The ID of the label to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Dashboard not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/members': + get: + operationId: GetDashboardsIDMembers + tags: + - Dashboards + summary: List all dashboard members + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + responses: + '200': + description: A list of users who have member privileges for a dashboard + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMembers' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostDashboardsIDMembers + tags: + - Dashboards + summary: Add a member to a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + requestBody: + description: User to add as member + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Added to dashboard members + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMember' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/members/{userID}': + delete: + operationId: DeleteDashboardsIDMembersID + tags: + - Dashboards + summary: Remove a member from a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the member to remove. + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + responses: + '204': + description: Member removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/owners': + get: + operationId: GetDashboardsIDOwners + tags: + - Dashboards + summary: List all dashboard owners + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + responses: + '200': + description: A list of users who have owner privileges for a dashboard + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwners' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostDashboardsIDOwners + tags: + - Dashboards + summary: Add an owner to a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + requestBody: + description: User to add as owner + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Added to dashboard owners + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwner' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/dashboards/{dashboardID}/owners/{userID}': + delete: + operationId: DeleteDashboardsIDOwnersID + tags: + - Dashboards + summary: Remove an owner from a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the owner to remove. + - in: path + name: dashboardID + schema: + type: string + required: true + description: The dashboard ID. + responses: + '204': + description: Owner removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /query/ast: + post: + operationId: PostQueryAst + description: Analyzes flux query and generates a query specification. + tags: + - Query + summary: Generate an Abstract Syntax Tree (AST) from a query + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Content-Type + schema: + type: string + enum: + - application/json + requestBody: + description: Analyzed Flux query to generate abstract syntax tree. + content: + application/json: + schema: + $ref: '#/components/schemas/LanguageRequest' + responses: + '200': + description: Abstract syntax tree of the flux query. + content: + application/json: + schema: + $ref: '#/components/schemas/ASTResponse' + default: + description: Any response other than 200 is an internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /query/suggestions: + get: + operationId: GetQuerySuggestions + tags: + - Query + summary: Retrieve query suggestions + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: Suggestions for next functions in call chain + content: + application/json: + schema: + $ref: '#/components/schemas/FluxSuggestions' + default: + description: Any response other than 200 is an internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/query/suggestions/{name}': + get: + operationId: GetQuerySuggestionsName + tags: + - Query + summary: Retrieve query suggestions for a branching suggestion + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: name + schema: + type: string + required: true + description: The name of the branching suggestion. + responses: + '200': + description: Suggestions for next functions in call chain + content: + application/json: + schema: + $ref: '#/components/schemas/FluxSuggestion' + default: + description: Any response other than 200 is an internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /query/analyze: + post: + operationId: PostQueryAnalyze + tags: + - Query + summary: Analyze a Flux query + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Content-Type + schema: + type: string + enum: + - application/json + requestBody: + description: Flux query to analyze + content: + application/json: + schema: + $ref: '#/components/schemas/Query' + responses: + '200': + description: Query analyze results. Errors will be empty if the query is valid. + content: + application/json: + schema: + $ref: '#/components/schemas/AnalyzeQueryResponse' + default: + description: Internal server error + headers: + X-Influx-Error: + description: Error string describing the problem + schema: + type: string + X-Influx-Reference: + description: Reference code unique to the error type + schema: + type: integer + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /query: + post: + operationId: PostQuery + tags: + - Query + summary: Query data + description: | + Retrieves data from InfluxDB buckets. + + To query data, you need the following: + - **organization** – _See [View organizations](https://docs.influxdata.com/influxdb/v2.1/organizations/view-orgs/#view-your-organization-id) for instructions on viewing your organization ID._ + - **API token** – _See [View tokens](https://docs.influxdata.com/influxdb/v2.1/security/tokens/view-tokens/) + for instructions on viewing your API token._ + - **InfluxDB URL** – _See [InfluxDB URLs](https://docs.influxdata.com/influxdb/v2.1/reference/urls/)_. + - **Flux query** – _See [Flux](https://docs.influxdata.com/flux/v0.x/)._ + + For more information and examples, see [Query with the InfluxDB API](https://docs.influxdata.com/influxdb/v2.1/query-data/execute-queries/influx-api/). + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Accept-Encoding + description: Indicates the content encoding (usually a compression algorithm) that the client can understand. + schema: + type: string + description: 'The content coding. Use `gzip` for compressed data or `identity` for unmodified, uncompressed data.' + default: identity + enum: + - gzip + - identity + - in: header + name: Content-Type + schema: + type: string + enum: + - application/json + - application/vnd.flux + - in: query + name: org + description: 'Specifies the name of the organization executing the query. Takes either the ID or Name. If both `orgID` and `org` are specified, `org` takes precedence.' + schema: + type: string + - in: query + name: orgID + description: 'Specifies the ID of the organization executing the query. If both `orgID` and `org` are specified, `org` takes precedence.' + schema: + type: string + requestBody: + description: Flux query or specification to execute + content: + application/json: + schema: + $ref: '#/components/schemas/Query' + application/vnd.flux: + schema: + type: string + example: | + from(bucket: "example-bucket") + |> range(start: -5m) + |> filter(fn: (r) => r._measurement == "example-measurement") + responses: + '200': + description: Success. Returns query results. + headers: + Content-Encoding: + description: Lists any encodings (usually compression algorithms) that have been applied to the response payload. + schema: + type: string + description: | + The content coding: `gzip` for compressed data or `identity` for unmodified, uncompressed data. + default: identity + enum: + - gzip + - identity + Trace-Id: + description: 'The Trace-Id header reports the request''s trace ID, if one was generated.' + schema: + type: string + description: Specifies the request's trace ID. + content: + text/csv: + schema: + type: string + example: | + result,table,_start,_stop,_time,region,host,_value mean,0,2018-05-08T20:50:00Z,2018-05-08T20:51:00Z,2018-05-08T20:50:00Z,east,A,15.43 mean,0,2018-05-08T20:50:00Z,2018-05-08T20:51:00Z,2018-05-08T20:50:20Z,east,B,59.25 mean,0,2018-05-08T20:50:00Z,2018-05-08T20:51:00Z,2018-05-08T20:50:40Z,east,C,52.62 + application/vnd.influx.arrow: + schema: + type: string + format: binary + '429': + description: Token is temporarily over quota. The Retry-After header describes when to try the read again. + headers: + Retry-After: + description: A non-negative decimal integer indicating the seconds to delay after the response is received. + schema: + type: integer + format: int32 + default: + description: Error processing query + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /buckets: + get: + operationId: GetBuckets + tags: + - Buckets + summary: List all buckets + parameters: + - $ref: '#/components/parameters/TraceSpan' + - $ref: '#/components/parameters/Offset' + - $ref: '#/components/parameters/Limit' + - $ref: '#/components/parameters/After' + - in: query + name: org + description: The name of the organization. + schema: + type: string + - in: query + name: orgID + description: The organization ID. + schema: + type: string + - in: query + name: name + description: Only returns buckets with a specific name. + schema: + type: string + - in: query + name: id + description: Only returns buckets with a specific ID. + schema: + type: string + responses: + '200': + description: A list of buckets + content: + application/json: + schema: + $ref: '#/components/schemas/Buckets' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostBuckets + tags: + - Buckets + summary: Create a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Bucket to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PostBucketRequest' + responses: + '201': + description: Bucket created + content: + application/json: + schema: + $ref: '#/components/schemas/Bucket' + '422': + description: Request body failed validation + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/buckets/{bucketID}': + get: + operationId: GetBucketsID + tags: + - Buckets + summary: Retrieve a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + responses: + '200': + description: Bucket details + content: + application/json: + schema: + $ref: '#/components/schemas/Bucket' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchBucketsID + tags: + - Buckets + summary: Update a bucket + requestBody: + description: Bucket update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PatchBucketRequest' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + responses: + '200': + description: An updated bucket + content: + application/json: + schema: + $ref: '#/components/schemas/Bucket' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteBucketsID + tags: + - Buckets + summary: Delete a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The ID of the bucket to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Bucket not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/buckets/{bucketID}/labels': + get: + operationId: GetBucketsIDLabels + tags: + - Buckets + summary: List all labels for a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + responses: + '200': + description: A list of all labels for a bucket + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostBucketsIDLabels + tags: + - Buckets + summary: Add a label to a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: The newly added label + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/buckets/{bucketID}/labels/{labelID}': + delete: + operationId: DeleteBucketsIDLabelsID + tags: + - Buckets + summary: Delete a label from a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The ID of the label to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Bucket not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/buckets/{bucketID}/members': + get: + operationId: GetBucketsIDMembers + tags: + - Buckets + summary: List all users with member privileges for a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + responses: + '200': + description: A list of bucket members + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMembers' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostBucketsIDMembers + tags: + - Buckets + summary: Add a member to a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + requestBody: + description: User to add as member + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Member added to bucket + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMember' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/buckets/{bucketID}/members/{userID}': + delete: + operationId: DeleteBucketsIDMembersID + tags: + - Buckets + summary: Remove a member from a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the member to remove. + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + responses: + '204': + description: Member removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/buckets/{bucketID}/owners': + get: + operationId: GetBucketsIDOwners + tags: + - Buckets + summary: List all owners of a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + responses: + '200': + description: A list of bucket owners + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwners' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostBucketsIDOwners + tags: + - Buckets + summary: Add an owner to a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + requestBody: + description: User to add as owner + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Bucket owner added + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwner' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/buckets/{bucketID}/owners/{userID}': + delete: + operationId: DeleteBucketsIDOwnersID + tags: + - Buckets + summary: Remove an owner from a bucket + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the owner to remove. + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + responses: + '204': + description: Owner removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /orgs: + get: + operationId: GetOrgs + tags: + - Organizations + summary: List all organizations + parameters: + - $ref: '#/components/parameters/TraceSpan' + - $ref: '#/components/parameters/Offset' + - $ref: '#/components/parameters/Limit' + - $ref: '#/components/parameters/Descending' + - in: query + name: org + schema: + type: string + description: Filter organizations to a specific organization name. + - in: query + name: orgID + schema: + type: string + description: Filter organizations to a specific organization ID. + - in: query + name: userID + schema: + type: string + description: Filter organizations to a specific user ID. + responses: + '200': + description: A list of organizations + content: + application/json: + schema: + $ref: '#/components/schemas/Organizations' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostOrgs + tags: + - Organizations + summary: Create an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Organization to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PostOrganizationRequest' + responses: + '201': + description: Organization created + content: + application/json: + schema: + $ref: '#/components/schemas/Organization' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/orgs/{orgID}': + get: + operationId: GetOrgsID + tags: + - Organizations + summary: Retrieve an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The ID of the organization to get. + responses: + '200': + description: Organization details + content: + application/json: + schema: + $ref: '#/components/schemas/Organization' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchOrgsID + tags: + - Organizations + summary: Update an organization + requestBody: + description: Organization update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PatchOrganizationRequest' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The ID of the organization to get. + responses: + '200': + description: Organization updated + content: + application/json: + schema: + $ref: '#/components/schemas/Organization' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteOrgsID + tags: + - Organizations + summary: Delete an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The ID of the organization to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Organization not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/orgs/{orgID}/secrets': + get: + operationId: GetOrgsIDSecrets + tags: + - Secrets + summary: List all secret keys for an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + responses: + '200': + description: A list of all secret keys + content: + application/json: + schema: + $ref: '#/components/schemas/SecretKeysResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchOrgsIDSecrets + tags: + - Secrets + summary: Update secrets in an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + requestBody: + description: Secret key value pairs to update/add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Secrets' + responses: + '204': + description: Keys successfully patched + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/orgs/{orgID}/members': + get: + operationId: GetOrgsIDMembers + tags: + - Organizations + summary: List all members of an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + responses: + '200': + description: A list of organization members + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMembers' + '404': + description: Organization not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostOrgsIDMembers + tags: + - Organizations + summary: Add a member to an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + requestBody: + description: User to add as member + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Added to organization created + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMember' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/orgs/{orgID}/members/{userID}': + delete: + operationId: DeleteOrgsIDMembersID + tags: + - Organizations + summary: Remove a member from an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the member to remove. + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + responses: + '204': + description: Member removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/orgs/{orgID}/owners': + get: + operationId: GetOrgsIDOwners + tags: + - Organizations + summary: List all owners of an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + responses: + '200': + description: A list of organization owners + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwners' + '404': + description: Organization not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostOrgsIDOwners + tags: + - Organizations + summary: Add an owner to an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + requestBody: + description: User to add as owner + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Organization owner added + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwner' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/orgs/{orgID}/owners/{userID}': + delete: + operationId: DeleteOrgsIDOwnersID + tags: + - Organizations + summary: Remove an owner from an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the owner to remove. + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + responses: + '204': + description: Owner removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/orgs/{orgID}/secrets/delete': + post: + deprecated: true + operationId: PostOrgsIDSecrets + tags: + - Secrets + summary: Delete secrets from an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + requestBody: + description: Secret key to delete + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/SecretKeys' + responses: + '204': + description: Keys successfully patched + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/orgs/{orgID}/secrets/{secretID}': + delete: + operationId: DeleteOrgsIDSecretsID + tags: + - Secrets + summary: Delete a secret from an organization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: orgID + schema: + type: string + required: true + description: The organization ID. + - in: path + name: secretID + schema: + type: string + required: true + description: The secret ID. + responses: + '204': + description: Keys successfully deleted + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /resources: + get: + operationId: GetResources + tags: + - Resources + summary: List all known resources + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: All resources targets + content: + application/json: + schema: + type: array + items: + type: string + default: + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /stacks: + get: + operationId: ListStacks + tags: + - Templates + summary: List installed templates + parameters: + - in: query + name: orgID + required: true + schema: + type: string + description: The organization ID of the stacks + - in: query + name: name + schema: + type: string + description: A collection of names to filter the list by. + - in: query + name: stackID + schema: + type: string + description: A collection of stackIDs to filter the list by. + responses: + '200': + description: Success. Returns the list of stacks. + content: + application/json: + schema: + type: object + properties: + stacks: + type: array + items: + $ref: '#/components/schemas/Stack' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: CreateStack + tags: + - Templates + summary: Create a new stack + requestBody: + description: The stack to create. + required: true + content: + application/json: + schema: + type: object + title: PostStackRequest + properties: + orgID: + type: string + name: + type: string + description: + type: string + urls: + type: array + items: + type: string + responses: + '201': + description: Success. Returns the newly created stack. + content: + application/json: + schema: + $ref: '#/components/schemas/Stack' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/stacks/{stack_id}': + get: + operationId: ReadStack + tags: + - Templates + summary: Retrieve a stack + parameters: + - in: path + name: stack_id + required: true + schema: + type: string + description: The identifier of the stack. + responses: + '200': + description: Returns the stack. + content: + application/json: + schema: + $ref: '#/components/schemas/Stack' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: UpdateStack + tags: + - Templates + summary: Update a stack + parameters: + - in: path + name: stack_id + required: true + schema: + type: string + description: The identifier of the stack. + requestBody: + description: The stack to update. + required: true + content: + application/json: + schema: + type: object + title: PatchStackRequest + properties: + name: + type: string + nullable: true + description: + type: string + nullable: true + templateURLs: + type: array + items: + type: string + nullable: true + additionalResources: + type: array + items: + type: object + properties: + resourceID: + type: string + kind: + type: string + templateMetaName: + type: string + required: + - kind + - resourceID + responses: + '200': + description: Returns the updated stack. + content: + application/json: + schema: + $ref: '#/components/schemas/Stack' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteStack + tags: + - Templates + summary: Delete a stack and associated resources + parameters: + - in: path + name: stack_id + required: true + schema: + type: string + description: The identifier of the stack. + - in: query + name: orgID + required: true + schema: + type: string + description: The identifier of the organization. + responses: + '204': + description: The stack and its associated resources were deleted. + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/stacks/{stack_id}/uninstall': + post: + operationId: UninstallStack + tags: + - Templates + summary: Uninstall a stack + parameters: + - in: path + name: stack_id + required: true + schema: + type: string + description: The identifier of the stack. + responses: + '200': + description: Returns the uninstalled stack. + content: + application/json: + schema: + $ref: '#/components/schemas/Stack' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /templates/apply: + post: + operationId: ApplyTemplate + tags: + - Templates + summary: Apply or dry-run a template + description: Applies or performs a dry-run of template in an organization. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateApply' + application/x-jsonnet: + schema: + $ref: '#/components/schemas/TemplateApply' + text/yml: + schema: + $ref: '#/components/schemas/TemplateApply' + responses: + '200': + description: | + Success. The package dry-run succeeded. No new resources were created. Returns a diff and summary of the dry-run. The diff and summary won't contain IDs for resources that didn't exist at the time of the dry-run. + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateSummary' + '201': + description: | + Success. The package applied successfully. Returns a diff and summary of the run. The summary contains newly created resources. The diff compares the initial state to the state after the package applied. This corresponds to `"dryRun": true`. + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateSummary' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /templates/export: + post: + operationId: ExportTemplate + tags: + - Templates + summary: Export a new template + requestBody: + description: Export resources as an InfluxDB template. + required: false + content: + application/json: + schema: + oneOf: + - $ref: '#/components/schemas/TemplateExportByID' + - $ref: '#/components/schemas/TemplateExportByName' + responses: + '200': + description: The template was created successfully. Returns the newly created template. + content: + application/json: + schema: + $ref: '#/components/schemas/Template' + application/x-yaml: + schema: + $ref: '#/components/schemas/Template' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}': + get: + operationId: GetTasksID + tags: + - Tasks + summary: Retrieve a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + responses: + '200': + description: Task details + content: + application/json: + schema: + $ref: '#/components/schemas/Task' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchTasksID + tags: + - Tasks + summary: Update a task + description: Update a task. This will cancel all queued runs. + requestBody: + description: Task update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TaskUpdateRequest' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + responses: + '200': + description: Task updated + content: + application/json: + schema: + $ref: '#/components/schemas/Task' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteTasksID + tags: + - Tasks + summary: Delete a task + description: Deletes a task and all associated records + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The ID of the task to delete. + responses: + '204': + description: Task deleted + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/runs': + get: + operationId: GetTasksIDRuns + tags: + - Tasks + summary: List runs for a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The ID of the task to get runs for. + - in: query + name: after + schema: + type: string + description: Returns runs after a specific ID. + - in: query + name: limit + schema: + type: integer + minimum: 1 + maximum: 500 + default: 100 + description: The number of runs to return + - in: query + name: afterTime + schema: + type: string + format: date-time + description: 'Filter runs to those scheduled after this time, RFC3339' + - in: query + name: beforeTime + schema: + type: string + format: date-time + description: 'Filter runs to those scheduled before this time, RFC3339' + responses: + '200': + description: A list of task runs + content: + application/json: + schema: + $ref: '#/components/schemas/Runs' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTasksIDRuns + tags: + - Tasks + summary: 'Manually start a task run, overriding the current schedule' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RunManually' + responses: + '201': + description: Run scheduled to start + content: + application/json: + schema: + $ref: '#/components/schemas/Run' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/runs/{runID}': + get: + operationId: GetTasksIDRunsID + tags: + - Tasks + summary: Retrieve a single run for a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + - in: path + name: runID + schema: + type: string + required: true + description: The run ID. + responses: + '200': + description: The run record + content: + application/json: + schema: + $ref: '#/components/schemas/Run' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteTasksIDRunsID + tags: + - Tasks + summary: Cancel a running task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + - in: path + name: runID + schema: + type: string + required: true + description: The run ID. + responses: + '204': + description: Delete has been accepted + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/runs/{runID}/retry': + post: + operationId: PostTasksIDRunsIDRetry + tags: + - Tasks + summary: Retry a task run + requestBody: + content: + application/json; charset=utf-8: + schema: + type: object + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + - in: path + name: runID + schema: + type: string + required: true + description: The run ID. + responses: + '200': + description: Run that has been queued + content: + application/json: + schema: + $ref: '#/components/schemas/Run' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/logs': + get: + operationId: GetTasksIDLogs + tags: + - Tasks + summary: Retrieve all logs for a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + responses: + '200': + description: All logs for a task + content: + application/json: + schema: + $ref: '#/components/schemas/Logs' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/runs/{runID}/logs': + get: + operationId: GetTasksIDRunsIDLogs + tags: + - Tasks + summary: Retrieve all logs for a run + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: ID of task to get logs for. + - in: path + name: runID + schema: + type: string + required: true + description: ID of run to get logs for. + responses: + '200': + description: All logs for a run + content: + application/json: + schema: + $ref: '#/components/schemas/Logs' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/labels': + get: + operationId: GetTasksIDLabels + tags: + - Tasks + summary: List all labels for a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + responses: + '200': + description: A list of all labels for a task + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTasksIDLabels + tags: + - Tasks + summary: Add a label to a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: A list of all labels for a task + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/labels/{labelID}': + delete: + operationId: DeleteTasksIDLabelsID + tags: + - Tasks + summary: Delete a label from a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The label ID. + responses: + '204': + description: Delete has been accepted + '404': + description: Task not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /flags: + get: + operationId: GetFlags + tags: + - Users + summary: Return the feature flags for the currently authenticated user + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: Feature flags for the currently authenticated user + content: + application/json: + schema: + $ref: '#/components/schemas/Flags' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /me: + get: + operationId: GetMe + tags: + - Users + summary: Retrieve the currently authenticated user + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: The currently authenticated user. + content: + application/json: + schema: + $ref: '#/components/schemas/UserResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /me/password: + put: + operationId: PutMePassword + tags: + - Users + summary: Update a password + security: + - BasicAuthentication: [] + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: New password + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PasswordResetBody' + responses: + '204': + description: Password successfully updated + default: + description: Unsuccessful authentication + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/members': + get: + operationId: GetTasksIDMembers + tags: + - Tasks + summary: List all task members + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + responses: + '200': + description: A list of users who have member privileges for a task + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMembers' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTasksIDMembers + tags: + - Tasks + summary: Add a member to a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + requestBody: + description: User to add as member + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Added to task members + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMember' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/members/{userID}': + delete: + operationId: DeleteTasksIDMembersID + tags: + - Tasks + summary: Remove a member from a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the member to remove. + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + responses: + '204': + description: Member removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/owners': + get: + operationId: GetTasksIDOwners + tags: + - Tasks + summary: List all owners of a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + responses: + '200': + description: A list of users who have owner privileges for a task + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwners' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTasksIDOwners + tags: + - Tasks + summary: Add an owner to a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + requestBody: + description: User to add as owner + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Added to task owners + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwner' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/tasks/{taskID}/owners/{userID}': + delete: + operationId: DeleteTasksIDOwnersID + tags: + - Tasks + summary: Remove an owner from a task + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the owner to remove. + - in: path + name: taskID + schema: + type: string + required: true + description: The task ID. + responses: + '204': + description: Owner removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/users/{userID}/password': + post: + operationId: PostUsersIDPassword + tags: + - Users + summary: Update a password + security: + - BasicAuthentication: [] + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The user ID. + requestBody: + description: New password + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PasswordResetBody' + responses: + '204': + description: Password successfully updated + default: + description: Unsuccessful authentication + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /checks: + get: + operationId: GetChecks + tags: + - Checks + summary: List all checks + parameters: + - $ref: '#/components/parameters/TraceSpan' + - $ref: '#/components/parameters/Offset' + - $ref: '#/components/parameters/Limit' + - in: query + name: orgID + required: true + description: Only show checks that belong to a specific organization ID. + schema: + type: string + responses: + '200': + description: A list of checks + content: + application/json: + schema: + $ref: '#/components/schemas/Checks' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: CreateCheck + tags: + - Checks + summary: Add new check + requestBody: + description: Check to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PostCheck' + responses: + '201': + description: Check created + content: + application/json: + schema: + $ref: '#/components/schemas/Check' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/checks/{checkID}': + get: + operationId: GetChecksID + tags: + - Checks + summary: Retrieve a check + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: checkID + schema: + type: string + required: true + description: The check ID. + responses: + '200': + description: The check requested + content: + application/json: + schema: + $ref: '#/components/schemas/Check' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + put: + operationId: PutChecksID + tags: + - Checks + summary: Update a check + requestBody: + description: Check update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Check' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: checkID + schema: + type: string + required: true + description: The check ID. + responses: + '200': + description: An updated check + content: + application/json: + schema: + $ref: '#/components/schemas/Check' + '404': + description: The check was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchChecksID + tags: + - Checks + summary: Update a check + requestBody: + description: Check update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CheckPatch' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: checkID + schema: + type: string + required: true + description: The check ID. + responses: + '200': + description: An updated check + content: + application/json: + schema: + $ref: '#/components/schemas/Check' + '404': + description: The check was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteChecksID + tags: + - Checks + summary: Delete a check + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: checkID + schema: + type: string + required: true + description: The check ID. + responses: + '204': + description: Delete has been accepted + '404': + description: The check was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/checks/{checkID}/labels': + get: + operationId: GetChecksIDLabels + tags: + - Checks + summary: List all labels for a check + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: checkID + schema: + type: string + required: true + description: The check ID. + responses: + '200': + description: A list of all labels for a check + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostChecksIDLabels + tags: + - Checks + summary: Add a label to a check + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: checkID + schema: + type: string + required: true + description: The check ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: The label was added to the check + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/checks/{checkID}/labels/{labelID}': + delete: + operationId: DeleteChecksIDLabelsID + tags: + - Checks + summary: Delete label from a check + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: checkID + schema: + type: string + required: true + description: The check ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The ID of the label to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Check or label not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /notificationRules: + get: + operationId: GetNotificationRules + tags: + - NotificationRules + summary: List all notification rules + parameters: + - $ref: '#/components/parameters/TraceSpan' + - $ref: '#/components/parameters/Offset' + - $ref: '#/components/parameters/Limit' + - in: query + name: orgID + required: true + description: Only show notification rules that belong to a specific organization ID. + schema: + type: string + - in: query + name: checkID + description: Only show notifications that belong to the specific check ID. + schema: + type: string + - in: query + name: tag + description: Only return notification rules that "would match" statuses which contain the tag key value pairs provided. + schema: + type: string + pattern: '^[a-zA-Z0-9_]+:[a-zA-Z0-9_]+$' + example: 'env:prod' + responses: + '200': + description: A list of notification rules + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationRules' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: CreateNotificationRule + tags: + - NotificationRules + summary: Add a notification rule + requestBody: + description: Notification rule to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PostNotificationRule' + responses: + '201': + description: Notification rule created + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationRule' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/checks/{checkID}/query': + get: + operationId: GetChecksIDQuery + tags: + - Checks + summary: Retrieve a check query + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: checkID + schema: + type: string + required: true + description: The check ID. + responses: + '200': + description: The check query requested + content: + application/json: + schema: + $ref: '#/components/schemas/FluxResponse' + '400': + description: Invalid request + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '404': + description: Check not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/notificationRules/{ruleID}': + get: + operationId: GetNotificationRulesID + tags: + - NotificationRules + summary: Retrieve a notification rule + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: ruleID + schema: + type: string + required: true + description: The notification rule ID. + responses: + '200': + description: The notification rule requested + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationRule' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + put: + operationId: PutNotificationRulesID + tags: + - NotificationRules + summary: Update a notification rule + requestBody: + description: Notification rule update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationRule' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: ruleID + schema: + type: string + required: true + description: The notification rule ID. + responses: + '200': + description: An updated notification rule + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationRule' + '404': + description: The notification rule was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchNotificationRulesID + tags: + - NotificationRules + summary: Update a notification rule + requestBody: + description: Notification rule update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationRuleUpdate' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: ruleID + schema: + type: string + required: true + description: The notification rule ID. + responses: + '200': + description: An updated notification rule + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationRule' + '404': + description: The notification rule was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteNotificationRulesID + tags: + - NotificationRules + summary: Delete a notification rule + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: ruleID + schema: + type: string + required: true + description: The notification rule ID. + responses: + '204': + description: Delete has been accepted + '404': + description: The check was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/notificationRules/{ruleID}/labels': + get: + operationId: GetNotificationRulesIDLabels + tags: + - NotificationRules + summary: List all labels for a notification rule + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: ruleID + schema: + type: string + required: true + description: The notification rule ID. + responses: + '200': + description: A list of all labels for a notification rule + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostNotificationRuleIDLabels + tags: + - NotificationRules + summary: Add a label to a notification rule + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: ruleID + schema: + type: string + required: true + description: The notification rule ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: The label was added to the notification rule + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/notificationRules/{ruleID}/labels/{labelID}': + delete: + operationId: DeleteNotificationRulesIDLabelsID + tags: + - NotificationRules + summary: Delete label from a notification rule + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: ruleID + schema: + type: string + required: true + description: The notification rule ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The ID of the label to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Rule or label not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/notificationRules/{ruleID}/query': + get: + operationId: GetNotificationRulesIDQuery + tags: + - Rules + summary: Retrieve a notification rule query + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: ruleID + schema: + type: string + required: true + description: The notification rule ID. + responses: + '200': + description: The notification rule query requested + content: + application/json: + schema: + $ref: '#/components/schemas/FluxResponse' + '400': + description: Invalid request + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '404': + description: Notification rule not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /notificationEndpoints: + get: + operationId: GetNotificationEndpoints + tags: + - NotificationEndpoints + summary: List all notification endpoints + parameters: + - $ref: '#/components/parameters/TraceSpan' + - $ref: '#/components/parameters/Offset' + - $ref: '#/components/parameters/Limit' + - in: query + name: orgID + required: true + description: Only show notification endpoints that belong to specific organization ID. + schema: + type: string + responses: + '200': + description: A list of notification endpoints + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationEndpoints' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: CreateNotificationEndpoint + tags: + - NotificationEndpoints + summary: Add a notification endpoint + requestBody: + description: Notification endpoint to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PostNotificationEndpoint' + responses: + '201': + description: Notification endpoint created + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationEndpoint' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/notificationEndpoints/{endpointID}': + get: + operationId: GetNotificationEndpointsID + tags: + - NotificationEndpoints + summary: Retrieve a notification endpoint + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: endpointID + schema: + type: string + required: true + description: The notification endpoint ID. + responses: + '200': + description: The notification endpoint requested + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationEndpoint' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + put: + operationId: PutNotificationEndpointsID + tags: + - NotificationEndpoints + summary: Update a notification endpoint + requestBody: + description: A new notification endpoint to replace the existing endpoint with + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationEndpoint' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: endpointID + schema: + type: string + required: true + description: The notification endpoint ID. + responses: + '200': + description: An updated notification endpoint + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationEndpoint' + '404': + description: The notification endpoint was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchNotificationEndpointsID + tags: + - NotificationEndpoints + summary: Update a notification endpoint + requestBody: + description: Check update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationEndpointUpdate' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: endpointID + schema: + type: string + required: true + description: The notification endpoint ID. + responses: + '200': + description: An updated notification endpoint + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationEndpoint' + '404': + description: The notification endpoint was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteNotificationEndpointsID + tags: + - NotificationEndpoints + summary: Delete a notification endpoint + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: endpointID + schema: + type: string + required: true + description: The notification endpoint ID. + responses: + '204': + description: Delete has been accepted + '404': + description: The endpoint was not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/notificationEndpoints/{endpointID}/labels': + get: + operationId: GetNotificationEndpointsIDLabels + tags: + - NotificationEndpoints + summary: List all labels for a notification endpoint + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: endpointID + schema: + type: string + required: true + description: The notification endpoint ID. + responses: + '200': + description: A list of all labels for a notification endpoint + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostNotificationEndpointIDLabels + tags: + - NotificationEndpoints + summary: Add a label to a notification endpoint + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: endpointID + schema: + type: string + required: true + description: The notification endpoint ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: The label was added to the notification endpoint + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/notificationEndpoints/{endpointID}/labels/{labelID}': + delete: + operationId: DeleteNotificationEndpointsIDLabelsID + tags: + - NotificationEndpoints + summary: Delete a label from a notification endpoint + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: endpointID + schema: + type: string + required: true + description: The notification endpoint ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The ID of the label to delete. + responses: + '204': + description: Delete has been accepted + '404': + description: Endpoint or label not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /health: + get: + operationId: GetHealth + tags: + - Health + summary: Get the health of an instance + servers: + - url: '' + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: The instance is healthy. + content: + application/json: + schema: + $ref: '#/components/schemas/HealthCheck' + '503': + description: The instance is unhealthy. + content: + application/json: + schema: + $ref: '#/components/schemas/HealthCheck' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /metrics: + get: + operationId: GetMetrics + tags: + - Metrics + summary: Get metrics of an instance + servers: + - url: '' + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: | + Payload body contains metrics about the InfluxDB instance. + + Metrics are formatted in the + Prometheus [plain-text exposition format](https://prometheus.io/docs/instrumenting/exposition_formats). + Each metric is identified by its name and a set of optional key-value pairs. + + The following descriptors precede each metric: + + - *`HELP`*: description of the metric + - *`TYPE`*: type of the metric (e.g. `counter`, `gauge`, `histogram`, or `summary`) + content: + text/plain: + schema: + type: string + format: Prometheus text-based exposition + externalDocs: + description: Prometheus exposition formats + url: 'https://prometheus.io/docs/instrumenting/exposition_formats' + examples: + expositionResponse: + summary: Metrics in plain text + value: | + # HELP go_threads Number of OS threads created. + # TYPE go_threads gauge + go_threads 19 + # HELP http_api_request_duration_seconds Time taken to respond to HTTP request + # TYPE http_api_request_duration_seconds histogram + http_api_request_duration_seconds_bucket{handler="platform",method="GET",path="/:fallback_path",response_code="200",status="2XX",user_agent="curl",le="0.005"} 4 + http_api_request_duration_seconds_bucket{handler="platform",method="GET",path="/:fallback_path",response_code="200",status="2XX",user_agent="curl",le="0.01"} 4 + http_api_request_duration_seconds_bucket{handler="platform",method="GET",path="/:fallback_path",response_code="200",status="2XX",user_agent="curl",le="0.025"} 5 + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /ready: + get: + operationId: GetReady + tags: + - Ready + summary: Get the readiness of an instance at startup + servers: + - url: '' + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: The instance is ready + content: + application/json: + schema: + $ref: '#/components/schemas/Ready' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /users: + get: + operationId: GetUsers + tags: + - Users + summary: List all users + parameters: + - $ref: '#/components/parameters/TraceSpan' + - $ref: '#/components/parameters/Offset' + - $ref: '#/components/parameters/Limit' + - $ref: '#/components/parameters/After' + - in: query + name: name + schema: + type: string + - in: query + name: id + schema: + type: string + responses: + '200': + description: A list of users + content: + application/json: + schema: + $ref: '#/components/schemas/Users' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + post: + operationId: PostUsers + tags: + - Users + summary: Create a user + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: User to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/User' + responses: + '201': + description: User created + content: + application/json: + schema: + $ref: '#/components/schemas/UserResponse' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + '/users/{userID}': + get: + operationId: GetUsersID + tags: + - Users + summary: Retrieve a user + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The user ID. + responses: + '200': + description: User details + content: + application/json: + schema: + $ref: '#/components/schemas/UserResponse' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + patch: + operationId: PatchUsersID + tags: + - Users + summary: Update a user + requestBody: + description: User update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/User' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the user to update. + responses: + '200': + description: User updated + content: + application/json: + schema: + $ref: '#/components/schemas/UserResponse' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + delete: + operationId: DeleteUsersID + tags: + - Users + summary: Delete a user + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of the user to delete. + responses: + '204': + description: User deleted + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /setup: + get: + operationId: GetSetup + tags: + - Setup + summary: 'Check if database has default user, org, bucket' + description: 'Returns `true` if no default user, organization, or bucket has been created.' + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: allowed true or false + content: + application/json: + schema: + $ref: '#/components/schemas/IsOnboarding' + post: + operationId: PostSetup + tags: + - Setup + summary: 'Set up initial user, org and bucket' + description: 'Post an onboarding request to set up initial user, org and bucket.' + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Source to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/OnboardingRequest' + responses: + '201': + description: 'Created default user, bucket, org' + content: + application/json: + schema: + $ref: '#/components/schemas/OnboardingResponse' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /authorizations: + get: + operationId: GetAuthorizations + tags: + - Authorizations + summary: List all authorizations + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: userID + schema: + type: string + description: Only show authorizations that belong to a user ID. + - in: query + name: user + schema: + type: string + description: Only show authorizations that belong to a user name. + - in: query + name: orgID + schema: + type: string + description: Only show authorizations that belong to an organization ID. + - in: query + name: org + schema: + type: string + description: Only show authorizations that belong to a organization name. + responses: + '200': + description: A list of authorizations + content: + application/json: + schema: + $ref: '#/components/schemas/Authorizations' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + post: + operationId: PostAuthorizations + tags: + - Authorizations + summary: Create an authorization + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Authorization to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AuthorizationPostRequest' + responses: + '201': + description: Authorization created + content: + application/json: + schema: + $ref: '#/components/schemas/Authorization' + '400': + description: Invalid request + $ref: '#/components/responses/ServerError' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + '/authorizations/{authID}': + get: + operationId: GetAuthorizationsID + tags: + - Authorizations + summary: Retrieve an authorization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: authID + schema: + type: string + required: true + description: The ID of the authorization to get. + responses: + '200': + description: Authorization details + content: + application/json: + schema: + $ref: '#/components/schemas/Authorization' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + patch: + operationId: PatchAuthorizationsID + tags: + - Authorizations + summary: Update an authorization to be active or inactive + requestBody: + description: Authorization to update + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AuthorizationUpdateRequest' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: authID + schema: + type: string + required: true + description: The ID of the authorization to update. + responses: + '200': + description: The active or inactive authorization + content: + application/json: + schema: + $ref: '#/components/schemas/Authorization' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + delete: + operationId: DeleteAuthorizationsID + tags: + - Authorizations + summary: Delete an authorization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: authID + schema: + type: string + required: true + description: The ID of the authorization to delete. + responses: + '204': + description: Authorization deleted + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /legacy/authorizations: + servers: + - url: /private + get: + operationId: GetLegacyAuthorizations + tags: + - Legacy Authorizations + summary: List all legacy authorizations + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: userID + schema: + type: string + description: Only show legacy authorizations that belong to a user ID. + - in: query + name: user + schema: + type: string + description: Only show legacy authorizations that belong to a user name. + - in: query + name: orgID + schema: + type: string + description: Only show legacy authorizations that belong to an organization ID. + - in: query + name: org + schema: + type: string + description: Only show legacy authorizations that belong to a organization name. + - in: query + name: token + schema: + type: string + description: Only show legacy authorizations with a specified token (auth name). + - in: query + name: authID + schema: + type: string + description: Only show legacy authorizations with a specified auth ID. + responses: + '200': + description: A list of legacy authorizations + content: + application/json: + schema: + $ref: '#/components/schemas/Authorizations' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + post: + operationId: PostLegacyAuthorizations + tags: + - Legacy Authorizations + summary: Create a legacy authorization + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Legacy authorization to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LegacyAuthorizationPostRequest' + responses: + '201': + description: Legacy authorization created + content: + application/json: + schema: + $ref: '#/components/schemas/Authorization' + '400': + description: Invalid request + $ref: '#/components/responses/ServerError' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + '/legacy/authorizations/{authID}': + servers: + - url: /private + get: + operationId: GetLegacyAuthorizationsID + tags: + - Legacy Authorizations + summary: Retrieve a legacy authorization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: authID + schema: + type: string + required: true + description: The ID of the legacy authorization to get. + responses: + '200': + description: Legacy authorization details + content: + application/json: + schema: + $ref: '#/components/schemas/Authorization' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + patch: + operationId: PatchLegacyAuthorizationsID + tags: + - Legacy Authorizations + summary: Update a legacy authorization to be active or inactive + requestBody: + description: Legacy authorization to update + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AuthorizationUpdateRequest' + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: authID + schema: + type: string + required: true + description: The ID of the legacy authorization to update. + responses: + '200': + description: The active or inactive legacy authorization + content: + application/json: + schema: + $ref: '#/components/schemas/Authorization' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + delete: + operationId: DeleteLegacyAuthorizationsID + tags: + - Legacy Authorizations + summary: Delete a legacy authorization + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: authID + schema: + type: string + required: true + description: The ID of the legacy authorization to delete. + responses: + '204': + description: Legacy authorization deleted + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + '/legacy/authorizations/{authID}/password': + servers: + - url: /private + post: + operationId: PostLegacyAuthorizationsIDPassword + tags: + - Legacy Authorizations + summary: Set a legacy authorization password + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: authID + schema: + type: string + required: true + description: The ID of the legacy authorization to update. + requestBody: + description: New password + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PasswordResetBody' + responses: + '204': + description: Legacy authorization password set + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /variables: + get: + operationId: GetVariables + tags: + - Variables + summary: List all variables + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: org + description: The name of the organization. + schema: + type: string + - in: query + name: orgID + description: The organization ID. + schema: + type: string + responses: + '200': + description: A list of variables for an organization + content: + application/json: + schema: + $ref: '#/components/schemas/Variables' + '400': + description: Invalid request + $ref: '#/components/responses/ServerError' + default: + description: Internal server error + $ref: '#/components/responses/ServerError' + post: + operationId: PostVariables + summary: Create a variable + tags: + - Variables + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Variable to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Variable' + responses: + '201': + description: Variable created + content: + application/json: + schema: + $ref: '#/components/schemas/Variable' + default: + description: Internal server error + $ref: '#/components/responses/ServerError' + '/variables/{variableID}': + get: + operationId: GetVariablesID + tags: + - Variables + summary: Retrieve a variable + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + required: true + schema: + type: string + description: The variable ID. + responses: + '200': + description: Variable found + content: + application/json: + schema: + $ref: '#/components/schemas/Variable' + '404': + description: Variable not found + $ref: '#/components/responses/ServerError' + default: + description: Internal server error + $ref: '#/components/responses/ServerError' + delete: + operationId: DeleteVariablesID + tags: + - Variables + summary: Delete a variable + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + required: true + schema: + type: string + description: The variable ID. + responses: + '204': + description: Variable deleted + default: + description: Internal server error + $ref: '#/components/responses/ServerError' + patch: + operationId: PatchVariablesID + summary: Update a variable + tags: + - Variables + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + required: true + schema: + type: string + description: The variable ID. + requestBody: + description: Variable update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Variable' + responses: + '200': + description: Variable updated + content: + application/json: + schema: + $ref: '#/components/schemas/Variable' + default: + description: Internal server error + $ref: '#/components/responses/ServerError' + put: + operationId: PutVariablesID + summary: Replace a variable + tags: + - Variables + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + required: true + schema: + type: string + description: The variable ID. + requestBody: + description: Variable to replace + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Variable' + responses: + '200': + description: Variable updated + content: + application/json: + schema: + $ref: '#/components/schemas/Variable' + default: + description: Internal server error + $ref: '#/components/responses/ServerError' + /sources: + post: + operationId: PostSources + tags: + - Sources + summary: Create a source + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Source to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Source' + responses: + '201': + description: Created Source + content: + application/json: + schema: + $ref: '#/components/schemas/Source' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + get: + operationId: GetSources + tags: + - Sources + summary: List all sources + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: org + description: The name of the organization. + schema: + type: string + responses: + '200': + description: A list of sources + content: + application/json: + schema: + $ref: '#/components/schemas/Sources' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/sources/{sourceID}': + delete: + operationId: DeleteSourcesID + tags: + - Sources + summary: Delete a source + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: sourceID + schema: + type: string + required: true + description: The source ID. + responses: + '204': + description: Delete has been accepted + '404': + description: View not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchSourcesID + tags: + - Sources + summary: Update a Source + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: sourceID + schema: + type: string + required: true + description: The source ID. + requestBody: + description: Source update + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Source' + responses: + '200': + description: Created Source + content: + application/json: + schema: + $ref: '#/components/schemas/Source' + '404': + description: Source not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + get: + operationId: GetSourcesID + tags: + - Sources + summary: Retrieve a source + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: sourceID + schema: + type: string + required: true + description: The source ID. + responses: + '200': + description: A source + content: + application/json: + schema: + $ref: '#/components/schemas/Source' + '404': + description: Source not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/sources/{sourceID}/health': + get: + operationId: GetSourcesIDHealth + tags: + - Sources + summary: Get the health of a source + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: sourceID + schema: + type: string + required: true + description: The source ID. + responses: + '200': + description: The source is healthy + content: + application/json: + schema: + $ref: '#/components/schemas/HealthCheck' + '503': + description: The source is not healthy + content: + application/json: + schema: + $ref: '#/components/schemas/HealthCheck' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/sources/{sourceID}/buckets': + get: + operationId: GetSourcesIDBuckets + tags: + - Sources + - Buckets + summary: Get buckets in a source + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: sourceID + schema: + type: string + required: true + description: The source ID. + - in: query + name: org + description: The name of the organization. + schema: + type: string + responses: + '200': + description: A source + content: + application/json: + schema: + $ref: '#/components/schemas/Buckets' + '404': + description: Source not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /scrapers: + get: + operationId: GetScrapers + tags: + - Scraper Targets + summary: List all scraper targets + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: name + description: Specifies the name of the scraper target. + schema: + type: string + - in: query + name: id + description: 'List of scraper target IDs to return. If both `id` and `owner` are specified, only `id` is used.' + schema: + type: array + items: + type: string + - in: query + name: orgID + description: Specifies the organization ID of the scraper target. + schema: + type: string + - in: query + name: org + description: Specifies the organization name of the scraper target. + schema: + type: string + responses: + '200': + description: All scraper targets + content: + application/json: + schema: + $ref: '#/components/schemas/ScraperTargetResponses' + post: + operationId: PostScrapers + summary: Create a scraper target + tags: + - Scraper Targets + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Scraper target to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ScraperTargetRequest' + responses: + '201': + description: Scraper target created + content: + application/json: + schema: + $ref: '#/components/schemas/ScraperTargetResponse' + default: + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/scrapers/{scraperTargetID}': + get: + operationId: GetScrapersID + tags: + - Scraper Targets + summary: Retrieve a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + required: true + schema: + type: string + description: The identifier of the scraper target. + responses: + '200': + description: The scraper target + content: + application/json: + schema: + $ref: '#/components/schemas/ScraperTargetResponse' + default: + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + delete: + operationId: DeleteScrapersID + tags: + - Scraper Targets + summary: Delete a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + required: true + schema: + type: string + description: The identifier of the scraper target. + responses: + '204': + description: Scraper target deleted + default: + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + patch: + operationId: PatchScrapersID + summary: Update a scraper target + tags: + - Scraper Targets + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + required: true + schema: + type: string + description: The identifier of the scraper target. + requestBody: + description: Scraper target update to apply + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ScraperTargetRequest' + responses: + '200': + description: Scraper target updated + content: + application/json: + schema: + $ref: '#/components/schemas/ScraperTargetResponse' + default: + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/scrapers/{scraperTargetID}/labels': + get: + operationId: GetScrapersIDLabels + tags: + - Scraper Targets + summary: List all labels for a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + responses: + '200': + description: A list of labels for a scraper target. + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostScrapersIDLabels + tags: + - Scraper Targets + summary: Add a label to a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + requestBody: + description: Label to add + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LabelMapping' + responses: + '201': + description: The newly added label + content: + application/json: + schema: + $ref: '#/components/schemas/LabelResponse' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/scrapers/{scraperTargetID}/labels/{labelID}': + delete: + operationId: DeleteScrapersIDLabelsID + tags: + - Scraper Targets + summary: Delete a label from a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + - in: path + name: labelID + schema: + type: string + required: true + description: The label ID. + responses: + '204': + description: Delete has been accepted + '404': + description: Scraper target not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/scrapers/{scraperTargetID}/members': + get: + operationId: GetScrapersIDMembers + tags: + - Scraper Targets + summary: List all users with member privileges for a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + responses: + '200': + description: A list of scraper target members + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMembers' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostScrapersIDMembers + tags: + - Scraper Targets + summary: Add a member to a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + requestBody: + description: User to add as member + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Member added to scraper targets + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceMember' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/scrapers/{scraperTargetID}/members/{userID}': + delete: + operationId: DeleteScrapersIDMembersID + tags: + - Scraper Targets + summary: Remove a member from a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of member to remove. + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + responses: + '204': + description: Member removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/scrapers/{scraperTargetID}/owners': + get: + operationId: GetScrapersIDOwners + tags: + - Scraper Targets + summary: List all owners of a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + responses: + '200': + description: A list of scraper target owners + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwners' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostScrapersIDOwners + tags: + - Scraper Targets + summary: Add an owner to a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + requestBody: + description: User to add as owner + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AddResourceMemberRequestBody' + responses: + '201': + description: Scraper target owner added + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceOwner' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '/scrapers/{scraperTargetID}/owners/{userID}': + delete: + operationId: DeleteScrapersIDOwnersID + tags: + - Scraper Targets + summary: Remove an owner from a scraper target + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: userID + schema: + type: string + required: true + description: The ID of owner to remove. + - in: path + name: scraperTargetID + schema: + type: string + required: true + description: The scraper target ID. + responses: + '204': + description: Owner removed + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /backup/kv: + get: + operationId: GetBackupKV + tags: + - Backup + summary: 'Download snapshot of metadata stored in the server''s embedded KV store. Should not be used in versions greater than 2.1.x, as it doesn''t include metadata stored in embedded SQL.' + deprecated: true + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: Snapshot of KV metadata + content: + application/octet-stream: + schema: + type: string + format: binary + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /backup/metadata: + get: + operationId: GetBackupMetadata + tags: + - Backup + summary: Download snapshot of all metadata in the server + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Accept-Encoding + description: Indicates the content encoding (usually a compression algorithm) that the client can understand. + schema: + type: string + description: 'The content coding. Use `gzip` for compressed data or `identity` for unmodified, uncompressed data.' + default: identity + enum: + - gzip + - identity + responses: + '200': + description: Snapshot of metadata + headers: + Content-Encoding: + description: Lists any encodings (usually compression algorithms) that have been applied to the response payload. + schema: + type: string + description: | + The content coding: `gzip` for compressed data or `identity` for unmodified, uncompressed data. + default: identity + enum: + - gzip + - identity + content: + multipart/mixed: + schema: + $ref: '#/components/schemas/MetadataBackup' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + '/backup/shards/{shardID}': + get: + operationId: GetBackupShardId + tags: + - Backup + summary: Download snapshot of all TSM data in a shard + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Accept-Encoding + description: Indicates the content encoding (usually a compression algorithm) that the client can understand. + schema: + type: string + description: 'The content coding. Use `gzip` for compressed data or `identity` for unmodified, uncompressed data.' + default: identity + enum: + - gzip + - identity + - in: path + name: shardID + schema: + type: integer + format: int64 + required: true + description: The shard ID. + - in: query + name: since + description: Earliest time to include in the snapshot. RFC3339 format. + schema: + type: string + format: date-time + responses: + '200': + description: TSM snapshot. + headers: + Content-Encoding: + description: Lists any encodings (usually compression algorithms) that have been applied to the response payload. + schema: + type: string + description: | + The content coding: `gzip` for compressed data or `identity` for unmodified, uncompressed data. + default: identity + enum: + - gzip + - identity + content: + application/octet-stream: + schema: + type: string + format: binary + '404': + description: Shard not found. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /restore/kv: + post: + operationId: PostRestoreKV + tags: + - Restore + summary: Overwrite the embedded KV store on the server with a backed-up snapshot. + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Content-Encoding + description: | + The value tells InfluxDB what compression is applied to the line protocol in the request payload. + To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + schema: + type: string + description: 'The content coding. Use `gzip` for compressed data or `identity` for unmodified, uncompressed data.' + default: identity + enum: + - gzip + - identity + - in: header + name: Content-Type + schema: + type: string + default: application/octet-stream + enum: + - application/octet-stream + requestBody: + description: Full KV snapshot. + required: true + content: + text/plain: + schema: + type: string + format: binary + responses: + '200': + description: KV store successfully overwritten. + content: + application/json: + schema: + type: object + properties: + token: + description: token is the root token for the instance after restore (this is overwritten during the restore) + type: string + '204': + description: KV store successfully overwritten. + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /restore/sql: + post: + operationId: PostRestoreSQL + tags: + - Restore + summary: Overwrite the embedded SQL store on the server with a backed-up snapshot. + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Content-Encoding + description: | + The value tells InfluxDB what compression is applied to the line protocol in the request payload. + To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + schema: + type: string + description: Specifies that the line protocol in the body is encoded with gzip or not encoded with identity. + default: identity + enum: + - gzip + - identity + - in: header + name: Content-Type + schema: + type: string + default: application/octet-stream + enum: + - application/octet-stream + requestBody: + description: Full SQL snapshot. + required: true + content: + text/plain: + schema: + type: string + format: binary + responses: + '204': + description: SQL store successfully overwritten. + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + '/restore/bucket/{bucketID}': + post: + operationId: PostRestoreBucketID + tags: + - Restore + summary: Overwrite storage metadata for a bucket with shard info from a backup. + deprecated: true + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: bucketID + schema: + type: string + required: true + description: The bucket ID. + - in: header + name: Content-Type + schema: + type: string + default: application/octet-stream + enum: + - application/octet-stream + requestBody: + description: Database info serialized as protobuf. + required: true + content: + text/plain: + schema: + type: string + format: byte + responses: + '200': + description: ID mappings for shards in bucket. + content: + application/json: + schema: + type: string + format: byte + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /restore/bucketMetadata: + post: + operationId: PostRestoreBucketMetadata + tags: + - Restore + summary: Create a new bucket pre-seeded with shard info from a backup. + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Metadata manifest for a bucket. + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BucketMetadataManifest' + responses: + '201': + description: ID mappings for shards in new bucket. + content: + application/json: + schema: + $ref: '#/components/schemas/RestoredBucketMappings' + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + '/restore/shards/{shardID}': + post: + operationId: PostRestoreShardId + tags: + - Restore + summary: Restore a TSM snapshot into a shard. + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: header + name: Content-Encoding + description: | + The value tells InfluxDB what compression is applied to the line protocol in the request payload. + To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + schema: + type: string + description: Specifies that the line protocol in the body is encoded with gzip or not encoded with identity. + default: identity + enum: + - gzip + - identity + - in: header + name: Content-Type + schema: + type: string + default: application/octet-stream + enum: + - application/octet-stream + - in: path + name: shardID + schema: + type: string + required: true + description: The shard ID. + requestBody: + description: TSM snapshot. + required: true + content: + text/plain: + schema: + type: string + format: binary + responses: + '204': + description: TSM snapshot successfully restored. + default: + description: Unexpected error + $ref: '#/components/responses/ServerError' + /config: + get: + operationId: GetConfig + tags: + - Config + summary: Get the run-time configuration of the instance + parameters: + - $ref: '#/components/parameters/TraceSpan' + responses: + '200': + description: Payload body contains the run-time configuration of the InfluxDB instance. + content: + application/json: + schema: + $ref: '#/components/schemas/Config' + '401': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + /remotes: + get: + operationId: GetRemoteConnections + tags: + - RemoteConnections + summary: List all remote connections + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: orgID + description: The organization ID. + required: true + schema: + type: string + - in: query + name: name + schema: + type: string + - in: query + name: remoteURL + schema: + type: string + format: uri + responses: + '200': + description: List of remote connections + content: + application/json: + schema: + $ref: '#/components/schemas/RemoteConnections' + '404': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + post: + operationId: PostRemoteConnection + tags: + - RemoteConnections + summary: Register a new remote connection + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/RemoteConnectionCreationRequest' + responses: + '201': + description: Remote connection saved + content: + application/json: + schema: + $ref: '#/components/schemas/RemoteConnection' + '400': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + '/remotes/{remoteID}': + get: + operationId: GetRemoteConnectionByID + tags: + - RemoteConnections + summary: Retrieve a remote connection + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: remoteID + schema: + type: string + required: true + responses: + '200': + description: Remote connection + content: + application/json: + schema: + $ref: '#/components/schemas/RemoteConnection' + '404': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + patch: + operationId: PatchRemoteConnectionByID + tags: + - RemoteConnections + summary: Update a remote connection + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: remoteID + schema: + type: string + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/RemoteConnectionUpdateRequest' + responses: + '200': + description: Updated information saved + content: + application/json: + schema: + $ref: '#/components/schemas/RemoteConnection' + '400': + $ref: '#/components/responses/ServerError' + '404': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + delete: + operationId: DeleteRemoteConnectionByID + tags: + - RemoteConnections + summary: Delete a remote connection + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: remoteID + schema: + type: string + required: true + responses: + '204': + description: Remote connection info deleted. + '404': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + /replications: + get: + operationId: GetReplications + tags: + - Replications + summary: List all replications + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: orgID + description: The organization ID. + required: true + schema: + type: string + - in: query + name: name + schema: + type: string + - in: query + name: remoteID + schema: + type: string + - in: query + name: localBucketID + schema: + type: string + responses: + '200': + description: List of replications + content: + application/json: + schema: + $ref: '#/components/schemas/Replications' + '404': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + post: + operationId: PostReplication + tags: + - Replications + summary: Register a new replication + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: validate + description: 'If true, validate the replication, but don''t save it.' + schema: + type: boolean + default: false + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ReplicationCreationRequest' + responses: + '201': + description: Replication saved + content: + application/json: + schema: + $ref: '#/components/schemas/Replication' + '204': + description: 'Replication validated, but not saved' + '400': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + '/replications/{replicationID}': + get: + operationId: GetReplicationByID + tags: + - Replications + summary: Retrieve a replication + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: replicationID + schema: + type: string + required: true + responses: + '200': + description: Replication + content: + application/json: + schema: + $ref: '#/components/schemas/Replication' + '404': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + patch: + operationId: PatchReplicationByID + tags: + - Replications + summary: Update a replication + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: replicationID + schema: + type: string + required: true + - in: query + name: validate + description: 'If true, validate the updated information, but don''t save it.' + schema: + type: boolean + default: false + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ReplicationUpdateRequest' + responses: + '200': + description: Updated information saved + content: + application/json: + schema: + $ref: '#/components/schemas/Replication' + '204': + description: 'Updated replication validated, but not saved' + '400': + $ref: '#/components/responses/ServerError' + '404': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + delete: + operationId: DeleteReplicationByID + tags: + - Replications + summary: Delete a replication + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: replicationID + schema: + type: string + required: true + responses: + '204': + description: Replication deleted. + '404': + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + '/replications/{replicationID}/validate': + post: + operationId: PostValidateReplicationByID + tags: + - Replications + summary: Validate a replication + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: replicationID + schema: + type: string + required: true + responses: + '204': + description: Replication is valid + '400': + description: Replication failed validation + $ref: '#/components/responses/ServerError' + default: + $ref: '#/components/responses/ServerError' + /dashboards: + post: + operationId: PostDashboards + tags: + - Dashboards + summary: Create a dashboard + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Dashboard to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CreateDashboardRequest' + responses: + '201': + description: Added dashboard + content: + application/json: + schema: + oneOf: + - $ref: '#/components/schemas/Dashboard' + - $ref: '#/components/schemas/DashboardWithViewProperties' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + get: + operationId: GetDashboards + tags: + - Dashboards + summary: List all dashboards + parameters: + - $ref: '#/components/parameters/TraceSpan' + - $ref: '#/components/parameters/Offset' + - $ref: '#/components/parameters/Limit' + - $ref: '#/components/parameters/Descending' + - in: query + name: owner + description: A user identifier. Returns only dashboards where this user has the `owner` role. + schema: + type: string + - in: query + name: sortBy + description: The column to sort by. + schema: + type: string + enum: + - ID + - CreatedAt + - UpdatedAt + - in: query + name: id + description: 'A list of dashboard identifiers. Returns only the listed dashboards. If both `id` and `owner` are specified, only `id` is used.' + schema: + type: array + items: + type: string + - in: query + name: orgID + description: The identifier of the organization. + schema: + type: string + - in: query + name: org + description: The name of the organization. + schema: + type: string + responses: + '200': + description: All dashboards + content: + application/json: + schema: + $ref: '#/components/schemas/Dashboards' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /tasks: + get: + operationId: GetTasks + tags: + - Tasks + summary: List all tasks + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: query + name: name + description: Returns task with a specific name. + schema: + type: string + - in: query + name: after + schema: + type: string + description: Return tasks after a specified ID. + - in: query + name: user + schema: + type: string + description: Filter tasks to a specific user ID. + - in: query + name: org + schema: + type: string + description: Filter tasks to a specific organization name. + - in: query + name: orgID + schema: + type: string + description: Filter tasks to a specific organization ID. + - in: query + name: status + schema: + type: string + enum: + - active + - inactive + description: Filter tasks by a status--"inactive" or "active". + - in: query + name: limit + schema: + type: integer + minimum: 1 + maximum: 500 + default: 100 + description: The number of tasks to return + - in: query + name: type + description: 'Type of task, unset by default.' + required: false + schema: + default: '' + type: string + enum: + - basic + - system + responses: + '200': + description: A list of tasks + content: + application/json: + schema: + $ref: '#/components/schemas/Tasks' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + operationId: PostTasks + tags: + - Tasks + summary: Create a new task + parameters: + - $ref: '#/components/parameters/TraceSpan' + requestBody: + description: Task to create + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TaskCreateRequest' + responses: + '201': + description: Task created + content: + application/json: + schema: + $ref: '#/components/schemas/Task' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' +components: + parameters: + TraceSpan: + in: header + name: Zap-Trace-Span + description: OpenTracing span context + example: + trace_id: '1' + span_id: '1' + baggage: + key: value + required: false + schema: + type: string + Offset: + in: query + name: offset + required: false + schema: + type: integer + minimum: 0 + Limit: + in: query + name: limit + required: false + schema: + type: integer + minimum: 1 + maximum: 100 + default: 20 + Descending: + in: query + name: descending + required: false + schema: + type: boolean + default: false + SortBy: + in: query + name: sortBy + required: false + schema: + type: string + After: + in: query + name: after + required: false + schema: + type: string + description: | + Resource ID to seek from. Results are not inclusive of this ID. Use `after` instead of `offset`. + schemas: + LanguageRequest: + description: Flux query to be analyzed. + type: object + required: + - query + properties: + query: + description: Flux query script to be analyzed + type: string + Query: + description: Query influx using the Flux language + type: object + required: + - query + properties: + extern: + $ref: '#/components/schemas/File' + query: + description: Query script to execute. + type: string + type: + description: The type of query. Must be "flux". + type: string + enum: + - flux + params: + type: object + additionalProperties: true + description: | + Enumeration of key/value pairs that respresent parameters to be injected into query (can only specify either this field or extern and not both) + dialect: + $ref: '#/components/schemas/Dialect' + now: + description: Specifies the time that should be reported as "now" in the query. Default is the server's now time. + type: string + format: date-time + Package: + description: Represents a complete package source tree. + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + path: + description: Package import path + type: string + package: + description: Package name + type: string + files: + description: Package files + type: array + items: + $ref: '#/components/schemas/File' + File: + description: Represents a source from a single file + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + name: + description: The name of the file. + type: string + package: + $ref: '#/components/schemas/PackageClause' + imports: + description: A list of package imports + type: array + items: + $ref: '#/components/schemas/ImportDeclaration' + body: + description: List of Flux statements + type: array + items: + $ref: '#/components/schemas/Statement' + PackageClause: + description: Defines a package identifier + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + name: + $ref: '#/components/schemas/Identifier' + ImportDeclaration: + description: Declares a package import + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + as: + $ref: '#/components/schemas/Identifier' + path: + $ref: '#/components/schemas/StringLiteral' + DeletePredicateRequest: + description: The delete predicate request. + type: object + required: + - start + - stop + properties: + start: + description: RFC3339Nano + type: string + format: date-time + stop: + description: RFC3339Nano + type: string + format: date-time + predicate: + description: InfluxQL-like delete statement + example: tag1="value1" and (tag2="value2" and tag3!="value3") + type: string + Node: + oneOf: + - $ref: '#/components/schemas/Expression' + - $ref: '#/components/schemas/Block' + NodeType: + description: Type of AST node + type: string + Block: + description: A set of statements + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + body: + description: Block body + type: array + items: + $ref: '#/components/schemas/Statement' + Statement: + oneOf: + - $ref: '#/components/schemas/BadStatement' + - $ref: '#/components/schemas/VariableAssignment' + - $ref: '#/components/schemas/MemberAssignment' + - $ref: '#/components/schemas/ExpressionStatement' + - $ref: '#/components/schemas/ReturnStatement' + - $ref: '#/components/schemas/OptionStatement' + - $ref: '#/components/schemas/BuiltinStatement' + - $ref: '#/components/schemas/TestStatement' + BadStatement: + description: A placeholder for statements for which no correct statement nodes can be created + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + text: + description: Raw source text + type: string + VariableAssignment: + description: Represents the declaration of a variable + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + id: + $ref: '#/components/schemas/Identifier' + init: + $ref: '#/components/schemas/Expression' + MemberAssignment: + description: Object property assignment + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + member: + $ref: '#/components/schemas/MemberExpression' + init: + $ref: '#/components/schemas/Expression' + ExpressionStatement: + description: May consist of an expression that does not return a value and is executed solely for its side-effects + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + expression: + $ref: '#/components/schemas/Expression' + ReturnStatement: + description: Defines an expression to return + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + argument: + $ref: '#/components/schemas/Expression' + OptionStatement: + description: A single variable declaration + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + assignment: + oneOf: + - $ref: '#/components/schemas/VariableAssignment' + - $ref: '#/components/schemas/MemberAssignment' + BuiltinStatement: + description: Declares a builtin identifier and its type + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + id: + $ref: '#/components/schemas/Identifier' + TestStatement: + description: Declares a Flux test case + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + assignment: + $ref: '#/components/schemas/VariableAssignment' + Expression: + oneOf: + - $ref: '#/components/schemas/ArrayExpression' + - $ref: '#/components/schemas/DictExpression' + - $ref: '#/components/schemas/FunctionExpression' + - $ref: '#/components/schemas/BinaryExpression' + - $ref: '#/components/schemas/CallExpression' + - $ref: '#/components/schemas/ConditionalExpression' + - $ref: '#/components/schemas/LogicalExpression' + - $ref: '#/components/schemas/MemberExpression' + - $ref: '#/components/schemas/IndexExpression' + - $ref: '#/components/schemas/ObjectExpression' + - $ref: '#/components/schemas/ParenExpression' + - $ref: '#/components/schemas/PipeExpression' + - $ref: '#/components/schemas/UnaryExpression' + - $ref: '#/components/schemas/BooleanLiteral' + - $ref: '#/components/schemas/DateTimeLiteral' + - $ref: '#/components/schemas/DurationLiteral' + - $ref: '#/components/schemas/FloatLiteral' + - $ref: '#/components/schemas/IntegerLiteral' + - $ref: '#/components/schemas/PipeLiteral' + - $ref: '#/components/schemas/RegexpLiteral' + - $ref: '#/components/schemas/StringLiteral' + - $ref: '#/components/schemas/UnsignedIntegerLiteral' + - $ref: '#/components/schemas/Identifier' + ArrayExpression: + description: Used to create and directly specify the elements of an array object + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + elements: + description: Elements of the array + type: array + items: + $ref: '#/components/schemas/Expression' + DictExpression: + description: Used to create and directly specify the elements of a dictionary + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + elements: + description: Elements of the dictionary + type: array + items: + $ref: '#/components/schemas/DictItem' + DictItem: + description: A key/value pair in a dictionary + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + key: + $ref: '#/components/schemas/Expression' + val: + $ref: '#/components/schemas/Expression' + FunctionExpression: + description: Function expression + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + params: + description: Function parameters + type: array + items: + $ref: '#/components/schemas/Property' + body: + $ref: '#/components/schemas/Node' + BinaryExpression: + description: uses binary operators to act on two operands in an expression + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + operator: + type: string + left: + $ref: '#/components/schemas/Expression' + right: + $ref: '#/components/schemas/Expression' + CallExpression: + description: Represents a function call + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + callee: + $ref: '#/components/schemas/Expression' + arguments: + description: Function arguments + type: array + items: + $ref: '#/components/schemas/Expression' + ConditionalExpression: + description: 'Selects one of two expressions, `Alternate` or `Consequent`, depending on a third boolean expression, `Test`' + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + test: + $ref: '#/components/schemas/Expression' + alternate: + $ref: '#/components/schemas/Expression' + consequent: + $ref: '#/components/schemas/Expression' + LogicalExpression: + description: Represents the rule conditions that collectively evaluate to either true or false + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + operator: + type: string + left: + $ref: '#/components/schemas/Expression' + right: + $ref: '#/components/schemas/Expression' + MemberExpression: + description: Represents accessing a property of an object + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + object: + $ref: '#/components/schemas/Expression' + property: + $ref: '#/components/schemas/PropertyKey' + IndexExpression: + description: Represents indexing into an array + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + array: + $ref: '#/components/schemas/Expression' + index: + $ref: '#/components/schemas/Expression' + ObjectExpression: + description: Allows the declaration of an anonymous object within a declaration + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + properties: + description: Object properties + type: array + items: + $ref: '#/components/schemas/Property' + ParenExpression: + description: Represents an expression wrapped in parenthesis + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + expression: + $ref: '#/components/schemas/Expression' + PipeExpression: + description: Call expression with pipe argument + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + argument: + $ref: '#/components/schemas/Expression' + call: + $ref: '#/components/schemas/CallExpression' + UnaryExpression: + description: Uses operators to act on a single operand in an expression + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + operator: + type: string + argument: + $ref: '#/components/schemas/Expression' + BooleanLiteral: + description: Represents boolean values + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + value: + type: boolean + DateTimeLiteral: + description: Represents an instant in time with nanosecond precision using the syntax of golang's RFC3339 Nanosecond variant + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + value: + type: string + format: date-time + DurationLiteral: + description: Represents the elapsed time between two instants as an int64 nanosecond count with syntax of golang's time.Duration + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + values: + description: Duration values + type: array + items: + $ref: '#/components/schemas/Duration' + FloatLiteral: + description: Represents floating point numbers according to the double representations defined by the IEEE-754-1985 + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + value: + type: number + IntegerLiteral: + description: Represents integer numbers + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + value: + type: string + PipeLiteral: + description: 'Represents a specialized literal value, indicating the left hand value of a pipe expression' + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + RegexpLiteral: + description: Expressions begin and end with `/` and are regular expressions with syntax accepted by RE2 + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + value: + type: string + StringLiteral: + description: Expressions begin and end with double quote marks + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + value: + type: string + UnsignedIntegerLiteral: + description: Represents integer numbers + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + value: + type: string + Duration: + description: A pair consisting of length of time and the unit of time measured. It is the atomic unit from which all duration literals are composed. + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + magnitude: + type: integer + unit: + type: string + Property: + description: The value associated with a key + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + key: + $ref: '#/components/schemas/PropertyKey' + value: + $ref: '#/components/schemas/Expression' + PropertyKey: + oneOf: + - $ref: '#/components/schemas/Identifier' + - $ref: '#/components/schemas/StringLiteral' + Identifier: + description: A valid Flux identifier + type: object + properties: + type: + $ref: '#/components/schemas/NodeType' + name: + type: string + Dialect: + description: 'Dialect are options to change the default CSV output format; https://www.w3.org/TR/2015/REC-tabular-metadata-20151217/#dialect-descriptions' + type: object + properties: + header: + description: 'If true, the results will contain a header row' + type: boolean + default: true + delimiter: + description: 'Separator between cells; the default is ,' + type: string + default: ',' + maxLength: 1 + minLength: 1 + annotations: + description: 'https://www.w3.org/TR/2015/REC-tabular-data-model-20151217/#columns' + type: array + uniqueItems: true + items: + type: string + enum: + - group + - datatype + - default + commentPrefix: + description: Character prefixed to comment strings + type: string + default: '#' + maxLength: 1 + minLength: 0 + dateTimeFormat: + description: Format of timestamps + type: string + default: RFC3339 + enum: + - RFC3339 + - RFC3339Nano + AuthorizationUpdateRequest: + properties: + status: + description: 'Status of the token. If `inactive`, requests using the token will be rejected.' + default: active + type: string + enum: + - active + - inactive + description: + type: string + description: A description of the token. + PostBucketRequest: + properties: + orgID: + type: string + name: + type: string + description: + type: string + rp: + type: string + retentionRules: + $ref: '#/components/schemas/RetentionRules' + schemaType: + $ref: '#/components/schemas/SchemaType' + default: implicit + required: + - orgID + - name + - retentionRules + Bucket: + properties: + links: + type: object + readOnly: true + example: + labels: /api/v2/buckets/1/labels + members: /api/v2/buckets/1/members + org: /api/v2/orgs/2 + owners: /api/v2/buckets/1/owners + self: /api/v2/buckets/1 + write: /api/v2/write?org=2&bucket=1 + properties: + labels: + description: URL to retrieve labels for this bucket. + $ref: '#/components/schemas/Link' + members: + description: URL to retrieve members that can read this bucket. + $ref: '#/components/schemas/Link' + org: + description: URL to retrieve parent organization for this bucket. + $ref: '#/components/schemas/Link' + owners: + description: URL to retrieve owners that can read and write to this bucket. + $ref: '#/components/schemas/Link' + self: + description: URL for this bucket. + $ref: '#/components/schemas/Link' + write: + description: URL to write line protocol to this bucket. + $ref: '#/components/schemas/Link' + id: + readOnly: true + type: string + type: + readOnly: true + type: string + default: user + enum: + - user + - system + name: + type: string + description: + type: string + orgID: + type: string + rp: + type: string + schemaType: + $ref: '#/components/schemas/SchemaType' + default: implicit + createdAt: + type: string + format: date-time + readOnly: true + updatedAt: + type: string + format: date-time + readOnly: true + retentionRules: + $ref: '#/components/schemas/RetentionRules' + labels: + $ref: '#/components/schemas/Labels' + required: + - name + - retentionRules + Buckets: + type: object + properties: + links: + readOnly: true + $ref: '#/components/schemas/Links' + buckets: + type: array + items: + $ref: '#/components/schemas/Bucket' + RetentionRules: + type: array + description: Rules to expire or retain data. No rules means data never expires. + items: + $ref: '#/components/schemas/RetentionRule' + PatchBucketRequest: + type: object + description: Updates to an existing bucket resource. + properties: + name: + type: string + description: + type: string + retentionRules: + $ref: '#/components/schemas/PatchRetentionRules' + PatchRetentionRules: + type: array + description: Updates to rules to expire or retain data. No rules means no updates. + items: + $ref: '#/components/schemas/PatchRetentionRule' + PatchRetentionRule: + type: object + description: Updates to a rule to expire or retain data. + properties: + type: + type: string + default: expire + enum: + - expire + everySeconds: + type: integer + format: int64 + description: Duration in seconds for how long data will be kept in the database. 0 means infinite. + example: 86400 + minimum: 0 + shardGroupDurationSeconds: + type: integer + format: int64 + description: Shard duration measured in seconds. + required: + - type + RetentionRule: + type: object + properties: + type: + type: string + default: expire + enum: + - expire + everySeconds: + type: integer + format: int64 + description: Duration in seconds for how long data will be kept in the database. 0 means infinite. + example: 86400 + minimum: 0 + shardGroupDurationSeconds: + type: integer + format: int64 + description: Shard duration measured in seconds. + required: + - type + - everySeconds + Link: + type: string + format: uri + readOnly: true + description: URI of resource. + Links: + type: object + properties: + next: + $ref: '#/components/schemas/Link' + self: + $ref: '#/components/schemas/Link' + prev: + $ref: '#/components/schemas/Link' + required: + - self + Logs: + type: object + properties: + events: + readOnly: true + type: array + items: + $ref: '#/components/schemas/LogEvent' + LogEvent: + type: object + properties: + time: + readOnly: true + description: 'Time event occurred, RFC3339Nano.' + type: string + format: date-time + message: + readOnly: true + description: A description of the event that occurred. + type: string + example: Halt and catch fire + runID: + readOnly: true + description: the ID of the task that logged + type: string + Organization: + properties: + links: + type: object + readOnly: true + example: + self: /api/v2/orgs/1 + members: /api/v2/orgs/1/members + owners: /api/v2/orgs/1/owners + labels: /api/v2/orgs/1/labels + secrets: /api/v2/orgs/1/secrets + buckets: /api/v2/buckets?org=myorg + tasks: /api/v2/tasks?org=myorg + dashboards: /api/v2/dashboards?org=myorg + properties: + self: + $ref: '#/components/schemas/Link' + members: + $ref: '#/components/schemas/Link' + owners: + $ref: '#/components/schemas/Link' + labels: + $ref: '#/components/schemas/Link' + secrets: + $ref: '#/components/schemas/Link' + buckets: + $ref: '#/components/schemas/Link' + tasks: + $ref: '#/components/schemas/Link' + dashboards: + $ref: '#/components/schemas/Link' + id: + readOnly: true + type: string + name: + type: string + description: + type: string + createdAt: + type: string + format: date-time + readOnly: true + updatedAt: + type: string + format: date-time + readOnly: true + status: + description: If inactive the organization is inactive. + default: active + type: string + enum: + - active + - inactive + required: + - name + Organizations: + type: object + properties: + links: + $ref: '#/components/schemas/Links' + orgs: + type: array + items: + $ref: '#/components/schemas/Organization' + PostOrganizationRequest: + type: object + properties: + name: + type: string + description: + type: string + required: + - name + PatchOrganizationRequest: + type: object + properties: + name: + type: string + description: New name to set on the organization + description: + type: string + description: New description to set on the organization + TemplateApply: + type: object + properties: + dryRun: + type: boolean + orgID: + type: string + stackID: + type: string + template: + type: object + properties: + contentType: + type: string + sources: + type: array + items: + type: string + contents: + $ref: '#/components/schemas/Template' + templates: + type: array + items: + type: object + properties: + contentType: + type: string + sources: + type: array + items: + type: string + contents: + $ref: '#/components/schemas/Template' + envRefs: + type: object + additionalProperties: + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + secrets: + type: object + additionalProperties: + type: string + remotes: + type: array + items: + type: object + properties: + url: + type: string + contentType: + type: string + required: + - url + actions: + type: array + items: + oneOf: + - type: object + properties: + action: + type: string + enum: + - skipKind + properties: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + required: + - kind + - type: object + properties: + action: + type: string + enum: + - skipResource + properties: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + resourceTemplateName: + type: string + required: + - kind + - resourceTemplateName + TemplateKind: + type: string + enum: + - Bucket + - Check + - CheckDeadman + - CheckThreshold + - Dashboard + - Label + - NotificationEndpoint + - NotificationEndpointHTTP + - NotificationEndpointPagerDuty + - NotificationEndpointSlack + - NotificationRule + - Task + - Telegraf + - Variable + TemplateExportByID: + type: object + properties: + stackID: + type: string + orgIDs: + type: array + items: + type: object + properties: + orgID: + type: string + resourceFilters: + type: object + properties: + byLabel: + type: array + items: + type: string + byResourceKind: + type: array + items: + $ref: '#/components/schemas/TemplateKind' + resources: + type: array + items: + type: object + properties: + id: + type: string + kind: + $ref: '#/components/schemas/TemplateKind' + name: + type: string + description: 'if defined with id, name is used for resource exported by id. if defined independently, resources strictly matching name are exported' + required: + - id + - kind + TemplateExportByName: + type: object + properties: + stackID: + type: string + orgIDs: + type: array + items: + type: object + properties: + orgID: + type: string + resourceFilters: + type: object + properties: + byLabel: + type: array + items: + type: string + byResourceKind: + type: array + items: + $ref: '#/components/schemas/TemplateKind' + resources: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + name: + type: string + required: + - name + - kind + Template: + type: array + items: + type: object + properties: + apiVersion: + type: string + kind: + $ref: '#/components/schemas/TemplateKind' + meta: + type: object + properties: + name: + type: string + spec: + type: object + TemplateEnvReferences: + type: array + items: + type: object + properties: + resourceField: + type: string + description: Field the environment reference corresponds too + envRefKey: + type: string + description: Key identified as environment reference and is the key identified in the template + value: + description: Value provided to fulfill reference + nullable: true + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + defaultValue: + description: Default value that will be provided for the reference when no value is provided + nullable: true + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + required: + - resourceField + - envRefKey + TemplateSummary: + type: object + properties: + sources: + type: array + items: + type: string + stackID: + type: string + summary: + type: object + properties: + buckets: + type: array + items: + type: object + properties: + id: + type: string + orgID: + type: string + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + name: + type: string + description: + type: string + retentionPeriod: + type: integer + labelAssociations: + type: array + items: + $ref: '#/components/schemas/TemplateSummaryLabel' + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + checks: + type: array + items: + allOf: + - $ref: '#/components/schemas/CheckDiscriminator' + - type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + labelAssociations: + type: array + items: + $ref: '#/components/schemas/TemplateSummaryLabel' + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + dashboards: + type: array + items: + type: object + properties: + id: + type: string + orgID: + type: string + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + name: + type: string + description: + type: string + labelAssociations: + type: array + items: + $ref: '#/components/schemas/TemplateSummaryLabel' + charts: + type: array + items: + $ref: '#/components/schemas/TemplateChart' + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + labels: + type: array + items: + $ref: '#/components/schemas/TemplateSummaryLabel' + labelMappings: + type: array + items: + type: object + properties: + status: + type: string + resourceTemplateMetaName: + type: string + resourceName: + type: string + resourceID: + type: string + resourceType: + type: string + labelTemplateMetaName: + type: string + labelName: + type: string + labelID: + type: string + missingEnvRefs: + type: array + items: + type: string + missingSecrets: + type: array + items: + type: string + notificationEndpoints: + type: array + items: + allOf: + - $ref: '#/components/schemas/NotificationEndpointDiscriminator' + - type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + labelAssociations: + type: array + items: + $ref: '#/components/schemas/TemplateSummaryLabel' + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + notificationRules: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + name: + type: string + description: + type: string + endpointTemplateMetaName: + type: string + endpointID: + type: string + endpointType: + type: string + every: + type: string + offset: + type: string + messageTemplate: + type: string + status: + type: string + statusRules: + type: array + items: + type: object + properties: + currentLevel: + type: string + previousLevel: + type: string + tagRules: + type: array + items: + type: object + properties: + key: + type: string + value: + type: string + operator: + type: string + labelAssociations: + type: array + items: + $ref: '#/components/schemas/TemplateSummaryLabel' + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + tasks: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + id: + type: string + name: + type: string + cron: + type: string + description: + type: string + every: + type: string + offset: + type: string + query: + type: string + status: + type: string + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + telegrafConfigs: + type: array + items: + allOf: + - $ref: '#/components/schemas/TelegrafRequest' + - type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + labelAssociations: + type: array + items: + $ref: '#/components/schemas/TemplateSummaryLabel' + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + variables: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + id: + type: string + orgID: + type: string + name: + type: string + description: + type: string + arguments: + $ref: '#/components/schemas/VariableProperties' + labelAssociations: + type: array + items: + $ref: '#/components/schemas/TemplateSummaryLabel' + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + diff: + type: object + properties: + buckets: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + stateStatus: + type: string + id: + type: string + templateMetaName: + type: string + new: + type: object + properties: + name: + type: string + description: + type: string + retentionRules: + $ref: '#/components/schemas/RetentionRules' + old: + type: object + properties: + name: + type: string + description: + type: string + retentionRules: + $ref: '#/components/schemas/RetentionRules' + checks: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + stateStatus: + type: string + id: + type: string + templateMetaName: + type: string + new: + $ref: '#/components/schemas/CheckDiscriminator' + old: + $ref: '#/components/schemas/CheckDiscriminator' + dashboards: + type: array + items: + type: object + properties: + stateStatus: + type: string + id: + type: string + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + new: + type: object + properties: + name: + type: string + description: + type: string + charts: + type: array + items: + $ref: '#/components/schemas/TemplateChart' + old: + type: object + properties: + name: + type: string + description: + type: string + charts: + type: array + items: + $ref: '#/components/schemas/TemplateChart' + labels: + type: array + items: + type: object + properties: + stateStatus: + type: string + kind: + $ref: '#/components/schemas/TemplateKind' + id: + type: string + templateMetaName: + type: string + new: + type: object + properties: + name: + type: string + color: + type: string + description: + type: string + old: + type: object + properties: + name: + type: string + color: + type: string + description: + type: string + labelMappings: + type: array + items: + type: object + properties: + status: + type: string + resourceType: + type: string + resourceID: + type: string + resourceTemplateMetaName: + type: string + resourceName: + type: string + labelID: + type: string + labelTemplateMetaName: + type: string + labelName: + type: string + notificationEndpoints: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + stateStatus: + type: string + id: + type: string + templateMetaName: + type: string + new: + $ref: '#/components/schemas/NotificationEndpointDiscriminator' + old: + $ref: '#/components/schemas/NotificationEndpointDiscriminator' + notificationRules: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + stateStatus: + type: string + id: + type: string + templateMetaName: + type: string + new: + type: object + properties: + name: + type: string + description: + type: string + endpointName: + type: string + endpointID: + type: string + endpointType: + type: string + every: + type: string + offset: + type: string + messageTemplate: + type: string + status: + type: string + statusRules: + type: array + items: + type: object + properties: + currentLevel: + type: string + previousLevel: + type: string + tagRules: + type: array + items: + type: object + properties: + key: + type: string + value: + type: string + operator: + type: string + old: + type: object + properties: + name: + type: string + description: + type: string + endpointName: + type: string + endpointID: + type: string + endpointType: + type: string + every: + type: string + offset: + type: string + messageTemplate: + type: string + status: + type: string + statusRules: + type: array + items: + type: object + properties: + currentLevel: + type: string + previousLevel: + type: string + tagRules: + type: array + items: + type: object + properties: + key: + type: string + value: + type: string + operator: + type: string + tasks: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + stateStatus: + type: string + id: + type: string + templateMetaName: + type: string + new: + type: object + properties: + name: + type: string + cron: + type: string + description: + type: string + every: + type: string + offset: + type: string + query: + type: string + status: + type: string + old: + type: object + properties: + name: + type: string + cron: + type: string + description: + type: string + every: + type: string + offset: + type: string + query: + type: string + status: + type: string + telegrafConfigs: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + stateStatus: + type: string + id: + type: string + templateMetaName: + type: string + new: + $ref: '#/components/schemas/TelegrafRequest' + old: + $ref: '#/components/schemas/TelegrafRequest' + variables: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + stateStatus: + type: string + id: + type: string + templateMetaName: + type: string + new: + type: object + properties: + name: + type: string + description: + type: string + args: + $ref: '#/components/schemas/VariableProperties' + old: + type: object + properties: + name: + type: string + description: + type: string + args: + $ref: '#/components/schemas/VariableProperties' + errors: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + reason: + type: string + fields: + type: array + items: + type: string + indexes: + type: array + items: + type: integer + TemplateSummaryLabel: + type: object + properties: + id: + type: string + orgID: + type: string + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + name: + type: string + properties: + type: object + properties: + color: + type: string + description: + type: string + envReferences: + $ref: '#/components/schemas/TemplateEnvReferences' + TemplateChart: + type: object + properties: + xPos: + type: integer + yPos: + type: integer + height: + type: integer + width: + type: integer + properties: + $ref: '#/components/schemas/ViewProperties' + Stack: + type: object + properties: + id: + type: string + orgID: + type: string + createdAt: + type: string + format: date-time + readOnly: true + events: + type: array + items: + type: object + properties: + eventType: + type: string + name: + type: string + description: + type: string + sources: + type: array + items: + type: string + resources: + type: array + items: + type: object + properties: + apiVersion: + type: string + resourceID: + type: string + kind: + $ref: '#/components/schemas/TemplateKind' + templateMetaName: + type: string + associations: + type: array + items: + type: object + properties: + kind: + $ref: '#/components/schemas/TemplateKind' + metaName: + type: string + links: + type: object + properties: + self: + type: string + urls: + type: array + items: + type: string + updatedAt: + type: string + format: date-time + readOnly: true + Runs: + type: object + properties: + links: + $ref: '#/components/schemas/Links' + runs: + type: array + items: + $ref: '#/components/schemas/Run' + Run: + properties: + id: + readOnly: true + type: string + taskID: + readOnly: true + type: string + status: + readOnly: true + type: string + enum: + - scheduled + - started + - failed + - success + - canceled + scheduledFor: + description: 'Time used for run''s "now" option, RFC3339.' + type: string + format: date-time + log: + description: An array of logs associated with the run. + type: array + readOnly: true + items: + $ref: '#/components/schemas/LogEvent' + startedAt: + readOnly: true + description: 'Time run started executing, RFC3339Nano.' + type: string + format: date-time + finishedAt: + readOnly: true + description: 'Time run finished executing, RFC3339Nano.' + type: string + format: date-time + requestedAt: + readOnly: true + description: 'Time run was manually requested, RFC3339Nano.' + type: string + format: date-time + links: + type: object + readOnly: true + example: + self: /api/v2/tasks/1/runs/1 + task: /api/v2/tasks/1 + retry: /api/v2/tasks/1/runs/1/retry + properties: + self: + type: string + format: uri + task: + type: string + format: uri + retry: + type: string + format: uri + RunManually: + properties: + scheduledFor: + nullable: true + description: 'Time used for run''s "now" option, RFC3339. Default is the server''s now time.' + type: string + format: date-time + Tasks: + type: object + properties: + links: + readOnly: true + $ref: '#/components/schemas/Links' + tasks: + type: array + items: + $ref: '#/components/schemas/Task' + Task: + type: object + properties: + id: + readOnly: true + type: string + type: + description: 'Type of the task, useful for filtering a task list.' + type: string + orgID: + description: ID of the organization that owns the task. + type: string + org: + description: Name of the organization that owns the task. + type: string + name: + description: Name of the task. + type: string + ownerID: + description: ID of the user who owns this Task. + type: string + description: + description: Description of the task. + type: string + status: + $ref: '#/components/schemas/TaskStatusType' + labels: + $ref: '#/components/schemas/Labels' + authorizationID: + description: ID of the authorization used when the task communicates with the query engine. + type: string + flux: + description: Flux script to run for this task. + type: string + every: + description: |- + Interval at which the task runs. `every` also determines when the task first runs, depending on the specified time. + Value is a [duration literal](https://docs.influxdata.com/flux/v0.x/spec/lexical-elements/#duration-literals)). + type: string + format: duration + cron: + description: |- + [Cron expression](https://en.wikipedia.org/wiki/Cron#Overview) that defines the schedule on which the task runs. Cron scheduling is based on system time. + Value is a [Cron expression](https://en.wikipedia.org/wiki/Cron#Overview). + type: string + offset: + description: |- + [Duration](https://docs.influxdata.com/flux/v0.x/spec/lexical-elements/#duration-literals) to delay execution of the task after the scheduled time has elapsed. `0` removes the offset. + The value is a [duration literal](https://docs.influxdata.com/flux/v0.x/spec/lexical-elements/#duration-literals). + type: string + format: duration + latestCompleted: + description: |- + Timestamp of the latest scheduled and completed run. + Value is a timestamp in [RFC3339 date/time format](https://docs.influxdata.com/flux/v0.x/data-types/basic/time/#time-syntax). + type: string + format: date-time + readOnly: true + lastRunStatus: + readOnly: true + type: string + enum: + - failed + - success + - canceled + lastRunError: + readOnly: true + type: string + createdAt: + type: string + format: date-time + readOnly: true + updatedAt: + type: string + format: date-time + readOnly: true + links: + type: object + readOnly: true + example: + self: /api/v2/tasks/1 + owners: /api/v2/tasks/1/owners + members: /api/v2/tasks/1/members + labels: /api/v2/tasks/1/labels + runs: /api/v2/tasks/1/runs + logs: /api/v2/tasks/1/logs + properties: + self: + $ref: '#/components/schemas/Link' + owners: + $ref: '#/components/schemas/Link' + members: + $ref: '#/components/schemas/Link' + runs: + $ref: '#/components/schemas/Link' + logs: + $ref: '#/components/schemas/Link' + labels: + $ref: '#/components/schemas/Link' + required: + - id + - name + - orgID + - flux + TaskStatusType: + type: string + enum: + - active + - inactive + UserResponse: + properties: + id: + readOnly: true + type: string + oauthID: + type: string + name: + type: string + status: + description: If inactive the user is inactive. + default: active + type: string + enum: + - active + - inactive + links: + type: object + readOnly: true + example: + self: /api/v2/users/1 + properties: + self: + type: string + format: uri + required: + - name + Flags: + type: object + additionalProperties: true + ResourceMember: + allOf: + - $ref: '#/components/schemas/UserResponse' + - type: object + properties: + role: + type: string + default: member + enum: + - member + ResourceMembers: + type: object + properties: + links: + type: object + properties: + self: + type: string + format: uri + users: + type: array + items: + $ref: '#/components/schemas/ResourceMember' + ResourceOwner: + allOf: + - $ref: '#/components/schemas/UserResponse' + - type: object + properties: + role: + type: string + default: owner + enum: + - owner + ResourceOwners: + type: object + properties: + links: + type: object + properties: + self: + type: string + format: uri + users: + type: array + items: + $ref: '#/components/schemas/ResourceOwner' + FluxSuggestions: + type: object + properties: + funcs: + type: array + items: + $ref: '#/components/schemas/FluxSuggestion' + FluxSuggestion: + type: object + properties: + name: + type: string + params: + type: object + additionalProperties: + type: string + Routes: + properties: + authorizations: + type: string + format: uri + buckets: + type: string + format: uri + dashboards: + type: string + format: uri + external: + type: object + properties: + statusFeed: + type: string + format: uri + variables: + type: string + format: uri + me: + type: string + format: uri + flags: + type: string + format: uri + orgs: + type: string + format: uri + query: + type: object + properties: + self: + type: string + format: uri + ast: + type: string + format: uri + analyze: + type: string + format: uri + suggestions: + type: string + format: uri + setup: + type: string + format: uri + signin: + type: string + format: uri + signout: + type: string + format: uri + sources: + type: string + format: uri + system: + type: object + properties: + metrics: + type: string + format: uri + debug: + type: string + format: uri + health: + type: string + format: uri + tasks: + type: string + format: uri + telegrafs: + type: string + format: uri + users: + type: string + format: uri + write: + type: string + format: uri + Error: + properties: + code: + description: code is the machine-readable error code. + readOnly: true + type: string + enum: + - internal error + - not found + - conflict + - invalid + - unprocessable entity + - empty value + - unavailable + - forbidden + - too many requests + - unauthorized + - method not allowed + - request too large + - unsupported media type + message: + readOnly: true + description: Human-readable message. + type: string + op: + readOnly: true + description: Describes the logical code operation when the error occurred. Useful for debugging. + type: string + err: + readOnly: true + description: Stack of errors that occurred during processing of the request. Useful for debugging. + type: string + required: + - code + LineProtocolError: + properties: + code: + description: Code is the machine-readable error code. + readOnly: true + type: string + enum: + - internal error + - not found + - conflict + - invalid + - empty value + - unavailable + message: + readOnly: true + description: Human-readable message. + type: string + op: + readOnly: true + description: Describes the logical code operation when the error occurred. Useful for debugging. + type: string + err: + readOnly: true + description: Stack of errors that occurred during processing of the request. Useful for debugging. + type: string + line: + readOnly: true + description: First line in the request body that contains malformed data. + type: integer + format: int32 + required: + - code + LineProtocolLengthError: + properties: + code: + description: Code is the machine-readable error code. + readOnly: true + type: string + enum: + - invalid + message: + readOnly: true + description: Human-readable message. + type: string + required: + - code + - message + Field: + type: object + properties: + value: + description: value is the value of the field. Meaning of the value is implied by the `type` key + type: string + type: + description: '`type` describes the field type. `func` is a function. `field` is a field reference.' + type: string + enum: + - func + - field + - integer + - number + - regex + - wildcard + alias: + description: Alias overrides the field name in the returned response. Applies only if type is `func` + type: string + args: + description: Args are the arguments to the function + type: array + items: + $ref: '#/components/schemas/Field' + BuilderConfig: + type: object + properties: + buckets: + type: array + items: + type: string + tags: + type: array + items: + $ref: '#/components/schemas/BuilderTagsType' + functions: + type: array + items: + $ref: '#/components/schemas/BuilderFunctionsType' + aggregateWindow: + type: object + properties: + period: + type: string + fillValues: + type: boolean + BuilderTagsType: + type: object + properties: + key: + type: string + values: + type: array + items: + type: string + aggregateFunctionType: + $ref: '#/components/schemas/BuilderAggregateFunctionType' + BuilderAggregateFunctionType: + type: string + enum: + - filter + - group + BuilderFunctionsType: + type: object + properties: + name: + type: string + DashboardQuery: + type: object + properties: + text: + type: string + description: The text of the Flux query. + editMode: + $ref: '#/components/schemas/QueryEditMode' + name: + type: string + builderConfig: + $ref: '#/components/schemas/BuilderConfig' + QueryEditMode: + type: string + enum: + - builder + - advanced + Axis: + type: object + description: Axis used in a visualization. + properties: + bounds: + type: array + minItems: 0 + maxItems: 2 + description: 'The extents of the axis in the form [lower, upper]. Clients determine whether bounds are inclusive or exclusive of their limits.' + items: + type: string + label: + description: Description of the axis. + type: string + prefix: + description: Label prefix for formatting axis values. + type: string + suffix: + description: Label suffix for formatting axis values. + type: string + base: + description: Radix for formatting axis values. + type: string + enum: + - '' + - '2' + - '10' + scale: + $ref: '#/components/schemas/AxisScale' + AxisScale: + description: 'Scale is the axis formatting scale. Supported: "log", "linear"' + type: string + enum: + - log + - linear + DashboardColor: + type: object + description: Defines an encoding of data value into color space. + required: + - id + - type + - hex + - name + - value + properties: + id: + description: The unique ID of the view color. + type: string + type: + description: Type is how the color is used. + type: string + enum: + - min + - max + - threshold + - scale + - text + - background + hex: + description: The hex number of the color + type: string + maxLength: 7 + minLength: 7 + name: + description: The user-facing name of the hex color. + type: string + value: + description: The data value mapped to this color. + type: number + format: float + RenamableField: + description: Describes a field that can be renamed and made visible or invisible. + type: object + properties: + internalName: + description: The calculated name of a field. + readOnly: true + type: string + displayName: + description: The name that a field is renamed to by the user. + type: string + visible: + description: Indicates whether this field should be visible on the table. + type: boolean + XYViewProperties: + type: object + required: + - type + - geom + - queries + - shape + - axes + - colors + - note + - showNoteWhenEmpty + - position + properties: + timeFormat: + type: string + type: + type: string + enum: + - xy + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + colorMapping: + description: An object that contains information about the color mapping + $ref: '#/components/schemas/ColorMapping' + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + axes: + $ref: '#/components/schemas/Axes' + staticLegend: + $ref: '#/components/schemas/StaticLegend' + xColumn: + type: string + generateXAxisTicks: + type: array + items: + type: string + xTotalTicks: + type: integer + xTickStart: + type: number + format: float + xTickStep: + type: number + format: float + yColumn: + type: string + generateYAxisTicks: + type: array + items: + type: string + yTotalTicks: + type: integer + yTickStart: + type: number + format: float + yTickStep: + type: number + format: float + shadeBelow: + type: boolean + hoverDimension: + type: string + enum: + - auto + - x + - 'y' + - xy + position: + type: string + enum: + - overlaid + - stacked + geom: + $ref: '#/components/schemas/XYGeom' + legendColorizeRows: + type: boolean + legendHide: + type: boolean + legendOpacity: + type: number + format: float + legendOrientationThreshold: + type: integer + XYGeom: + type: string + enum: + - line + - step + - stacked + - bar + - monotoneX + BandViewProperties: + type: object + required: + - type + - geom + - queries + - shape + - axes + - colors + - note + - showNoteWhenEmpty + properties: + timeFormat: + type: string + type: + type: string + enum: + - band + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + axes: + $ref: '#/components/schemas/Axes' + staticLegend: + $ref: '#/components/schemas/StaticLegend' + xColumn: + type: string + generateXAxisTicks: + type: array + items: + type: string + xTotalTicks: + type: integer + xTickStart: + type: number + format: float + xTickStep: + type: number + format: float + yColumn: + type: string + generateYAxisTicks: + type: array + items: + type: string + yTotalTicks: + type: integer + yTickStart: + type: number + format: float + yTickStep: + type: number + format: float + upperColumn: + type: string + mainColumn: + type: string + lowerColumn: + type: string + hoverDimension: + type: string + enum: + - auto + - x + - 'y' + - xy + geom: + $ref: '#/components/schemas/XYGeom' + legendColorizeRows: + type: boolean + legendHide: + type: boolean + legendOpacity: + type: number + format: float + legendOrientationThreshold: + type: integer + LinePlusSingleStatProperties: + type: object + required: + - type + - queries + - shape + - axes + - colors + - note + - showNoteWhenEmpty + - prefix + - suffix + - decimalPlaces + - position + properties: + timeFormat: + type: string + type: + type: string + enum: + - line-plus-single-stat + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + axes: + $ref: '#/components/schemas/Axes' + staticLegend: + $ref: '#/components/schemas/StaticLegend' + xColumn: + type: string + generateXAxisTicks: + type: array + items: + type: string + xTotalTicks: + type: integer + xTickStart: + type: number + format: float + xTickStep: + type: number + format: float + yColumn: + type: string + generateYAxisTicks: + type: array + items: + type: string + yTotalTicks: + type: integer + yTickStart: + type: number + format: float + yTickStep: + type: number + format: float + shadeBelow: + type: boolean + hoverDimension: + type: string + enum: + - auto + - x + - 'y' + - xy + position: + type: string + enum: + - overlaid + - stacked + prefix: + type: string + suffix: + type: string + decimalPlaces: + $ref: '#/components/schemas/DecimalPlaces' + legendColorizeRows: + type: boolean + legendHide: + type: boolean + legendOpacity: + type: number + format: float + legendOrientationThreshold: + type: integer + MosaicViewProperties: + type: object + required: + - type + - queries + - colors + - shape + - note + - showNoteWhenEmpty + - xColumn + - ySeriesColumns + - fillColumns + - xDomain + - yDomain + - xAxisLabel + - yAxisLabel + - xPrefix + - yPrefix + - xSuffix + - ySuffix + properties: + timeFormat: + type: string + type: + type: string + enum: + - mosaic + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + type: string + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + xColumn: + type: string + generateXAxisTicks: + type: array + items: + type: string + xTotalTicks: + type: integer + xTickStart: + type: number + format: float + xTickStep: + type: number + format: float + yLabelColumnSeparator: + type: string + yLabelColumns: + type: array + items: + type: string + ySeriesColumns: + type: array + items: + type: string + fillColumns: + type: array + items: + type: string + xDomain: + type: array + items: + type: number + maxItems: 2 + yDomain: + type: array + items: + type: number + maxItems: 2 + xAxisLabel: + type: string + yAxisLabel: + type: string + xPrefix: + type: string + xSuffix: + type: string + yPrefix: + type: string + ySuffix: + type: string + hoverDimension: + type: string + enum: + - auto + - x + - 'y' + - xy + legendColorizeRows: + type: boolean + legendHide: + type: boolean + legendOpacity: + type: number + format: float + legendOrientationThreshold: + type: integer + ScatterViewProperties: + type: object + required: + - type + - queries + - colors + - shape + - note + - showNoteWhenEmpty + - xColumn + - yColumn + - fillColumns + - symbolColumns + - xDomain + - yDomain + - xAxisLabel + - yAxisLabel + - xPrefix + - yPrefix + - xSuffix + - ySuffix + properties: + timeFormat: + type: string + type: + type: string + enum: + - scatter + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + type: string + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + xColumn: + type: string + generateXAxisTicks: + type: array + items: + type: string + xTotalTicks: + type: integer + xTickStart: + type: number + format: float + xTickStep: + type: number + format: float + yColumn: + type: string + generateYAxisTicks: + type: array + items: + type: string + yTotalTicks: + type: integer + yTickStart: + type: number + format: float + yTickStep: + type: number + format: float + fillColumns: + type: array + items: + type: string + symbolColumns: + type: array + items: + type: string + xDomain: + type: array + items: + type: number + maxItems: 2 + yDomain: + type: array + items: + type: number + maxItems: 2 + xAxisLabel: + type: string + yAxisLabel: + type: string + xPrefix: + type: string + xSuffix: + type: string + yPrefix: + type: string + ySuffix: + type: string + legendColorizeRows: + type: boolean + legendHide: + type: boolean + legendOpacity: + type: number + format: float + legendOrientationThreshold: + type: integer + HeatmapViewProperties: + type: object + required: + - type + - queries + - colors + - shape + - note + - showNoteWhenEmpty + - xColumn + - yColumn + - xDomain + - yDomain + - xAxisLabel + - yAxisLabel + - xPrefix + - yPrefix + - xSuffix + - ySuffix + - binSize + properties: + timeFormat: + type: string + type: + type: string + enum: + - heatmap + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + type: string + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + xColumn: + type: string + generateXAxisTicks: + type: array + items: + type: string + xTotalTicks: + type: integer + xTickStart: + type: number + format: float + xTickStep: + type: number + format: float + yColumn: + type: string + generateYAxisTicks: + type: array + items: + type: string + yTotalTicks: + type: integer + yTickStart: + type: number + format: float + yTickStep: + type: number + format: float + xDomain: + type: array + items: + type: number + maxItems: 2 + yDomain: + type: array + items: + type: number + maxItems: 2 + xAxisLabel: + type: string + yAxisLabel: + type: string + xPrefix: + type: string + xSuffix: + type: string + yPrefix: + type: string + ySuffix: + type: string + binSize: + type: number + legendColorizeRows: + type: boolean + legendHide: + type: boolean + legendOpacity: + type: number + format: float + legendOrientationThreshold: + type: integer + SingleStatViewProperties: + type: object + required: + - type + - queries + - colors + - shape + - note + - showNoteWhenEmpty + - prefix + - tickPrefix + - suffix + - tickSuffix + - decimalPlaces + properties: + type: + type: string + enum: + - single-stat + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + prefix: + type: string + tickPrefix: + type: string + suffix: + type: string + tickSuffix: + type: string + staticLegend: + $ref: '#/components/schemas/StaticLegend' + decimalPlaces: + $ref: '#/components/schemas/DecimalPlaces' + HistogramViewProperties: + type: object + required: + - type + - queries + - colors + - shape + - note + - showNoteWhenEmpty + - xColumn + - fillColumns + - xDomain + - xAxisLabel + - position + - binCount + properties: + type: + type: string + enum: + - histogram + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + xColumn: + type: string + fillColumns: + type: array + items: + type: string + xDomain: + type: array + items: + type: number + format: float + xAxisLabel: + type: string + position: + type: string + enum: + - overlaid + - stacked + binCount: + type: integer + legendColorizeRows: + type: boolean + legendHide: + type: boolean + legendOpacity: + type: number + format: float + legendOrientationThreshold: + type: integer + GaugeViewProperties: + type: object + required: + - type + - queries + - colors + - shape + - note + - showNoteWhenEmpty + - prefix + - tickPrefix + - suffix + - tickSuffix + - decimalPlaces + properties: + type: + type: string + enum: + - gauge + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + prefix: + type: string + tickPrefix: + type: string + suffix: + type: string + tickSuffix: + type: string + decimalPlaces: + $ref: '#/components/schemas/DecimalPlaces' + TableViewProperties: + type: object + required: + - type + - queries + - colors + - shape + - note + - showNoteWhenEmpty + - tableOptions + - fieldOptions + - timeFormat + - decimalPlaces + properties: + type: + type: string + enum: + - table + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + tableOptions: + type: object + properties: + verticalTimeAxis: + description: verticalTimeAxis describes the orientation of the table by indicating whether the time axis will be displayed vertically + type: boolean + sortBy: + $ref: '#/components/schemas/RenamableField' + wrapping: + description: Wrapping describes the text wrapping style to be used in table views + type: string + enum: + - truncate + - wrap + - single-line + fixFirstColumn: + description: fixFirstColumn indicates whether the first column of the table should be locked + type: boolean + fieldOptions: + description: fieldOptions represent the fields retrieved by the query with customization options + type: array + items: + $ref: '#/components/schemas/RenamableField' + timeFormat: + description: timeFormat describes the display format for time values according to moment.js date formatting + type: string + decimalPlaces: + $ref: '#/components/schemas/DecimalPlaces' + SimpleTableViewProperties: + type: object + required: + - type + - showAll + - queries + - shape + - note + - showNoteWhenEmpty + properties: + type: + type: string + enum: + - simple-table + showAll: + type: boolean + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + MarkdownViewProperties: + type: object + required: + - type + - shape + - note + properties: + type: + type: string + enum: + - markdown + shape: + type: string + enum: + - chronograf-v2 + note: + type: string + CheckViewProperties: + type: object + required: + - type + - shape + - checkID + - queries + - colors + properties: + type: + type: string + enum: + - check + shape: + type: string + enum: + - chronograf-v2 + checkID: + type: string + check: + $ref: '#/components/schemas/Check' + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + legendColorizeRows: + type: boolean + legendHide: + type: boolean + legendOpacity: + type: number + format: float + legendOrientationThreshold: + type: integer + GeoViewLayer: + type: object + oneOf: + - $ref: '#/components/schemas/GeoCircleViewLayer' + - $ref: '#/components/schemas/GeoHeatMapViewLayer' + - $ref: '#/components/schemas/GeoPointMapViewLayer' + - $ref: '#/components/schemas/GeoTrackMapViewLayer' + GeoViewLayerProperties: + type: object + required: + - type + properties: + type: + type: string + enum: + - heatmap + - circleMap + - pointMap + - trackMap + GeoCircleViewLayer: + allOf: + - $ref: '#/components/schemas/GeoViewLayerProperties' + - type: object + required: + - radiusField + - radiusDimension + - colorField + - colorDimension + - colors + properties: + radiusField: + type: string + description: Radius field + radiusDimension: + $ref: '#/components/schemas/Axis' + colorField: + type: string + description: Circle color field + colorDimension: + $ref: '#/components/schemas/Axis' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + radius: + description: Maximum radius size in pixels + type: integer + interpolateColors: + description: Interpolate circle color based on displayed value + type: boolean + GeoPointMapViewLayer: + allOf: + - $ref: '#/components/schemas/GeoViewLayerProperties' + - type: object + required: + - colorField + - colorDimension + - colors + properties: + colorField: + type: string + description: Marker color field + colorDimension: + $ref: '#/components/schemas/Axis' + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + isClustered: + description: Cluster close markers together + type: boolean + tooltipColumns: + description: An array for which columns to display in tooltip + type: array + items: + type: string + GeoTrackMapViewLayer: + allOf: + - $ref: '#/components/schemas/GeoViewLayerProperties' + - type: object + required: + - trackWidth + - speed + - randomColors + - trackPointVisualization + properties: + trackWidth: + description: Width of the track + type: integer + speed: + description: Speed of the track animation + type: integer + randomColors: + description: Assign different colors to different tracks + type: boolean + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + GeoHeatMapViewLayer: + allOf: + - $ref: '#/components/schemas/GeoViewLayerProperties' + - type: object + required: + - intensityField + - intensityDimension + - radius + - blur + - colors + properties: + intensityField: + type: string + description: Intensity field + intensityDimension: + $ref: '#/components/schemas/Axis' + radius: + description: Radius size in pixels + type: integer + blur: + description: Blur for heatmap points + type: integer + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + GeoViewProperties: + type: object + required: + - type + - shape + - queries + - note + - showNoteWhenEmpty + - center + - zoom + - allowPanAndZoom + - detectCoordinateFields + - layers + properties: + type: + type: string + enum: + - geo + queries: + type: array + items: + $ref: '#/components/schemas/DashboardQuery' + shape: + type: string + enum: + - chronograf-v2 + center: + description: Coordinates of the center of the map + type: object + required: + - lat + - lon + properties: + lat: + description: Latitude of the center of the map + type: number + format: double + lon: + description: Longitude of the center of the map + type: number + format: double + zoom: + description: Zoom level used for initial display of the map + type: number + format: double + minimum: 1 + maximum: 28 + allowPanAndZoom: + description: 'If true, map zoom and pan controls are enabled on the dashboard view' + type: boolean + default: true + detectCoordinateFields: + description: 'If true, search results get automatically regroupped so that lon,lat and value are treated as columns' + type: boolean + default: true + useS2CellID: + description: 'If true, S2 column is used to calculate lat/lon' + type: boolean + s2Column: + description: String to define the column + type: string + latLonColumns: + $ref: '#/components/schemas/LatLonColumns' + mapStyle: + description: 'Define map type - regular, satellite etc.' + type: string + note: + type: string + showNoteWhenEmpty: + description: 'If true, will display note when empty' + type: boolean + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + $ref: '#/components/schemas/DashboardColor' + layers: + description: List of individual layers shown in the map + type: array + items: + $ref: '#/components/schemas/GeoViewLayer' + LatLonColumns: + description: Object type to define lat/lon columns + type: object + required: + - lat + - lon + properties: + lat: + $ref: '#/components/schemas/LatLonColumn' + lon: + $ref: '#/components/schemas/LatLonColumn' + LatLonColumn: + description: Object type for key and column definitions + type: object + required: + - key + - column + properties: + key: + description: Key to determine whether the column is tag/field + type: string + column: + description: Column to look up Lat/Lon + type: string + Axes: + description: The viewport for a View's visualizations + type: object + required: + - x + - 'y' + properties: + x: + $ref: '#/components/schemas/Axis' + 'y': + $ref: '#/components/schemas/Axis' + StaticLegend: + description: StaticLegend represents the options specific to the static legend + type: object + properties: + colorizeRows: + type: boolean + heightRatio: + type: number + format: float + show: + type: boolean + opacity: + type: number + format: float + orientationThreshold: + type: integer + valueAxis: + type: string + widthRatio: + type: number + format: float + DecimalPlaces: + description: 'Indicates whether decimal places should be enforced, and how many digits it should show.' + type: object + properties: + isEnforced: + description: Indicates whether decimal point setting should be enforced + type: boolean + digits: + description: The number of digits after decimal to display + type: integer + format: int32 + ConstantVariableProperties: + properties: + type: + type: string + enum: + - constant + values: + type: array + items: + type: string + MapVariableProperties: + properties: + type: + type: string + enum: + - map + values: + type: object + additionalProperties: + type: string + QueryVariableProperties: + properties: + type: + type: string + enum: + - query + values: + type: object + properties: + query: + type: string + language: + type: string + VariableProperties: + type: object + oneOf: + - $ref: '#/components/schemas/QueryVariableProperties' + - $ref: '#/components/schemas/ConstantVariableProperties' + - $ref: '#/components/schemas/MapVariableProperties' + ViewProperties: + oneOf: + - $ref: '#/components/schemas/LinePlusSingleStatProperties' + - $ref: '#/components/schemas/XYViewProperties' + - $ref: '#/components/schemas/SingleStatViewProperties' + - $ref: '#/components/schemas/HistogramViewProperties' + - $ref: '#/components/schemas/GaugeViewProperties' + - $ref: '#/components/schemas/TableViewProperties' + - $ref: '#/components/schemas/SimpleTableViewProperties' + - $ref: '#/components/schemas/MarkdownViewProperties' + - $ref: '#/components/schemas/CheckViewProperties' + - $ref: '#/components/schemas/ScatterViewProperties' + - $ref: '#/components/schemas/HeatmapViewProperties' + - $ref: '#/components/schemas/MosaicViewProperties' + - $ref: '#/components/schemas/BandViewProperties' + - $ref: '#/components/schemas/GeoViewProperties' + View: + required: + - name + - properties + properties: + links: + type: object + readOnly: true + properties: + self: + type: string + id: + readOnly: true + type: string + name: + type: string + properties: + $ref: '#/components/schemas/ViewProperties' + Views: + type: object + properties: + links: + type: object + properties: + self: + type: string + views: + type: array + items: + $ref: '#/components/schemas/View' + CellUpdate: + type: object + properties: + x: + type: integer + format: int32 + 'y': + type: integer + format: int32 + w: + type: integer + format: int32 + h: + type: integer + format: int32 + CreateCell: + type: object + properties: + name: + type: string + x: + type: integer + format: int32 + 'y': + type: integer + format: int32 + w: + type: integer + format: int32 + h: + type: integer + format: int32 + usingView: + type: string + description: Makes a copy of the provided view. + AnalyzeQueryResponse: + type: object + properties: + errors: + type: array + items: + type: object + properties: + line: + type: integer + column: + type: integer + character: + type: integer + message: + type: string + CellWithViewProperties: + type: object + allOf: + - $ref: '#/components/schemas/Cell' + - type: object + properties: + name: + type: string + properties: + $ref: '#/components/schemas/ViewProperties' + Cell: + type: object + properties: + id: + type: string + links: + type: object + properties: + self: + type: string + view: + type: string + x: + type: integer + format: int32 + 'y': + type: integer + format: int32 + w: + type: integer + format: int32 + h: + type: integer + format: int32 + viewID: + type: string + description: The reference to a view from the views API. + CellsWithViewProperties: + type: array + items: + $ref: '#/components/schemas/CellWithViewProperties' + Cells: + type: array + items: + $ref: '#/components/schemas/Cell' + Secrets: + additionalProperties: + type: string + example: + apikey: abc123xyz + SecretKeys: + type: object + properties: + secrets: + type: array + items: + type: string + SecretKeysResponse: + allOf: + - $ref: '#/components/schemas/SecretKeys' + - type: object + properties: + links: + readOnly: true + type: object + properties: + self: + type: string + org: + type: string + CreateDashboardRequest: + properties: + orgID: + type: string + description: The ID of the organization that owns the dashboard. + name: + type: string + description: The user-facing name of the dashboard. + description: + type: string + description: The user-facing description of the dashboard. + required: + - orgID + - name + DashboardWithViewProperties: + type: object + allOf: + - $ref: '#/components/schemas/CreateDashboardRequest' + - type: object + properties: + links: + type: object + example: + self: /api/v2/dashboards/1 + cells: /api/v2/dashboards/1/cells + owners: /api/v2/dashboards/1/owners + members: /api/v2/dashboards/1/members + labels: /api/v2/dashboards/1/labels + org: /api/v2/labels/1 + properties: + self: + $ref: '#/components/schemas/Link' + cells: + $ref: '#/components/schemas/Link' + members: + $ref: '#/components/schemas/Link' + owners: + $ref: '#/components/schemas/Link' + labels: + $ref: '#/components/schemas/Link' + org: + $ref: '#/components/schemas/Link' + id: + readOnly: true + type: string + meta: + type: object + properties: + createdAt: + type: string + format: date-time + updatedAt: + type: string + format: date-time + cells: + $ref: '#/components/schemas/CellsWithViewProperties' + labels: + $ref: '#/components/schemas/Labels' + Dashboard: + type: object + allOf: + - $ref: '#/components/schemas/CreateDashboardRequest' + - type: object + properties: + links: + type: object + example: + self: /api/v2/dashboards/1 + cells: /api/v2/dashboards/1/cells + owners: /api/v2/dashboards/1/owners + members: /api/v2/dashboards/1/members + labels: /api/v2/dashboards/1/labels + org: /api/v2/labels/1 + properties: + self: + $ref: '#/components/schemas/Link' + cells: + $ref: '#/components/schemas/Link' + members: + $ref: '#/components/schemas/Link' + owners: + $ref: '#/components/schemas/Link' + labels: + $ref: '#/components/schemas/Link' + org: + $ref: '#/components/schemas/Link' + id: + readOnly: true + type: string + meta: + type: object + properties: + createdAt: + type: string + format: date-time + updatedAt: + type: string + format: date-time + cells: + $ref: '#/components/schemas/Cells' + labels: + $ref: '#/components/schemas/Labels' + Dashboards: + type: object + properties: + links: + $ref: '#/components/schemas/Links' + dashboards: + type: array + items: + $ref: '#/components/schemas/Dashboard' + TelegrafRequest: + type: object + properties: + name: + type: string + description: + type: string + metadata: + type: object + properties: + buckets: + type: array + items: + type: string + config: + type: string + orgID: + type: string + TelegrafPluginRequest: + type: object + properties: + name: + type: string + description: + type: string + plugins: + type: array + items: + type: object + properties: + type: + type: string + name: + type: string + alias: + type: string + description: + type: string + config: + type: string + metadata: + type: object + properties: + buckets: + type: array + items: + type: string + config: + type: string + orgID: + type: string + Telegraf: + type: object + allOf: + - $ref: '#/components/schemas/TelegrafRequest' + - type: object + properties: + id: + type: string + readOnly: true + links: + type: object + readOnly: true + example: + self: /api/v2/telegrafs/1 + lables: /api/v2/telegrafs/1/labels + owners: /api/v2/telegrafs/1/owners + members: /api/v2/telegrafs/1/members + properties: + self: + $ref: '#/components/schemas/Link' + labels: + $ref: '#/components/schemas/Link' + members: + $ref: '#/components/schemas/Link' + owners: + $ref: '#/components/schemas/Link' + labels: + readOnly: true + $ref: '#/components/schemas/Labels' + Telegrafs: + type: object + properties: + configurations: + type: array + items: + $ref: '#/components/schemas/Telegraf' + TelegrafPlugin: + type: object + properties: + type: + type: string + name: + type: string + description: + type: string + config: + type: string + TelegrafPlugins: + type: object + properties: + version: + type: string + os: + type: string + plugins: + type: array + items: + $ref: '#/components/schemas/TelegrafPlugin' + IsOnboarding: + type: object + properties: + allowed: + description: True means that the influxdb instance has NOT had initial setup; false means that the database has been setup. + type: boolean + PasswordResetBody: + properties: + password: + type: string + required: + - password + AddResourceMemberRequestBody: + type: object + properties: + id: + type: string + name: + type: string + required: + - id + Ready: + type: object + properties: + status: + type: string + enum: + - ready + started: + type: string + format: date-time + example: '2019-03-13T10:09:33.891196-04:00' + up: + type: string + example: 14m45.911966424s + HealthCheck: + type: object + required: + - name + - status + properties: + name: + type: string + message: + type: string + checks: + type: array + items: + $ref: '#/components/schemas/HealthCheck' + status: + type: string + enum: + - pass + - fail + version: + type: string + commit: + type: string + Labels: + type: array + items: + $ref: '#/components/schemas/Label' + Label: + type: object + properties: + id: + readOnly: true + type: string + orgID: + readOnly: true + type: string + name: + type: string + properties: + type: object + additionalProperties: + type: string + description: Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. + example: + color: ffb3b3 + description: this is a description + LabelCreateRequest: + type: object + required: + - orgID + - name + properties: + orgID: + type: string + name: + type: string + properties: + type: object + additionalProperties: + type: string + description: Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. + example: + color: ffb3b3 + description: this is a description + LabelUpdate: + type: object + properties: + name: + type: string + properties: + type: object + additionalProperties: + type: string + description: Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. + example: + color: ffb3b3 + description: this is a description + LabelMapping: + type: object + properties: + labelID: + type: string + LabelsResponse: + type: object + properties: + labels: + $ref: '#/components/schemas/Labels' + links: + $ref: '#/components/schemas/Links' + LabelResponse: + type: object + properties: + label: + $ref: '#/components/schemas/Label' + links: + $ref: '#/components/schemas/Links' + ASTResponse: + description: Contains the AST for the supplied Flux query + type: object + properties: + ast: + $ref: '#/components/schemas/Package' + WritePrecision: + type: string + enum: + - ms + - s + - us + - ns + TaskCreateRequest: + type: object + properties: + orgID: + description: The ID of the organization that owns this Task. + type: string + org: + description: The name of the organization that owns this Task. + type: string + status: + $ref: '#/components/schemas/TaskStatusType' + flux: + description: The Flux script to run for this task. + type: string + description: + description: An optional description of the task. + type: string + required: + - flux + TaskUpdateRequest: + type: object + properties: + status: + $ref: '#/components/schemas/TaskStatusType' + flux: + description: The Flux script to run for this task. + type: string + name: + description: Override the 'name' option in the flux script. + type: string + every: + description: Override the 'every' option in the flux script. + type: string + cron: + description: Override the 'cron' option in the flux script. + type: string + offset: + description: Override the 'offset' option in the flux script. + type: string + description: + description: An optional description of the task. + type: string + FluxResponse: + description: Rendered flux that backs the check or notification. + properties: + flux: + type: string + CheckPatch: + type: object + properties: + name: + type: string + description: + type: string + status: + type: string + enum: + - active + - inactive + CheckDiscriminator: + oneOf: + - $ref: '#/components/schemas/DeadmanCheck' + - $ref: '#/components/schemas/ThresholdCheck' + - $ref: '#/components/schemas/CustomCheck' + discriminator: + propertyName: type + mapping: + deadman: '#/components/schemas/DeadmanCheck' + threshold: '#/components/schemas/ThresholdCheck' + custom: '#/components/schemas/CustomCheck' + Check: + allOf: + - $ref: '#/components/schemas/CheckDiscriminator' + PostCheck: + allOf: + - $ref: '#/components/schemas/CheckDiscriminator' + Checks: + properties: + checks: + type: array + items: + $ref: '#/components/schemas/Check' + links: + $ref: '#/components/schemas/Links' + CheckBase: + properties: + id: + readOnly: true + type: string + name: + type: string + orgID: + description: The ID of the organization that owns this check. + type: string + taskID: + description: The ID of the task associated with this check. + type: string + ownerID: + description: The ID of creator used to create this check. + type: string + readOnly: true + createdAt: + type: string + format: date-time + readOnly: true + updatedAt: + type: string + format: date-time + readOnly: true + query: + $ref: '#/components/schemas/DashboardQuery' + status: + $ref: '#/components/schemas/TaskStatusType' + description: + description: An optional description of the check. + type: string + latestCompleted: + description: 'Timestamp (in RFC3339 date/time format](https://datatracker.ietf.org/doc/html/rfc3339)) of the latest scheduled and completed run.' + format: date-time + readOnly: true + lastRunStatus: + readOnly: true + type: string + enum: + - failed + - success + - canceled + lastRunError: + readOnly: true + type: string + labels: + $ref: '#/components/schemas/Labels' + links: + type: object + readOnly: true + example: + self: /api/v2/checks/1 + labels: /api/v2/checks/1/labels + members: /api/v2/checks/1/members + owners: /api/v2/checks/1/owners + query: /api/v2/checks/1/query + properties: + self: + description: URL for this check + $ref: '#/components/schemas/Link' + labels: + description: URL to retrieve labels for this check + $ref: '#/components/schemas/Link' + members: + description: URL to retrieve members for this check + $ref: '#/components/schemas/Link' + owners: + description: URL to retrieve owners for this check + $ref: '#/components/schemas/Link' + query: + description: URL to retrieve flux script for this check + $ref: '#/components/schemas/Link' + required: + - name + - orgID + - query + ThresholdCheck: + allOf: + - $ref: '#/components/schemas/CheckBase' + - type: object + required: + - type + properties: + type: + type: string + enum: + - threshold + thresholds: + type: array + items: + $ref: '#/components/schemas/Threshold' + every: + description: Check repetition interval. + type: string + offset: + description: 'Duration to delay after the schedule, before executing check.' + type: string + tags: + description: List of tags to write to each status. + type: array + items: + type: object + properties: + key: + type: string + value: + type: string + statusMessageTemplate: + description: The template used to generate and write a status message. + type: string + Threshold: + oneOf: + - $ref: '#/components/schemas/GreaterThreshold' + - $ref: '#/components/schemas/LesserThreshold' + - $ref: '#/components/schemas/RangeThreshold' + discriminator: + propertyName: type + mapping: + greater: '#/components/schemas/GreaterThreshold' + lesser: '#/components/schemas/LesserThreshold' + range: '#/components/schemas/RangeThreshold' + DeadmanCheck: + allOf: + - $ref: '#/components/schemas/CheckBase' + - type: object + required: + - type + properties: + type: + type: string + enum: + - deadman + timeSince: + description: String duration before deadman triggers. + type: string + staleTime: + description: String duration for time that a series is considered stale and should not trigger deadman. + type: string + reportZero: + description: 'If only zero values reported since time, trigger an alert' + type: boolean + level: + $ref: '#/components/schemas/CheckStatusLevel' + every: + description: Check repetition interval. + type: string + offset: + description: 'Duration to delay after the schedule, before executing check.' + type: string + tags: + description: List of tags to write to each status. + type: array + items: + type: object + properties: + key: + type: string + value: + type: string + statusMessageTemplate: + description: The template used to generate and write a status message. + type: string + CustomCheck: + allOf: + - $ref: '#/components/schemas/CheckBase' + - type: object + properties: + type: + type: string + enum: + - custom + required: + - type + ThresholdBase: + properties: + level: + $ref: '#/components/schemas/CheckStatusLevel' + allValues: + description: 'If true, only alert if all values meet threshold.' + type: boolean + GreaterThreshold: + allOf: + - $ref: '#/components/schemas/ThresholdBase' + - type: object + required: + - type + - value + properties: + type: + type: string + enum: + - greater + value: + type: number + format: float + LesserThreshold: + allOf: + - $ref: '#/components/schemas/ThresholdBase' + - type: object + required: + - type + - value + properties: + type: + type: string + enum: + - lesser + value: + type: number + format: float + RangeThreshold: + allOf: + - $ref: '#/components/schemas/ThresholdBase' + - type: object + required: + - type + - min + - max + - within + properties: + type: + type: string + enum: + - range + min: + type: number + format: float + max: + type: number + format: float + within: + type: boolean + CheckStatusLevel: + description: The state to record if check matches a criteria. + type: string + enum: + - UNKNOWN + - OK + - INFO + - CRIT + - WARN + RuleStatusLevel: + description: The state to record if check matches a criteria. + type: string + enum: + - UNKNOWN + - OK + - INFO + - CRIT + - WARN + - ANY + NotificationRuleUpdate: + type: object + properties: + name: + type: string + description: + type: string + status: + type: string + enum: + - active + - inactive + NotificationRuleDiscriminator: + oneOf: + - $ref: '#/components/schemas/SlackNotificationRule' + - $ref: '#/components/schemas/SMTPNotificationRule' + - $ref: '#/components/schemas/PagerDutyNotificationRule' + - $ref: '#/components/schemas/HTTPNotificationRule' + - $ref: '#/components/schemas/TelegramNotificationRule' + discriminator: + propertyName: type + mapping: + slack: '#/components/schemas/SlackNotificationRule' + smtp: '#/components/schemas/SMTPNotificationRule' + pagerduty: '#/components/schemas/PagerDutyNotificationRule' + http: '#/components/schemas/HTTPNotificationRule' + telegram: '#/components/schemas/TelegramNotificationRule' + NotificationRule: + allOf: + - $ref: '#/components/schemas/NotificationRuleDiscriminator' + PostNotificationRule: + allOf: + - $ref: '#/components/schemas/NotificationRuleDiscriminator' + NotificationRules: + properties: + notificationRules: + type: array + items: + $ref: '#/components/schemas/NotificationRule' + links: + $ref: '#/components/schemas/Links' + NotificationRuleBase: + type: object + required: + - orgID + - status + - name + - statusRules + - endpointID + properties: + latestCompleted: + description: 'Timestamp (in RFC3339 date/time format](https://datatracker.ietf.org/doc/html/rfc3339)) of the latest scheduled and completed run.' + type: string + format: date-time + readOnly: true + lastRunStatus: + readOnly: true + type: string + enum: + - failed + - success + - canceled + lastRunError: + readOnly: true + type: string + id: + readOnly: true + type: string + endpointID: + type: string + orgID: + description: The ID of the organization that owns this notification rule. + type: string + taskID: + description: The ID of the task associated with this notification rule. + type: string + ownerID: + description: The ID of creator used to create this notification rule. + type: string + readOnly: true + createdAt: + type: string + format: date-time + readOnly: true + updatedAt: + type: string + format: date-time + readOnly: true + status: + $ref: '#/components/schemas/TaskStatusType' + name: + description: Human-readable name describing the notification rule. + type: string + sleepUntil: + type: string + every: + description: The notification repetition interval. + type: string + offset: + description: 'Duration to delay after the schedule, before executing check.' + type: string + runbookLink: + type: string + limitEvery: + description: 'Don''t notify me more than times every seconds. If set, limit cannot be empty.' + type: integer + limit: + description: 'Don''t notify me more than times every seconds. If set, limitEvery cannot be empty.' + type: integer + tagRules: + description: List of tag rules the notification rule attempts to match. + type: array + items: + $ref: '#/components/schemas/TagRule' + description: + description: An optional description of the notification rule. + type: string + statusRules: + description: List of status rules the notification rule attempts to match. + type: array + minItems: 1 + items: + $ref: '#/components/schemas/StatusRule' + labels: + $ref: '#/components/schemas/Labels' + links: + type: object + readOnly: true + example: + self: /api/v2/notificationRules/1 + labels: /api/v2/notificationRules/1/labels + members: /api/v2/notificationRules/1/members + owners: /api/v2/notificationRules/1/owners + query: /api/v2/notificationRules/1/query + properties: + self: + description: URL for this endpoint. + $ref: '#/components/schemas/Link' + labels: + description: URL to retrieve labels for this notification rule. + $ref: '#/components/schemas/Link' + members: + description: URL to retrieve members for this notification rule. + $ref: '#/components/schemas/Link' + owners: + description: URL to retrieve owners for this notification rule. + $ref: '#/components/schemas/Link' + query: + description: URL to retrieve flux script for this notification rule. + $ref: '#/components/schemas/Link' + TagRule: + type: object + properties: + key: + type: string + value: + type: string + operator: + type: string + enum: + - equal + - notequal + - equalregex + - notequalregex + StatusRule: + type: object + properties: + currentLevel: + $ref: '#/components/schemas/RuleStatusLevel' + previousLevel: + $ref: '#/components/schemas/RuleStatusLevel' + count: + type: integer + period: + type: string + HTTPNotificationRuleBase: + type: object + required: + - type + properties: + type: + type: string + enum: + - http + url: + type: string + HTTPNotificationRule: + allOf: + - $ref: '#/components/schemas/NotificationRuleBase' + - $ref: '#/components/schemas/HTTPNotificationRuleBase' + SlackNotificationRuleBase: + type: object + required: + - type + - messageTemplate + properties: + type: + type: string + enum: + - slack + channel: + type: string + messageTemplate: + type: string + SlackNotificationRule: + allOf: + - $ref: '#/components/schemas/NotificationRuleBase' + - $ref: '#/components/schemas/SlackNotificationRuleBase' + SMTPNotificationRule: + allOf: + - $ref: '#/components/schemas/NotificationRuleBase' + - $ref: '#/components/schemas/SMTPNotificationRuleBase' + SMTPNotificationRuleBase: + type: object + required: + - type + - subjectTemplate + - to + properties: + type: + type: string + enum: + - smtp + subjectTemplate: + type: string + bodyTemplate: + type: string + to: + type: string + PagerDutyNotificationRule: + allOf: + - $ref: '#/components/schemas/NotificationRuleBase' + - $ref: '#/components/schemas/PagerDutyNotificationRuleBase' + PagerDutyNotificationRuleBase: + type: object + required: + - type + - messageTemplate + properties: + type: + type: string + enum: + - pagerduty + messageTemplate: + type: string + TelegramNotificationRule: + allOf: + - $ref: '#/components/schemas/NotificationRuleBase' + - $ref: '#/components/schemas/TelegramNotificationRuleBase' + TelegramNotificationRuleBase: + type: object + required: + - type + - messageTemplate + - channel + properties: + type: + description: The discriminator between other types of notification rules is "telegram". + type: string + enum: + - telegram + messageTemplate: + description: The message template as a flux interpolated string. + type: string + parseMode: + description: 'Parse mode of the message text per https://core.telegram.org/bots/api#formatting-options . Defaults to "MarkdownV2" .' + type: string + enum: + - MarkdownV2 + - HTML + - Markdown + disableWebPagePreview: + description: Disables preview of web links in the sent messages when "true". Defaults to "false" . + type: boolean + NotificationEndpointUpdate: + type: object + properties: + name: + type: string + description: + type: string + status: + type: string + enum: + - active + - inactive + NotificationEndpointDiscriminator: + oneOf: + - $ref: '#/components/schemas/SlackNotificationEndpoint' + - $ref: '#/components/schemas/PagerDutyNotificationEndpoint' + - $ref: '#/components/schemas/HTTPNotificationEndpoint' + - $ref: '#/components/schemas/TelegramNotificationEndpoint' + discriminator: + propertyName: type + mapping: + slack: '#/components/schemas/SlackNotificationEndpoint' + pagerduty: '#/components/schemas/PagerDutyNotificationEndpoint' + http: '#/components/schemas/HTTPNotificationEndpoint' + telegram: '#/components/schemas/TelegramNotificationEndpoint' + NotificationEndpoint: + allOf: + - $ref: '#/components/schemas/NotificationEndpointDiscriminator' + PostNotificationEndpoint: + allOf: + - $ref: '#/components/schemas/NotificationEndpointDiscriminator' + NotificationEndpoints: + properties: + notificationEndpoints: + type: array + items: + $ref: '#/components/schemas/NotificationEndpoint' + links: + $ref: '#/components/schemas/Links' + NotificationEndpointBase: + type: object + required: + - type + - name + properties: + id: + type: string + orgID: + type: string + userID: + type: string + createdAt: + type: string + format: date-time + readOnly: true + updatedAt: + type: string + format: date-time + readOnly: true + description: + description: An optional description of the notification endpoint. + type: string + name: + type: string + status: + description: The status of the endpoint. + default: active + type: string + enum: + - active + - inactive + labels: + $ref: '#/components/schemas/Labels' + links: + type: object + readOnly: true + example: + self: /api/v2/notificationEndpoints/1 + labels: /api/v2/notificationEndpoints/1/labels + members: /api/v2/notificationEndpoints/1/members + owners: /api/v2/notificationEndpoints/1/owners + properties: + self: + description: URL for this endpoint. + $ref: '#/components/schemas/Link' + labels: + description: URL to retrieve labels for this endpoint. + $ref: '#/components/schemas/Link' + members: + description: URL to retrieve members for this endpoint. + $ref: '#/components/schemas/Link' + owners: + description: URL to retrieve owners for this endpoint. + $ref: '#/components/schemas/Link' + type: + $ref: '#/components/schemas/NotificationEndpointType' + SlackNotificationEndpoint: + type: object + allOf: + - $ref: '#/components/schemas/NotificationEndpointBase' + - type: object + properties: + url: + description: Specifies the URL of the Slack endpoint. Specify either `URL` or `Token`. + type: string + token: + description: Specifies the API token string. Specify either `URL` or `Token`. + type: string + PagerDutyNotificationEndpoint: + type: object + allOf: + - $ref: '#/components/schemas/NotificationEndpointBase' + - type: object + required: + - routingKey + properties: + clientURL: + type: string + routingKey: + type: string + HTTPNotificationEndpoint: + type: object + allOf: + - $ref: '#/components/schemas/NotificationEndpointBase' + - type: object + required: + - url + - authMethod + - method + properties: + url: + type: string + username: + type: string + password: + type: string + token: + type: string + method: + type: string + enum: + - POST + - GET + - PUT + authMethod: + type: string + enum: + - none + - basic + - bearer + contentTemplate: + type: string + headers: + type: object + description: Customized headers. + additionalProperties: + type: string + TelegramNotificationEndpoint: + type: object + allOf: + - $ref: '#/components/schemas/NotificationEndpointBase' + - type: object + required: + - token + - channel + properties: + token: + description: 'Specifies the Telegram bot token. See https://core.telegram.org/bots#creating-a-new-bot .' + type: string + channel: + description: 'ID of the telegram channel, a chat_id in https://core.telegram.org/bots/api#sendmessage .' + type: string + NotificationEndpointType: + type: string + enum: + - slack + - pagerduty + - http + - telegram + DBRP: + type: object + properties: + id: + type: string + description: ID of the DBRP mapping. + readOnly: true + orgID: + type: string + description: ID of the organization that owns this mapping. + bucketID: + type: string + description: ID of the bucket used as the target for the translation. + database: + type: string + description: InfluxDB v1 database + retention_policy: + type: string + description: InfluxDB v1 retention policy + default: + type: boolean + description: Mapping represents the default retention policy for the database specified. + links: + $ref: '#/components/schemas/Links' + required: + - id + - orgID + - bucketID + - database + - retention_policy + - default + DBRPs: + properties: + content: + type: array + items: + $ref: '#/components/schemas/DBRP' + DBRPUpdate: + properties: + retention_policy: + type: string + description: InfluxDB v1 retention policy + default: + type: boolean + DBRPCreate: + type: object + properties: + orgID: + type: string + description: ID of the organization that owns this mapping. + org: + type: string + description: Name of the organization that owns this mapping. + bucketID: + type: string + description: ID of the bucket used as the target for the translation. + database: + type: string + description: InfluxDB v1 database + retention_policy: + type: string + description: InfluxDB v1 retention policy + default: + type: boolean + description: Mapping represents the default retention policy for the database specified. + required: + - bucketID + - database + - retention_policy + DBRPGet: + type: object + properties: + content: + $ref: '#/components/schemas/DBRP' + required: true + SchemaType: + type: string + enum: + - implicit + - explicit + ColorMapping: + type: object + description: A color mapping is an object that maps time series data to a UI color scheme to allow the UI to render graphs consistent colors across reloads. + additionalProperties: + type: string + example: + series_id_1: '#edf529' + series_id_2: '#edf529' + measurement_birdmigration_europe: '#663cd0' + configcat_deployments-autopromotionblocker: '#663cd0' + Authorization: + required: + - orgID + - permissions + allOf: + - $ref: '#/components/schemas/AuthorizationUpdateRequest' + - type: object + properties: + createdAt: + type: string + format: date-time + readOnly: true + updatedAt: + type: string + format: date-time + readOnly: true + orgID: + type: string + description: ID of the organization that the authorization is scoped to. + permissions: + type: array + minItems: 1 + description: List of permissions for an authorization. An authorization must have at least one permission. + items: + $ref: '#/components/schemas/Permission' + id: + readOnly: true + type: string + token: + readOnly: true + type: string + description: Token used to authenticate API requests. + userID: + readOnly: true + type: string + description: ID of the user that created and owns the token. + user: + readOnly: true + type: string + description: Name of the user that created and owns the token. + org: + readOnly: true + type: string + description: Name of the organization that the token is scoped to. + links: + type: object + readOnly: true + example: + self: /api/v2/authorizations/1 + user: /api/v2/users/12 + properties: + self: + readOnly: true + $ref: '#/components/schemas/Link' + user: + readOnly: true + $ref: '#/components/schemas/Link' + AuthorizationPostRequest: + required: + - orgID + - permissions + allOf: + - $ref: '#/components/schemas/AuthorizationUpdateRequest' + - type: object + properties: + orgID: + type: string + description: ID of org that authorization is scoped to. + userID: + type: string + description: ID of user that authorization is scoped to. + permissions: + type: array + minItems: 1 + description: List of permissions for an auth. An auth must have at least one Permission. + items: + $ref: '#/components/schemas/Permission' + LegacyAuthorizationPostRequest: + required: + - orgID + - permissions + allOf: + - $ref: '#/components/schemas/AuthorizationUpdateRequest' + - type: object + properties: + orgID: + type: string + description: ID of org that authorization is scoped to. + userID: + type: string + description: ID of user that authorization is scoped to. + token: + type: string + description: Token (name) of the authorization + permissions: + type: array + minItems: 1 + description: List of permissions for an auth. An auth must have at least one Permission. + items: + $ref: '#/components/schemas/Permission' + Authorizations: + type: object + properties: + links: + readOnly: true + $ref: '#/components/schemas/Links' + authorizations: + type: array + items: + $ref: '#/components/schemas/Authorization' + Permission: + required: + - action + - resource + properties: + action: + type: string + enum: + - read + - write + resource: + $ref: '#/components/schemas/Resource' + Resource: + type: object + required: + - type + properties: + type: + type: string + enum: + - authorizations + - buckets + - dashboards + - orgs + - sources + - tasks + - telegrafs + - users + - variables + - scrapers + - secrets + - labels + - views + - documents + - notificationRules + - notificationEndpoints + - checks + - dbrp + - notebooks + - annotations + - remotes + - replications + id: + type: string + description: If ID is set that is a permission for a specific resource. if it is not set it is a permission for all resources of that resource type. + name: + type: string + description: Optional name of the resource if the resource has a name field. + orgID: + type: string + description: If orgID is set that is a permission for all resources owned my that org. if it is not set it is a permission for all resources of that resource type. + org: + type: string + description: Optional name of the organization of the organization with orgID. + User: + properties: + id: + readOnly: true + type: string + oauthID: + type: string + name: + type: string + status: + description: If inactive the user is inactive. + default: active + type: string + enum: + - active + - inactive + required: + - name + Users: + type: object + properties: + links: + type: object + properties: + self: + type: string + format: uri + users: + type: array + items: + $ref: '#/components/schemas/UserResponse' + OnboardingRequest: + type: object + properties: + username: + type: string + password: + type: string + org: + type: string + bucket: + type: string + retentionPeriodSeconds: + type: integer + format: int64 + retentionPeriodHrs: + type: integer + deprecated: true + description: | + Retention period *in nanoseconds* for the new bucket. This key's name has been misleading since OSS 2.0 GA, please transition to use `retentionPeriodSeconds` + token: + type: string + description: | + Authentication token to set on the initial user. If not specified, the server will generate a token. + required: + - username + - org + - bucket + OnboardingResponse: + type: object + properties: + user: + $ref: '#/components/schemas/UserResponse' + org: + $ref: '#/components/schemas/Organization' + bucket: + $ref: '#/components/schemas/Bucket' + auth: + $ref: '#/components/schemas/Authorization' + Variable: + type: object + required: + - name + - orgID + - arguments + properties: + links: + type: object + readOnly: true + properties: + self: + type: string + format: uri + org: + type: string + format: uri + labels: + type: string + format: uri + id: + readOnly: true + type: string + orgID: + type: string + name: + type: string + description: + type: string + selected: + type: array + items: + type: string + labels: + $ref: '#/components/schemas/Labels' + arguments: + $ref: '#/components/schemas/VariableProperties' + createdAt: + type: string + format: date-time + updatedAt: + type: string + format: date-time + Variables: + type: object + example: + variables: + - id: '1221432' + name: ':ok:' + selected: + - hello + arguments: + type: constant + values: + - howdy + - hello + - hi + - yo + - oy + - id: '1221432' + name: ':ok:' + selected: + - c + arguments: + type: map + values: + a: fdjaklfdjkldsfjlkjdsa + b: dfaksjfkljekfajekdljfas + c: fdjksajfdkfeawfeea + - id: '1221432' + name: ':ok:' + selected: + - host + arguments: + type: query + query: 'from(bucket: "foo") |> showMeasurements()' + language: flux + properties: + variables: + type: array + items: + $ref: '#/components/schemas/Variable' + Source: + type: object + properties: + links: + type: object + properties: + self: + type: string + query: + type: string + health: + type: string + buckets: + type: string + id: + type: string + orgID: + type: string + default: + type: boolean + name: + type: string + type: + type: string + enum: + - v1 + - v2 + - self + url: + type: string + format: uri + insecureSkipVerify: + type: boolean + telegraf: + type: string + token: + type: string + username: + type: string + password: + type: string + sharedSecret: + type: string + metaUrl: + type: string + format: uri + defaultRP: + type: string + languages: + type: array + readOnly: true + items: + type: string + enum: + - flux + - influxql + Sources: + type: object + properties: + links: + type: object + properties: + self: + type: string + format: uri + sources: + type: array + items: + $ref: '#/components/schemas/Source' + ScraperTargetRequest: + type: object + properties: + name: + type: string + description: The name of the scraper target. + type: + type: string + description: The type of the metrics to be parsed. + enum: + - prometheus + url: + type: string + description: The URL of the metrics endpoint. + example: 'http://localhost:9090/metrics' + orgID: + type: string + description: The organization ID. + bucketID: + type: string + description: The ID of the bucket to write to. + allowInsecure: + type: boolean + description: Skip TLS verification on endpoint. + default: false + ScraperTargetResponse: + type: object + allOf: + - $ref: '#/components/schemas/ScraperTargetRequest' + - type: object + properties: + id: + type: string + readOnly: true + org: + type: string + description: The name of the organization. + bucket: + type: string + description: The bucket name. + links: + type: object + readOnly: true + example: + self: /api/v2/scrapers/1 + owners: /api/v2/scrapers/1/owners + members: /api/v2/scrapers/1/members + bucket: /api/v2/buckets/1 + organization: /api/v2/orgs/1 + properties: + self: + $ref: '#/components/schemas/Link' + members: + $ref: '#/components/schemas/Link' + owners: + $ref: '#/components/schemas/Link' + bucket: + $ref: '#/components/schemas/Link' + organization: + $ref: '#/components/schemas/Link' + ScraperTargetResponses: + type: object + properties: + configurations: + type: array + items: + $ref: '#/components/schemas/ScraperTargetResponse' + MetadataBackup: + type: object + properties: + kv: + type: string + format: binary + sql: + type: string + format: binary + buckets: + $ref: '#/components/schemas/BucketMetadataManifests' + required: + - kv + - sql + - buckets + BucketMetadataManifests: + type: array + items: + $ref: '#/components/schemas/BucketMetadataManifest' + BucketMetadataManifest: + type: object + properties: + organizationID: + type: string + organizationName: + type: string + bucketID: + type: string + bucketName: + type: string + description: + type: string + defaultRetentionPolicy: + type: string + retentionPolicies: + $ref: '#/components/schemas/RetentionPolicyManifests' + required: + - organizationID + - organizationName + - bucketID + - bucketName + - defaultRetentionPolicy + - retentionPolicies + RetentionPolicyManifests: + type: array + items: + $ref: '#/components/schemas/RetentionPolicyManifest' + RetentionPolicyManifest: + type: object + properties: + name: + type: string + replicaN: + type: integer + duration: + type: integer + format: int64 + shardGroupDuration: + type: integer + format: int64 + shardGroups: + $ref: '#/components/schemas/ShardGroupManifests' + subscriptions: + $ref: '#/components/schemas/SubscriptionManifests' + required: + - name + - replicaN + - duration + - shardGroupDuration + - shardGroups + - subscriptions + ShardGroupManifests: + type: array + items: + $ref: '#/components/schemas/ShardGroupManifest' + ShardGroupManifest: + type: object + properties: + id: + type: integer + format: int64 + startTime: + type: string + format: date-time + endTime: + type: string + format: date-time + deletedAt: + type: string + format: date-time + truncatedAt: + type: string + format: date-time + shards: + $ref: '#/components/schemas/ShardManifests' + required: + - id + - startTime + - endTime + - shards + ShardManifests: + type: array + items: + $ref: '#/components/schemas/ShardManifest' + ShardManifest: + type: object + properties: + id: + type: integer + format: int64 + shardOwners: + $ref: '#/components/schemas/ShardOwners' + required: + - id + - shardOwners + ShardOwners: + type: array + items: + $ref: '#/components/schemas/ShardOwner' + ShardOwner: + type: object + properties: + nodeID: + type: integer + format: int64 + description: ID of the node that owns a shard. + required: + - nodeID + SubscriptionManifests: + type: array + items: + $ref: '#/components/schemas/SubscriptionManifest' + SubscriptionManifest: + type: object + properties: + name: + type: string + mode: + type: string + destinations: + type: array + items: + type: string + required: + - name + - mode + - destinations + RestoredBucketMappings: + type: object + properties: + id: + description: New ID of the restored bucket + type: string + name: + type: string + shardMappings: + $ref: '#/components/schemas/BucketShardMappings' + required: + - id + - name + - shardMappings + BucketShardMappings: + type: array + items: + $ref: '#/components/schemas/BucketShardMapping' + BucketShardMapping: + type: object + properties: + oldId: + type: integer + format: int64 + newId: + type: integer + format: int64 + required: + - oldId + - newId + Config: + type: object + properties: + config: + type: object + RemoteConnection: + type: object + properties: + id: + type: string + name: + type: string + orgID: + type: string + description: + type: string + remoteURL: + type: string + format: uri + remoteOrgID: + type: string + allowInsecureTLS: + type: boolean + default: false + required: + - id + - name + - orgID + - remoteURL + - remoteOrgID + - allowInsecureTLS + RemoteConnections: + type: object + properties: + remotes: + type: array + items: + $ref: '#/components/schemas/RemoteConnection' + RemoteConnectionCreationRequest: + type: object + properties: + name: + type: string + description: + type: string + orgID: + type: string + remoteURL: + type: string + format: uri + remoteAPIToken: + type: string + remoteOrgID: + type: string + allowInsecureTLS: + type: boolean + default: false + required: + - name + - orgID + - remoteURL + - remoteAPIToken + - remoteOrgID + - allowInsecureTLS + RemoteConnectionUpdateRequest: + type: object + properties: + name: + type: string + description: + type: string + remoteURL: + type: string + format: uri + remoteAPIToken: + type: string + remoteOrgID: + type: string + allowInsecureTLS: + type: boolean + default: false + Replication: + type: object + properties: + id: + type: string + name: + type: string + description: + type: string + orgID: + type: string + remoteID: + type: string + localBucketID: + type: string + remoteBucketID: + type: string + maxQueueSizeBytes: + type: integer + format: int64 + currentQueueSizeBytes: + type: integer + format: int64 + latestResponseCode: + type: integer + latestErrorMessage: + type: string + dropNonRetryableData: + type: boolean + required: + - id + - name + - remoteID + - orgID + - localBucketID + - remoteBucketID + - maxQueueSizeBytes + - currentQueueSizeBytes + Replications: + type: object + properties: + replications: + type: array + items: + $ref: '#/components/schemas/Replication' + ReplicationCreationRequest: + type: object + properties: + name: + type: string + description: + type: string + orgID: + type: string + remoteID: + type: string + localBucketID: + type: string + remoteBucketID: + type: string + maxQueueSizeBytes: + type: integer + format: int64 + minimum: 33554430 + default: 67108860 + dropNonRetryableData: + type: boolean + default: false + required: + - name + - orgID + - remoteID + - localBucketID + - remoteBucketID + - maxQueueSizeBytes + ReplicationUpdateRequest: + type: object + properties: + name: + type: string + description: + type: string + remoteID: + type: string + remoteBucketID: + type: string + maxQueueSizeBytes: + type: integer + format: int64 + minimum: 33554430 + dropNonRetryableData: + type: boolean + responses: + ServerError: + description: Non 2XX error response from server. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + securitySchemes: + TokenAuthentication: + type: apiKey + name: Authorization + in: header + description: | + Use the [Token authentication](#section/Authentication/TokenAuthentication) + scheme to authenticate to the InfluxDB API. + + + In your API requests, send an `Authorization` header. + For the header value, provide the word `Token` followed by a space and an InfluxDB API token. + The word `Token` is case-sensitive. + + + ### Syntax + + `Authorization: Token YOUR_INFLUX_TOKEN` + + + For more information and examples, see the following: + - [`/authorizations`](#tag/Authorizations) endpoint. + - [Authorize API requests](https://docs.influxdata.com/influxdb/v2.1/api-guide/api_intro/#authentication). + - [Manage API tokens](https://docs.influxdata.com/influxdb/v2.1/security/tokens/). + BasicAuthentication: + type: http + scheme: basic + description: | + Use the HTTP Basic authentication scheme for InfluxDB `/api/v2` API operations that support it. + + Username and password schemes require the following credentials: + - **username** + - **password** +security: + - TokenAuthentication: [] diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/domain/types.gen.go b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/types.gen.go new file mode 100644 index 0000000..e279fe6 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/types.gen.go @@ -0,0 +1,7252 @@ +// Package domain provides primitives to interact with the openapi HTTP API. +// +// Code generated by github.com/deepmap/oapi-codegen version (devel) DO NOT EDIT. +package domain + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/pkg/errors" +) + +const ( + BasicAuthenticationScopes = "BasicAuthentication.Scopes" + TokenAuthenticationScopes = "TokenAuthentication.Scopes" +) + +// Defines values for AuthorizationUpdateRequestStatus. +const ( + AuthorizationUpdateRequestStatusActive AuthorizationUpdateRequestStatus = "active" + + AuthorizationUpdateRequestStatusInactive AuthorizationUpdateRequestStatus = "inactive" +) + +// Defines values for AxisBase. +const ( + AxisBase10 AxisBase = "10" + + AxisBase2 AxisBase = "2" + + AxisBaseEmpty AxisBase = "" +) + +// Defines values for AxisScale. +const ( + AxisScaleLinear AxisScale = "linear" + + AxisScaleLog AxisScale = "log" +) + +// Defines values for BandViewPropertiesHoverDimension. +const ( + BandViewPropertiesHoverDimensionAuto BandViewPropertiesHoverDimension = "auto" + + BandViewPropertiesHoverDimensionX BandViewPropertiesHoverDimension = "x" + + BandViewPropertiesHoverDimensionXy BandViewPropertiesHoverDimension = "xy" + + BandViewPropertiesHoverDimensionY BandViewPropertiesHoverDimension = "y" +) + +// Defines values for BandViewPropertiesShape. +const ( + BandViewPropertiesShapeChronografV2 BandViewPropertiesShape = "chronograf-v2" +) + +// Defines values for BandViewPropertiesType. +const ( + BandViewPropertiesTypeBand BandViewPropertiesType = "band" +) + +// Defines values for BucketType. +const ( + BucketTypeSystem BucketType = "system" + + BucketTypeUser BucketType = "user" +) + +// Defines values for BuilderAggregateFunctionType. +const ( + BuilderAggregateFunctionTypeFilter BuilderAggregateFunctionType = "filter" + + BuilderAggregateFunctionTypeGroup BuilderAggregateFunctionType = "group" +) + +// Defines values for CheckBaseLastRunStatus. +const ( + CheckBaseLastRunStatusCanceled CheckBaseLastRunStatus = "canceled" + + CheckBaseLastRunStatusFailed CheckBaseLastRunStatus = "failed" + + CheckBaseLastRunStatusSuccess CheckBaseLastRunStatus = "success" +) + +// Defines values for CheckPatchStatus. +const ( + CheckPatchStatusActive CheckPatchStatus = "active" + + CheckPatchStatusInactive CheckPatchStatus = "inactive" +) + +// Defines values for CheckStatusLevel. +const ( + CheckStatusLevelCRIT CheckStatusLevel = "CRIT" + + CheckStatusLevelINFO CheckStatusLevel = "INFO" + + CheckStatusLevelOK CheckStatusLevel = "OK" + + CheckStatusLevelUNKNOWN CheckStatusLevel = "UNKNOWN" + + CheckStatusLevelWARN CheckStatusLevel = "WARN" +) + +// Defines values for CheckViewPropertiesShape. +const ( + CheckViewPropertiesShapeChronografV2 CheckViewPropertiesShape = "chronograf-v2" +) + +// Defines values for CheckViewPropertiesType. +const ( + CheckViewPropertiesTypeCheck CheckViewPropertiesType = "check" +) + +// Defines values for ConstantVariablePropertiesType. +const ( + ConstantVariablePropertiesTypeConstant ConstantVariablePropertiesType = "constant" +) + +// Defines values for CustomCheckType. +const ( + CustomCheckTypeCustom CustomCheckType = "custom" +) + +// Defines values for DashboardColorType. +const ( + DashboardColorTypeBackground DashboardColorType = "background" + + DashboardColorTypeMax DashboardColorType = "max" + + DashboardColorTypeMin DashboardColorType = "min" + + DashboardColorTypeScale DashboardColorType = "scale" + + DashboardColorTypeText DashboardColorType = "text" + + DashboardColorTypeThreshold DashboardColorType = "threshold" +) + +// Defines values for DeadmanCheckType. +const ( + DeadmanCheckTypeDeadman DeadmanCheckType = "deadman" +) + +// Defines values for DialectAnnotations. +const ( + DialectAnnotationsDatatype DialectAnnotations = "datatype" + + DialectAnnotationsDefault DialectAnnotations = "default" + + DialectAnnotationsGroup DialectAnnotations = "group" +) + +// Defines values for DialectDateTimeFormat. +const ( + DialectDateTimeFormatRFC3339 DialectDateTimeFormat = "RFC3339" + + DialectDateTimeFormatRFC3339Nano DialectDateTimeFormat = "RFC3339Nano" +) + +// Defines values for ErrorCode. +const ( + ErrorCodeConflict ErrorCode = "conflict" + + ErrorCodeEmptyValue ErrorCode = "empty value" + + ErrorCodeForbidden ErrorCode = "forbidden" + + ErrorCodeInternalError ErrorCode = "internal error" + + ErrorCodeInvalid ErrorCode = "invalid" + + ErrorCodeMethodNotAllowed ErrorCode = "method not allowed" + + ErrorCodeNotFound ErrorCode = "not found" + + ErrorCodeRequestTooLarge ErrorCode = "request too large" + + ErrorCodeTooManyRequests ErrorCode = "too many requests" + + ErrorCodeUnauthorized ErrorCode = "unauthorized" + + ErrorCodeUnavailable ErrorCode = "unavailable" + + ErrorCodeUnprocessableEntity ErrorCode = "unprocessable entity" + + ErrorCodeUnsupportedMediaType ErrorCode = "unsupported media type" +) + +// Defines values for FieldType. +const ( + FieldTypeField FieldType = "field" + + FieldTypeFunc FieldType = "func" + + FieldTypeInteger FieldType = "integer" + + FieldTypeNumber FieldType = "number" + + FieldTypeRegex FieldType = "regex" + + FieldTypeWildcard FieldType = "wildcard" +) + +// Defines values for GaugeViewPropertiesShape. +const ( + GaugeViewPropertiesShapeChronografV2 GaugeViewPropertiesShape = "chronograf-v2" +) + +// Defines values for GaugeViewPropertiesType. +const ( + GaugeViewPropertiesTypeGauge GaugeViewPropertiesType = "gauge" +) + +// Defines values for GeoViewLayerPropertiesType. +const ( + GeoViewLayerPropertiesTypeCircleMap GeoViewLayerPropertiesType = "circleMap" + + GeoViewLayerPropertiesTypeHeatmap GeoViewLayerPropertiesType = "heatmap" + + GeoViewLayerPropertiesTypePointMap GeoViewLayerPropertiesType = "pointMap" + + GeoViewLayerPropertiesTypeTrackMap GeoViewLayerPropertiesType = "trackMap" +) + +// Defines values for GeoViewPropertiesShape. +const ( + GeoViewPropertiesShapeChronografV2 GeoViewPropertiesShape = "chronograf-v2" +) + +// Defines values for GeoViewPropertiesType. +const ( + GeoViewPropertiesTypeGeo GeoViewPropertiesType = "geo" +) + +// Defines values for GreaterThresholdType. +const ( + GreaterThresholdTypeGreater GreaterThresholdType = "greater" +) + +// Defines values for HTTPNotificationEndpointAuthMethod. +const ( + HTTPNotificationEndpointAuthMethodBasic HTTPNotificationEndpointAuthMethod = "basic" + + HTTPNotificationEndpointAuthMethodBearer HTTPNotificationEndpointAuthMethod = "bearer" + + HTTPNotificationEndpointAuthMethodNone HTTPNotificationEndpointAuthMethod = "none" +) + +// Defines values for HTTPNotificationEndpointMethod. +const ( + HTTPNotificationEndpointMethodGET HTTPNotificationEndpointMethod = "GET" + + HTTPNotificationEndpointMethodPOST HTTPNotificationEndpointMethod = "POST" + + HTTPNotificationEndpointMethodPUT HTTPNotificationEndpointMethod = "PUT" +) + +// Defines values for HTTPNotificationRuleBaseType. +const ( + HTTPNotificationRuleBaseTypeHttp HTTPNotificationRuleBaseType = "http" +) + +// Defines values for HealthCheckStatus. +const ( + HealthCheckStatusFail HealthCheckStatus = "fail" + + HealthCheckStatusPass HealthCheckStatus = "pass" +) + +// Defines values for HeatmapViewPropertiesShape. +const ( + HeatmapViewPropertiesShapeChronografV2 HeatmapViewPropertiesShape = "chronograf-v2" +) + +// Defines values for HeatmapViewPropertiesType. +const ( + HeatmapViewPropertiesTypeHeatmap HeatmapViewPropertiesType = "heatmap" +) + +// Defines values for HistogramViewPropertiesPosition. +const ( + HistogramViewPropertiesPositionOverlaid HistogramViewPropertiesPosition = "overlaid" + + HistogramViewPropertiesPositionStacked HistogramViewPropertiesPosition = "stacked" +) + +// Defines values for HistogramViewPropertiesShape. +const ( + HistogramViewPropertiesShapeChronografV2 HistogramViewPropertiesShape = "chronograf-v2" +) + +// Defines values for HistogramViewPropertiesType. +const ( + HistogramViewPropertiesTypeHistogram HistogramViewPropertiesType = "histogram" +) + +// Defines values for LesserThresholdType. +const ( + LesserThresholdTypeLesser LesserThresholdType = "lesser" +) + +// Defines values for LinePlusSingleStatPropertiesHoverDimension. +const ( + LinePlusSingleStatPropertiesHoverDimensionAuto LinePlusSingleStatPropertiesHoverDimension = "auto" + + LinePlusSingleStatPropertiesHoverDimensionX LinePlusSingleStatPropertiesHoverDimension = "x" + + LinePlusSingleStatPropertiesHoverDimensionXy LinePlusSingleStatPropertiesHoverDimension = "xy" + + LinePlusSingleStatPropertiesHoverDimensionY LinePlusSingleStatPropertiesHoverDimension = "y" +) + +// Defines values for LinePlusSingleStatPropertiesPosition. +const ( + LinePlusSingleStatPropertiesPositionOverlaid LinePlusSingleStatPropertiesPosition = "overlaid" + + LinePlusSingleStatPropertiesPositionStacked LinePlusSingleStatPropertiesPosition = "stacked" +) + +// Defines values for LinePlusSingleStatPropertiesShape. +const ( + LinePlusSingleStatPropertiesShapeChronografV2 LinePlusSingleStatPropertiesShape = "chronograf-v2" +) + +// Defines values for LinePlusSingleStatPropertiesType. +const ( + LinePlusSingleStatPropertiesTypeLinePlusSingleStat LinePlusSingleStatPropertiesType = "line-plus-single-stat" +) + +// Defines values for LineProtocolErrorCode. +const ( + LineProtocolErrorCodeConflict LineProtocolErrorCode = "conflict" + + LineProtocolErrorCodeEmptyValue LineProtocolErrorCode = "empty value" + + LineProtocolErrorCodeInternalError LineProtocolErrorCode = "internal error" + + LineProtocolErrorCodeInvalid LineProtocolErrorCode = "invalid" + + LineProtocolErrorCodeNotFound LineProtocolErrorCode = "not found" + + LineProtocolErrorCodeUnavailable LineProtocolErrorCode = "unavailable" +) + +// Defines values for LineProtocolLengthErrorCode. +const ( + LineProtocolLengthErrorCodeInvalid LineProtocolLengthErrorCode = "invalid" +) + +// Defines values for MapVariablePropertiesType. +const ( + MapVariablePropertiesTypeMap MapVariablePropertiesType = "map" +) + +// Defines values for MarkdownViewPropertiesShape. +const ( + MarkdownViewPropertiesShapeChronografV2 MarkdownViewPropertiesShape = "chronograf-v2" +) + +// Defines values for MarkdownViewPropertiesType. +const ( + MarkdownViewPropertiesTypeMarkdown MarkdownViewPropertiesType = "markdown" +) + +// Defines values for MosaicViewPropertiesHoverDimension. +const ( + MosaicViewPropertiesHoverDimensionAuto MosaicViewPropertiesHoverDimension = "auto" + + MosaicViewPropertiesHoverDimensionX MosaicViewPropertiesHoverDimension = "x" + + MosaicViewPropertiesHoverDimensionXy MosaicViewPropertiesHoverDimension = "xy" + + MosaicViewPropertiesHoverDimensionY MosaicViewPropertiesHoverDimension = "y" +) + +// Defines values for MosaicViewPropertiesShape. +const ( + MosaicViewPropertiesShapeChronografV2 MosaicViewPropertiesShape = "chronograf-v2" +) + +// Defines values for MosaicViewPropertiesType. +const ( + MosaicViewPropertiesTypeMosaic MosaicViewPropertiesType = "mosaic" +) + +// Defines values for NotificationEndpointBaseStatus. +const ( + NotificationEndpointBaseStatusActive NotificationEndpointBaseStatus = "active" + + NotificationEndpointBaseStatusInactive NotificationEndpointBaseStatus = "inactive" +) + +// Defines values for NotificationEndpointType. +const ( + NotificationEndpointTypeHttp NotificationEndpointType = "http" + + NotificationEndpointTypePagerduty NotificationEndpointType = "pagerduty" + + NotificationEndpointTypeSlack NotificationEndpointType = "slack" + + NotificationEndpointTypeTelegram NotificationEndpointType = "telegram" +) + +// Defines values for NotificationEndpointUpdateStatus. +const ( + NotificationEndpointUpdateStatusActive NotificationEndpointUpdateStatus = "active" + + NotificationEndpointUpdateStatusInactive NotificationEndpointUpdateStatus = "inactive" +) + +// Defines values for NotificationRuleBaseLastRunStatus. +const ( + NotificationRuleBaseLastRunStatusCanceled NotificationRuleBaseLastRunStatus = "canceled" + + NotificationRuleBaseLastRunStatusFailed NotificationRuleBaseLastRunStatus = "failed" + + NotificationRuleBaseLastRunStatusSuccess NotificationRuleBaseLastRunStatus = "success" +) + +// Defines values for NotificationRuleUpdateStatus. +const ( + NotificationRuleUpdateStatusActive NotificationRuleUpdateStatus = "active" + + NotificationRuleUpdateStatusInactive NotificationRuleUpdateStatus = "inactive" +) + +// Defines values for OrganizationStatus. +const ( + OrganizationStatusActive OrganizationStatus = "active" + + OrganizationStatusInactive OrganizationStatus = "inactive" +) + +// Defines values for PagerDutyNotificationRuleBaseType. +const ( + PagerDutyNotificationRuleBaseTypePagerduty PagerDutyNotificationRuleBaseType = "pagerduty" +) + +// Defines values for PatchRetentionRuleType. +const ( + PatchRetentionRuleTypeExpire PatchRetentionRuleType = "expire" +) + +// Defines values for PermissionAction. +const ( + PermissionActionRead PermissionAction = "read" + + PermissionActionWrite PermissionAction = "write" +) + +// Defines values for QueryType. +const ( + QueryTypeFlux QueryType = "flux" +) + +// Defines values for QueryEditMode. +const ( + QueryEditModeAdvanced QueryEditMode = "advanced" + + QueryEditModeBuilder QueryEditMode = "builder" +) + +// Defines values for QueryVariablePropertiesType. +const ( + QueryVariablePropertiesTypeQuery QueryVariablePropertiesType = "query" +) + +// Defines values for RangeThresholdType. +const ( + RangeThresholdTypeRange RangeThresholdType = "range" +) + +// Defines values for ReadyStatus. +const ( + ReadyStatusReady ReadyStatus = "ready" +) + +// Defines values for ResourceType. +const ( + ResourceTypeAnnotations ResourceType = "annotations" + + ResourceTypeAuthorizations ResourceType = "authorizations" + + ResourceTypeBuckets ResourceType = "buckets" + + ResourceTypeChecks ResourceType = "checks" + + ResourceTypeDashboards ResourceType = "dashboards" + + ResourceTypeDbrp ResourceType = "dbrp" + + ResourceTypeDocuments ResourceType = "documents" + + ResourceTypeLabels ResourceType = "labels" + + ResourceTypeNotebooks ResourceType = "notebooks" + + ResourceTypeNotificationEndpoints ResourceType = "notificationEndpoints" + + ResourceTypeNotificationRules ResourceType = "notificationRules" + + ResourceTypeOrgs ResourceType = "orgs" + + ResourceTypeRemotes ResourceType = "remotes" + + ResourceTypeReplications ResourceType = "replications" + + ResourceTypeScrapers ResourceType = "scrapers" + + ResourceTypeSecrets ResourceType = "secrets" + + ResourceTypeSources ResourceType = "sources" + + ResourceTypeTasks ResourceType = "tasks" + + ResourceTypeTelegrafs ResourceType = "telegrafs" + + ResourceTypeUsers ResourceType = "users" + + ResourceTypeVariables ResourceType = "variables" + + ResourceTypeViews ResourceType = "views" +) + +// Defines values for ResourceMemberRole. +const ( + ResourceMemberRoleMember ResourceMemberRole = "member" +) + +// Defines values for ResourceOwnerRole. +const ( + ResourceOwnerRoleOwner ResourceOwnerRole = "owner" +) + +// Defines values for RetentionRuleType. +const ( + RetentionRuleTypeExpire RetentionRuleType = "expire" +) + +// Defines values for RuleStatusLevel. +const ( + RuleStatusLevelANY RuleStatusLevel = "ANY" + + RuleStatusLevelCRIT RuleStatusLevel = "CRIT" + + RuleStatusLevelINFO RuleStatusLevel = "INFO" + + RuleStatusLevelOK RuleStatusLevel = "OK" + + RuleStatusLevelUNKNOWN RuleStatusLevel = "UNKNOWN" + + RuleStatusLevelWARN RuleStatusLevel = "WARN" +) + +// Defines values for RunStatus. +const ( + RunStatusCanceled RunStatus = "canceled" + + RunStatusFailed RunStatus = "failed" + + RunStatusScheduled RunStatus = "scheduled" + + RunStatusStarted RunStatus = "started" + + RunStatusSuccess RunStatus = "success" +) + +// Defines values for SMTPNotificationRuleBaseType. +const ( + SMTPNotificationRuleBaseTypeSmtp SMTPNotificationRuleBaseType = "smtp" +) + +// Defines values for ScatterViewPropertiesShape. +const ( + ScatterViewPropertiesShapeChronografV2 ScatterViewPropertiesShape = "chronograf-v2" +) + +// Defines values for ScatterViewPropertiesType. +const ( + ScatterViewPropertiesTypeScatter ScatterViewPropertiesType = "scatter" +) + +// Defines values for SchemaType. +const ( + SchemaTypeExplicit SchemaType = "explicit" + + SchemaTypeImplicit SchemaType = "implicit" +) + +// Defines values for ScraperTargetRequestType. +const ( + ScraperTargetRequestTypePrometheus ScraperTargetRequestType = "prometheus" +) + +// Defines values for SimpleTableViewPropertiesShape. +const ( + SimpleTableViewPropertiesShapeChronografV2 SimpleTableViewPropertiesShape = "chronograf-v2" +) + +// Defines values for SimpleTableViewPropertiesType. +const ( + SimpleTableViewPropertiesTypeSimpleTable SimpleTableViewPropertiesType = "simple-table" +) + +// Defines values for SingleStatViewPropertiesShape. +const ( + SingleStatViewPropertiesShapeChronografV2 SingleStatViewPropertiesShape = "chronograf-v2" +) + +// Defines values for SingleStatViewPropertiesType. +const ( + SingleStatViewPropertiesTypeSingleStat SingleStatViewPropertiesType = "single-stat" +) + +// Defines values for SlackNotificationRuleBaseType. +const ( + SlackNotificationRuleBaseTypeSlack SlackNotificationRuleBaseType = "slack" +) + +// Defines values for SourceLanguages. +const ( + SourceLanguagesFlux SourceLanguages = "flux" + + SourceLanguagesInfluxql SourceLanguages = "influxql" +) + +// Defines values for SourceType. +const ( + SourceTypeSelf SourceType = "self" + + SourceTypeV1 SourceType = "v1" + + SourceTypeV2 SourceType = "v2" +) + +// Defines values for TableViewPropertiesShape. +const ( + TableViewPropertiesShapeChronografV2 TableViewPropertiesShape = "chronograf-v2" +) + +// Defines values for TableViewPropertiesTableOptionsWrapping. +const ( + TableViewPropertiesTableOptionsWrappingSingleLine TableViewPropertiesTableOptionsWrapping = "single-line" + + TableViewPropertiesTableOptionsWrappingTruncate TableViewPropertiesTableOptionsWrapping = "truncate" + + TableViewPropertiesTableOptionsWrappingWrap TableViewPropertiesTableOptionsWrapping = "wrap" +) + +// Defines values for TableViewPropertiesType. +const ( + TableViewPropertiesTypeTable TableViewPropertiesType = "table" +) + +// Defines values for TagRuleOperator. +const ( + TagRuleOperatorEqual TagRuleOperator = "equal" + + TagRuleOperatorEqualregex TagRuleOperator = "equalregex" + + TagRuleOperatorNotequal TagRuleOperator = "notequal" + + TagRuleOperatorNotequalregex TagRuleOperator = "notequalregex" +) + +// Defines values for TaskLastRunStatus. +const ( + TaskLastRunStatusCanceled TaskLastRunStatus = "canceled" + + TaskLastRunStatusFailed TaskLastRunStatus = "failed" + + TaskLastRunStatusSuccess TaskLastRunStatus = "success" +) + +// Defines values for TaskStatusType. +const ( + TaskStatusTypeActive TaskStatusType = "active" + + TaskStatusTypeInactive TaskStatusType = "inactive" +) + +// Defines values for TelegramNotificationRuleBaseParseMode. +const ( + TelegramNotificationRuleBaseParseModeHTML TelegramNotificationRuleBaseParseMode = "HTML" + + TelegramNotificationRuleBaseParseModeMarkdown TelegramNotificationRuleBaseParseMode = "Markdown" + + TelegramNotificationRuleBaseParseModeMarkdownV2 TelegramNotificationRuleBaseParseMode = "MarkdownV2" +) + +// Defines values for TelegramNotificationRuleBaseType. +const ( + TelegramNotificationRuleBaseTypeTelegram TelegramNotificationRuleBaseType = "telegram" +) + +// Defines values for TemplateKind. +const ( + TemplateKindBucket TemplateKind = "Bucket" + + TemplateKindCheck TemplateKind = "Check" + + TemplateKindCheckDeadman TemplateKind = "CheckDeadman" + + TemplateKindCheckThreshold TemplateKind = "CheckThreshold" + + TemplateKindDashboard TemplateKind = "Dashboard" + + TemplateKindLabel TemplateKind = "Label" + + TemplateKindNotificationEndpoint TemplateKind = "NotificationEndpoint" + + TemplateKindNotificationEndpointHTTP TemplateKind = "NotificationEndpointHTTP" + + TemplateKindNotificationEndpointPagerDuty TemplateKind = "NotificationEndpointPagerDuty" + + TemplateKindNotificationEndpointSlack TemplateKind = "NotificationEndpointSlack" + + TemplateKindNotificationRule TemplateKind = "NotificationRule" + + TemplateKindTask TemplateKind = "Task" + + TemplateKindTelegraf TemplateKind = "Telegraf" + + TemplateKindVariable TemplateKind = "Variable" +) + +// Defines values for ThresholdCheckType. +const ( + ThresholdCheckTypeThreshold ThresholdCheckType = "threshold" +) + +// Defines values for UserStatus. +const ( + UserStatusActive UserStatus = "active" + + UserStatusInactive UserStatus = "inactive" +) + +// Defines values for UserResponseStatus. +const ( + UserResponseStatusActive UserResponseStatus = "active" + + UserResponseStatusInactive UserResponseStatus = "inactive" +) + +// Defines values for WritePrecision. +const ( + WritePrecisionMs WritePrecision = "ms" + + WritePrecisionNs WritePrecision = "ns" + + WritePrecisionS WritePrecision = "s" + + WritePrecisionUs WritePrecision = "us" +) + +// Defines values for XYGeom. +const ( + XYGeomBar XYGeom = "bar" + + XYGeomLine XYGeom = "line" + + XYGeomMonotoneX XYGeom = "monotoneX" + + XYGeomStacked XYGeom = "stacked" + + XYGeomStep XYGeom = "step" +) + +// Defines values for XYViewPropertiesHoverDimension. +const ( + XYViewPropertiesHoverDimensionAuto XYViewPropertiesHoverDimension = "auto" + + XYViewPropertiesHoverDimensionX XYViewPropertiesHoverDimension = "x" + + XYViewPropertiesHoverDimensionXy XYViewPropertiesHoverDimension = "xy" + + XYViewPropertiesHoverDimensionY XYViewPropertiesHoverDimension = "y" +) + +// Defines values for XYViewPropertiesPosition. +const ( + XYViewPropertiesPositionOverlaid XYViewPropertiesPosition = "overlaid" + + XYViewPropertiesPositionStacked XYViewPropertiesPosition = "stacked" +) + +// Defines values for XYViewPropertiesShape. +const ( + XYViewPropertiesShapeChronografV2 XYViewPropertiesShape = "chronograf-v2" +) + +// Defines values for XYViewPropertiesType. +const ( + XYViewPropertiesTypeXy XYViewPropertiesType = "xy" +) + +// Contains the AST for the supplied Flux query +type ASTResponse struct { + // Represents a complete package source tree. + Ast *Package `json:"ast,omitempty"` +} + +// AddResourceMemberRequestBody defines model for AddResourceMemberRequestBody. +type AddResourceMemberRequestBody struct { + Id string `json:"id"` + Name *string `json:"name,omitempty"` +} + +// AnalyzeQueryResponse defines model for AnalyzeQueryResponse. +type AnalyzeQueryResponse struct { + Errors *[]struct { + Character *int `json:"character,omitempty"` + Column *int `json:"column,omitempty"` + Line *int `json:"line,omitempty"` + Message *string `json:"message,omitempty"` + } `json:"errors,omitempty"` +} + +// Used to create and directly specify the elements of an array object +type ArrayExpression struct { + // Elements of the array + Elements *[]Expression `json:"elements,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Authorization defines model for Authorization. +type Authorization struct { + // Embedded struct due to allOf(#/components/schemas/AuthorizationUpdateRequest) + AuthorizationUpdateRequest `yaml:",inline"` + // Embedded fields due to inline allOf schema + CreatedAt *time.Time `json:"createdAt,omitempty"` + Id *string `json:"id,omitempty"` + Links *struct { + // URI of resource. + Self *Link `json:"self,omitempty"` + + // URI of resource. + User *Link `json:"user,omitempty"` + } `json:"links,omitempty"` + + // Name of the organization that the token is scoped to. + Org *string `json:"org,omitempty"` + + // ID of the organization that the authorization is scoped to. + OrgID *string `json:"orgID,omitempty"` + + // List of permissions for an authorization. An authorization must have at least one permission. + Permissions *[]Permission `json:"permissions,omitempty"` + + // Token used to authenticate API requests. + Token *string `json:"token,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` + + // Name of the user that created and owns the token. + User *string `json:"user,omitempty"` + + // ID of the user that created and owns the token. + UserID *string `json:"userID,omitempty"` +} + +// AuthorizationPostRequest defines model for AuthorizationPostRequest. +type AuthorizationPostRequest struct { + // Embedded struct due to allOf(#/components/schemas/AuthorizationUpdateRequest) + AuthorizationUpdateRequest `yaml:",inline"` + // Embedded fields due to inline allOf schema + // ID of org that authorization is scoped to. + OrgID *string `json:"orgID,omitempty"` + + // List of permissions for an auth. An auth must have at least one Permission. + Permissions *[]Permission `json:"permissions,omitempty"` + + // ID of user that authorization is scoped to. + UserID *string `json:"userID,omitempty"` +} + +// AuthorizationUpdateRequest defines model for AuthorizationUpdateRequest. +type AuthorizationUpdateRequest struct { + // A description of the token. + Description *string `json:"description,omitempty"` + + // Status of the token. If `inactive`, requests using the token will be rejected. + Status *AuthorizationUpdateRequestStatus `json:"status,omitempty"` +} + +// Status of the token. If `inactive`, requests using the token will be rejected. +type AuthorizationUpdateRequestStatus string + +// Authorizations defines model for Authorizations. +type Authorizations struct { + Authorizations *[]Authorization `json:"authorizations,omitempty"` + Links *Links `json:"links,omitempty"` +} + +// The viewport for a View's visualizations +type Axes struct { + // Axis used in a visualization. + X Axis `json:"x"` + + // Axis used in a visualization. + Y Axis `json:"y"` +} + +// Axis used in a visualization. +type Axis struct { + // Radix for formatting axis values. + Base *AxisBase `json:"base,omitempty"` + + // The extents of the axis in the form [lower, upper]. Clients determine whether bounds are inclusive or exclusive of their limits. + Bounds *[]string `json:"bounds,omitempty"` + + // Description of the axis. + Label *string `json:"label,omitempty"` + + // Label prefix for formatting axis values. + Prefix *string `json:"prefix,omitempty"` + + // Scale is the axis formatting scale. Supported: "log", "linear" + Scale *AxisScale `json:"scale,omitempty"` + + // Label suffix for formatting axis values. + Suffix *string `json:"suffix,omitempty"` +} + +// Radix for formatting axis values. +type AxisBase string + +// Scale is the axis formatting scale. Supported: "log", "linear" +type AxisScale string + +// A placeholder for statements for which no correct statement nodes can be created +type BadStatement struct { + // Raw source text + Text *string `json:"text,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// BandViewProperties defines model for BandViewProperties. +type BandViewProperties struct { + // The viewport for a View's visualizations + Axes Axes `json:"axes"` + + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + GenerateXAxisTicks *[]string `json:"generateXAxisTicks,omitempty"` + GenerateYAxisTicks *[]string `json:"generateYAxisTicks,omitempty"` + Geom XYGeom `json:"geom"` + HoverDimension *BandViewPropertiesHoverDimension `json:"hoverDimension,omitempty"` + LegendColorizeRows *bool `json:"legendColorizeRows,omitempty"` + LegendHide *bool `json:"legendHide,omitempty"` + LegendOpacity *float32 `json:"legendOpacity,omitempty"` + LegendOrientationThreshold *int `json:"legendOrientationThreshold,omitempty"` + LowerColumn *string `json:"lowerColumn,omitempty"` + MainColumn *string `json:"mainColumn,omitempty"` + Note string `json:"note"` + Queries []DashboardQuery `json:"queries"` + Shape BandViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + + // StaticLegend represents the options specific to the static legend + StaticLegend *StaticLegend `json:"staticLegend,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` + Type BandViewPropertiesType `json:"type"` + UpperColumn *string `json:"upperColumn,omitempty"` + XColumn *string `json:"xColumn,omitempty"` + XTickStart *float32 `json:"xTickStart,omitempty"` + XTickStep *float32 `json:"xTickStep,omitempty"` + XTotalTicks *int `json:"xTotalTicks,omitempty"` + YColumn *string `json:"yColumn,omitempty"` + YTickStart *float32 `json:"yTickStart,omitempty"` + YTickStep *float32 `json:"yTickStep,omitempty"` + YTotalTicks *int `json:"yTotalTicks,omitempty"` +} + +// BandViewPropertiesHoverDimension defines model for BandViewProperties.HoverDimension. +type BandViewPropertiesHoverDimension string + +// BandViewPropertiesShape defines model for BandViewProperties.Shape. +type BandViewPropertiesShape string + +// BandViewPropertiesType defines model for BandViewProperties.Type. +type BandViewPropertiesType string + +// uses binary operators to act on two operands in an expression +type BinaryExpression struct { + Left *Expression `json:"left,omitempty"` + Operator *string `json:"operator,omitempty"` + Right *Expression `json:"right,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// A set of statements +type Block struct { + // Block body + Body *[]Statement `json:"body,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Represents boolean values +type BooleanLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Value *bool `json:"value,omitempty"` +} + +// Bucket defines model for Bucket. +type Bucket struct { + CreatedAt *time.Time `json:"createdAt,omitempty"` + Description *string `json:"description,omitempty"` + Id *string `json:"id,omitempty"` + Labels *Labels `json:"labels,omitempty"` + Links *struct { + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Org *Link `json:"org,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + + // URI of resource. + Write *Link `json:"write,omitempty"` + } `json:"links,omitempty"` + Name string `json:"name"` + OrgID *string `json:"orgID,omitempty"` + + // Rules to expire or retain data. No rules means data never expires. + RetentionRules RetentionRules `json:"retentionRules"` + Rp *string `json:"rp,omitempty"` + SchemaType *SchemaType `json:"schemaType,omitempty"` + Type *BucketType `json:"type,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` +} + +// BucketType defines model for Bucket.Type. +type BucketType string + +// BucketMetadataManifest defines model for BucketMetadataManifest. +type BucketMetadataManifest struct { + BucketID string `json:"bucketID"` + BucketName string `json:"bucketName"` + DefaultRetentionPolicy string `json:"defaultRetentionPolicy"` + Description *string `json:"description,omitempty"` + OrganizationID string `json:"organizationID"` + OrganizationName string `json:"organizationName"` + RetentionPolicies RetentionPolicyManifests `json:"retentionPolicies"` +} + +// BucketMetadataManifests defines model for BucketMetadataManifests. +type BucketMetadataManifests []BucketMetadataManifest + +// BucketShardMapping defines model for BucketShardMapping. +type BucketShardMapping struct { + NewId int64 `json:"newId"` + OldId int64 `json:"oldId"` +} + +// BucketShardMappings defines model for BucketShardMappings. +type BucketShardMappings []BucketShardMapping + +// Buckets defines model for Buckets. +type Buckets struct { + Buckets *[]Bucket `json:"buckets,omitempty"` + Links *Links `json:"links,omitempty"` +} + +// BuilderAggregateFunctionType defines model for BuilderAggregateFunctionType. +type BuilderAggregateFunctionType string + +// BuilderConfig defines model for BuilderConfig. +type BuilderConfig struct { + AggregateWindow *struct { + FillValues *bool `json:"fillValues,omitempty"` + Period *string `json:"period,omitempty"` + } `json:"aggregateWindow,omitempty"` + Buckets *[]string `json:"buckets,omitempty"` + Functions *[]BuilderFunctionsType `json:"functions,omitempty"` + Tags *[]BuilderTagsType `json:"tags,omitempty"` +} + +// BuilderFunctionsType defines model for BuilderFunctionsType. +type BuilderFunctionsType struct { + Name *string `json:"name,omitempty"` +} + +// BuilderTagsType defines model for BuilderTagsType. +type BuilderTagsType struct { + AggregateFunctionType *BuilderAggregateFunctionType `json:"aggregateFunctionType,omitempty"` + Key *string `json:"key,omitempty"` + Values *[]string `json:"values,omitempty"` +} + +// Declares a builtin identifier and its type +type BuiltinStatement struct { + // A valid Flux identifier + Id *Identifier `json:"id,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Represents a function call +type CallExpression struct { + // Function arguments + Arguments *[]Expression `json:"arguments,omitempty"` + Callee *Expression `json:"callee,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Cell defines model for Cell. +type Cell struct { + H *int32 `json:"h,omitempty"` + Id *string `json:"id,omitempty"` + Links *struct { + Self *string `json:"self,omitempty"` + View *string `json:"view,omitempty"` + } `json:"links,omitempty"` + + // The reference to a view from the views API. + ViewID *string `json:"viewID,omitempty"` + W *int32 `json:"w,omitempty"` + X *int32 `json:"x,omitempty"` + Y *int32 `json:"y,omitempty"` +} + +// CellUpdate defines model for CellUpdate. +type CellUpdate struct { + H *int32 `json:"h,omitempty"` + W *int32 `json:"w,omitempty"` + X *int32 `json:"x,omitempty"` + Y *int32 `json:"y,omitempty"` +} + +// CellWithViewProperties defines model for CellWithViewProperties. +type CellWithViewProperties struct { + // Embedded struct due to allOf(#/components/schemas/Cell) + Cell `yaml:",inline"` + // Embedded fields due to inline allOf schema + Name *string `json:"name,omitempty"` + Properties *ViewProperties `json:"properties,omitempty"` +} + +// Cells defines model for Cells. +type Cells []Cell + +// CellsWithViewProperties defines model for CellsWithViewProperties. +type CellsWithViewProperties []CellWithViewProperties + +// Check defines model for Check. +type Check struct { + // Embedded struct due to allOf(#/components/schemas/CheckDiscriminator) + CheckDiscriminator `yaml:",inline"` +} + +// CheckBase defines model for CheckBase. +type CheckBase struct { + CreatedAt *time.Time `json:"createdAt,omitempty"` + + // An optional description of the check. + Description *string `json:"description,omitempty"` + Id *string `json:"id,omitempty"` + Labels *Labels `json:"labels,omitempty"` + LastRunError *string `json:"lastRunError,omitempty"` + LastRunStatus *CheckBaseLastRunStatus `json:"lastRunStatus,omitempty"` + + // Timestamp (in RFC3339 date/time format](https://datatracker.ietf.org/doc/html/rfc3339)) of the latest scheduled and completed run. + LatestCompleted *interface{} `json:"latestCompleted,omitempty"` + Links *struct { + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Query *Link `json:"query,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + } `json:"links,omitempty"` + Name string `json:"name"` + + // The ID of the organization that owns this check. + OrgID string `json:"orgID"` + + // The ID of creator used to create this check. + OwnerID *string `json:"ownerID,omitempty"` + Query DashboardQuery `json:"query"` + Status *TaskStatusType `json:"status,omitempty"` + + // The ID of the task associated with this check. + TaskID *string `json:"taskID,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` +} + +// CheckBaseLastRunStatus defines model for CheckBase.LastRunStatus. +type CheckBaseLastRunStatus string + +// CheckDiscriminator defines model for CheckDiscriminator. +type CheckDiscriminator interface{} + +// CheckPatch defines model for CheckPatch. +type CheckPatch struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + Status *CheckPatchStatus `json:"status,omitempty"` +} + +// CheckPatchStatus defines model for CheckPatch.Status. +type CheckPatchStatus string + +// The state to record if check matches a criteria. +type CheckStatusLevel string + +// CheckViewProperties defines model for CheckViewProperties. +type CheckViewProperties struct { + Check *Check `json:"check,omitempty"` + CheckID string `json:"checkID"` + + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + LegendColorizeRows *bool `json:"legendColorizeRows,omitempty"` + LegendHide *bool `json:"legendHide,omitempty"` + LegendOpacity *float32 `json:"legendOpacity,omitempty"` + LegendOrientationThreshold *int `json:"legendOrientationThreshold,omitempty"` + Queries []DashboardQuery `json:"queries"` + Shape CheckViewPropertiesShape `json:"shape"` + Type CheckViewPropertiesType `json:"type"` +} + +// CheckViewPropertiesShape defines model for CheckViewProperties.Shape. +type CheckViewPropertiesShape string + +// CheckViewPropertiesType defines model for CheckViewProperties.Type. +type CheckViewPropertiesType string + +// Checks defines model for Checks. +type Checks struct { + Checks *[]Check `json:"checks,omitempty"` + Links *Links `json:"links,omitempty"` +} + +// A color mapping is an object that maps time series data to a UI color scheme to allow the UI to render graphs consistent colors across reloads. +type ColorMapping struct { + AdditionalProperties map[string]string `json:"-"` +} + +// Selects one of two expressions, `Alternate` or `Consequent`, depending on a third boolean expression, `Test` +type ConditionalExpression struct { + Alternate *Expression `json:"alternate,omitempty"` + Consequent *Expression `json:"consequent,omitempty"` + Test *Expression `json:"test,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Config defines model for Config. +type Config struct { + Config *map[string]interface{} `json:"config,omitempty"` +} + +// ConstantVariableProperties defines model for ConstantVariableProperties. +type ConstantVariableProperties struct { + Type *ConstantVariablePropertiesType `json:"type,omitempty"` + Values *[]string `json:"values,omitempty"` +} + +// ConstantVariablePropertiesType defines model for ConstantVariableProperties.Type. +type ConstantVariablePropertiesType string + +// CreateCell defines model for CreateCell. +type CreateCell struct { + H *int32 `json:"h,omitempty"` + Name *string `json:"name,omitempty"` + + // Makes a copy of the provided view. + UsingView *string `json:"usingView,omitempty"` + W *int32 `json:"w,omitempty"` + X *int32 `json:"x,omitempty"` + Y *int32 `json:"y,omitempty"` +} + +// CreateDashboardRequest defines model for CreateDashboardRequest. +type CreateDashboardRequest struct { + // The user-facing description of the dashboard. + Description *string `json:"description,omitempty"` + + // The user-facing name of the dashboard. + Name string `json:"name"` + + // The ID of the organization that owns the dashboard. + OrgID string `json:"orgID"` +} + +// CustomCheck defines model for CustomCheck. +type CustomCheck struct { + // Embedded struct due to allOf(#/components/schemas/CheckBase) + CheckBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + Type CustomCheckType `json:"type"` +} + +// CustomCheckType defines model for CustomCheck.Type. +type CustomCheckType string + +// DBRP defines model for DBRP. +type DBRP struct { + // ID of the bucket used as the target for the translation. + BucketID string `json:"bucketID"` + + // InfluxDB v1 database + Database string `json:"database"` + + // Mapping represents the default retention policy for the database specified. + Default bool `json:"default"` + + // ID of the DBRP mapping. + Id string `json:"id"` + Links *Links `json:"links,omitempty"` + + // ID of the organization that owns this mapping. + OrgID string `json:"orgID"` + + // InfluxDB v1 retention policy + RetentionPolicy string `json:"retention_policy"` +} + +// DBRPCreate defines model for DBRPCreate. +type DBRPCreate struct { + // ID of the bucket used as the target for the translation. + BucketID string `json:"bucketID"` + + // InfluxDB v1 database + Database string `json:"database"` + + // Mapping represents the default retention policy for the database specified. + Default *bool `json:"default,omitempty"` + + // Name of the organization that owns this mapping. + Org *string `json:"org,omitempty"` + + // ID of the organization that owns this mapping. + OrgID *string `json:"orgID,omitempty"` + + // InfluxDB v1 retention policy + RetentionPolicy string `json:"retention_policy"` +} + +// DBRPGet defines model for DBRPGet. +type DBRPGet struct { + Content *DBRP `json:"content,omitempty"` +} + +// DBRPUpdate defines model for DBRPUpdate. +type DBRPUpdate struct { + Default *bool `json:"default,omitempty"` + + // InfluxDB v1 retention policy + RetentionPolicy *string `json:"retention_policy,omitempty"` +} + +// DBRPs defines model for DBRPs. +type DBRPs struct { + Content *[]DBRP `json:"content,omitempty"` +} + +// Dashboard defines model for Dashboard. +type Dashboard struct { + // Embedded struct due to allOf(#/components/schemas/CreateDashboardRequest) + CreateDashboardRequest `yaml:",inline"` + // Embedded fields due to inline allOf schema + Cells *Cells `json:"cells,omitempty"` + Id *string `json:"id,omitempty"` + Labels *Labels `json:"labels,omitempty"` + Links *struct { + // URI of resource. + Cells *Link `json:"cells,omitempty"` + + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Org *Link `json:"org,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + } `json:"links,omitempty"` + Meta *struct { + CreatedAt *time.Time `json:"createdAt,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` + } `json:"meta,omitempty"` +} + +// Defines an encoding of data value into color space. +type DashboardColor struct { + // The hex number of the color + Hex string `json:"hex"` + + // The unique ID of the view color. + Id string `json:"id"` + + // The user-facing name of the hex color. + Name string `json:"name"` + + // Type is how the color is used. + Type DashboardColorType `json:"type"` + + // The data value mapped to this color. + Value float32 `json:"value"` +} + +// Type is how the color is used. +type DashboardColorType string + +// DashboardQuery defines model for DashboardQuery. +type DashboardQuery struct { + BuilderConfig *BuilderConfig `json:"builderConfig,omitempty"` + EditMode *QueryEditMode `json:"editMode,omitempty"` + Name *string `json:"name,omitempty"` + + // The text of the Flux query. + Text *string `json:"text,omitempty"` +} + +// DashboardWithViewProperties defines model for DashboardWithViewProperties. +type DashboardWithViewProperties struct { + // Embedded struct due to allOf(#/components/schemas/CreateDashboardRequest) + CreateDashboardRequest `yaml:",inline"` + // Embedded fields due to inline allOf schema + Cells *CellsWithViewProperties `json:"cells,omitempty"` + Id *string `json:"id,omitempty"` + Labels *Labels `json:"labels,omitempty"` + Links *struct { + // URI of resource. + Cells *Link `json:"cells,omitempty"` + + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Org *Link `json:"org,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + } `json:"links,omitempty"` + Meta *struct { + CreatedAt *time.Time `json:"createdAt,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` + } `json:"meta,omitempty"` +} + +// Dashboards defines model for Dashboards. +type Dashboards struct { + Dashboards *[]Dashboard `json:"dashboards,omitempty"` + Links *Links `json:"links,omitempty"` +} + +// Represents an instant in time with nanosecond precision using the syntax of golang's RFC3339 Nanosecond variant +type DateTimeLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Value *time.Time `json:"value,omitempty"` +} + +// DeadmanCheck defines model for DeadmanCheck. +type DeadmanCheck struct { + // Embedded struct due to allOf(#/components/schemas/CheckBase) + CheckBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + // Check repetition interval. + Every *string `json:"every,omitempty"` + + // The state to record if check matches a criteria. + Level *CheckStatusLevel `json:"level,omitempty"` + + // Duration to delay after the schedule, before executing check. + Offset *string `json:"offset,omitempty"` + + // If only zero values reported since time, trigger an alert + ReportZero *bool `json:"reportZero,omitempty"` + + // String duration for time that a series is considered stale and should not trigger deadman. + StaleTime *string `json:"staleTime,omitempty"` + + // The template used to generate and write a status message. + StatusMessageTemplate *string `json:"statusMessageTemplate,omitempty"` + + // List of tags to write to each status. + Tags *[]struct { + Key *string `json:"key,omitempty"` + Value *string `json:"value,omitempty"` + } `json:"tags,omitempty"` + + // String duration before deadman triggers. + TimeSince *string `json:"timeSince,omitempty"` + Type DeadmanCheckType `json:"type"` +} + +// DeadmanCheckType defines model for DeadmanCheck.Type. +type DeadmanCheckType string + +// Indicates whether decimal places should be enforced, and how many digits it should show. +type DecimalPlaces struct { + // The number of digits after decimal to display + Digits *int32 `json:"digits,omitempty"` + + // Indicates whether decimal point setting should be enforced + IsEnforced *bool `json:"isEnforced,omitempty"` +} + +// The delete predicate request. +type DeletePredicateRequest struct { + // InfluxQL-like delete statement + Predicate *string `json:"predicate,omitempty"` + + // RFC3339Nano + Start time.Time `json:"start"` + + // RFC3339Nano + Stop time.Time `json:"stop"` +} + +// Dialect are options to change the default CSV output format; https://www.w3.org/TR/2015/REC-tabular-metadata-20151217/#dialect-descriptions +type Dialect struct { + // https://www.w3.org/TR/2015/REC-tabular-data-model-20151217/#columns + Annotations *[]DialectAnnotations `json:"annotations,omitempty"` + + // Character prefixed to comment strings + CommentPrefix *string `json:"commentPrefix,omitempty"` + + // Format of timestamps + DateTimeFormat *DialectDateTimeFormat `json:"dateTimeFormat,omitempty"` + + // Separator between cells; the default is , + Delimiter *string `json:"delimiter,omitempty"` + + // If true, the results will contain a header row + Header *bool `json:"header,omitempty"` +} + +// DialectAnnotations defines model for Dialect.Annotations. +type DialectAnnotations string + +// Format of timestamps +type DialectDateTimeFormat string + +// Used to create and directly specify the elements of a dictionary +type DictExpression struct { + // Elements of the dictionary + Elements *[]DictItem `json:"elements,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// A key/value pair in a dictionary +type DictItem struct { + Key *Expression `json:"key,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Val *Expression `json:"val,omitempty"` +} + +// A pair consisting of length of time and the unit of time measured. It is the atomic unit from which all duration literals are composed. +type Duration struct { + Magnitude *int `json:"magnitude,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Unit *string `json:"unit,omitempty"` +} + +// Represents the elapsed time between two instants as an int64 nanosecond count with syntax of golang's time.Duration +type DurationLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` + + // Duration values + Values *[]Duration `json:"values,omitempty"` +} + +// Error defines model for Error. +type Error struct { + // code is the machine-readable error code. + Code ErrorCode `json:"code"` + + // Stack of errors that occurred during processing of the request. Useful for debugging. + Err *string `json:"err,omitempty"` + + // Human-readable message. + Message *string `json:"message,omitempty"` + + // Describes the logical code operation when the error occurred. Useful for debugging. + Op *string `json:"op,omitempty"` +} + +// code is the machine-readable error code. +type ErrorCode string + +// Expression defines model for Expression. +type Expression interface{} + +// May consist of an expression that does not return a value and is executed solely for its side-effects +type ExpressionStatement struct { + Expression *Expression `json:"expression,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Field defines model for Field. +type Field struct { + // Alias overrides the field name in the returned response. Applies only if type is `func` + Alias *string `json:"alias,omitempty"` + + // Args are the arguments to the function + Args *[]Field `json:"args,omitempty"` + + // `type` describes the field type. `func` is a function. `field` is a field reference. + Type *FieldType `json:"type,omitempty"` + + // value is the value of the field. Meaning of the value is implied by the `type` key + Value *string `json:"value,omitempty"` +} + +// `type` describes the field type. `func` is a function. `field` is a field reference. +type FieldType string + +// Represents a source from a single file +type File struct { + // List of Flux statements + Body *[]Statement `json:"body,omitempty"` + + // A list of package imports + Imports *[]ImportDeclaration `json:"imports,omitempty"` + + // The name of the file. + Name *string `json:"name,omitempty"` + + // Defines a package identifier + Package *PackageClause `json:"package,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Flags defines model for Flags. +type Flags struct { + AdditionalProperties map[string]interface{} `json:"-"` +} + +// Represents floating point numbers according to the double representations defined by the IEEE-754-1985 +type FloatLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Value *float32 `json:"value,omitempty"` +} + +// Rendered flux that backs the check or notification. +type FluxResponse struct { + Flux *string `json:"flux,omitempty"` +} + +// FluxSuggestion defines model for FluxSuggestion. +type FluxSuggestion struct { + Name *string `json:"name,omitempty"` + Params *FluxSuggestion_Params `json:"params,omitempty"` +} + +// FluxSuggestion_Params defines model for FluxSuggestion.Params. +type FluxSuggestion_Params struct { + AdditionalProperties map[string]string `json:"-"` +} + +// FluxSuggestions defines model for FluxSuggestions. +type FluxSuggestions struct { + Funcs *[]FluxSuggestion `json:"funcs,omitempty"` +} + +// Function expression +type FunctionExpression struct { + Body *Node `json:"body,omitempty"` + + // Function parameters + Params *[]Property `json:"params,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// GaugeViewProperties defines model for GaugeViewProperties. +type GaugeViewProperties struct { + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + + // Indicates whether decimal places should be enforced, and how many digits it should show. + DecimalPlaces DecimalPlaces `json:"decimalPlaces"` + Note string `json:"note"` + Prefix string `json:"prefix"` + Queries []DashboardQuery `json:"queries"` + Shape GaugeViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + Suffix string `json:"suffix"` + TickPrefix string `json:"tickPrefix"` + TickSuffix string `json:"tickSuffix"` + Type GaugeViewPropertiesType `json:"type"` +} + +// GaugeViewPropertiesShape defines model for GaugeViewProperties.Shape. +type GaugeViewPropertiesShape string + +// GaugeViewPropertiesType defines model for GaugeViewProperties.Type. +type GaugeViewPropertiesType string + +// GeoCircleViewLayer defines model for GeoCircleViewLayer. +type GeoCircleViewLayer struct { + // Embedded struct due to allOf(#/components/schemas/GeoViewLayerProperties) + GeoViewLayerProperties `yaml:",inline"` + // Embedded fields due to inline allOf schema + // Axis used in a visualization. + ColorDimension Axis `json:"colorDimension"` + + // Circle color field + ColorField string `json:"colorField"` + + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + + // Interpolate circle color based on displayed value + InterpolateColors *bool `json:"interpolateColors,omitempty"` + + // Maximum radius size in pixels + Radius *int `json:"radius,omitempty"` + + // Axis used in a visualization. + RadiusDimension Axis `json:"radiusDimension"` + + // Radius field + RadiusField string `json:"radiusField"` +} + +// GeoHeatMapViewLayer defines model for GeoHeatMapViewLayer. +type GeoHeatMapViewLayer struct { + // Embedded struct due to allOf(#/components/schemas/GeoViewLayerProperties) + GeoViewLayerProperties `yaml:",inline"` + // Embedded fields due to inline allOf schema + // Blur for heatmap points + Blur int `json:"blur"` + + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + + // Axis used in a visualization. + IntensityDimension Axis `json:"intensityDimension"` + + // Intensity field + IntensityField string `json:"intensityField"` + + // Radius size in pixels + Radius int `json:"radius"` +} + +// GeoPointMapViewLayer defines model for GeoPointMapViewLayer. +type GeoPointMapViewLayer struct { + // Embedded struct due to allOf(#/components/schemas/GeoViewLayerProperties) + GeoViewLayerProperties `yaml:",inline"` + // Embedded fields due to inline allOf schema + // Axis used in a visualization. + ColorDimension Axis `json:"colorDimension"` + + // Marker color field + ColorField string `json:"colorField"` + + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + + // Cluster close markers together + IsClustered *bool `json:"isClustered,omitempty"` + + // An array for which columns to display in tooltip + TooltipColumns *[]string `json:"tooltipColumns,omitempty"` +} + +// GeoTrackMapViewLayer defines model for GeoTrackMapViewLayer. +type GeoTrackMapViewLayer struct { + // Embedded struct due to allOf(#/components/schemas/GeoViewLayerProperties) + GeoViewLayerProperties `yaml:",inline"` + // Embedded fields due to inline allOf schema +} + +// GeoViewLayer defines model for GeoViewLayer. +type GeoViewLayer interface{} + +// GeoViewLayerProperties defines model for GeoViewLayerProperties. +type GeoViewLayerProperties struct { + Type GeoViewLayerPropertiesType `json:"type"` +} + +// GeoViewLayerPropertiesType defines model for GeoViewLayerProperties.Type. +type GeoViewLayerPropertiesType string + +// GeoViewProperties defines model for GeoViewProperties. +type GeoViewProperties struct { + // If true, map zoom and pan controls are enabled on the dashboard view + AllowPanAndZoom bool `json:"allowPanAndZoom"` + + // Coordinates of the center of the map + Center struct { + // Latitude of the center of the map + Lat float64 `json:"lat"` + + // Longitude of the center of the map + Lon float64 `json:"lon"` + } `json:"center"` + + // Colors define color encoding of data into a visualization + Colors *[]DashboardColor `json:"colors,omitempty"` + + // If true, search results get automatically regroupped so that lon,lat and value are treated as columns + DetectCoordinateFields bool `json:"detectCoordinateFields"` + + // Object type to define lat/lon columns + LatLonColumns *LatLonColumns `json:"latLonColumns,omitempty"` + + // List of individual layers shown in the map + Layers []GeoViewLayer `json:"layers"` + + // Define map type - regular, satellite etc. + MapStyle *string `json:"mapStyle,omitempty"` + Note string `json:"note"` + Queries []DashboardQuery `json:"queries"` + + // String to define the column + S2Column *string `json:"s2Column,omitempty"` + Shape GeoViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + Type GeoViewPropertiesType `json:"type"` + + // If true, S2 column is used to calculate lat/lon + UseS2CellID *bool `json:"useS2CellID,omitempty"` + + // Zoom level used for initial display of the map + Zoom float64 `json:"zoom"` +} + +// GeoViewPropertiesShape defines model for GeoViewProperties.Shape. +type GeoViewPropertiesShape string + +// GeoViewPropertiesType defines model for GeoViewProperties.Type. +type GeoViewPropertiesType string + +// GreaterThreshold defines model for GreaterThreshold. +type GreaterThreshold struct { + // Embedded struct due to allOf(#/components/schemas/ThresholdBase) + ThresholdBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + Type GreaterThresholdType `json:"type"` + Value float32 `json:"value"` +} + +// GreaterThresholdType defines model for GreaterThreshold.Type. +type GreaterThresholdType string + +// HTTPNotificationEndpoint defines model for HTTPNotificationEndpoint. +type HTTPNotificationEndpoint struct { + // Embedded struct due to allOf(#/components/schemas/NotificationEndpointBase) + NotificationEndpointBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + AuthMethod HTTPNotificationEndpointAuthMethod `json:"authMethod"` + ContentTemplate *string `json:"contentTemplate,omitempty"` + + // Customized headers. + Headers *HTTPNotificationEndpoint_Headers `json:"headers,omitempty"` + Method HTTPNotificationEndpointMethod `json:"method"` + Password *string `json:"password,omitempty"` + Token *string `json:"token,omitempty"` + Url string `json:"url"` + Username *string `json:"username,omitempty"` +} + +// HTTPNotificationEndpointAuthMethod defines model for HTTPNotificationEndpoint.AuthMethod. +type HTTPNotificationEndpointAuthMethod string + +// Customized headers. +type HTTPNotificationEndpoint_Headers struct { + AdditionalProperties map[string]string `json:"-"` +} + +// HTTPNotificationEndpointMethod defines model for HTTPNotificationEndpoint.Method. +type HTTPNotificationEndpointMethod string + +// HTTPNotificationRule defines model for HTTPNotificationRule. +type HTTPNotificationRule struct { + // Embedded struct due to allOf(#/components/schemas/NotificationRuleBase) + NotificationRuleBase `yaml:",inline"` + // Embedded struct due to allOf(#/components/schemas/HTTPNotificationRuleBase) + HTTPNotificationRuleBase `yaml:",inline"` +} + +// HTTPNotificationRuleBase defines model for HTTPNotificationRuleBase. +type HTTPNotificationRuleBase struct { + Type HTTPNotificationRuleBaseType `json:"type"` + Url *string `json:"url,omitempty"` +} + +// HTTPNotificationRuleBaseType defines model for HTTPNotificationRuleBase.Type. +type HTTPNotificationRuleBaseType string + +// HealthCheck defines model for HealthCheck. +type HealthCheck struct { + Checks *[]HealthCheck `json:"checks,omitempty"` + Commit *string `json:"commit,omitempty"` + Message *string `json:"message,omitempty"` + Name string `json:"name"` + Status HealthCheckStatus `json:"status"` + Version *string `json:"version,omitempty"` +} + +// HealthCheckStatus defines model for HealthCheck.Status. +type HealthCheckStatus string + +// HeatmapViewProperties defines model for HeatmapViewProperties. +type HeatmapViewProperties struct { + BinSize float32 `json:"binSize"` + + // Colors define color encoding of data into a visualization + Colors []string `json:"colors"` + GenerateXAxisTicks *[]string `json:"generateXAxisTicks,omitempty"` + GenerateYAxisTicks *[]string `json:"generateYAxisTicks,omitempty"` + LegendColorizeRows *bool `json:"legendColorizeRows,omitempty"` + LegendHide *bool `json:"legendHide,omitempty"` + LegendOpacity *float32 `json:"legendOpacity,omitempty"` + LegendOrientationThreshold *int `json:"legendOrientationThreshold,omitempty"` + Note string `json:"note"` + Queries []DashboardQuery `json:"queries"` + Shape HeatmapViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + TimeFormat *string `json:"timeFormat,omitempty"` + Type HeatmapViewPropertiesType `json:"type"` + XAxisLabel string `json:"xAxisLabel"` + XColumn string `json:"xColumn"` + XDomain []float32 `json:"xDomain"` + XPrefix string `json:"xPrefix"` + XSuffix string `json:"xSuffix"` + XTickStart *float32 `json:"xTickStart,omitempty"` + XTickStep *float32 `json:"xTickStep,omitempty"` + XTotalTicks *int `json:"xTotalTicks,omitempty"` + YAxisLabel string `json:"yAxisLabel"` + YColumn string `json:"yColumn"` + YDomain []float32 `json:"yDomain"` + YPrefix string `json:"yPrefix"` + YSuffix string `json:"ySuffix"` + YTickStart *float32 `json:"yTickStart,omitempty"` + YTickStep *float32 `json:"yTickStep,omitempty"` + YTotalTicks *int `json:"yTotalTicks,omitempty"` +} + +// HeatmapViewPropertiesShape defines model for HeatmapViewProperties.Shape. +type HeatmapViewPropertiesShape string + +// HeatmapViewPropertiesType defines model for HeatmapViewProperties.Type. +type HeatmapViewPropertiesType string + +// HistogramViewProperties defines model for HistogramViewProperties. +type HistogramViewProperties struct { + BinCount int `json:"binCount"` + + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + FillColumns []string `json:"fillColumns"` + LegendColorizeRows *bool `json:"legendColorizeRows,omitempty"` + LegendHide *bool `json:"legendHide,omitempty"` + LegendOpacity *float32 `json:"legendOpacity,omitempty"` + LegendOrientationThreshold *int `json:"legendOrientationThreshold,omitempty"` + Note string `json:"note"` + Position HistogramViewPropertiesPosition `json:"position"` + Queries []DashboardQuery `json:"queries"` + Shape HistogramViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + Type HistogramViewPropertiesType `json:"type"` + XAxisLabel string `json:"xAxisLabel"` + XColumn string `json:"xColumn"` + XDomain []float32 `json:"xDomain"` +} + +// HistogramViewPropertiesPosition defines model for HistogramViewProperties.Position. +type HistogramViewPropertiesPosition string + +// HistogramViewPropertiesShape defines model for HistogramViewProperties.Shape. +type HistogramViewPropertiesShape string + +// HistogramViewPropertiesType defines model for HistogramViewProperties.Type. +type HistogramViewPropertiesType string + +// A valid Flux identifier +type Identifier struct { + Name *string `json:"name,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Declares a package import +type ImportDeclaration struct { + // A valid Flux identifier + As *Identifier `json:"as,omitempty"` + + // Expressions begin and end with double quote marks + Path *StringLiteral `json:"path,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Represents indexing into an array +type IndexExpression struct { + Array *Expression `json:"array,omitempty"` + Index *Expression `json:"index,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Represents integer numbers +type IntegerLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Value *string `json:"value,omitempty"` +} + +// IsOnboarding defines model for IsOnboarding. +type IsOnboarding struct { + // True means that the influxdb instance has NOT had initial setup; false means that the database has been setup. + Allowed *bool `json:"allowed,omitempty"` +} + +// Label defines model for Label. +type Label struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` + + // Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. + Properties *Label_Properties `json:"properties,omitempty"` +} + +// Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. +type Label_Properties struct { + AdditionalProperties map[string]string `json:"-"` +} + +// LabelCreateRequest defines model for LabelCreateRequest. +type LabelCreateRequest struct { + Name string `json:"name"` + OrgID string `json:"orgID"` + + // Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. + Properties *LabelCreateRequest_Properties `json:"properties,omitempty"` +} + +// Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. +type LabelCreateRequest_Properties struct { + AdditionalProperties map[string]string `json:"-"` +} + +// LabelMapping defines model for LabelMapping. +type LabelMapping struct { + LabelID *string `json:"labelID,omitempty"` +} + +// LabelResponse defines model for LabelResponse. +type LabelResponse struct { + Label *Label `json:"label,omitempty"` + Links *Links `json:"links,omitempty"` +} + +// LabelUpdate defines model for LabelUpdate. +type LabelUpdate struct { + Name *string `json:"name,omitempty"` + + // Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. + Properties *LabelUpdate_Properties `json:"properties,omitempty"` +} + +// Key/Value pairs associated with this label. Keys can be removed by sending an update with an empty value. +type LabelUpdate_Properties struct { + AdditionalProperties map[string]string `json:"-"` +} + +// Labels defines model for Labels. +type Labels []Label + +// LabelsResponse defines model for LabelsResponse. +type LabelsResponse struct { + Labels *Labels `json:"labels,omitempty"` + Links *Links `json:"links,omitempty"` +} + +// Flux query to be analyzed. +type LanguageRequest struct { + // Flux query script to be analyzed + Query string `json:"query"` +} + +// Object type for key and column definitions +type LatLonColumn struct { + // Column to look up Lat/Lon + Column string `json:"column"` + + // Key to determine whether the column is tag/field + Key string `json:"key"` +} + +// Object type to define lat/lon columns +type LatLonColumns struct { + // Object type for key and column definitions + Lat LatLonColumn `json:"lat"` + + // Object type for key and column definitions + Lon LatLonColumn `json:"lon"` +} + +// LegacyAuthorizationPostRequest defines model for LegacyAuthorizationPostRequest. +type LegacyAuthorizationPostRequest struct { + // Embedded struct due to allOf(#/components/schemas/AuthorizationUpdateRequest) + AuthorizationUpdateRequest `yaml:",inline"` + // Embedded fields due to inline allOf schema + // ID of org that authorization is scoped to. + OrgID *string `json:"orgID,omitempty"` + + // List of permissions for an auth. An auth must have at least one Permission. + Permissions *[]Permission `json:"permissions,omitempty"` + + // Token (name) of the authorization + Token *string `json:"token,omitempty"` + + // ID of user that authorization is scoped to. + UserID *string `json:"userID,omitempty"` +} + +// LesserThreshold defines model for LesserThreshold. +type LesserThreshold struct { + // Embedded struct due to allOf(#/components/schemas/ThresholdBase) + ThresholdBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + Type LesserThresholdType `json:"type"` + Value float32 `json:"value"` +} + +// LesserThresholdType defines model for LesserThreshold.Type. +type LesserThresholdType string + +// LinePlusSingleStatProperties defines model for LinePlusSingleStatProperties. +type LinePlusSingleStatProperties struct { + // The viewport for a View's visualizations + Axes Axes `json:"axes"` + + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + + // Indicates whether decimal places should be enforced, and how many digits it should show. + DecimalPlaces DecimalPlaces `json:"decimalPlaces"` + GenerateXAxisTicks *[]string `json:"generateXAxisTicks,omitempty"` + GenerateYAxisTicks *[]string `json:"generateYAxisTicks,omitempty"` + HoverDimension *LinePlusSingleStatPropertiesHoverDimension `json:"hoverDimension,omitempty"` + LegendColorizeRows *bool `json:"legendColorizeRows,omitempty"` + LegendHide *bool `json:"legendHide,omitempty"` + LegendOpacity *float32 `json:"legendOpacity,omitempty"` + LegendOrientationThreshold *int `json:"legendOrientationThreshold,omitempty"` + Note string `json:"note"` + Position LinePlusSingleStatPropertiesPosition `json:"position"` + Prefix string `json:"prefix"` + Queries []DashboardQuery `json:"queries"` + ShadeBelow *bool `json:"shadeBelow,omitempty"` + Shape LinePlusSingleStatPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + + // StaticLegend represents the options specific to the static legend + StaticLegend *StaticLegend `json:"staticLegend,omitempty"` + Suffix string `json:"suffix"` + TimeFormat *string `json:"timeFormat,omitempty"` + Type LinePlusSingleStatPropertiesType `json:"type"` + XColumn *string `json:"xColumn,omitempty"` + XTickStart *float32 `json:"xTickStart,omitempty"` + XTickStep *float32 `json:"xTickStep,omitempty"` + XTotalTicks *int `json:"xTotalTicks,omitempty"` + YColumn *string `json:"yColumn,omitempty"` + YTickStart *float32 `json:"yTickStart,omitempty"` + YTickStep *float32 `json:"yTickStep,omitempty"` + YTotalTicks *int `json:"yTotalTicks,omitempty"` +} + +// LinePlusSingleStatPropertiesHoverDimension defines model for LinePlusSingleStatProperties.HoverDimension. +type LinePlusSingleStatPropertiesHoverDimension string + +// LinePlusSingleStatPropertiesPosition defines model for LinePlusSingleStatProperties.Position. +type LinePlusSingleStatPropertiesPosition string + +// LinePlusSingleStatPropertiesShape defines model for LinePlusSingleStatProperties.Shape. +type LinePlusSingleStatPropertiesShape string + +// LinePlusSingleStatPropertiesType defines model for LinePlusSingleStatProperties.Type. +type LinePlusSingleStatPropertiesType string + +// LineProtocolError defines model for LineProtocolError. +type LineProtocolError struct { + // Code is the machine-readable error code. + Code LineProtocolErrorCode `json:"code"` + + // Stack of errors that occurred during processing of the request. Useful for debugging. + Err *string `json:"err,omitempty"` + + // First line in the request body that contains malformed data. + Line *int32 `json:"line,omitempty"` + + // Human-readable message. + Message *string `json:"message,omitempty"` + + // Describes the logical code operation when the error occurred. Useful for debugging. + Op *string `json:"op,omitempty"` +} + +// Code is the machine-readable error code. +type LineProtocolErrorCode string + +// LineProtocolLengthError defines model for LineProtocolLengthError. +type LineProtocolLengthError struct { + // Code is the machine-readable error code. + Code LineProtocolLengthErrorCode `json:"code"` + + // Human-readable message. + Message string `json:"message"` +} + +// Code is the machine-readable error code. +type LineProtocolLengthErrorCode string + +// URI of resource. +type Link string + +// Links defines model for Links. +type Links struct { + // URI of resource. + Next *Link `json:"next,omitempty"` + + // URI of resource. + Prev *Link `json:"prev,omitempty"` + + // URI of resource. + Self Link `json:"self"` +} + +// LogEvent defines model for LogEvent. +type LogEvent struct { + // A description of the event that occurred. + Message *string `json:"message,omitempty"` + + // the ID of the task that logged + RunID *string `json:"runID,omitempty"` + + // Time event occurred, RFC3339Nano. + Time *time.Time `json:"time,omitempty"` +} + +// Represents the rule conditions that collectively evaluate to either true or false +type LogicalExpression struct { + Left *Expression `json:"left,omitempty"` + Operator *string `json:"operator,omitempty"` + Right *Expression `json:"right,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Logs defines model for Logs. +type Logs struct { + Events *[]LogEvent `json:"events,omitempty"` +} + +// MapVariableProperties defines model for MapVariableProperties. +type MapVariableProperties struct { + Type *MapVariablePropertiesType `json:"type,omitempty"` + Values *MapVariableProperties_Values `json:"values,omitempty"` +} + +// MapVariablePropertiesType defines model for MapVariableProperties.Type. +type MapVariablePropertiesType string + +// MapVariableProperties_Values defines model for MapVariableProperties.Values. +type MapVariableProperties_Values struct { + AdditionalProperties map[string]string `json:"-"` +} + +// MarkdownViewProperties defines model for MarkdownViewProperties. +type MarkdownViewProperties struct { + Note string `json:"note"` + Shape MarkdownViewPropertiesShape `json:"shape"` + Type MarkdownViewPropertiesType `json:"type"` +} + +// MarkdownViewPropertiesShape defines model for MarkdownViewProperties.Shape. +type MarkdownViewPropertiesShape string + +// MarkdownViewPropertiesType defines model for MarkdownViewProperties.Type. +type MarkdownViewPropertiesType string + +// Object property assignment +type MemberAssignment struct { + Init *Expression `json:"init,omitempty"` + + // Represents accessing a property of an object + Member *MemberExpression `json:"member,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Represents accessing a property of an object +type MemberExpression struct { + Object *Expression `json:"object,omitempty"` + Property *PropertyKey `json:"property,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// MetadataBackup defines model for MetadataBackup. +type MetadataBackup struct { + Buckets BucketMetadataManifests `json:"buckets"` + Kv string `json:"kv"` + Sql string `json:"sql"` +} + +// MosaicViewProperties defines model for MosaicViewProperties. +type MosaicViewProperties struct { + // Colors define color encoding of data into a visualization + Colors []string `json:"colors"` + FillColumns []string `json:"fillColumns"` + GenerateXAxisTicks *[]string `json:"generateXAxisTicks,omitempty"` + HoverDimension *MosaicViewPropertiesHoverDimension `json:"hoverDimension,omitempty"` + LegendColorizeRows *bool `json:"legendColorizeRows,omitempty"` + LegendHide *bool `json:"legendHide,omitempty"` + LegendOpacity *float32 `json:"legendOpacity,omitempty"` + LegendOrientationThreshold *int `json:"legendOrientationThreshold,omitempty"` + Note string `json:"note"` + Queries []DashboardQuery `json:"queries"` + Shape MosaicViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + TimeFormat *string `json:"timeFormat,omitempty"` + Type MosaicViewPropertiesType `json:"type"` + XAxisLabel string `json:"xAxisLabel"` + XColumn string `json:"xColumn"` + XDomain []float32 `json:"xDomain"` + XPrefix string `json:"xPrefix"` + XSuffix string `json:"xSuffix"` + XTickStart *float32 `json:"xTickStart,omitempty"` + XTickStep *float32 `json:"xTickStep,omitempty"` + XTotalTicks *int `json:"xTotalTicks,omitempty"` + YAxisLabel string `json:"yAxisLabel"` + YDomain []float32 `json:"yDomain"` + YLabelColumnSeparator *string `json:"yLabelColumnSeparator,omitempty"` + YLabelColumns *[]string `json:"yLabelColumns,omitempty"` + YPrefix string `json:"yPrefix"` + YSeriesColumns []string `json:"ySeriesColumns"` + YSuffix string `json:"ySuffix"` +} + +// MosaicViewPropertiesHoverDimension defines model for MosaicViewProperties.HoverDimension. +type MosaicViewPropertiesHoverDimension string + +// MosaicViewPropertiesShape defines model for MosaicViewProperties.Shape. +type MosaicViewPropertiesShape string + +// MosaicViewPropertiesType defines model for MosaicViewProperties.Type. +type MosaicViewPropertiesType string + +// Node defines model for Node. +type Node interface{} + +// Type of AST node +type NodeType string + +// NotificationEndpoint defines model for NotificationEndpoint. +type NotificationEndpoint struct { + // Embedded struct due to allOf(#/components/schemas/NotificationEndpointDiscriminator) + NotificationEndpointDiscriminator `yaml:",inline"` +} + +// NotificationEndpointBase defines model for NotificationEndpointBase. +type NotificationEndpointBase struct { + CreatedAt *time.Time `json:"createdAt,omitempty"` + + // An optional description of the notification endpoint. + Description *string `json:"description,omitempty"` + Id *string `json:"id,omitempty"` + Labels *Labels `json:"labels,omitempty"` + Links *struct { + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + } `json:"links,omitempty"` + Name string `json:"name"` + OrgID *string `json:"orgID,omitempty"` + + // The status of the endpoint. + Status *NotificationEndpointBaseStatus `json:"status,omitempty"` + Type NotificationEndpointType `json:"type"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` + UserID *string `json:"userID,omitempty"` +} + +// The status of the endpoint. +type NotificationEndpointBaseStatus string + +// NotificationEndpointDiscriminator defines model for NotificationEndpointDiscriminator. +type NotificationEndpointDiscriminator interface{} + +// NotificationEndpointType defines model for NotificationEndpointType. +type NotificationEndpointType string + +// NotificationEndpointUpdate defines model for NotificationEndpointUpdate. +type NotificationEndpointUpdate struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + Status *NotificationEndpointUpdateStatus `json:"status,omitempty"` +} + +// NotificationEndpointUpdateStatus defines model for NotificationEndpointUpdate.Status. +type NotificationEndpointUpdateStatus string + +// NotificationEndpoints defines model for NotificationEndpoints. +type NotificationEndpoints struct { + Links *Links `json:"links,omitempty"` + NotificationEndpoints *[]NotificationEndpoint `json:"notificationEndpoints,omitempty"` +} + +// NotificationRule defines model for NotificationRule. +type NotificationRule struct { + // Embedded struct due to allOf(#/components/schemas/NotificationRuleDiscriminator) + NotificationRuleDiscriminator `yaml:",inline"` +} + +// NotificationRuleBase defines model for NotificationRuleBase. +type NotificationRuleBase struct { + CreatedAt *time.Time `json:"createdAt,omitempty"` + + // An optional description of the notification rule. + Description *string `json:"description,omitempty"` + EndpointID string `json:"endpointID"` + + // The notification repetition interval. + Every *string `json:"every,omitempty"` + Id *string `json:"id,omitempty"` + Labels *Labels `json:"labels,omitempty"` + LastRunError *string `json:"lastRunError,omitempty"` + LastRunStatus *NotificationRuleBaseLastRunStatus `json:"lastRunStatus,omitempty"` + + // Timestamp (in RFC3339 date/time format](https://datatracker.ietf.org/doc/html/rfc3339)) of the latest scheduled and completed run. + LatestCompleted *time.Time `json:"latestCompleted,omitempty"` + + // Don't notify me more than times every seconds. If set, limitEvery cannot be empty. + Limit *int `json:"limit,omitempty"` + + // Don't notify me more than times every seconds. If set, limit cannot be empty. + LimitEvery *int `json:"limitEvery,omitempty"` + Links *struct { + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Query *Link `json:"query,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + } `json:"links,omitempty"` + + // Human-readable name describing the notification rule. + Name string `json:"name"` + + // Duration to delay after the schedule, before executing check. + Offset *string `json:"offset,omitempty"` + + // The ID of the organization that owns this notification rule. + OrgID string `json:"orgID"` + + // The ID of creator used to create this notification rule. + OwnerID *string `json:"ownerID,omitempty"` + RunbookLink *string `json:"runbookLink,omitempty"` + SleepUntil *string `json:"sleepUntil,omitempty"` + Status TaskStatusType `json:"status"` + + // List of status rules the notification rule attempts to match. + StatusRules []StatusRule `json:"statusRules"` + + // List of tag rules the notification rule attempts to match. + TagRules *[]TagRule `json:"tagRules,omitempty"` + + // The ID of the task associated with this notification rule. + TaskID *string `json:"taskID,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` +} + +// NotificationRuleBaseLastRunStatus defines model for NotificationRuleBase.LastRunStatus. +type NotificationRuleBaseLastRunStatus string + +// NotificationRuleDiscriminator defines model for NotificationRuleDiscriminator. +type NotificationRuleDiscriminator interface{} + +// NotificationRuleUpdate defines model for NotificationRuleUpdate. +type NotificationRuleUpdate struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + Status *NotificationRuleUpdateStatus `json:"status,omitempty"` +} + +// NotificationRuleUpdateStatus defines model for NotificationRuleUpdate.Status. +type NotificationRuleUpdateStatus string + +// NotificationRules defines model for NotificationRules. +type NotificationRules struct { + Links *Links `json:"links,omitempty"` + NotificationRules *[]NotificationRule `json:"notificationRules,omitempty"` +} + +// Allows the declaration of an anonymous object within a declaration +type ObjectExpression struct { + // Object properties + Properties *[]Property `json:"properties,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// OnboardingRequest defines model for OnboardingRequest. +type OnboardingRequest struct { + Bucket string `json:"bucket"` + Org string `json:"org"` + Password *string `json:"password,omitempty"` + + // Retention period *in nanoseconds* for the new bucket. This key's name has been misleading since OSS 2.0 GA, please transition to use `retentionPeriodSeconds` + RetentionPeriodHrs *int `json:"retentionPeriodHrs,omitempty"` + RetentionPeriodSeconds *int64 `json:"retentionPeriodSeconds,omitempty"` + + // Authentication token to set on the initial user. If not specified, the server will generate a token. + Token *string `json:"token,omitempty"` + Username string `json:"username"` +} + +// OnboardingResponse defines model for OnboardingResponse. +type OnboardingResponse struct { + Auth *Authorization `json:"auth,omitempty"` + Bucket *Bucket `json:"bucket,omitempty"` + Org *Organization `json:"org,omitempty"` + User *UserResponse `json:"user,omitempty"` +} + +// A single variable declaration +type OptionStatement struct { + Assignment *interface{} `json:"assignment,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Organization defines model for Organization. +type Organization struct { + CreatedAt *time.Time `json:"createdAt,omitempty"` + Description *string `json:"description,omitempty"` + Id *string `json:"id,omitempty"` + Links *struct { + // URI of resource. + Buckets *Link `json:"buckets,omitempty"` + + // URI of resource. + Dashboards *Link `json:"dashboards,omitempty"` + + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Secrets *Link `json:"secrets,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + + // URI of resource. + Tasks *Link `json:"tasks,omitempty"` + } `json:"links,omitempty"` + Name string `json:"name"` + + // If inactive the organization is inactive. + Status *OrganizationStatus `json:"status,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` +} + +// If inactive the organization is inactive. +type OrganizationStatus string + +// Organizations defines model for Organizations. +type Organizations struct { + Links *Links `json:"links,omitempty"` + Orgs *[]Organization `json:"orgs,omitempty"` +} + +// Represents a complete package source tree. +type Package struct { + // Package files + Files *[]File `json:"files,omitempty"` + + // Package name + Package *string `json:"package,omitempty"` + + // Package import path + Path *string `json:"path,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Defines a package identifier +type PackageClause struct { + // A valid Flux identifier + Name *Identifier `json:"name,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// PagerDutyNotificationEndpoint defines model for PagerDutyNotificationEndpoint. +type PagerDutyNotificationEndpoint struct { + // Embedded struct due to allOf(#/components/schemas/NotificationEndpointBase) + NotificationEndpointBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + ClientURL *string `json:"clientURL,omitempty"` + RoutingKey string `json:"routingKey"` +} + +// PagerDutyNotificationRule defines model for PagerDutyNotificationRule. +type PagerDutyNotificationRule struct { + // Embedded struct due to allOf(#/components/schemas/NotificationRuleBase) + NotificationRuleBase `yaml:",inline"` + // Embedded struct due to allOf(#/components/schemas/PagerDutyNotificationRuleBase) + PagerDutyNotificationRuleBase `yaml:",inline"` +} + +// PagerDutyNotificationRuleBase defines model for PagerDutyNotificationRuleBase. +type PagerDutyNotificationRuleBase struct { + MessageTemplate string `json:"messageTemplate"` + Type PagerDutyNotificationRuleBaseType `json:"type"` +} + +// PagerDutyNotificationRuleBaseType defines model for PagerDutyNotificationRuleBase.Type. +type PagerDutyNotificationRuleBaseType string + +// Represents an expression wrapped in parenthesis +type ParenExpression struct { + Expression *Expression `json:"expression,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// PasswordResetBody defines model for PasswordResetBody. +type PasswordResetBody struct { + Password string `json:"password"` +} + +// Updates to an existing bucket resource. +type PatchBucketRequest struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + + // Updates to rules to expire or retain data. No rules means no updates. + RetentionRules *PatchRetentionRules `json:"retentionRules,omitempty"` +} + +// PatchOrganizationRequest defines model for PatchOrganizationRequest. +type PatchOrganizationRequest struct { + // New description to set on the organization + Description *string `json:"description,omitempty"` + + // New name to set on the organization + Name *string `json:"name,omitempty"` +} + +// Updates to a rule to expire or retain data. +type PatchRetentionRule struct { + // Duration in seconds for how long data will be kept in the database. 0 means infinite. + EverySeconds *int64 `json:"everySeconds,omitempty"` + + // Shard duration measured in seconds. + ShardGroupDurationSeconds *int64 `json:"shardGroupDurationSeconds,omitempty"` + Type PatchRetentionRuleType `json:"type"` +} + +// PatchRetentionRuleType defines model for PatchRetentionRule.Type. +type PatchRetentionRuleType string + +// Updates to rules to expire or retain data. No rules means no updates. +type PatchRetentionRules []PatchRetentionRule + +// Permission defines model for Permission. +type Permission struct { + Action PermissionAction `json:"action"` + Resource Resource `json:"resource"` +} + +// PermissionAction defines model for Permission.Action. +type PermissionAction string + +// Call expression with pipe argument +type PipeExpression struct { + Argument *Expression `json:"argument,omitempty"` + + // Represents a function call + Call *CallExpression `json:"call,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Represents a specialized literal value, indicating the left hand value of a pipe expression +type PipeLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// PostBucketRequest defines model for PostBucketRequest. +type PostBucketRequest struct { + Description *string `json:"description,omitempty"` + Name string `json:"name"` + OrgID string `json:"orgID"` + + // Rules to expire or retain data. No rules means data never expires. + RetentionRules RetentionRules `json:"retentionRules"` + Rp *string `json:"rp,omitempty"` + SchemaType *SchemaType `json:"schemaType,omitempty"` +} + +// PostCheck defines model for PostCheck. +type PostCheck struct { + // Embedded struct due to allOf(#/components/schemas/CheckDiscriminator) + CheckDiscriminator `yaml:",inline"` +} + +// PostNotificationEndpoint defines model for PostNotificationEndpoint. +type PostNotificationEndpoint struct { + // Embedded struct due to allOf(#/components/schemas/NotificationEndpointDiscriminator) + NotificationEndpointDiscriminator `yaml:",inline"` +} + +// PostNotificationRule defines model for PostNotificationRule. +type PostNotificationRule struct { + // Embedded struct due to allOf(#/components/schemas/NotificationRuleDiscriminator) + NotificationRuleDiscriminator `yaml:",inline"` +} + +// PostOrganizationRequest defines model for PostOrganizationRequest. +type PostOrganizationRequest struct { + Description *string `json:"description,omitempty"` + Name string `json:"name"` +} + +// The value associated with a key +type Property struct { + Key *PropertyKey `json:"key,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Value *Expression `json:"value,omitempty"` +} + +// PropertyKey defines model for PropertyKey. +type PropertyKey interface{} + +// Query influx using the Flux language +type Query struct { + // Dialect are options to change the default CSV output format; https://www.w3.org/TR/2015/REC-tabular-metadata-20151217/#dialect-descriptions + Dialect *Dialect `json:"dialect,omitempty"` + + // Represents a source from a single file + Extern *File `json:"extern,omitempty"` + + // Specifies the time that should be reported as "now" in the query. Default is the server's now time. + Now *time.Time `json:"now,omitempty"` + + // Enumeration of key/value pairs that respresent parameters to be injected into query (can only specify either this field or extern and not both) + Params *Query_Params `json:"params,omitempty"` + + // Query script to execute. + Query string `json:"query"` + + // The type of query. Must be "flux". + Type *QueryType `json:"type,omitempty"` +} + +// Enumeration of key/value pairs that respresent parameters to be injected into query (can only specify either this field or extern and not both) +type Query_Params struct { + AdditionalProperties map[string]interface{} `json:"-"` +} + +// The type of query. Must be "flux". +type QueryType string + +// QueryEditMode defines model for QueryEditMode. +type QueryEditMode string + +// QueryVariableProperties defines model for QueryVariableProperties. +type QueryVariableProperties struct { + Type *QueryVariablePropertiesType `json:"type,omitempty"` + Values *struct { + Language *string `json:"language,omitempty"` + Query *string `json:"query,omitempty"` + } `json:"values,omitempty"` +} + +// QueryVariablePropertiesType defines model for QueryVariableProperties.Type. +type QueryVariablePropertiesType string + +// RangeThreshold defines model for RangeThreshold. +type RangeThreshold struct { + // Embedded struct due to allOf(#/components/schemas/ThresholdBase) + ThresholdBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + Max float32 `json:"max"` + Min float32 `json:"min"` + Type RangeThresholdType `json:"type"` + Within bool `json:"within"` +} + +// RangeThresholdType defines model for RangeThreshold.Type. +type RangeThresholdType string + +// Ready defines model for Ready. +type Ready struct { + Started *time.Time `json:"started,omitempty"` + Status *ReadyStatus `json:"status,omitempty"` + Up *string `json:"up,omitempty"` +} + +// ReadyStatus defines model for Ready.Status. +type ReadyStatus string + +// Expressions begin and end with `/` and are regular expressions with syntax accepted by RE2 +type RegexpLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Value *string `json:"value,omitempty"` +} + +// RemoteConnection defines model for RemoteConnection. +type RemoteConnection struct { + AllowInsecureTLS bool `json:"allowInsecureTLS"` + Description *string `json:"description,omitempty"` + Id string `json:"id"` + Name string `json:"name"` + OrgID string `json:"orgID"` + RemoteOrgID string `json:"remoteOrgID"` + RemoteURL string `json:"remoteURL"` +} + +// RemoteConnectionCreationRequest defines model for RemoteConnectionCreationRequest. +type RemoteConnectionCreationRequest struct { + AllowInsecureTLS bool `json:"allowInsecureTLS"` + Description *string `json:"description,omitempty"` + Name string `json:"name"` + OrgID string `json:"orgID"` + RemoteAPIToken string `json:"remoteAPIToken"` + RemoteOrgID string `json:"remoteOrgID"` + RemoteURL string `json:"remoteURL"` +} + +// RemoteConnectionUpdateRequest defines model for RemoteConnectionUpdateRequest. +type RemoteConnectionUpdateRequest struct { + AllowInsecureTLS *bool `json:"allowInsecureTLS,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + RemoteAPIToken *string `json:"remoteAPIToken,omitempty"` + RemoteOrgID *string `json:"remoteOrgID,omitempty"` + RemoteURL *string `json:"remoteURL,omitempty"` +} + +// RemoteConnections defines model for RemoteConnections. +type RemoteConnections struct { + Remotes *[]RemoteConnection `json:"remotes,omitempty"` +} + +// Describes a field that can be renamed and made visible or invisible. +type RenamableField struct { + // The name that a field is renamed to by the user. + DisplayName *string `json:"displayName,omitempty"` + + // The calculated name of a field. + InternalName *string `json:"internalName,omitempty"` + + // Indicates whether this field should be visible on the table. + Visible *bool `json:"visible,omitempty"` +} + +// Replication defines model for Replication. +type Replication struct { + CurrentQueueSizeBytes int64 `json:"currentQueueSizeBytes"` + Description *string `json:"description,omitempty"` + DropNonRetryableData *bool `json:"dropNonRetryableData,omitempty"` + Id string `json:"id"` + LatestErrorMessage *string `json:"latestErrorMessage,omitempty"` + LatestResponseCode *int `json:"latestResponseCode,omitempty"` + LocalBucketID string `json:"localBucketID"` + MaxQueueSizeBytes int64 `json:"maxQueueSizeBytes"` + Name string `json:"name"` + OrgID string `json:"orgID"` + RemoteBucketID string `json:"remoteBucketID"` + RemoteID string `json:"remoteID"` +} + +// ReplicationCreationRequest defines model for ReplicationCreationRequest. +type ReplicationCreationRequest struct { + Description *string `json:"description,omitempty"` + DropNonRetryableData *bool `json:"dropNonRetryableData,omitempty"` + LocalBucketID string `json:"localBucketID"` + MaxQueueSizeBytes int64 `json:"maxQueueSizeBytes"` + Name string `json:"name"` + OrgID string `json:"orgID"` + RemoteBucketID string `json:"remoteBucketID"` + RemoteID string `json:"remoteID"` +} + +// ReplicationUpdateRequest defines model for ReplicationUpdateRequest. +type ReplicationUpdateRequest struct { + Description *string `json:"description,omitempty"` + DropNonRetryableData *bool `json:"dropNonRetryableData,omitempty"` + MaxQueueSizeBytes *int64 `json:"maxQueueSizeBytes,omitempty"` + Name *string `json:"name,omitempty"` + RemoteBucketID *string `json:"remoteBucketID,omitempty"` + RemoteID *string `json:"remoteID,omitempty"` +} + +// Replications defines model for Replications. +type Replications struct { + Replications *[]Replication `json:"replications,omitempty"` +} + +// Resource defines model for Resource. +type Resource struct { + // If ID is set that is a permission for a specific resource. if it is not set it is a permission for all resources of that resource type. + Id *string `json:"id,omitempty"` + + // Optional name of the resource if the resource has a name field. + Name *string `json:"name,omitempty"` + + // Optional name of the organization of the organization with orgID. + Org *string `json:"org,omitempty"` + + // If orgID is set that is a permission for all resources owned my that org. if it is not set it is a permission for all resources of that resource type. + OrgID *string `json:"orgID,omitempty"` + Type ResourceType `json:"type"` +} + +// ResourceType defines model for Resource.Type. +type ResourceType string + +// ResourceMember defines model for ResourceMember. +type ResourceMember struct { + // Embedded struct due to allOf(#/components/schemas/UserResponse) + UserResponse `yaml:",inline"` + // Embedded fields due to inline allOf schema + Role *ResourceMemberRole `json:"role,omitempty"` +} + +// ResourceMemberRole defines model for ResourceMember.Role. +type ResourceMemberRole string + +// ResourceMembers defines model for ResourceMembers. +type ResourceMembers struct { + Links *struct { + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + Users *[]ResourceMember `json:"users,omitempty"` +} + +// ResourceOwner defines model for ResourceOwner. +type ResourceOwner struct { + // Embedded struct due to allOf(#/components/schemas/UserResponse) + UserResponse `yaml:",inline"` + // Embedded fields due to inline allOf schema + Role *ResourceOwnerRole `json:"role,omitempty"` +} + +// ResourceOwnerRole defines model for ResourceOwner.Role. +type ResourceOwnerRole string + +// ResourceOwners defines model for ResourceOwners. +type ResourceOwners struct { + Links *struct { + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + Users *[]ResourceOwner `json:"users,omitempty"` +} + +// RestoredBucketMappings defines model for RestoredBucketMappings. +type RestoredBucketMappings struct { + // New ID of the restored bucket + Id string `json:"id"` + Name string `json:"name"` + ShardMappings BucketShardMappings `json:"shardMappings"` +} + +// RetentionPolicyManifest defines model for RetentionPolicyManifest. +type RetentionPolicyManifest struct { + Duration int64 `json:"duration"` + Name string `json:"name"` + ReplicaN int `json:"replicaN"` + ShardGroupDuration int64 `json:"shardGroupDuration"` + ShardGroups ShardGroupManifests `json:"shardGroups"` + Subscriptions SubscriptionManifests `json:"subscriptions"` +} + +// RetentionPolicyManifests defines model for RetentionPolicyManifests. +type RetentionPolicyManifests []RetentionPolicyManifest + +// RetentionRule defines model for RetentionRule. +type RetentionRule struct { + // Duration in seconds for how long data will be kept in the database. 0 means infinite. + EverySeconds int64 `json:"everySeconds"` + + // Shard duration measured in seconds. + ShardGroupDurationSeconds *int64 `json:"shardGroupDurationSeconds,omitempty"` + Type RetentionRuleType `json:"type"` +} + +// RetentionRuleType defines model for RetentionRule.Type. +type RetentionRuleType string + +// Rules to expire or retain data. No rules means data never expires. +type RetentionRules []RetentionRule + +// Defines an expression to return +type ReturnStatement struct { + Argument *Expression `json:"argument,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Routes defines model for Routes. +type Routes struct { + Authorizations *string `json:"authorizations,omitempty"` + Buckets *string `json:"buckets,omitempty"` + Dashboards *string `json:"dashboards,omitempty"` + External *struct { + StatusFeed *string `json:"statusFeed,omitempty"` + } `json:"external,omitempty"` + Flags *string `json:"flags,omitempty"` + Me *string `json:"me,omitempty"` + Orgs *string `json:"orgs,omitempty"` + Query *struct { + Analyze *string `json:"analyze,omitempty"` + Ast *string `json:"ast,omitempty"` + Self *string `json:"self,omitempty"` + Suggestions *string `json:"suggestions,omitempty"` + } `json:"query,omitempty"` + Setup *string `json:"setup,omitempty"` + Signin *string `json:"signin,omitempty"` + Signout *string `json:"signout,omitempty"` + Sources *string `json:"sources,omitempty"` + System *struct { + Debug *string `json:"debug,omitempty"` + Health *string `json:"health,omitempty"` + Metrics *string `json:"metrics,omitempty"` + } `json:"system,omitempty"` + Tasks *string `json:"tasks,omitempty"` + Telegrafs *string `json:"telegrafs,omitempty"` + Users *string `json:"users,omitempty"` + Variables *string `json:"variables,omitempty"` + Write *string `json:"write,omitempty"` +} + +// The state to record if check matches a criteria. +type RuleStatusLevel string + +// Run defines model for Run. +type Run struct { + // Time run finished executing, RFC3339Nano. + FinishedAt *time.Time `json:"finishedAt,omitempty"` + Id *string `json:"id,omitempty"` + Links *struct { + Retry *string `json:"retry,omitempty"` + Self *string `json:"self,omitempty"` + Task *string `json:"task,omitempty"` + } `json:"links,omitempty"` + + // An array of logs associated with the run. + Log *[]LogEvent `json:"log,omitempty"` + + // Time run was manually requested, RFC3339Nano. + RequestedAt *time.Time `json:"requestedAt,omitempty"` + + // Time used for run's "now" option, RFC3339. + ScheduledFor *time.Time `json:"scheduledFor,omitempty"` + + // Time run started executing, RFC3339Nano. + StartedAt *time.Time `json:"startedAt,omitempty"` + Status *RunStatus `json:"status,omitempty"` + TaskID *string `json:"taskID,omitempty"` +} + +// RunStatus defines model for Run.Status. +type RunStatus string + +// RunManually defines model for RunManually. +type RunManually struct { + // Time used for run's "now" option, RFC3339. Default is the server's now time. + ScheduledFor *time.Time `json:"scheduledFor"` +} + +// Runs defines model for Runs. +type Runs struct { + Links *Links `json:"links,omitempty"` + Runs *[]Run `json:"runs,omitempty"` +} + +// SMTPNotificationRule defines model for SMTPNotificationRule. +type SMTPNotificationRule struct { + // Embedded struct due to allOf(#/components/schemas/NotificationRuleBase) + NotificationRuleBase `yaml:",inline"` + // Embedded struct due to allOf(#/components/schemas/SMTPNotificationRuleBase) + SMTPNotificationRuleBase `yaml:",inline"` +} + +// SMTPNotificationRuleBase defines model for SMTPNotificationRuleBase. +type SMTPNotificationRuleBase struct { + BodyTemplate *string `json:"bodyTemplate,omitempty"` + SubjectTemplate string `json:"subjectTemplate"` + To string `json:"to"` + Type SMTPNotificationRuleBaseType `json:"type"` +} + +// SMTPNotificationRuleBaseType defines model for SMTPNotificationRuleBase.Type. +type SMTPNotificationRuleBaseType string + +// ScatterViewProperties defines model for ScatterViewProperties. +type ScatterViewProperties struct { + // Colors define color encoding of data into a visualization + Colors []string `json:"colors"` + FillColumns []string `json:"fillColumns"` + GenerateXAxisTicks *[]string `json:"generateXAxisTicks,omitempty"` + GenerateYAxisTicks *[]string `json:"generateYAxisTicks,omitempty"` + LegendColorizeRows *bool `json:"legendColorizeRows,omitempty"` + LegendHide *bool `json:"legendHide,omitempty"` + LegendOpacity *float32 `json:"legendOpacity,omitempty"` + LegendOrientationThreshold *int `json:"legendOrientationThreshold,omitempty"` + Note string `json:"note"` + Queries []DashboardQuery `json:"queries"` + Shape ScatterViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + SymbolColumns []string `json:"symbolColumns"` + TimeFormat *string `json:"timeFormat,omitempty"` + Type ScatterViewPropertiesType `json:"type"` + XAxisLabel string `json:"xAxisLabel"` + XColumn string `json:"xColumn"` + XDomain []float32 `json:"xDomain"` + XPrefix string `json:"xPrefix"` + XSuffix string `json:"xSuffix"` + XTickStart *float32 `json:"xTickStart,omitempty"` + XTickStep *float32 `json:"xTickStep,omitempty"` + XTotalTicks *int `json:"xTotalTicks,omitempty"` + YAxisLabel string `json:"yAxisLabel"` + YColumn string `json:"yColumn"` + YDomain []float32 `json:"yDomain"` + YPrefix string `json:"yPrefix"` + YSuffix string `json:"ySuffix"` + YTickStart *float32 `json:"yTickStart,omitempty"` + YTickStep *float32 `json:"yTickStep,omitempty"` + YTotalTicks *int `json:"yTotalTicks,omitempty"` +} + +// ScatterViewPropertiesShape defines model for ScatterViewProperties.Shape. +type ScatterViewPropertiesShape string + +// ScatterViewPropertiesType defines model for ScatterViewProperties.Type. +type ScatterViewPropertiesType string + +// SchemaType defines model for SchemaType. +type SchemaType string + +// ScraperTargetRequest defines model for ScraperTargetRequest. +type ScraperTargetRequest struct { + // Skip TLS verification on endpoint. + AllowInsecure *bool `json:"allowInsecure,omitempty"` + + // The ID of the bucket to write to. + BucketID *string `json:"bucketID,omitempty"` + + // The name of the scraper target. + Name *string `json:"name,omitempty"` + + // The organization ID. + OrgID *string `json:"orgID,omitempty"` + + // The type of the metrics to be parsed. + Type *ScraperTargetRequestType `json:"type,omitempty"` + + // The URL of the metrics endpoint. + Url *string `json:"url,omitempty"` +} + +// The type of the metrics to be parsed. +type ScraperTargetRequestType string + +// ScraperTargetResponse defines model for ScraperTargetResponse. +type ScraperTargetResponse struct { + // Embedded struct due to allOf(#/components/schemas/ScraperTargetRequest) + ScraperTargetRequest `yaml:",inline"` + // Embedded fields due to inline allOf schema + // The bucket name. + Bucket *string `json:"bucket,omitempty"` + Id *string `json:"id,omitempty"` + Links *struct { + // URI of resource. + Bucket *Link `json:"bucket,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Organization *Link `json:"organization,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + } `json:"links,omitempty"` + + // The name of the organization. + Org *string `json:"org,omitempty"` +} + +// ScraperTargetResponses defines model for ScraperTargetResponses. +type ScraperTargetResponses struct { + Configurations *[]ScraperTargetResponse `json:"configurations,omitempty"` +} + +// SecretKeys defines model for SecretKeys. +type SecretKeys struct { + Secrets *[]string `json:"secrets,omitempty"` +} + +// SecretKeysResponse defines model for SecretKeysResponse. +type SecretKeysResponse struct { + // Embedded struct due to allOf(#/components/schemas/SecretKeys) + SecretKeys `yaml:",inline"` + // Embedded fields due to inline allOf schema + Links *struct { + Org *string `json:"org,omitempty"` + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` +} + +// Secrets defines model for Secrets. +type Secrets struct { + AdditionalProperties map[string]string `json:"-"` +} + +// ShardGroupManifest defines model for ShardGroupManifest. +type ShardGroupManifest struct { + DeletedAt *time.Time `json:"deletedAt,omitempty"` + EndTime time.Time `json:"endTime"` + Id int64 `json:"id"` + Shards ShardManifests `json:"shards"` + StartTime time.Time `json:"startTime"` + TruncatedAt *time.Time `json:"truncatedAt,omitempty"` +} + +// ShardGroupManifests defines model for ShardGroupManifests. +type ShardGroupManifests []ShardGroupManifest + +// ShardManifest defines model for ShardManifest. +type ShardManifest struct { + Id int64 `json:"id"` + ShardOwners ShardOwners `json:"shardOwners"` +} + +// ShardManifests defines model for ShardManifests. +type ShardManifests []ShardManifest + +// ShardOwner defines model for ShardOwner. +type ShardOwner struct { + // ID of the node that owns a shard. + NodeID int64 `json:"nodeID"` +} + +// ShardOwners defines model for ShardOwners. +type ShardOwners []ShardOwner + +// SimpleTableViewProperties defines model for SimpleTableViewProperties. +type SimpleTableViewProperties struct { + Note string `json:"note"` + Queries []DashboardQuery `json:"queries"` + Shape SimpleTableViewPropertiesShape `json:"shape"` + ShowAll bool `json:"showAll"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + Type SimpleTableViewPropertiesType `json:"type"` +} + +// SimpleTableViewPropertiesShape defines model for SimpleTableViewProperties.Shape. +type SimpleTableViewPropertiesShape string + +// SimpleTableViewPropertiesType defines model for SimpleTableViewProperties.Type. +type SimpleTableViewPropertiesType string + +// SingleStatViewProperties defines model for SingleStatViewProperties. +type SingleStatViewProperties struct { + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + + // Indicates whether decimal places should be enforced, and how many digits it should show. + DecimalPlaces DecimalPlaces `json:"decimalPlaces"` + Note string `json:"note"` + Prefix string `json:"prefix"` + Queries []DashboardQuery `json:"queries"` + Shape SingleStatViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + + // StaticLegend represents the options specific to the static legend + StaticLegend *StaticLegend `json:"staticLegend,omitempty"` + Suffix string `json:"suffix"` + TickPrefix string `json:"tickPrefix"` + TickSuffix string `json:"tickSuffix"` + Type SingleStatViewPropertiesType `json:"type"` +} + +// SingleStatViewPropertiesShape defines model for SingleStatViewProperties.Shape. +type SingleStatViewPropertiesShape string + +// SingleStatViewPropertiesType defines model for SingleStatViewProperties.Type. +type SingleStatViewPropertiesType string + +// SlackNotificationEndpoint defines model for SlackNotificationEndpoint. +type SlackNotificationEndpoint struct { + // Embedded struct due to allOf(#/components/schemas/NotificationEndpointBase) + NotificationEndpointBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + // Specifies the API token string. Specify either `URL` or `Token`. + Token *string `json:"token,omitempty"` + + // Specifies the URL of the Slack endpoint. Specify either `URL` or `Token`. + Url *string `json:"url,omitempty"` +} + +// SlackNotificationRule defines model for SlackNotificationRule. +type SlackNotificationRule struct { + // Embedded struct due to allOf(#/components/schemas/NotificationRuleBase) + NotificationRuleBase `yaml:",inline"` + // Embedded struct due to allOf(#/components/schemas/SlackNotificationRuleBase) + SlackNotificationRuleBase `yaml:",inline"` +} + +// SlackNotificationRuleBase defines model for SlackNotificationRuleBase. +type SlackNotificationRuleBase struct { + Channel *string `json:"channel,omitempty"` + MessageTemplate string `json:"messageTemplate"` + Type SlackNotificationRuleBaseType `json:"type"` +} + +// SlackNotificationRuleBaseType defines model for SlackNotificationRuleBase.Type. +type SlackNotificationRuleBaseType string + +// Source defines model for Source. +type Source struct { + Default *bool `json:"default,omitempty"` + DefaultRP *string `json:"defaultRP,omitempty"` + Id *string `json:"id,omitempty"` + InsecureSkipVerify *bool `json:"insecureSkipVerify,omitempty"` + Languages *[]SourceLanguages `json:"languages,omitempty"` + Links *struct { + Buckets *string `json:"buckets,omitempty"` + Health *string `json:"health,omitempty"` + Query *string `json:"query,omitempty"` + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + MetaUrl *string `json:"metaUrl,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` + Password *string `json:"password,omitempty"` + SharedSecret *string `json:"sharedSecret,omitempty"` + Telegraf *string `json:"telegraf,omitempty"` + Token *string `json:"token,omitempty"` + Type *SourceType `json:"type,omitempty"` + Url *string `json:"url,omitempty"` + Username *string `json:"username,omitempty"` +} + +// SourceLanguages defines model for Source.Languages. +type SourceLanguages string + +// SourceType defines model for Source.Type. +type SourceType string + +// Sources defines model for Sources. +type Sources struct { + Links *struct { + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + Sources *[]Source `json:"sources,omitempty"` +} + +// Stack defines model for Stack. +type Stack struct { + CreatedAt *time.Time `json:"createdAt,omitempty"` + Events *[]struct { + Description *string `json:"description,omitempty"` + EventType *string `json:"eventType,omitempty"` + Name *string `json:"name,omitempty"` + Resources *[]struct { + ApiVersion *string `json:"apiVersion,omitempty"` + Associations *[]struct { + Kind *TemplateKind `json:"kind,omitempty"` + MetaName *string `json:"metaName,omitempty"` + } `json:"associations,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + Links *struct { + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + ResourceID *string `json:"resourceID,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"resources,omitempty"` + Sources *[]string `json:"sources,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` + Urls *[]string `json:"urls,omitempty"` + } `json:"events,omitempty"` + Id *string `json:"id,omitempty"` + OrgID *string `json:"orgID,omitempty"` +} + +// Statement defines model for Statement. +type Statement interface{} + +// StaticLegend represents the options specific to the static legend +type StaticLegend struct { + ColorizeRows *bool `json:"colorizeRows,omitempty"` + HeightRatio *float32 `json:"heightRatio,omitempty"` + Opacity *float32 `json:"opacity,omitempty"` + OrientationThreshold *int `json:"orientationThreshold,omitempty"` + Show *bool `json:"show,omitempty"` + ValueAxis *string `json:"valueAxis,omitempty"` + WidthRatio *float32 `json:"widthRatio,omitempty"` +} + +// StatusRule defines model for StatusRule. +type StatusRule struct { + Count *int `json:"count,omitempty"` + + // The state to record if check matches a criteria. + CurrentLevel *RuleStatusLevel `json:"currentLevel,omitempty"` + Period *string `json:"period,omitempty"` + + // The state to record if check matches a criteria. + PreviousLevel *RuleStatusLevel `json:"previousLevel,omitempty"` +} + +// Expressions begin and end with double quote marks +type StringLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Value *string `json:"value,omitempty"` +} + +// SubscriptionManifest defines model for SubscriptionManifest. +type SubscriptionManifest struct { + Destinations []string `json:"destinations"` + Mode string `json:"mode"` + Name string `json:"name"` +} + +// SubscriptionManifests defines model for SubscriptionManifests. +type SubscriptionManifests []SubscriptionManifest + +// TableViewProperties defines model for TableViewProperties. +type TableViewProperties struct { + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + + // Indicates whether decimal places should be enforced, and how many digits it should show. + DecimalPlaces DecimalPlaces `json:"decimalPlaces"` + + // fieldOptions represent the fields retrieved by the query with customization options + FieldOptions []RenamableField `json:"fieldOptions"` + Note string `json:"note"` + Queries []DashboardQuery `json:"queries"` + Shape TableViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + TableOptions struct { + // fixFirstColumn indicates whether the first column of the table should be locked + FixFirstColumn *bool `json:"fixFirstColumn,omitempty"` + + // Describes a field that can be renamed and made visible or invisible. + SortBy *RenamableField `json:"sortBy,omitempty"` + + // verticalTimeAxis describes the orientation of the table by indicating whether the time axis will be displayed vertically + VerticalTimeAxis *bool `json:"verticalTimeAxis,omitempty"` + + // Wrapping describes the text wrapping style to be used in table views + Wrapping *TableViewPropertiesTableOptionsWrapping `json:"wrapping,omitempty"` + } `json:"tableOptions"` + + // timeFormat describes the display format for time values according to moment.js date formatting + TimeFormat string `json:"timeFormat"` + Type TableViewPropertiesType `json:"type"` +} + +// TableViewPropertiesShape defines model for TableViewProperties.Shape. +type TableViewPropertiesShape string + +// Wrapping describes the text wrapping style to be used in table views +type TableViewPropertiesTableOptionsWrapping string + +// TableViewPropertiesType defines model for TableViewProperties.Type. +type TableViewPropertiesType string + +// TagRule defines model for TagRule. +type TagRule struct { + Key *string `json:"key,omitempty"` + Operator *TagRuleOperator `json:"operator,omitempty"` + Value *string `json:"value,omitempty"` +} + +// TagRuleOperator defines model for TagRule.Operator. +type TagRuleOperator string + +// Task defines model for Task. +type Task struct { + // ID of the authorization used when the task communicates with the query engine. + AuthorizationID *string `json:"authorizationID,omitempty"` + CreatedAt *time.Time `json:"createdAt,omitempty"` + + // [Cron expression](https://en.wikipedia.org/wiki/Cron#Overview) that defines the schedule on which the task runs. Cron scheduling is based on system time. + // Value is a [Cron expression](https://en.wikipedia.org/wiki/Cron#Overview). + Cron *string `json:"cron,omitempty"` + + // Description of the task. + Description *string `json:"description,omitempty"` + + // Interval at which the task runs. `every` also determines when the task first runs, depending on the specified time. + // Value is a [duration literal](https://docs.influxdata.com/flux/v0.x/spec/lexical-elements/#duration-literals)). + Every *string `json:"every,omitempty"` + + // Flux script to run for this task. + Flux string `json:"flux"` + Id string `json:"id"` + Labels *Labels `json:"labels,omitempty"` + LastRunError *string `json:"lastRunError,omitempty"` + LastRunStatus *TaskLastRunStatus `json:"lastRunStatus,omitempty"` + + // Timestamp of the latest scheduled and completed run. + // Value is a timestamp in [RFC3339 date/time format](https://docs.influxdata.com/flux/v0.x/data-types/basic/time/#time-syntax). + LatestCompleted *time.Time `json:"latestCompleted,omitempty"` + Links *struct { + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Logs *Link `json:"logs,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Runs *Link `json:"runs,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + } `json:"links,omitempty"` + + // Name of the task. + Name string `json:"name"` + + // [Duration](https://docs.influxdata.com/flux/v0.x/spec/lexical-elements/#duration-literals) to delay execution of the task after the scheduled time has elapsed. `0` removes the offset. + // The value is a [duration literal](https://docs.influxdata.com/flux/v0.x/spec/lexical-elements/#duration-literals). + Offset *string `json:"offset,omitempty"` + + // Name of the organization that owns the task. + Org *string `json:"org,omitempty"` + + // ID of the organization that owns the task. + OrgID string `json:"orgID"` + + // ID of the user who owns this Task. + OwnerID *string `json:"ownerID,omitempty"` + Status *TaskStatusType `json:"status,omitempty"` + + // Type of the task, useful for filtering a task list. + Type *string `json:"type,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` +} + +// TaskLastRunStatus defines model for Task.LastRunStatus. +type TaskLastRunStatus string + +// TaskCreateRequest defines model for TaskCreateRequest. +type TaskCreateRequest struct { + // An optional description of the task. + Description *string `json:"description,omitempty"` + + // The Flux script to run for this task. + Flux string `json:"flux"` + + // The name of the organization that owns this Task. + Org *string `json:"org,omitempty"` + + // The ID of the organization that owns this Task. + OrgID *string `json:"orgID,omitempty"` + Status *TaskStatusType `json:"status,omitempty"` +} + +// TaskStatusType defines model for TaskStatusType. +type TaskStatusType string + +// TaskUpdateRequest defines model for TaskUpdateRequest. +type TaskUpdateRequest struct { + // Override the 'cron' option in the flux script. + Cron *string `json:"cron,omitempty"` + + // An optional description of the task. + Description *string `json:"description,omitempty"` + + // Override the 'every' option in the flux script. + Every *string `json:"every,omitempty"` + + // The Flux script to run for this task. + Flux *string `json:"flux,omitempty"` + + // Override the 'name' option in the flux script. + Name *string `json:"name,omitempty"` + + // Override the 'offset' option in the flux script. + Offset *string `json:"offset,omitempty"` + Status *TaskStatusType `json:"status,omitempty"` +} + +// Tasks defines model for Tasks. +type Tasks struct { + Links *Links `json:"links,omitempty"` + Tasks *[]Task `json:"tasks,omitempty"` +} + +// Telegraf defines model for Telegraf. +type Telegraf struct { + // Embedded struct due to allOf(#/components/schemas/TelegrafRequest) + TelegrafRequest `yaml:",inline"` + // Embedded fields due to inline allOf schema + Id *string `json:"id,omitempty"` + Labels *Labels `json:"labels,omitempty"` + Links *struct { + // URI of resource. + Labels *Link `json:"labels,omitempty"` + + // URI of resource. + Members *Link `json:"members,omitempty"` + + // URI of resource. + Owners *Link `json:"owners,omitempty"` + + // URI of resource. + Self *Link `json:"self,omitempty"` + } `json:"links,omitempty"` +} + +// TelegrafPlugin defines model for TelegrafPlugin. +type TelegrafPlugin struct { + Config *string `json:"config,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + Type *string `json:"type,omitempty"` +} + +// TelegrafPluginRequest defines model for TelegrafPluginRequest. +type TelegrafPluginRequest struct { + Config *string `json:"config,omitempty"` + Description *string `json:"description,omitempty"` + Metadata *struct { + Buckets *[]string `json:"buckets,omitempty"` + } `json:"metadata,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` + Plugins *[]struct { + Alias *string `json:"alias,omitempty"` + Config *string `json:"config,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"plugins,omitempty"` +} + +// TelegrafPlugins defines model for TelegrafPlugins. +type TelegrafPlugins struct { + Os *string `json:"os,omitempty"` + Plugins *[]TelegrafPlugin `json:"plugins,omitempty"` + Version *string `json:"version,omitempty"` +} + +// TelegrafRequest defines model for TelegrafRequest. +type TelegrafRequest struct { + Config *string `json:"config,omitempty"` + Description *string `json:"description,omitempty"` + Metadata *struct { + Buckets *[]string `json:"buckets,omitempty"` + } `json:"metadata,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` +} + +// Telegrafs defines model for Telegrafs. +type Telegrafs struct { + Configurations *[]Telegraf `json:"configurations,omitempty"` +} + +// TelegramNotificationEndpoint defines model for TelegramNotificationEndpoint. +type TelegramNotificationEndpoint struct { + // Embedded struct due to allOf(#/components/schemas/NotificationEndpointBase) + NotificationEndpointBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + // ID of the telegram channel, a chat_id in https://core.telegram.org/bots/api#sendmessage . + Channel string `json:"channel"` + + // Specifies the Telegram bot token. See https://core.telegram.org/bots#creating-a-new-bot . + Token string `json:"token"` +} + +// TelegramNotificationRule defines model for TelegramNotificationRule. +type TelegramNotificationRule struct { + // Embedded struct due to allOf(#/components/schemas/NotificationRuleBase) + NotificationRuleBase `yaml:",inline"` + // Embedded struct due to allOf(#/components/schemas/TelegramNotificationRuleBase) + TelegramNotificationRuleBase `yaml:",inline"` +} + +// TelegramNotificationRuleBase defines model for TelegramNotificationRuleBase. +type TelegramNotificationRuleBase struct { + // Disables preview of web links in the sent messages when "true". Defaults to "false" . + DisableWebPagePreview *bool `json:"disableWebPagePreview,omitempty"` + + // The message template as a flux interpolated string. + MessageTemplate string `json:"messageTemplate"` + + // Parse mode of the message text per https://core.telegram.org/bots/api#formatting-options . Defaults to "MarkdownV2" . + ParseMode *TelegramNotificationRuleBaseParseMode `json:"parseMode,omitempty"` + + // The discriminator between other types of notification rules is "telegram". + Type TelegramNotificationRuleBaseType `json:"type"` +} + +// Parse mode of the message text per https://core.telegram.org/bots/api#formatting-options . Defaults to "MarkdownV2" . +type TelegramNotificationRuleBaseParseMode string + +// The discriminator between other types of notification rules is "telegram". +type TelegramNotificationRuleBaseType string + +// Template defines model for Template. +type Template []struct { + ApiVersion *string `json:"apiVersion,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + Meta *struct { + Name *string `json:"name,omitempty"` + } `json:"meta,omitempty"` + Spec *map[string]interface{} `json:"spec,omitempty"` +} + +// TemplateApply defines model for TemplateApply. +type TemplateApply struct { + Actions *[]interface{} `json:"actions,omitempty"` + DryRun *bool `json:"dryRun,omitempty"` + EnvRefs *TemplateApply_EnvRefs `json:"envRefs,omitempty"` + OrgID *string `json:"orgID,omitempty"` + Remotes *[]struct { + ContentType *string `json:"contentType,omitempty"` + Url string `json:"url"` + } `json:"remotes,omitempty"` + Secrets *TemplateApply_Secrets `json:"secrets,omitempty"` + StackID *string `json:"stackID,omitempty"` + Template *struct { + ContentType *string `json:"contentType,omitempty"` + Contents *Template `json:"contents,omitempty"` + Sources *[]string `json:"sources,omitempty"` + } `json:"template,omitempty"` + Templates *[]struct { + ContentType *string `json:"contentType,omitempty"` + Contents *Template `json:"contents,omitempty"` + Sources *[]string `json:"sources,omitempty"` + } `json:"templates,omitempty"` +} + +// TemplateApply_EnvRefs defines model for TemplateApply.EnvRefs. +type TemplateApply_EnvRefs struct { + AdditionalProperties map[string]interface{} `json:"-"` +} + +// TemplateApply_Secrets defines model for TemplateApply.Secrets. +type TemplateApply_Secrets struct { + AdditionalProperties map[string]string `json:"-"` +} + +// TemplateChart defines model for TemplateChart. +type TemplateChart struct { + Height *int `json:"height,omitempty"` + Properties *ViewProperties `json:"properties,omitempty"` + Width *int `json:"width,omitempty"` + XPos *int `json:"xPos,omitempty"` + YPos *int `json:"yPos,omitempty"` +} + +// TemplateEnvReferences defines model for TemplateEnvReferences. +type TemplateEnvReferences []struct { + // Default value that will be provided for the reference when no value is provided + DefaultValue *interface{} `json:"defaultValue"` + + // Key identified as environment reference and is the key identified in the template + EnvRefKey string `json:"envRefKey"` + + // Field the environment reference corresponds too + ResourceField string `json:"resourceField"` + + // Value provided to fulfill reference + Value *interface{} `json:"value"` +} + +// TemplateExportByID defines model for TemplateExportByID. +type TemplateExportByID struct { + OrgIDs *[]struct { + OrgID *string `json:"orgID,omitempty"` + ResourceFilters *struct { + ByLabel *[]string `json:"byLabel,omitempty"` + ByResourceKind *[]TemplateKind `json:"byResourceKind,omitempty"` + } `json:"resourceFilters,omitempty"` + } `json:"orgIDs,omitempty"` + Resources *[]struct { + Id string `json:"id"` + Kind TemplateKind `json:"kind"` + + // if defined with id, name is used for resource exported by id. if defined independently, resources strictly matching name are exported + Name *string `json:"name,omitempty"` + } `json:"resources,omitempty"` + StackID *string `json:"stackID,omitempty"` +} + +// TemplateExportByName defines model for TemplateExportByName. +type TemplateExportByName struct { + OrgIDs *[]struct { + OrgID *string `json:"orgID,omitempty"` + ResourceFilters *struct { + ByLabel *[]string `json:"byLabel,omitempty"` + ByResourceKind *[]TemplateKind `json:"byResourceKind,omitempty"` + } `json:"resourceFilters,omitempty"` + } `json:"orgIDs,omitempty"` + Resources *[]struct { + Kind TemplateKind `json:"kind"` + Name string `json:"name"` + } `json:"resources,omitempty"` + StackID *string `json:"stackID,omitempty"` +} + +// TemplateKind defines model for TemplateKind. +type TemplateKind string + +// TemplateSummary defines model for TemplateSummary. +type TemplateSummary struct { + Diff *struct { + Buckets *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + + // Rules to expire or retain data. No rules means data never expires. + RetentionRules *RetentionRules `json:"retentionRules,omitempty"` + } `json:"new,omitempty"` + Old *struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + + // Rules to expire or retain data. No rules means data never expires. + RetentionRules *RetentionRules `json:"retentionRules,omitempty"` + } `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"buckets,omitempty"` + Checks *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *CheckDiscriminator `json:"new,omitempty"` + Old *CheckDiscriminator `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"checks,omitempty"` + Dashboards *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *struct { + Charts *[]TemplateChart `json:"charts,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"new,omitempty"` + Old *struct { + Charts *[]TemplateChart `json:"charts,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"dashboards,omitempty"` + LabelMappings *[]struct { + LabelID *string `json:"labelID,omitempty"` + LabelName *string `json:"labelName,omitempty"` + LabelTemplateMetaName *string `json:"labelTemplateMetaName,omitempty"` + ResourceID *string `json:"resourceID,omitempty"` + ResourceName *string `json:"resourceName,omitempty"` + ResourceTemplateMetaName *string `json:"resourceTemplateMetaName,omitempty"` + ResourceType *string `json:"resourceType,omitempty"` + Status *string `json:"status,omitempty"` + } `json:"labelMappings,omitempty"` + Labels *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *struct { + Color *string `json:"color,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"new,omitempty"` + Old *struct { + Color *string `json:"color,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"labels,omitempty"` + NotificationEndpoints *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *NotificationEndpointDiscriminator `json:"new,omitempty"` + Old *NotificationEndpointDiscriminator `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"notificationEndpoints,omitempty"` + NotificationRules *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *struct { + Description *string `json:"description,omitempty"` + EndpointID *string `json:"endpointID,omitempty"` + EndpointName *string `json:"endpointName,omitempty"` + EndpointType *string `json:"endpointType,omitempty"` + Every *string `json:"every,omitempty"` + MessageTemplate *string `json:"messageTemplate,omitempty"` + Name *string `json:"name,omitempty"` + Offset *string `json:"offset,omitempty"` + Status *string `json:"status,omitempty"` + StatusRules *[]struct { + CurrentLevel *string `json:"currentLevel,omitempty"` + PreviousLevel *string `json:"previousLevel,omitempty"` + } `json:"statusRules,omitempty"` + TagRules *[]struct { + Key *string `json:"key,omitempty"` + Operator *string `json:"operator,omitempty"` + Value *string `json:"value,omitempty"` + } `json:"tagRules,omitempty"` + } `json:"new,omitempty"` + Old *struct { + Description *string `json:"description,omitempty"` + EndpointID *string `json:"endpointID,omitempty"` + EndpointName *string `json:"endpointName,omitempty"` + EndpointType *string `json:"endpointType,omitempty"` + Every *string `json:"every,omitempty"` + MessageTemplate *string `json:"messageTemplate,omitempty"` + Name *string `json:"name,omitempty"` + Offset *string `json:"offset,omitempty"` + Status *string `json:"status,omitempty"` + StatusRules *[]struct { + CurrentLevel *string `json:"currentLevel,omitempty"` + PreviousLevel *string `json:"previousLevel,omitempty"` + } `json:"statusRules,omitempty"` + TagRules *[]struct { + Key *string `json:"key,omitempty"` + Operator *string `json:"operator,omitempty"` + Value *string `json:"value,omitempty"` + } `json:"tagRules,omitempty"` + } `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"notificationRules,omitempty"` + Tasks *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *struct { + Cron *string `json:"cron,omitempty"` + Description *string `json:"description,omitempty"` + Every *string `json:"every,omitempty"` + Name *string `json:"name,omitempty"` + Offset *string `json:"offset,omitempty"` + Query *string `json:"query,omitempty"` + Status *string `json:"status,omitempty"` + } `json:"new,omitempty"` + Old *struct { + Cron *string `json:"cron,omitempty"` + Description *string `json:"description,omitempty"` + Every *string `json:"every,omitempty"` + Name *string `json:"name,omitempty"` + Offset *string `json:"offset,omitempty"` + Query *string `json:"query,omitempty"` + Status *string `json:"status,omitempty"` + } `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"tasks,omitempty"` + TelegrafConfigs *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *TelegrafRequest `json:"new,omitempty"` + Old *TelegrafRequest `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"telegrafConfigs,omitempty"` + Variables *[]struct { + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + New *struct { + Args *VariableProperties `json:"args,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"new,omitempty"` + Old *struct { + Args *VariableProperties `json:"args,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"old,omitempty"` + StateStatus *string `json:"stateStatus,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"variables,omitempty"` + } `json:"diff,omitempty"` + Errors *[]struct { + Fields *[]string `json:"fields,omitempty"` + Indexes *[]int `json:"indexes,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + Reason *string `json:"reason,omitempty"` + } `json:"errors,omitempty"` + Sources *[]string `json:"sources,omitempty"` + StackID *string `json:"stackID,omitempty"` + Summary *struct { + Buckets *[]struct { + Description *string `json:"description,omitempty"` + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + LabelAssociations *[]TemplateSummaryLabel `json:"labelAssociations,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` + RetentionPeriod *int `json:"retentionPeriod,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"buckets,omitempty"` + Checks *[]struct { + // Embedded struct due to allOf(#/components/schemas/CheckDiscriminator) + CheckDiscriminator `yaml:",inline"` + // Embedded fields due to inline allOf schema + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + LabelAssociations *[]TemplateSummaryLabel `json:"labelAssociations,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"checks,omitempty"` + Dashboards *[]struct { + Charts *[]TemplateChart `json:"charts,omitempty"` + Description *string `json:"description,omitempty"` + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + LabelAssociations *[]TemplateSummaryLabel `json:"labelAssociations,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"dashboards,omitempty"` + LabelMappings *[]struct { + LabelID *string `json:"labelID,omitempty"` + LabelName *string `json:"labelName,omitempty"` + LabelTemplateMetaName *string `json:"labelTemplateMetaName,omitempty"` + ResourceID *string `json:"resourceID,omitempty"` + ResourceName *string `json:"resourceName,omitempty"` + ResourceTemplateMetaName *string `json:"resourceTemplateMetaName,omitempty"` + ResourceType *string `json:"resourceType,omitempty"` + Status *string `json:"status,omitempty"` + } `json:"labelMappings,omitempty"` + Labels *[]TemplateSummaryLabel `json:"labels,omitempty"` + MissingEnvRefs *[]string `json:"missingEnvRefs,omitempty"` + MissingSecrets *[]string `json:"missingSecrets,omitempty"` + NotificationEndpoints *[]struct { + // Embedded struct due to allOf(#/components/schemas/NotificationEndpointDiscriminator) + NotificationEndpointDiscriminator `yaml:",inline"` + // Embedded fields due to inline allOf schema + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + LabelAssociations *[]TemplateSummaryLabel `json:"labelAssociations,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"notificationEndpoints,omitempty"` + NotificationRules *[]struct { + Description *string `json:"description,omitempty"` + EndpointID *string `json:"endpointID,omitempty"` + EndpointTemplateMetaName *string `json:"endpointTemplateMetaName,omitempty"` + EndpointType *string `json:"endpointType,omitempty"` + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Every *string `json:"every,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + LabelAssociations *[]TemplateSummaryLabel `json:"labelAssociations,omitempty"` + MessageTemplate *string `json:"messageTemplate,omitempty"` + Name *string `json:"name,omitempty"` + Offset *string `json:"offset,omitempty"` + Status *string `json:"status,omitempty"` + StatusRules *[]struct { + CurrentLevel *string `json:"currentLevel,omitempty"` + PreviousLevel *string `json:"previousLevel,omitempty"` + } `json:"statusRules,omitempty"` + TagRules *[]struct { + Key *string `json:"key,omitempty"` + Operator *string `json:"operator,omitempty"` + Value *string `json:"value,omitempty"` + } `json:"tagRules,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"notificationRules,omitempty"` + Tasks *[]struct { + Cron *string `json:"cron,omitempty"` + Description *string `json:"description,omitempty"` + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Every *string `json:"every,omitempty"` + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + Name *string `json:"name,omitempty"` + Offset *string `json:"offset,omitempty"` + Query *string `json:"query,omitempty"` + Status *string `json:"status,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"tasks,omitempty"` + TelegrafConfigs *[]struct { + // Embedded struct due to allOf(#/components/schemas/TelegrafRequest) + TelegrafRequest `yaml:",inline"` + // Embedded fields due to inline allOf schema + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + LabelAssociations *[]TemplateSummaryLabel `json:"labelAssociations,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"telegrafConfigs,omitempty"` + Variables *[]struct { + Arguments *VariableProperties `json:"arguments,omitempty"` + Description *string `json:"description,omitempty"` + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + LabelAssociations *[]TemplateSummaryLabel `json:"labelAssociations,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"variables,omitempty"` + } `json:"summary,omitempty"` +} + +// TemplateSummaryLabel defines model for TemplateSummaryLabel. +type TemplateSummaryLabel struct { + EnvReferences *TemplateEnvReferences `json:"envReferences,omitempty"` + Id *string `json:"id,omitempty"` + Kind *TemplateKind `json:"kind,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` + Properties *struct { + Color *string `json:"color,omitempty"` + Description *string `json:"description,omitempty"` + } `json:"properties,omitempty"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` +} + +// Declares a Flux test case +type TestStatement struct { + // Represents the declaration of a variable + Assignment *VariableAssignment `json:"assignment,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Threshold defines model for Threshold. +type Threshold interface{} + +// ThresholdBase defines model for ThresholdBase. +type ThresholdBase struct { + // If true, only alert if all values meet threshold. + AllValues *bool `json:"allValues,omitempty"` + + // The state to record if check matches a criteria. + Level *CheckStatusLevel `json:"level,omitempty"` +} + +// ThresholdCheck defines model for ThresholdCheck. +type ThresholdCheck struct { + // Embedded struct due to allOf(#/components/schemas/CheckBase) + CheckBase `yaml:",inline"` + // Embedded fields due to inline allOf schema + // Check repetition interval. + Every *string `json:"every,omitempty"` + + // Duration to delay after the schedule, before executing check. + Offset *string `json:"offset,omitempty"` + + // The template used to generate and write a status message. + StatusMessageTemplate *string `json:"statusMessageTemplate,omitempty"` + + // List of tags to write to each status. + Tags *[]struct { + Key *string `json:"key,omitempty"` + Value *string `json:"value,omitempty"` + } `json:"tags,omitempty"` + Thresholds *[]Threshold `json:"thresholds,omitempty"` + Type ThresholdCheckType `json:"type"` +} + +// ThresholdCheckType defines model for ThresholdCheck.Type. +type ThresholdCheckType string + +// Uses operators to act on a single operand in an expression +type UnaryExpression struct { + Argument *Expression `json:"argument,omitempty"` + Operator *string `json:"operator,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// Represents integer numbers +type UnsignedIntegerLiteral struct { + // Type of AST node + Type *NodeType `json:"type,omitempty"` + Value *string `json:"value,omitempty"` +} + +// User defines model for User. +type User struct { + Id *string `json:"id,omitempty"` + Name string `json:"name"` + OauthID *string `json:"oauthID,omitempty"` + + // If inactive the user is inactive. + Status *UserStatus `json:"status,omitempty"` +} + +// If inactive the user is inactive. +type UserStatus string + +// UserResponse defines model for UserResponse. +type UserResponse struct { + Id *string `json:"id,omitempty"` + Links *struct { + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + Name string `json:"name"` + OauthID *string `json:"oauthID,omitempty"` + + // If inactive the user is inactive. + Status *UserResponseStatus `json:"status,omitempty"` +} + +// If inactive the user is inactive. +type UserResponseStatus string + +// Users defines model for Users. +type Users struct { + Links *struct { + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + Users *[]UserResponse `json:"users,omitempty"` +} + +// Variable defines model for Variable. +type Variable struct { + Arguments VariableProperties `json:"arguments"` + CreatedAt *time.Time `json:"createdAt,omitempty"` + Description *string `json:"description,omitempty"` + Id *string `json:"id,omitempty"` + Labels *Labels `json:"labels,omitempty"` + Links *struct { + Labels *string `json:"labels,omitempty"` + Org *string `json:"org,omitempty"` + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + Name string `json:"name"` + OrgID string `json:"orgID"` + Selected *[]string `json:"selected,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` +} + +// Represents the declaration of a variable +type VariableAssignment struct { + // A valid Flux identifier + Id *Identifier `json:"id,omitempty"` + Init *Expression `json:"init,omitempty"` + + // Type of AST node + Type *NodeType `json:"type,omitempty"` +} + +// VariableProperties defines model for VariableProperties. +type VariableProperties interface{} + +// Variables defines model for Variables. +type Variables struct { + Variables *[]Variable `json:"variables,omitempty"` +} + +// View defines model for View. +type View struct { + Id *string `json:"id,omitempty"` + Links *struct { + Self *string `json:"self,omitempty"` + } `json:"links,omitempty"` + Name string `json:"name"` + Properties ViewProperties `json:"properties"` +} + +// ViewProperties defines model for ViewProperties. +type ViewProperties interface{} + +// WritePrecision defines model for WritePrecision. +type WritePrecision string + +// XYGeom defines model for XYGeom. +type XYGeom string + +// XYViewProperties defines model for XYViewProperties. +type XYViewProperties struct { + // The viewport for a View's visualizations + Axes Axes `json:"axes"` + + // A color mapping is an object that maps time series data to a UI color scheme to allow the UI to render graphs consistent colors across reloads. + ColorMapping *ColorMapping `json:"colorMapping,omitempty"` + + // Colors define color encoding of data into a visualization + Colors []DashboardColor `json:"colors"` + GenerateXAxisTicks *[]string `json:"generateXAxisTicks,omitempty"` + GenerateYAxisTicks *[]string `json:"generateYAxisTicks,omitempty"` + Geom XYGeom `json:"geom"` + HoverDimension *XYViewPropertiesHoverDimension `json:"hoverDimension,omitempty"` + LegendColorizeRows *bool `json:"legendColorizeRows,omitempty"` + LegendHide *bool `json:"legendHide,omitempty"` + LegendOpacity *float32 `json:"legendOpacity,omitempty"` + LegendOrientationThreshold *int `json:"legendOrientationThreshold,omitempty"` + Note string `json:"note"` + Position XYViewPropertiesPosition `json:"position"` + Queries []DashboardQuery `json:"queries"` + ShadeBelow *bool `json:"shadeBelow,omitempty"` + Shape XYViewPropertiesShape `json:"shape"` + + // If true, will display note when empty + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + + // StaticLegend represents the options specific to the static legend + StaticLegend *StaticLegend `json:"staticLegend,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` + Type XYViewPropertiesType `json:"type"` + XColumn *string `json:"xColumn,omitempty"` + XTickStart *float32 `json:"xTickStart,omitempty"` + XTickStep *float32 `json:"xTickStep,omitempty"` + XTotalTicks *int `json:"xTotalTicks,omitempty"` + YColumn *string `json:"yColumn,omitempty"` + YTickStart *float32 `json:"yTickStart,omitempty"` + YTickStep *float32 `json:"yTickStep,omitempty"` + YTotalTicks *int `json:"yTotalTicks,omitempty"` +} + +// XYViewPropertiesHoverDimension defines model for XYViewProperties.HoverDimension. +type XYViewPropertiesHoverDimension string + +// XYViewPropertiesPosition defines model for XYViewProperties.Position. +type XYViewPropertiesPosition string + +// XYViewPropertiesShape defines model for XYViewProperties.Shape. +type XYViewPropertiesShape string + +// XYViewPropertiesType defines model for XYViewProperties.Type. +type XYViewPropertiesType string + +// After defines model for After. +type After string + +// Descending defines model for Descending. +type Descending bool + +// Limit defines model for Limit. +type Limit int + +// Offset defines model for Offset. +type Offset int + +// TraceSpan defines model for TraceSpan. +type TraceSpan string + +// ServerError defines model for ServerError. +type ServerError Error + +// GetRoutesParams defines parameters for GetRoutes. +type GetRoutesParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetAuthorizationsParams defines parameters for GetAuthorizations. +type GetAuthorizationsParams struct { + // Only show authorizations that belong to a user ID. + UserID *string `json:"userID,omitempty"` + + // Only show authorizations that belong to a user name. + User *string `json:"user,omitempty"` + + // Only show authorizations that belong to an organization ID. + OrgID *string `json:"orgID,omitempty"` + + // Only show authorizations that belong to a organization name. + Org *string `json:"org,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostAuthorizationsJSONBody defines parameters for PostAuthorizations. +type PostAuthorizationsJSONBody AuthorizationPostRequest + +// PostAuthorizationsParams defines parameters for PostAuthorizations. +type PostAuthorizationsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteAuthorizationsIDParams defines parameters for DeleteAuthorizationsID. +type DeleteAuthorizationsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetAuthorizationsIDParams defines parameters for GetAuthorizationsID. +type GetAuthorizationsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchAuthorizationsIDJSONBody defines parameters for PatchAuthorizationsID. +type PatchAuthorizationsIDJSONBody AuthorizationUpdateRequest + +// PatchAuthorizationsIDParams defines parameters for PatchAuthorizationsID. +type PatchAuthorizationsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetBackupKVParams defines parameters for GetBackupKV. +type GetBackupKVParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetBackupMetadataParams defines parameters for GetBackupMetadata. +type GetBackupMetadataParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + + // Indicates the content encoding (usually a compression algorithm) that the client can understand. + AcceptEncoding *GetBackupMetadataParamsAcceptEncoding `json:"Accept-Encoding,omitempty"` +} + +// GetBackupMetadataParamsAcceptEncoding defines parameters for GetBackupMetadata. +type GetBackupMetadataParamsAcceptEncoding string + +// GetBackupShardIdParams defines parameters for GetBackupShardId. +type GetBackupShardIdParams struct { + // Earliest time to include in the snapshot. RFC3339 format. + Since *time.Time `json:"since,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + + // Indicates the content encoding (usually a compression algorithm) that the client can understand. + AcceptEncoding *GetBackupShardIdParamsAcceptEncoding `json:"Accept-Encoding,omitempty"` +} + +// GetBackupShardIdParamsAcceptEncoding defines parameters for GetBackupShardId. +type GetBackupShardIdParamsAcceptEncoding string + +// GetBucketsParams defines parameters for GetBuckets. +type GetBucketsParams struct { + Offset *Offset `json:"offset,omitempty"` + Limit *Limit `json:"limit,omitempty"` + + // Resource ID to seek from. Results are not inclusive of this ID. Use `after` instead of `offset`. + After *After `json:"after,omitempty"` + + // The name of the organization. + Org *string `json:"org,omitempty"` + + // The organization ID. + OrgID *string `json:"orgID,omitempty"` + + // Only returns buckets with a specific name. + Name *string `json:"name,omitempty"` + + // Only returns buckets with a specific ID. + Id *string `json:"id,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostBucketsJSONBody defines parameters for PostBuckets. +type PostBucketsJSONBody PostBucketRequest + +// PostBucketsParams defines parameters for PostBuckets. +type PostBucketsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteBucketsIDParams defines parameters for DeleteBucketsID. +type DeleteBucketsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetBucketsIDParams defines parameters for GetBucketsID. +type GetBucketsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchBucketsIDJSONBody defines parameters for PatchBucketsID. +type PatchBucketsIDJSONBody PatchBucketRequest + +// PatchBucketsIDParams defines parameters for PatchBucketsID. +type PatchBucketsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetBucketsIDLabelsParams defines parameters for GetBucketsIDLabels. +type GetBucketsIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostBucketsIDLabelsJSONBody defines parameters for PostBucketsIDLabels. +type PostBucketsIDLabelsJSONBody LabelMapping + +// PostBucketsIDLabelsParams defines parameters for PostBucketsIDLabels. +type PostBucketsIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteBucketsIDLabelsIDParams defines parameters for DeleteBucketsIDLabelsID. +type DeleteBucketsIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetBucketsIDMembersParams defines parameters for GetBucketsIDMembers. +type GetBucketsIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostBucketsIDMembersJSONBody defines parameters for PostBucketsIDMembers. +type PostBucketsIDMembersJSONBody AddResourceMemberRequestBody + +// PostBucketsIDMembersParams defines parameters for PostBucketsIDMembers. +type PostBucketsIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteBucketsIDMembersIDParams defines parameters for DeleteBucketsIDMembersID. +type DeleteBucketsIDMembersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetBucketsIDOwnersParams defines parameters for GetBucketsIDOwners. +type GetBucketsIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostBucketsIDOwnersJSONBody defines parameters for PostBucketsIDOwners. +type PostBucketsIDOwnersJSONBody AddResourceMemberRequestBody + +// PostBucketsIDOwnersParams defines parameters for PostBucketsIDOwners. +type PostBucketsIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteBucketsIDOwnersIDParams defines parameters for DeleteBucketsIDOwnersID. +type DeleteBucketsIDOwnersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetChecksParams defines parameters for GetChecks. +type GetChecksParams struct { + Offset *Offset `json:"offset,omitempty"` + Limit *Limit `json:"limit,omitempty"` + + // Only show checks that belong to a specific organization ID. + OrgID string `json:"orgID"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// CreateCheckJSONBody defines parameters for CreateCheck. +type CreateCheckJSONBody PostCheck + +// DeleteChecksIDParams defines parameters for DeleteChecksID. +type DeleteChecksIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetChecksIDParams defines parameters for GetChecksID. +type GetChecksIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchChecksIDJSONBody defines parameters for PatchChecksID. +type PatchChecksIDJSONBody CheckPatch + +// PatchChecksIDParams defines parameters for PatchChecksID. +type PatchChecksIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PutChecksIDJSONBody defines parameters for PutChecksID. +type PutChecksIDJSONBody Check + +// PutChecksIDParams defines parameters for PutChecksID. +type PutChecksIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetChecksIDLabelsParams defines parameters for GetChecksIDLabels. +type GetChecksIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostChecksIDLabelsJSONBody defines parameters for PostChecksIDLabels. +type PostChecksIDLabelsJSONBody LabelMapping + +// PostChecksIDLabelsParams defines parameters for PostChecksIDLabels. +type PostChecksIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteChecksIDLabelsIDParams defines parameters for DeleteChecksIDLabelsID. +type DeleteChecksIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetChecksIDQueryParams defines parameters for GetChecksIDQuery. +type GetChecksIDQueryParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetConfigParams defines parameters for GetConfig. +type GetConfigParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDashboardsParams defines parameters for GetDashboards. +type GetDashboardsParams struct { + Offset *Offset `json:"offset,omitempty"` + Limit *Limit `json:"limit,omitempty"` + Descending *Descending `json:"descending,omitempty"` + + // A user identifier. Returns only dashboards where this user has the `owner` role. + Owner *string `json:"owner,omitempty"` + + // The column to sort by. + SortBy *GetDashboardsParamsSortBy `json:"sortBy,omitempty"` + + // A list of dashboard identifiers. Returns only the listed dashboards. If both `id` and `owner` are specified, only `id` is used. + Id *[]string `json:"id,omitempty"` + + // The identifier of the organization. + OrgID *string `json:"orgID,omitempty"` + + // The name of the organization. + Org *string `json:"org,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDashboardsParamsSortBy defines parameters for GetDashboards. +type GetDashboardsParamsSortBy string + +// PostDashboardsJSONBody defines parameters for PostDashboards. +type PostDashboardsJSONBody CreateDashboardRequest + +// PostDashboardsParams defines parameters for PostDashboards. +type PostDashboardsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteDashboardsIDParams defines parameters for DeleteDashboardsID. +type DeleteDashboardsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDashboardsIDParams defines parameters for GetDashboardsID. +type GetDashboardsIDParams struct { + // Includes the cell view properties in the response if set to `properties` + Include *GetDashboardsIDParamsInclude `json:"include,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDashboardsIDParamsInclude defines parameters for GetDashboardsID. +type GetDashboardsIDParamsInclude string + +// PatchDashboardsIDJSONBody defines parameters for PatchDashboardsID. +type PatchDashboardsIDJSONBody struct { + Cells *CellWithViewProperties `json:"cells,omitempty"` + + // optional, when provided will replace the description + Description *string `json:"description,omitempty"` + + // optional, when provided will replace the name + Name *string `json:"name,omitempty"` +} + +// PatchDashboardsIDParams defines parameters for PatchDashboardsID. +type PatchDashboardsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostDashboardsIDCellsJSONBody defines parameters for PostDashboardsIDCells. +type PostDashboardsIDCellsJSONBody CreateCell + +// PostDashboardsIDCellsParams defines parameters for PostDashboardsIDCells. +type PostDashboardsIDCellsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PutDashboardsIDCellsJSONBody defines parameters for PutDashboardsIDCells. +type PutDashboardsIDCellsJSONBody Cells + +// PutDashboardsIDCellsParams defines parameters for PutDashboardsIDCells. +type PutDashboardsIDCellsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteDashboardsIDCellsIDParams defines parameters for DeleteDashboardsIDCellsID. +type DeleteDashboardsIDCellsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchDashboardsIDCellsIDJSONBody defines parameters for PatchDashboardsIDCellsID. +type PatchDashboardsIDCellsIDJSONBody CellUpdate + +// PatchDashboardsIDCellsIDParams defines parameters for PatchDashboardsIDCellsID. +type PatchDashboardsIDCellsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDashboardsIDCellsIDViewParams defines parameters for GetDashboardsIDCellsIDView. +type GetDashboardsIDCellsIDViewParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchDashboardsIDCellsIDViewJSONBody defines parameters for PatchDashboardsIDCellsIDView. +type PatchDashboardsIDCellsIDViewJSONBody View + +// PatchDashboardsIDCellsIDViewParams defines parameters for PatchDashboardsIDCellsIDView. +type PatchDashboardsIDCellsIDViewParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDashboardsIDLabelsParams defines parameters for GetDashboardsIDLabels. +type GetDashboardsIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostDashboardsIDLabelsJSONBody defines parameters for PostDashboardsIDLabels. +type PostDashboardsIDLabelsJSONBody LabelMapping + +// PostDashboardsIDLabelsParams defines parameters for PostDashboardsIDLabels. +type PostDashboardsIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteDashboardsIDLabelsIDParams defines parameters for DeleteDashboardsIDLabelsID. +type DeleteDashboardsIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDashboardsIDMembersParams defines parameters for GetDashboardsIDMembers. +type GetDashboardsIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostDashboardsIDMembersJSONBody defines parameters for PostDashboardsIDMembers. +type PostDashboardsIDMembersJSONBody AddResourceMemberRequestBody + +// PostDashboardsIDMembersParams defines parameters for PostDashboardsIDMembers. +type PostDashboardsIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteDashboardsIDMembersIDParams defines parameters for DeleteDashboardsIDMembersID. +type DeleteDashboardsIDMembersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDashboardsIDOwnersParams defines parameters for GetDashboardsIDOwners. +type GetDashboardsIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostDashboardsIDOwnersJSONBody defines parameters for PostDashboardsIDOwners. +type PostDashboardsIDOwnersJSONBody AddResourceMemberRequestBody + +// PostDashboardsIDOwnersParams defines parameters for PostDashboardsIDOwners. +type PostDashboardsIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteDashboardsIDOwnersIDParams defines parameters for DeleteDashboardsIDOwnersID. +type DeleteDashboardsIDOwnersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDBRPsParams defines parameters for GetDBRPs. +type GetDBRPsParams struct { + // Specifies the organization ID to filter on + OrgID *string `json:"orgID,omitempty"` + + // Specifies the organization name to filter on + Org *string `json:"org,omitempty"` + + // Specifies the mapping ID to filter on + Id *string `json:"id,omitempty"` + + // Specifies the bucket ID to filter on + BucketID *string `json:"bucketID,omitempty"` + + // Specifies filtering on default + Default *bool `json:"default,omitempty"` + + // Specifies the database to filter on + Db *string `json:"db,omitempty"` + + // Specifies the retention policy to filter on + Rp *string `json:"rp,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostDBRPJSONBody defines parameters for PostDBRP. +type PostDBRPJSONBody DBRPCreate + +// PostDBRPParams defines parameters for PostDBRP. +type PostDBRPParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteDBRPIDParams defines parameters for DeleteDBRPID. +type DeleteDBRPIDParams struct { + // Specifies the organization ID of the mapping + OrgID *string `json:"orgID,omitempty"` + + // Specifies the organization name of the mapping + Org *string `json:"org,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetDBRPsIDParams defines parameters for GetDBRPsID. +type GetDBRPsIDParams struct { + // Specifies the organization ID of the mapping + OrgID *string `json:"orgID,omitempty"` + + // Specifies the organization name of the mapping + Org *string `json:"org,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchDBRPIDJSONBody defines parameters for PatchDBRPID. +type PatchDBRPIDJSONBody DBRPUpdate + +// PatchDBRPIDParams defines parameters for PatchDBRPID. +type PatchDBRPIDParams struct { + // Specifies the organization ID of the mapping + OrgID *string `json:"orgID,omitempty"` + + // Specifies the organization name of the mapping + Org *string `json:"org,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostDeleteJSONBody defines parameters for PostDelete. +type PostDeleteJSONBody DeletePredicateRequest + +// PostDeleteParams defines parameters for PostDelete. +type PostDeleteParams struct { + // Specifies the organization to delete data from. + Org *string `json:"org,omitempty"` + + // Specifies the bucket to delete data from. + Bucket *string `json:"bucket,omitempty"` + + // Specifies the organization ID of the resource. + OrgID *string `json:"orgID,omitempty"` + + // Specifies the bucket ID to delete data from. + BucketID *string `json:"bucketID,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetFlagsParams defines parameters for GetFlags. +type GetFlagsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetHealthParams defines parameters for GetHealth. +type GetHealthParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetLabelsParams defines parameters for GetLabels. +type GetLabelsParams struct { + // The organization ID. + OrgID *string `json:"orgID,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostLabelsJSONBody defines parameters for PostLabels. +type PostLabelsJSONBody LabelCreateRequest + +// DeleteLabelsIDParams defines parameters for DeleteLabelsID. +type DeleteLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetLabelsIDParams defines parameters for GetLabelsID. +type GetLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchLabelsIDJSONBody defines parameters for PatchLabelsID. +type PatchLabelsIDJSONBody LabelUpdate + +// PatchLabelsIDParams defines parameters for PatchLabelsID. +type PatchLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetLegacyAuthorizationsParams defines parameters for GetLegacyAuthorizations. +type GetLegacyAuthorizationsParams struct { + // Only show legacy authorizations that belong to a user ID. + UserID *string `json:"userID,omitempty"` + + // Only show legacy authorizations that belong to a user name. + User *string `json:"user,omitempty"` + + // Only show legacy authorizations that belong to an organization ID. + OrgID *string `json:"orgID,omitempty"` + + // Only show legacy authorizations that belong to a organization name. + Org *string `json:"org,omitempty"` + + // Only show legacy authorizations with a specified token (auth name). + Token *string `json:"token,omitempty"` + + // Only show legacy authorizations with a specified auth ID. + AuthID *string `json:"authID,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostLegacyAuthorizationsJSONBody defines parameters for PostLegacyAuthorizations. +type PostLegacyAuthorizationsJSONBody LegacyAuthorizationPostRequest + +// PostLegacyAuthorizationsParams defines parameters for PostLegacyAuthorizations. +type PostLegacyAuthorizationsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteLegacyAuthorizationsIDParams defines parameters for DeleteLegacyAuthorizationsID. +type DeleteLegacyAuthorizationsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetLegacyAuthorizationsIDParams defines parameters for GetLegacyAuthorizationsID. +type GetLegacyAuthorizationsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchLegacyAuthorizationsIDJSONBody defines parameters for PatchLegacyAuthorizationsID. +type PatchLegacyAuthorizationsIDJSONBody AuthorizationUpdateRequest + +// PatchLegacyAuthorizationsIDParams defines parameters for PatchLegacyAuthorizationsID. +type PatchLegacyAuthorizationsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostLegacyAuthorizationsIDPasswordJSONBody defines parameters for PostLegacyAuthorizationsIDPassword. +type PostLegacyAuthorizationsIDPasswordJSONBody PasswordResetBody + +// PostLegacyAuthorizationsIDPasswordParams defines parameters for PostLegacyAuthorizationsIDPassword. +type PostLegacyAuthorizationsIDPasswordParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetMeParams defines parameters for GetMe. +type GetMeParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PutMePasswordJSONBody defines parameters for PutMePassword. +type PutMePasswordJSONBody PasswordResetBody + +// PutMePasswordParams defines parameters for PutMePassword. +type PutMePasswordParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetMetricsParams defines parameters for GetMetrics. +type GetMetricsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetNotificationEndpointsParams defines parameters for GetNotificationEndpoints. +type GetNotificationEndpointsParams struct { + Offset *Offset `json:"offset,omitempty"` + Limit *Limit `json:"limit,omitempty"` + + // Only show notification endpoints that belong to specific organization ID. + OrgID string `json:"orgID"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// CreateNotificationEndpointJSONBody defines parameters for CreateNotificationEndpoint. +type CreateNotificationEndpointJSONBody PostNotificationEndpoint + +// DeleteNotificationEndpointsIDParams defines parameters for DeleteNotificationEndpointsID. +type DeleteNotificationEndpointsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetNotificationEndpointsIDParams defines parameters for GetNotificationEndpointsID. +type GetNotificationEndpointsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchNotificationEndpointsIDJSONBody defines parameters for PatchNotificationEndpointsID. +type PatchNotificationEndpointsIDJSONBody NotificationEndpointUpdate + +// PatchNotificationEndpointsIDParams defines parameters for PatchNotificationEndpointsID. +type PatchNotificationEndpointsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PutNotificationEndpointsIDJSONBody defines parameters for PutNotificationEndpointsID. +type PutNotificationEndpointsIDJSONBody NotificationEndpoint + +// PutNotificationEndpointsIDParams defines parameters for PutNotificationEndpointsID. +type PutNotificationEndpointsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetNotificationEndpointsIDLabelsParams defines parameters for GetNotificationEndpointsIDLabels. +type GetNotificationEndpointsIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostNotificationEndpointIDLabelsJSONBody defines parameters for PostNotificationEndpointIDLabels. +type PostNotificationEndpointIDLabelsJSONBody LabelMapping + +// PostNotificationEndpointIDLabelsParams defines parameters for PostNotificationEndpointIDLabels. +type PostNotificationEndpointIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteNotificationEndpointsIDLabelsIDParams defines parameters for DeleteNotificationEndpointsIDLabelsID. +type DeleteNotificationEndpointsIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetNotificationRulesParams defines parameters for GetNotificationRules. +type GetNotificationRulesParams struct { + Offset *Offset `json:"offset,omitempty"` + Limit *Limit `json:"limit,omitempty"` + + // Only show notification rules that belong to a specific organization ID. + OrgID string `json:"orgID"` + + // Only show notifications that belong to the specific check ID. + CheckID *string `json:"checkID,omitempty"` + + // Only return notification rules that "would match" statuses which contain the tag key value pairs provided. + Tag *string `json:"tag,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// CreateNotificationRuleJSONBody defines parameters for CreateNotificationRule. +type CreateNotificationRuleJSONBody PostNotificationRule + +// DeleteNotificationRulesIDParams defines parameters for DeleteNotificationRulesID. +type DeleteNotificationRulesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetNotificationRulesIDParams defines parameters for GetNotificationRulesID. +type GetNotificationRulesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchNotificationRulesIDJSONBody defines parameters for PatchNotificationRulesID. +type PatchNotificationRulesIDJSONBody NotificationRuleUpdate + +// PatchNotificationRulesIDParams defines parameters for PatchNotificationRulesID. +type PatchNotificationRulesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PutNotificationRulesIDJSONBody defines parameters for PutNotificationRulesID. +type PutNotificationRulesIDJSONBody NotificationRule + +// PutNotificationRulesIDParams defines parameters for PutNotificationRulesID. +type PutNotificationRulesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetNotificationRulesIDLabelsParams defines parameters for GetNotificationRulesIDLabels. +type GetNotificationRulesIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostNotificationRuleIDLabelsJSONBody defines parameters for PostNotificationRuleIDLabels. +type PostNotificationRuleIDLabelsJSONBody LabelMapping + +// PostNotificationRuleIDLabelsParams defines parameters for PostNotificationRuleIDLabels. +type PostNotificationRuleIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteNotificationRulesIDLabelsIDParams defines parameters for DeleteNotificationRulesIDLabelsID. +type DeleteNotificationRulesIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetNotificationRulesIDQueryParams defines parameters for GetNotificationRulesIDQuery. +type GetNotificationRulesIDQueryParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetOrgsParams defines parameters for GetOrgs. +type GetOrgsParams struct { + Offset *Offset `json:"offset,omitempty"` + Limit *Limit `json:"limit,omitempty"` + Descending *Descending `json:"descending,omitempty"` + + // Filter organizations to a specific organization name. + Org *string `json:"org,omitempty"` + + // Filter organizations to a specific organization ID. + OrgID *string `json:"orgID,omitempty"` + + // Filter organizations to a specific user ID. + UserID *string `json:"userID,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostOrgsJSONBody defines parameters for PostOrgs. +type PostOrgsJSONBody PostOrganizationRequest + +// PostOrgsParams defines parameters for PostOrgs. +type PostOrgsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteOrgsIDParams defines parameters for DeleteOrgsID. +type DeleteOrgsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetOrgsIDParams defines parameters for GetOrgsID. +type GetOrgsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchOrgsIDJSONBody defines parameters for PatchOrgsID. +type PatchOrgsIDJSONBody PatchOrganizationRequest + +// PatchOrgsIDParams defines parameters for PatchOrgsID. +type PatchOrgsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetOrgsIDMembersParams defines parameters for GetOrgsIDMembers. +type GetOrgsIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostOrgsIDMembersJSONBody defines parameters for PostOrgsIDMembers. +type PostOrgsIDMembersJSONBody AddResourceMemberRequestBody + +// PostOrgsIDMembersParams defines parameters for PostOrgsIDMembers. +type PostOrgsIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteOrgsIDMembersIDParams defines parameters for DeleteOrgsIDMembersID. +type DeleteOrgsIDMembersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetOrgsIDOwnersParams defines parameters for GetOrgsIDOwners. +type GetOrgsIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostOrgsIDOwnersJSONBody defines parameters for PostOrgsIDOwners. +type PostOrgsIDOwnersJSONBody AddResourceMemberRequestBody + +// PostOrgsIDOwnersParams defines parameters for PostOrgsIDOwners. +type PostOrgsIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteOrgsIDOwnersIDParams defines parameters for DeleteOrgsIDOwnersID. +type DeleteOrgsIDOwnersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetOrgsIDSecretsParams defines parameters for GetOrgsIDSecrets. +type GetOrgsIDSecretsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchOrgsIDSecretsJSONBody defines parameters for PatchOrgsIDSecrets. +type PatchOrgsIDSecretsJSONBody Secrets + +// PatchOrgsIDSecretsParams defines parameters for PatchOrgsIDSecrets. +type PatchOrgsIDSecretsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostOrgsIDSecretsJSONBody defines parameters for PostOrgsIDSecrets. +type PostOrgsIDSecretsJSONBody SecretKeys + +// PostOrgsIDSecretsParams defines parameters for PostOrgsIDSecrets. +type PostOrgsIDSecretsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteOrgsIDSecretsIDParams defines parameters for DeleteOrgsIDSecretsID. +type DeleteOrgsIDSecretsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostQueryJSONBody defines parameters for PostQuery. +type PostQueryJSONBody Query + +// PostQueryParams defines parameters for PostQuery. +type PostQueryParams struct { + // Specifies the name of the organization executing the query. Takes either the ID or Name. If both `orgID` and `org` are specified, `org` takes precedence. + Org *string `json:"org,omitempty"` + + // Specifies the ID of the organization executing the query. If both `orgID` and `org` are specified, `org` takes precedence. + OrgID *string `json:"orgID,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + + // Indicates the content encoding (usually a compression algorithm) that the client can understand. + AcceptEncoding *PostQueryParamsAcceptEncoding `json:"Accept-Encoding,omitempty"` + ContentType *PostQueryParamsContentType `json:"Content-Type,omitempty"` +} + +// PostQueryParamsAcceptEncoding defines parameters for PostQuery. +type PostQueryParamsAcceptEncoding string + +// PostQueryParamsContentType defines parameters for PostQuery. +type PostQueryParamsContentType string + +// PostQueryAnalyzeJSONBody defines parameters for PostQueryAnalyze. +type PostQueryAnalyzeJSONBody Query + +// PostQueryAnalyzeParams defines parameters for PostQueryAnalyze. +type PostQueryAnalyzeParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + ContentType *PostQueryAnalyzeParamsContentType `json:"Content-Type,omitempty"` +} + +// PostQueryAnalyzeParamsContentType defines parameters for PostQueryAnalyze. +type PostQueryAnalyzeParamsContentType string + +// PostQueryAstJSONBody defines parameters for PostQueryAst. +type PostQueryAstJSONBody LanguageRequest + +// PostQueryAstParams defines parameters for PostQueryAst. +type PostQueryAstParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + ContentType *PostQueryAstParamsContentType `json:"Content-Type,omitempty"` +} + +// PostQueryAstParamsContentType defines parameters for PostQueryAst. +type PostQueryAstParamsContentType string + +// GetQuerySuggestionsParams defines parameters for GetQuerySuggestions. +type GetQuerySuggestionsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetQuerySuggestionsNameParams defines parameters for GetQuerySuggestionsName. +type GetQuerySuggestionsNameParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetReadyParams defines parameters for GetReady. +type GetReadyParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetRemoteConnectionsParams defines parameters for GetRemoteConnections. +type GetRemoteConnectionsParams struct { + // The organization ID. + OrgID string `json:"orgID"` + Name *string `json:"name,omitempty"` + RemoteURL *string `json:"remoteURL,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostRemoteConnectionJSONBody defines parameters for PostRemoteConnection. +type PostRemoteConnectionJSONBody RemoteConnectionCreationRequest + +// DeleteRemoteConnectionByIDParams defines parameters for DeleteRemoteConnectionByID. +type DeleteRemoteConnectionByIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetRemoteConnectionByIDParams defines parameters for GetRemoteConnectionByID. +type GetRemoteConnectionByIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchRemoteConnectionByIDJSONBody defines parameters for PatchRemoteConnectionByID. +type PatchRemoteConnectionByIDJSONBody RemoteConnectionUpdateRequest + +// PatchRemoteConnectionByIDParams defines parameters for PatchRemoteConnectionByID. +type PatchRemoteConnectionByIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetReplicationsParams defines parameters for GetReplications. +type GetReplicationsParams struct { + // The organization ID. + OrgID string `json:"orgID"` + Name *string `json:"name,omitempty"` + RemoteID *string `json:"remoteID,omitempty"` + LocalBucketID *string `json:"localBucketID,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostReplicationJSONBody defines parameters for PostReplication. +type PostReplicationJSONBody ReplicationCreationRequest + +// PostReplicationParams defines parameters for PostReplication. +type PostReplicationParams struct { + // If true, validate the replication, but don't save it. + Validate *bool `json:"validate,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteReplicationByIDParams defines parameters for DeleteReplicationByID. +type DeleteReplicationByIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetReplicationByIDParams defines parameters for GetReplicationByID. +type GetReplicationByIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchReplicationByIDJSONBody defines parameters for PatchReplicationByID. +type PatchReplicationByIDJSONBody ReplicationUpdateRequest + +// PatchReplicationByIDParams defines parameters for PatchReplicationByID. +type PatchReplicationByIDParams struct { + // If true, validate the updated information, but don't save it. + Validate *bool `json:"validate,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostValidateReplicationByIDParams defines parameters for PostValidateReplicationByID. +type PostValidateReplicationByIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetResourcesParams defines parameters for GetResources. +type GetResourcesParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostRestoreBucketIDParams defines parameters for PostRestoreBucketID. +type PostRestoreBucketIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + ContentType *PostRestoreBucketIDParamsContentType `json:"Content-Type,omitempty"` +} + +// PostRestoreBucketIDParamsContentType defines parameters for PostRestoreBucketID. +type PostRestoreBucketIDParamsContentType string + +// PostRestoreBucketMetadataJSONBody defines parameters for PostRestoreBucketMetadata. +type PostRestoreBucketMetadataJSONBody BucketMetadataManifest + +// PostRestoreBucketMetadataParams defines parameters for PostRestoreBucketMetadata. +type PostRestoreBucketMetadataParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostRestoreKVParams defines parameters for PostRestoreKV. +type PostRestoreKVParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + + // The value tells InfluxDB what compression is applied to the line protocol in the request payload. + // To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + ContentEncoding *PostRestoreKVParamsContentEncoding `json:"Content-Encoding,omitempty"` + ContentType *PostRestoreKVParamsContentType `json:"Content-Type,omitempty"` +} + +// PostRestoreKVParamsContentEncoding defines parameters for PostRestoreKV. +type PostRestoreKVParamsContentEncoding string + +// PostRestoreKVParamsContentType defines parameters for PostRestoreKV. +type PostRestoreKVParamsContentType string + +// PostRestoreShardIdParams defines parameters for PostRestoreShardId. +type PostRestoreShardIdParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + + // The value tells InfluxDB what compression is applied to the line protocol in the request payload. + // To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + ContentEncoding *PostRestoreShardIdParamsContentEncoding `json:"Content-Encoding,omitempty"` + ContentType *PostRestoreShardIdParamsContentType `json:"Content-Type,omitempty"` +} + +// PostRestoreShardIdParamsContentEncoding defines parameters for PostRestoreShardId. +type PostRestoreShardIdParamsContentEncoding string + +// PostRestoreShardIdParamsContentType defines parameters for PostRestoreShardId. +type PostRestoreShardIdParamsContentType string + +// PostRestoreSQLParams defines parameters for PostRestoreSQL. +type PostRestoreSQLParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + + // The value tells InfluxDB what compression is applied to the line protocol in the request payload. + // To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + ContentEncoding *PostRestoreSQLParamsContentEncoding `json:"Content-Encoding,omitempty"` + ContentType *PostRestoreSQLParamsContentType `json:"Content-Type,omitempty"` +} + +// PostRestoreSQLParamsContentEncoding defines parameters for PostRestoreSQL. +type PostRestoreSQLParamsContentEncoding string + +// PostRestoreSQLParamsContentType defines parameters for PostRestoreSQL. +type PostRestoreSQLParamsContentType string + +// GetScrapersParams defines parameters for GetScrapers. +type GetScrapersParams struct { + // Specifies the name of the scraper target. + Name *string `json:"name,omitempty"` + + // List of scraper target IDs to return. If both `id` and `owner` are specified, only `id` is used. + Id *[]string `json:"id,omitempty"` + + // Specifies the organization ID of the scraper target. + OrgID *string `json:"orgID,omitempty"` + + // Specifies the organization name of the scraper target. + Org *string `json:"org,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostScrapersJSONBody defines parameters for PostScrapers. +type PostScrapersJSONBody ScraperTargetRequest + +// PostScrapersParams defines parameters for PostScrapers. +type PostScrapersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteScrapersIDParams defines parameters for DeleteScrapersID. +type DeleteScrapersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetScrapersIDParams defines parameters for GetScrapersID. +type GetScrapersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchScrapersIDJSONBody defines parameters for PatchScrapersID. +type PatchScrapersIDJSONBody ScraperTargetRequest + +// PatchScrapersIDParams defines parameters for PatchScrapersID. +type PatchScrapersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetScrapersIDLabelsParams defines parameters for GetScrapersIDLabels. +type GetScrapersIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostScrapersIDLabelsJSONBody defines parameters for PostScrapersIDLabels. +type PostScrapersIDLabelsJSONBody LabelMapping + +// PostScrapersIDLabelsParams defines parameters for PostScrapersIDLabels. +type PostScrapersIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteScrapersIDLabelsIDParams defines parameters for DeleteScrapersIDLabelsID. +type DeleteScrapersIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetScrapersIDMembersParams defines parameters for GetScrapersIDMembers. +type GetScrapersIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostScrapersIDMembersJSONBody defines parameters for PostScrapersIDMembers. +type PostScrapersIDMembersJSONBody AddResourceMemberRequestBody + +// PostScrapersIDMembersParams defines parameters for PostScrapersIDMembers. +type PostScrapersIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteScrapersIDMembersIDParams defines parameters for DeleteScrapersIDMembersID. +type DeleteScrapersIDMembersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetScrapersIDOwnersParams defines parameters for GetScrapersIDOwners. +type GetScrapersIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostScrapersIDOwnersJSONBody defines parameters for PostScrapersIDOwners. +type PostScrapersIDOwnersJSONBody AddResourceMemberRequestBody + +// PostScrapersIDOwnersParams defines parameters for PostScrapersIDOwners. +type PostScrapersIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteScrapersIDOwnersIDParams defines parameters for DeleteScrapersIDOwnersID. +type DeleteScrapersIDOwnersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetSetupParams defines parameters for GetSetup. +type GetSetupParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostSetupJSONBody defines parameters for PostSetup. +type PostSetupJSONBody OnboardingRequest + +// PostSetupParams defines parameters for PostSetup. +type PostSetupParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostSigninParams defines parameters for PostSignin. +type PostSigninParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostSignoutParams defines parameters for PostSignout. +type PostSignoutParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetSourcesParams defines parameters for GetSources. +type GetSourcesParams struct { + // The name of the organization. + Org *string `json:"org,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostSourcesJSONBody defines parameters for PostSources. +type PostSourcesJSONBody Source + +// PostSourcesParams defines parameters for PostSources. +type PostSourcesParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteSourcesIDParams defines parameters for DeleteSourcesID. +type DeleteSourcesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetSourcesIDParams defines parameters for GetSourcesID. +type GetSourcesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchSourcesIDJSONBody defines parameters for PatchSourcesID. +type PatchSourcesIDJSONBody Source + +// PatchSourcesIDParams defines parameters for PatchSourcesID. +type PatchSourcesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetSourcesIDBucketsParams defines parameters for GetSourcesIDBuckets. +type GetSourcesIDBucketsParams struct { + // The name of the organization. + Org *string `json:"org,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetSourcesIDHealthParams defines parameters for GetSourcesIDHealth. +type GetSourcesIDHealthParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// ListStacksParams defines parameters for ListStacks. +type ListStacksParams struct { + // The organization ID of the stacks + OrgID string `json:"orgID"` + + // A collection of names to filter the list by. + Name *string `json:"name,omitempty"` + + // A collection of stackIDs to filter the list by. + StackID *string `json:"stackID,omitempty"` +} + +// CreateStackJSONBody defines parameters for CreateStack. +type CreateStackJSONBody struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + OrgID *string `json:"orgID,omitempty"` + Urls *[]string `json:"urls,omitempty"` +} + +// DeleteStackParams defines parameters for DeleteStack. +type DeleteStackParams struct { + // The identifier of the organization. + OrgID string `json:"orgID"` +} + +// UpdateStackJSONBody defines parameters for UpdateStack. +type UpdateStackJSONBody struct { + AdditionalResources *[]struct { + Kind string `json:"kind"` + ResourceID string `json:"resourceID"` + TemplateMetaName *string `json:"templateMetaName,omitempty"` + } `json:"additionalResources,omitempty"` + Description *string `json:"description"` + Name *string `json:"name"` + TemplateURLs *[]string `json:"templateURLs"` +} + +// GetTasksParams defines parameters for GetTasks. +type GetTasksParams struct { + // Returns task with a specific name. + Name *string `json:"name,omitempty"` + + // Return tasks after a specified ID. + After *string `json:"after,omitempty"` + + // Filter tasks to a specific user ID. + User *string `json:"user,omitempty"` + + // Filter tasks to a specific organization name. + Org *string `json:"org,omitempty"` + + // Filter tasks to a specific organization ID. + OrgID *string `json:"orgID,omitempty"` + + // Filter tasks by a status--"inactive" or "active". + Status *GetTasksParamsStatus `json:"status,omitempty"` + + // The number of tasks to return + Limit *int `json:"limit,omitempty"` + + // Type of task, unset by default. + Type *GetTasksParamsType `json:"type,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksParamsStatus defines parameters for GetTasks. +type GetTasksParamsStatus string + +// GetTasksParamsType defines parameters for GetTasks. +type GetTasksParamsType string + +// PostTasksJSONBody defines parameters for PostTasks. +type PostTasksJSONBody TaskCreateRequest + +// PostTasksParams defines parameters for PostTasks. +type PostTasksParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTasksIDParams defines parameters for DeleteTasksID. +type DeleteTasksIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksIDParams defines parameters for GetTasksID. +type GetTasksIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchTasksIDJSONBody defines parameters for PatchTasksID. +type PatchTasksIDJSONBody TaskUpdateRequest + +// PatchTasksIDParams defines parameters for PatchTasksID. +type PatchTasksIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksIDLabelsParams defines parameters for GetTasksIDLabels. +type GetTasksIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTasksIDLabelsJSONBody defines parameters for PostTasksIDLabels. +type PostTasksIDLabelsJSONBody LabelMapping + +// PostTasksIDLabelsParams defines parameters for PostTasksIDLabels. +type PostTasksIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTasksIDLabelsIDParams defines parameters for DeleteTasksIDLabelsID. +type DeleteTasksIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksIDLogsParams defines parameters for GetTasksIDLogs. +type GetTasksIDLogsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksIDMembersParams defines parameters for GetTasksIDMembers. +type GetTasksIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTasksIDMembersJSONBody defines parameters for PostTasksIDMembers. +type PostTasksIDMembersJSONBody AddResourceMemberRequestBody + +// PostTasksIDMembersParams defines parameters for PostTasksIDMembers. +type PostTasksIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTasksIDMembersIDParams defines parameters for DeleteTasksIDMembersID. +type DeleteTasksIDMembersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksIDOwnersParams defines parameters for GetTasksIDOwners. +type GetTasksIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTasksIDOwnersJSONBody defines parameters for PostTasksIDOwners. +type PostTasksIDOwnersJSONBody AddResourceMemberRequestBody + +// PostTasksIDOwnersParams defines parameters for PostTasksIDOwners. +type PostTasksIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTasksIDOwnersIDParams defines parameters for DeleteTasksIDOwnersID. +type DeleteTasksIDOwnersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksIDRunsParams defines parameters for GetTasksIDRuns. +type GetTasksIDRunsParams struct { + // Returns runs after a specific ID. + After *string `json:"after,omitempty"` + + // The number of runs to return + Limit *int `json:"limit,omitempty"` + + // Filter runs to those scheduled after this time, RFC3339 + AfterTime *time.Time `json:"afterTime,omitempty"` + + // Filter runs to those scheduled before this time, RFC3339 + BeforeTime *time.Time `json:"beforeTime,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTasksIDRunsJSONBody defines parameters for PostTasksIDRuns. +type PostTasksIDRunsJSONBody RunManually + +// PostTasksIDRunsParams defines parameters for PostTasksIDRuns. +type PostTasksIDRunsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTasksIDRunsIDParams defines parameters for DeleteTasksIDRunsID. +type DeleteTasksIDRunsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksIDRunsIDParams defines parameters for GetTasksIDRunsID. +type GetTasksIDRunsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTasksIDRunsIDLogsParams defines parameters for GetTasksIDRunsIDLogs. +type GetTasksIDRunsIDLogsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTasksIDRunsIDRetryParams defines parameters for PostTasksIDRunsIDRetry. +type PostTasksIDRunsIDRetryParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTelegrafPluginsParams defines parameters for GetTelegrafPlugins. +type GetTelegrafPluginsParams struct { + // The type of plugin desired. + Type *string `json:"type,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTelegrafsParams defines parameters for GetTelegrafs. +type GetTelegrafsParams struct { + // The organization ID the Telegraf config belongs to. + OrgID *string `json:"orgID,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTelegrafsJSONBody defines parameters for PostTelegrafs. +type PostTelegrafsJSONBody TelegrafPluginRequest + +// PostTelegrafsParams defines parameters for PostTelegrafs. +type PostTelegrafsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTelegrafsIDParams defines parameters for DeleteTelegrafsID. +type DeleteTelegrafsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTelegrafsIDParams defines parameters for GetTelegrafsID. +type GetTelegrafsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + Accept *GetTelegrafsIDParamsAccept `json:"Accept,omitempty"` +} + +// GetTelegrafsIDParamsAccept defines parameters for GetTelegrafsID. +type GetTelegrafsIDParamsAccept string + +// PutTelegrafsIDJSONBody defines parameters for PutTelegrafsID. +type PutTelegrafsIDJSONBody TelegrafPluginRequest + +// PutTelegrafsIDParams defines parameters for PutTelegrafsID. +type PutTelegrafsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTelegrafsIDLabelsParams defines parameters for GetTelegrafsIDLabels. +type GetTelegrafsIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTelegrafsIDLabelsJSONBody defines parameters for PostTelegrafsIDLabels. +type PostTelegrafsIDLabelsJSONBody LabelMapping + +// PostTelegrafsIDLabelsParams defines parameters for PostTelegrafsIDLabels. +type PostTelegrafsIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTelegrafsIDLabelsIDParams defines parameters for DeleteTelegrafsIDLabelsID. +type DeleteTelegrafsIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTelegrafsIDMembersParams defines parameters for GetTelegrafsIDMembers. +type GetTelegrafsIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTelegrafsIDMembersJSONBody defines parameters for PostTelegrafsIDMembers. +type PostTelegrafsIDMembersJSONBody AddResourceMemberRequestBody + +// PostTelegrafsIDMembersParams defines parameters for PostTelegrafsIDMembers. +type PostTelegrafsIDMembersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTelegrafsIDMembersIDParams defines parameters for DeleteTelegrafsIDMembersID. +type DeleteTelegrafsIDMembersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetTelegrafsIDOwnersParams defines parameters for GetTelegrafsIDOwners. +type GetTelegrafsIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostTelegrafsIDOwnersJSONBody defines parameters for PostTelegrafsIDOwners. +type PostTelegrafsIDOwnersJSONBody AddResourceMemberRequestBody + +// PostTelegrafsIDOwnersParams defines parameters for PostTelegrafsIDOwners. +type PostTelegrafsIDOwnersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteTelegrafsIDOwnersIDParams defines parameters for DeleteTelegrafsIDOwnersID. +type DeleteTelegrafsIDOwnersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// ApplyTemplateJSONBody defines parameters for ApplyTemplate. +type ApplyTemplateJSONBody TemplateApply + +// ExportTemplateJSONBody defines parameters for ExportTemplate. +type ExportTemplateJSONBody interface{} + +// GetUsersParams defines parameters for GetUsers. +type GetUsersParams struct { + Offset *Offset `json:"offset,omitempty"` + Limit *Limit `json:"limit,omitempty"` + + // Resource ID to seek from. Results are not inclusive of this ID. Use `after` instead of `offset`. + After *After `json:"after,omitempty"` + Name *string `json:"name,omitempty"` + Id *string `json:"id,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostUsersJSONBody defines parameters for PostUsers. +type PostUsersJSONBody User + +// PostUsersParams defines parameters for PostUsers. +type PostUsersParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteUsersIDParams defines parameters for DeleteUsersID. +type DeleteUsersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetUsersIDParams defines parameters for GetUsersID. +type GetUsersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchUsersIDJSONBody defines parameters for PatchUsersID. +type PatchUsersIDJSONBody User + +// PatchUsersIDParams defines parameters for PatchUsersID. +type PatchUsersIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostUsersIDPasswordJSONBody defines parameters for PostUsersIDPassword. +type PostUsersIDPasswordJSONBody PasswordResetBody + +// PostUsersIDPasswordParams defines parameters for PostUsersIDPassword. +type PostUsersIDPasswordParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetVariablesParams defines parameters for GetVariables. +type GetVariablesParams struct { + // The name of the organization. + Org *string `json:"org,omitempty"` + + // The organization ID. + OrgID *string `json:"orgID,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostVariablesJSONBody defines parameters for PostVariables. +type PostVariablesJSONBody Variable + +// PostVariablesParams defines parameters for PostVariables. +type PostVariablesParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteVariablesIDParams defines parameters for DeleteVariablesID. +type DeleteVariablesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetVariablesIDParams defines parameters for GetVariablesID. +type GetVariablesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PatchVariablesIDJSONBody defines parameters for PatchVariablesID. +type PatchVariablesIDJSONBody Variable + +// PatchVariablesIDParams defines parameters for PatchVariablesID. +type PatchVariablesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PutVariablesIDJSONBody defines parameters for PutVariablesID. +type PutVariablesIDJSONBody Variable + +// PutVariablesIDParams defines parameters for PutVariablesID. +type PutVariablesIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// GetVariablesIDLabelsParams defines parameters for GetVariablesIDLabels. +type GetVariablesIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostVariablesIDLabelsJSONBody defines parameters for PostVariablesIDLabels. +type PostVariablesIDLabelsJSONBody LabelMapping + +// PostVariablesIDLabelsParams defines parameters for PostVariablesIDLabels. +type PostVariablesIDLabelsParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// DeleteVariablesIDLabelsIDParams defines parameters for DeleteVariablesIDLabelsID. +type DeleteVariablesIDLabelsIDParams struct { + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` +} + +// PostWriteParams defines parameters for PostWrite. +type PostWriteParams struct { + // The parameter value specifies the destination organization for writes. The database writes all points in the batch to this organization. If you provide both `orgID` and `org` parameters, `org` takes precedence. + Org string `json:"org"` + + // The parameter value specifies the ID of the destination organization for writes. If both `orgID` and `org` are specified, `org` takes precedence. + OrgID *string `json:"orgID,omitempty"` + + // The destination bucket for writes. + Bucket string `json:"bucket"` + + // The precision for the unix timestamps within the body line-protocol. + Precision *WritePrecision `json:"precision,omitempty"` + + // OpenTracing span context + ZapTraceSpan *TraceSpan `json:"Zap-Trace-Span,omitempty"` + + // The value tells InfluxDB what compression is applied to the line protocol in the request payload. + // To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + ContentEncoding *PostWriteParamsContentEncoding `json:"Content-Encoding,omitempty"` + + // The header value indicates the format of the data in the request body. + ContentType *PostWriteParamsContentType `json:"Content-Type,omitempty"` + + // The header value indicates the size of the entity-body, in bytes, sent to the database. If the length is greater than the database's `max body` configuration option, the server responds with status code `413`. + ContentLength *int `json:"Content-Length,omitempty"` + + // The header value specifies the response format. + Accept *PostWriteParamsAccept `json:"Accept,omitempty"` +} + +// PostWriteParamsContentEncoding defines parameters for PostWrite. +type PostWriteParamsContentEncoding string + +// PostWriteParamsContentType defines parameters for PostWrite. +type PostWriteParamsContentType string + +// PostWriteParamsAccept defines parameters for PostWrite. +type PostWriteParamsAccept string + +// PostAuthorizationsJSONRequestBody defines body for PostAuthorizations for application/json ContentType. +type PostAuthorizationsJSONRequestBody PostAuthorizationsJSONBody + +// PatchAuthorizationsIDJSONRequestBody defines body for PatchAuthorizationsID for application/json ContentType. +type PatchAuthorizationsIDJSONRequestBody PatchAuthorizationsIDJSONBody + +// PostBucketsJSONRequestBody defines body for PostBuckets for application/json ContentType. +type PostBucketsJSONRequestBody PostBucketsJSONBody + +// PatchBucketsIDJSONRequestBody defines body for PatchBucketsID for application/json ContentType. +type PatchBucketsIDJSONRequestBody PatchBucketsIDJSONBody + +// PostBucketsIDLabelsJSONRequestBody defines body for PostBucketsIDLabels for application/json ContentType. +type PostBucketsIDLabelsJSONRequestBody PostBucketsIDLabelsJSONBody + +// PostBucketsIDMembersJSONRequestBody defines body for PostBucketsIDMembers for application/json ContentType. +type PostBucketsIDMembersJSONRequestBody PostBucketsIDMembersJSONBody + +// PostBucketsIDOwnersJSONRequestBody defines body for PostBucketsIDOwners for application/json ContentType. +type PostBucketsIDOwnersJSONRequestBody PostBucketsIDOwnersJSONBody + +// CreateCheckJSONRequestBody defines body for CreateCheck for application/json ContentType. +type CreateCheckJSONRequestBody CreateCheckJSONBody + +// PatchChecksIDJSONRequestBody defines body for PatchChecksID for application/json ContentType. +type PatchChecksIDJSONRequestBody PatchChecksIDJSONBody + +// PutChecksIDJSONRequestBody defines body for PutChecksID for application/json ContentType. +type PutChecksIDJSONRequestBody PutChecksIDJSONBody + +// PostChecksIDLabelsJSONRequestBody defines body for PostChecksIDLabels for application/json ContentType. +type PostChecksIDLabelsJSONRequestBody PostChecksIDLabelsJSONBody + +// PostDashboardsJSONRequestBody defines body for PostDashboards for application/json ContentType. +type PostDashboardsJSONRequestBody PostDashboardsJSONBody + +// PatchDashboardsIDJSONRequestBody defines body for PatchDashboardsID for application/json ContentType. +type PatchDashboardsIDJSONRequestBody PatchDashboardsIDJSONBody + +// PostDashboardsIDCellsJSONRequestBody defines body for PostDashboardsIDCells for application/json ContentType. +type PostDashboardsIDCellsJSONRequestBody PostDashboardsIDCellsJSONBody + +// PutDashboardsIDCellsJSONRequestBody defines body for PutDashboardsIDCells for application/json ContentType. +type PutDashboardsIDCellsJSONRequestBody PutDashboardsIDCellsJSONBody + +// PatchDashboardsIDCellsIDJSONRequestBody defines body for PatchDashboardsIDCellsID for application/json ContentType. +type PatchDashboardsIDCellsIDJSONRequestBody PatchDashboardsIDCellsIDJSONBody + +// PatchDashboardsIDCellsIDViewJSONRequestBody defines body for PatchDashboardsIDCellsIDView for application/json ContentType. +type PatchDashboardsIDCellsIDViewJSONRequestBody PatchDashboardsIDCellsIDViewJSONBody + +// PostDashboardsIDLabelsJSONRequestBody defines body for PostDashboardsIDLabels for application/json ContentType. +type PostDashboardsIDLabelsJSONRequestBody PostDashboardsIDLabelsJSONBody + +// PostDashboardsIDMembersJSONRequestBody defines body for PostDashboardsIDMembers for application/json ContentType. +type PostDashboardsIDMembersJSONRequestBody PostDashboardsIDMembersJSONBody + +// PostDashboardsIDOwnersJSONRequestBody defines body for PostDashboardsIDOwners for application/json ContentType. +type PostDashboardsIDOwnersJSONRequestBody PostDashboardsIDOwnersJSONBody + +// PostDBRPJSONRequestBody defines body for PostDBRP for application/json ContentType. +type PostDBRPJSONRequestBody PostDBRPJSONBody + +// PatchDBRPIDJSONRequestBody defines body for PatchDBRPID for application/json ContentType. +type PatchDBRPIDJSONRequestBody PatchDBRPIDJSONBody + +// PostDeleteJSONRequestBody defines body for PostDelete for application/json ContentType. +type PostDeleteJSONRequestBody PostDeleteJSONBody + +// PostLabelsJSONRequestBody defines body for PostLabels for application/json ContentType. +type PostLabelsJSONRequestBody PostLabelsJSONBody + +// PatchLabelsIDJSONRequestBody defines body for PatchLabelsID for application/json ContentType. +type PatchLabelsIDJSONRequestBody PatchLabelsIDJSONBody + +// PostLegacyAuthorizationsJSONRequestBody defines body for PostLegacyAuthorizations for application/json ContentType. +type PostLegacyAuthorizationsJSONRequestBody PostLegacyAuthorizationsJSONBody + +// PatchLegacyAuthorizationsIDJSONRequestBody defines body for PatchLegacyAuthorizationsID for application/json ContentType. +type PatchLegacyAuthorizationsIDJSONRequestBody PatchLegacyAuthorizationsIDJSONBody + +// PostLegacyAuthorizationsIDPasswordJSONRequestBody defines body for PostLegacyAuthorizationsIDPassword for application/json ContentType. +type PostLegacyAuthorizationsIDPasswordJSONRequestBody PostLegacyAuthorizationsIDPasswordJSONBody + +// PutMePasswordJSONRequestBody defines body for PutMePassword for application/json ContentType. +type PutMePasswordJSONRequestBody PutMePasswordJSONBody + +// CreateNotificationEndpointJSONRequestBody defines body for CreateNotificationEndpoint for application/json ContentType. +type CreateNotificationEndpointJSONRequestBody CreateNotificationEndpointJSONBody + +// PatchNotificationEndpointsIDJSONRequestBody defines body for PatchNotificationEndpointsID for application/json ContentType. +type PatchNotificationEndpointsIDJSONRequestBody PatchNotificationEndpointsIDJSONBody + +// PutNotificationEndpointsIDJSONRequestBody defines body for PutNotificationEndpointsID for application/json ContentType. +type PutNotificationEndpointsIDJSONRequestBody PutNotificationEndpointsIDJSONBody + +// PostNotificationEndpointIDLabelsJSONRequestBody defines body for PostNotificationEndpointIDLabels for application/json ContentType. +type PostNotificationEndpointIDLabelsJSONRequestBody PostNotificationEndpointIDLabelsJSONBody + +// CreateNotificationRuleJSONRequestBody defines body for CreateNotificationRule for application/json ContentType. +type CreateNotificationRuleJSONRequestBody CreateNotificationRuleJSONBody + +// PatchNotificationRulesIDJSONRequestBody defines body for PatchNotificationRulesID for application/json ContentType. +type PatchNotificationRulesIDJSONRequestBody PatchNotificationRulesIDJSONBody + +// PutNotificationRulesIDJSONRequestBody defines body for PutNotificationRulesID for application/json ContentType. +type PutNotificationRulesIDJSONRequestBody PutNotificationRulesIDJSONBody + +// PostNotificationRuleIDLabelsJSONRequestBody defines body for PostNotificationRuleIDLabels for application/json ContentType. +type PostNotificationRuleIDLabelsJSONRequestBody PostNotificationRuleIDLabelsJSONBody + +// PostOrgsJSONRequestBody defines body for PostOrgs for application/json ContentType. +type PostOrgsJSONRequestBody PostOrgsJSONBody + +// PatchOrgsIDJSONRequestBody defines body for PatchOrgsID for application/json ContentType. +type PatchOrgsIDJSONRequestBody PatchOrgsIDJSONBody + +// PostOrgsIDMembersJSONRequestBody defines body for PostOrgsIDMembers for application/json ContentType. +type PostOrgsIDMembersJSONRequestBody PostOrgsIDMembersJSONBody + +// PostOrgsIDOwnersJSONRequestBody defines body for PostOrgsIDOwners for application/json ContentType. +type PostOrgsIDOwnersJSONRequestBody PostOrgsIDOwnersJSONBody + +// PatchOrgsIDSecretsJSONRequestBody defines body for PatchOrgsIDSecrets for application/json ContentType. +type PatchOrgsIDSecretsJSONRequestBody PatchOrgsIDSecretsJSONBody + +// PostOrgsIDSecretsJSONRequestBody defines body for PostOrgsIDSecrets for application/json ContentType. +type PostOrgsIDSecretsJSONRequestBody PostOrgsIDSecretsJSONBody + +// PostQueryJSONRequestBody defines body for PostQuery for application/json ContentType. +type PostQueryJSONRequestBody PostQueryJSONBody + +// PostQueryAnalyzeJSONRequestBody defines body for PostQueryAnalyze for application/json ContentType. +type PostQueryAnalyzeJSONRequestBody PostQueryAnalyzeJSONBody + +// PostQueryAstJSONRequestBody defines body for PostQueryAst for application/json ContentType. +type PostQueryAstJSONRequestBody PostQueryAstJSONBody + +// PostRemoteConnectionJSONRequestBody defines body for PostRemoteConnection for application/json ContentType. +type PostRemoteConnectionJSONRequestBody PostRemoteConnectionJSONBody + +// PatchRemoteConnectionByIDJSONRequestBody defines body for PatchRemoteConnectionByID for application/json ContentType. +type PatchRemoteConnectionByIDJSONRequestBody PatchRemoteConnectionByIDJSONBody + +// PostReplicationJSONRequestBody defines body for PostReplication for application/json ContentType. +type PostReplicationJSONRequestBody PostReplicationJSONBody + +// PatchReplicationByIDJSONRequestBody defines body for PatchReplicationByID for application/json ContentType. +type PatchReplicationByIDJSONRequestBody PatchReplicationByIDJSONBody + +// PostRestoreBucketMetadataJSONRequestBody defines body for PostRestoreBucketMetadata for application/json ContentType. +type PostRestoreBucketMetadataJSONRequestBody PostRestoreBucketMetadataJSONBody + +// PostScrapersJSONRequestBody defines body for PostScrapers for application/json ContentType. +type PostScrapersJSONRequestBody PostScrapersJSONBody + +// PatchScrapersIDJSONRequestBody defines body for PatchScrapersID for application/json ContentType. +type PatchScrapersIDJSONRequestBody PatchScrapersIDJSONBody + +// PostScrapersIDLabelsJSONRequestBody defines body for PostScrapersIDLabels for application/json ContentType. +type PostScrapersIDLabelsJSONRequestBody PostScrapersIDLabelsJSONBody + +// PostScrapersIDMembersJSONRequestBody defines body for PostScrapersIDMembers for application/json ContentType. +type PostScrapersIDMembersJSONRequestBody PostScrapersIDMembersJSONBody + +// PostScrapersIDOwnersJSONRequestBody defines body for PostScrapersIDOwners for application/json ContentType. +type PostScrapersIDOwnersJSONRequestBody PostScrapersIDOwnersJSONBody + +// PostSetupJSONRequestBody defines body for PostSetup for application/json ContentType. +type PostSetupJSONRequestBody PostSetupJSONBody + +// PostSourcesJSONRequestBody defines body for PostSources for application/json ContentType. +type PostSourcesJSONRequestBody PostSourcesJSONBody + +// PatchSourcesIDJSONRequestBody defines body for PatchSourcesID for application/json ContentType. +type PatchSourcesIDJSONRequestBody PatchSourcesIDJSONBody + +// CreateStackJSONRequestBody defines body for CreateStack for application/json ContentType. +type CreateStackJSONRequestBody CreateStackJSONBody + +// UpdateStackJSONRequestBody defines body for UpdateStack for application/json ContentType. +type UpdateStackJSONRequestBody UpdateStackJSONBody + +// PostTasksJSONRequestBody defines body for PostTasks for application/json ContentType. +type PostTasksJSONRequestBody PostTasksJSONBody + +// PatchTasksIDJSONRequestBody defines body for PatchTasksID for application/json ContentType. +type PatchTasksIDJSONRequestBody PatchTasksIDJSONBody + +// PostTasksIDLabelsJSONRequestBody defines body for PostTasksIDLabels for application/json ContentType. +type PostTasksIDLabelsJSONRequestBody PostTasksIDLabelsJSONBody + +// PostTasksIDMembersJSONRequestBody defines body for PostTasksIDMembers for application/json ContentType. +type PostTasksIDMembersJSONRequestBody PostTasksIDMembersJSONBody + +// PostTasksIDOwnersJSONRequestBody defines body for PostTasksIDOwners for application/json ContentType. +type PostTasksIDOwnersJSONRequestBody PostTasksIDOwnersJSONBody + +// PostTasksIDRunsJSONRequestBody defines body for PostTasksIDRuns for application/json ContentType. +type PostTasksIDRunsJSONRequestBody PostTasksIDRunsJSONBody + +// PostTelegrafsJSONRequestBody defines body for PostTelegrafs for application/json ContentType. +type PostTelegrafsJSONRequestBody PostTelegrafsJSONBody + +// PutTelegrafsIDJSONRequestBody defines body for PutTelegrafsID for application/json ContentType. +type PutTelegrafsIDJSONRequestBody PutTelegrafsIDJSONBody + +// PostTelegrafsIDLabelsJSONRequestBody defines body for PostTelegrafsIDLabels for application/json ContentType. +type PostTelegrafsIDLabelsJSONRequestBody PostTelegrafsIDLabelsJSONBody + +// PostTelegrafsIDMembersJSONRequestBody defines body for PostTelegrafsIDMembers for application/json ContentType. +type PostTelegrafsIDMembersJSONRequestBody PostTelegrafsIDMembersJSONBody + +// PostTelegrafsIDOwnersJSONRequestBody defines body for PostTelegrafsIDOwners for application/json ContentType. +type PostTelegrafsIDOwnersJSONRequestBody PostTelegrafsIDOwnersJSONBody + +// ApplyTemplateJSONRequestBody defines body for ApplyTemplate for application/json ContentType. +type ApplyTemplateJSONRequestBody ApplyTemplateJSONBody + +// ExportTemplateJSONRequestBody defines body for ExportTemplate for application/json ContentType. +type ExportTemplateJSONRequestBody ExportTemplateJSONBody + +// PostUsersJSONRequestBody defines body for PostUsers for application/json ContentType. +type PostUsersJSONRequestBody PostUsersJSONBody + +// PatchUsersIDJSONRequestBody defines body for PatchUsersID for application/json ContentType. +type PatchUsersIDJSONRequestBody PatchUsersIDJSONBody + +// PostUsersIDPasswordJSONRequestBody defines body for PostUsersIDPassword for application/json ContentType. +type PostUsersIDPasswordJSONRequestBody PostUsersIDPasswordJSONBody + +// PostVariablesJSONRequestBody defines body for PostVariables for application/json ContentType. +type PostVariablesJSONRequestBody PostVariablesJSONBody + +// PatchVariablesIDJSONRequestBody defines body for PatchVariablesID for application/json ContentType. +type PatchVariablesIDJSONRequestBody PatchVariablesIDJSONBody + +// PutVariablesIDJSONRequestBody defines body for PutVariablesID for application/json ContentType. +type PutVariablesIDJSONRequestBody PutVariablesIDJSONBody + +// PostVariablesIDLabelsJSONRequestBody defines body for PostVariablesIDLabels for application/json ContentType. +type PostVariablesIDLabelsJSONRequestBody PostVariablesIDLabelsJSONBody + +// Getter for additional properties for ColorMapping. Returns the specified +// element and whether it was found +func (a ColorMapping) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for ColorMapping +func (a *ColorMapping) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for ColorMapping to handle AdditionalProperties +func (a *ColorMapping) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for ColorMapping to handle AdditionalProperties +func (a ColorMapping) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for Flags. Returns the specified +// element and whether it was found +func (a Flags) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for Flags +func (a *Flags) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for Flags to handle AdditionalProperties +func (a *Flags) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for Flags to handle AdditionalProperties +func (a Flags) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for FluxSuggestion_Params. Returns the specified +// element and whether it was found +func (a FluxSuggestion_Params) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for FluxSuggestion_Params +func (a *FluxSuggestion_Params) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for FluxSuggestion_Params to handle AdditionalProperties +func (a *FluxSuggestion_Params) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for FluxSuggestion_Params to handle AdditionalProperties +func (a FluxSuggestion_Params) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for HTTPNotificationEndpoint_Headers. Returns the specified +// element and whether it was found +func (a HTTPNotificationEndpoint_Headers) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for HTTPNotificationEndpoint_Headers +func (a *HTTPNotificationEndpoint_Headers) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for HTTPNotificationEndpoint_Headers to handle AdditionalProperties +func (a *HTTPNotificationEndpoint_Headers) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for HTTPNotificationEndpoint_Headers to handle AdditionalProperties +func (a HTTPNotificationEndpoint_Headers) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for Label_Properties. Returns the specified +// element and whether it was found +func (a Label_Properties) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for Label_Properties +func (a *Label_Properties) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for Label_Properties to handle AdditionalProperties +func (a *Label_Properties) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for Label_Properties to handle AdditionalProperties +func (a Label_Properties) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for LabelCreateRequest_Properties. Returns the specified +// element and whether it was found +func (a LabelCreateRequest_Properties) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for LabelCreateRequest_Properties +func (a *LabelCreateRequest_Properties) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for LabelCreateRequest_Properties to handle AdditionalProperties +func (a *LabelCreateRequest_Properties) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for LabelCreateRequest_Properties to handle AdditionalProperties +func (a LabelCreateRequest_Properties) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for LabelUpdate_Properties. Returns the specified +// element and whether it was found +func (a LabelUpdate_Properties) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for LabelUpdate_Properties +func (a *LabelUpdate_Properties) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for LabelUpdate_Properties to handle AdditionalProperties +func (a *LabelUpdate_Properties) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for LabelUpdate_Properties to handle AdditionalProperties +func (a LabelUpdate_Properties) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for MapVariableProperties_Values. Returns the specified +// element and whether it was found +func (a MapVariableProperties_Values) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for MapVariableProperties_Values +func (a *MapVariableProperties_Values) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for MapVariableProperties_Values to handle AdditionalProperties +func (a *MapVariableProperties_Values) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for MapVariableProperties_Values to handle AdditionalProperties +func (a MapVariableProperties_Values) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for Query_Params. Returns the specified +// element and whether it was found +func (a Query_Params) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for Query_Params +func (a *Query_Params) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for Query_Params to handle AdditionalProperties +func (a *Query_Params) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for Query_Params to handle AdditionalProperties +func (a Query_Params) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for Secrets. Returns the specified +// element and whether it was found +func (a Secrets) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for Secrets +func (a *Secrets) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for Secrets to handle AdditionalProperties +func (a *Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for Secrets to handle AdditionalProperties +func (a Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for TemplateApply_EnvRefs. Returns the specified +// element and whether it was found +func (a TemplateApply_EnvRefs) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for TemplateApply_EnvRefs +func (a *TemplateApply_EnvRefs) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for TemplateApply_EnvRefs to handle AdditionalProperties +func (a *TemplateApply_EnvRefs) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for TemplateApply_EnvRefs to handle AdditionalProperties +func (a TemplateApply_EnvRefs) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for TemplateApply_Secrets. Returns the specified +// element and whether it was found +func (a TemplateApply_Secrets) Get(fieldName string) (value string, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for TemplateApply_Secrets +func (a *TemplateApply_Secrets) Set(fieldName string, value string) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]string) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for TemplateApply_Secrets to handle AdditionalProperties +func (a *TemplateApply_Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]string) + for fieldName, fieldBuf := range object { + var fieldVal string + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error unmarshaling field %s", fieldName)) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for TemplateApply_Secrets to handle AdditionalProperties +func (a TemplateApply_Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("error marshaling '%s'", fieldName)) + } + } + return json.Marshal(object) +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/domain/utils.go b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/utils.go new file mode 100644 index 0000000..c3a17c4 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/domain/utils.go @@ -0,0 +1,21 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package domain + +import ( + "github.com/influxdata/influxdb-client-go/v2/api/http" +) + +// ErrorToHTTPError creates http.Error from domain.Error +func ErrorToHTTPError(error *Error, statusCode int) *http.Error { + err := &http.Error{ + StatusCode: statusCode, + Code: string(error.Code), + } + if error.Message != nil { + err.Message = *error.Message + } + return err +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/internal/gzip/gzip.go b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/gzip/gzip.go new file mode 100644 index 0000000..853aa38 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/gzip/gzip.go @@ -0,0 +1,52 @@ +// Copyright 2020-2021 InfluxData, Inc.. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package gzip provides GZip related functionality +package gzip + +import ( + "compress/gzip" + "io" + "sync" +) + +// ReadWaitCloser is ReadCloser that waits for finishing underlying reader +type ReadWaitCloser struct { + pipeReader *io.PipeReader + wg sync.WaitGroup +} + +// Close closes underlying reader and waits for finishing operations +func (r *ReadWaitCloser) Close() error { + err := r.pipeReader.Close() + r.wg.Wait() // wait for the gzip goroutine finish + return err +} + +// CompressWithGzip takes an io.Reader as input and pipes +// it through a gzip.Writer returning an io.Reader containing +// the gzipped data. +// An error is returned if passing data to the gzip.Writer fails +// this is shamelessly stolen from https://github.com/influxdata/telegraf +func CompressWithGzip(data io.Reader) (io.ReadCloser, error) { + pipeReader, pipeWriter := io.Pipe() + gzipWriter := gzip.NewWriter(pipeWriter) + + rc := &ReadWaitCloser{ + pipeReader: pipeReader, + } + + rc.wg.Add(1) + var err error + go func() { + _, err = io.Copy(gzipWriter, data) + gzipWriter.Close() + // subsequent reads from the read half of the pipe will + // return no bytes and the error err, or EOF if err is nil. + pipeWriter.CloseWithError(err) + rc.wg.Done() + }() + + return pipeReader, err +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/internal/http/userAgent.go b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/http/userAgent.go new file mode 100644 index 0000000..0281bcb --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/http/userAgent.go @@ -0,0 +1,9 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package http hold internal HTTP related stuff +package http + +// UserAgent keeps once created User-Agent string +var UserAgent string diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/internal/log/logger.go b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/log/logger.go new file mode 100644 index 0000000..66951a9 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/log/logger.go @@ -0,0 +1,74 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package log provides internal logging infrastructure +package log + +import ( + ilog "github.com/influxdata/influxdb-client-go/v2/log" +) + +// Debugf writes formatted debug message to the Logger instance +func Debugf(format string, v ...interface{}) { + if ilog.Log != nil { + ilog.Log.Debugf(format, v...) + } +} + +// Debug writes debug message message to the Logger instance +func Debug(msg string) { + if ilog.Log != nil { + ilog.Log.Debug(msg) + } +} + +// Infof writes formatted info message to the Logger instance +func Infof(format string, v ...interface{}) { + if ilog.Log != nil { + ilog.Log.Infof(format, v...) + } +} + +// Info writes info message message to the Logger instance +func Info(msg string) { + if ilog.Log != nil { + ilog.Log.Info(msg) + } +} + +// Warnf writes formatted warning message to the Logger instance +func Warnf(format string, v ...interface{}) { + if ilog.Log != nil { + ilog.Log.Warnf(format, v...) + } +} + +// Warn writes warning message message to the Logger instance +func Warn(msg string) { + if ilog.Log != nil { + ilog.Log.Warn(msg) + } +} + +// Errorf writes formatted error message to the Logger instance +func Errorf(format string, v ...interface{}) { + if ilog.Log != nil { + ilog.Log.Errorf(format, v...) + } +} + +// Error writes error message message to the Logger instance +func Error(msg string) { + if ilog.Log != nil { + ilog.Log.Error(msg) + } +} + +// Level retrieves current logging level form the Logger instance +func Level() uint { + if ilog.Log != nil { + return ilog.Log.LogLevel() + } + return ilog.ErrorLevel +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/internal/write/queue.go b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/write/queue.go new file mode 100644 index 0000000..2491e9f --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/write/queue.go @@ -0,0 +1,50 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package write + +import ( + "container/list" +) + +type queue struct { + list *list.List + limit int +} + +func newQueue(limit int) *queue { + return &queue{list: list.New(), limit: limit} +} +func (q *queue) push(batch *Batch) bool { + overWrite := false + if q.list.Len() == q.limit { + q.pop() + overWrite = true + } + q.list.PushBack(batch) + return overWrite +} + +func (q *queue) pop() *Batch { + el := q.list.Front() + if el != nil { + q.list.Remove(el) + batch := el.Value.(*Batch) + batch.Evicted = true + return batch + } + return nil +} + +func (q *queue) first() *Batch { + el := q.list.Front() + if el != nil { + return el.Value.(*Batch) + } + return nil +} + +func (q *queue) isEmpty() bool { + return q.list.Len() == 0 +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/internal/write/service.go b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/write/service.go new file mode 100644 index 0000000..90c9710 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/internal/write/service.go @@ -0,0 +1,413 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package write provides service and its stuff +package write + +import ( + "bytes" + "context" + "fmt" + "io" + "math/rand" + "net/http" + "net/url" + "sort" + "strings" + "sync" + "time" + + http2 "github.com/influxdata/influxdb-client-go/v2/api/http" + "github.com/influxdata/influxdb-client-go/v2/api/write" + "github.com/influxdata/influxdb-client-go/v2/internal/gzip" + "github.com/influxdata/influxdb-client-go/v2/internal/log" + ilog "github.com/influxdata/influxdb-client-go/v2/log" + lp "github.com/influxdata/line-protocol" +) + +// Batch holds information for sending points batch +type Batch struct { + // lines to send + Batch string + // retry attempts so far + RetryAttempts uint + // true if it was removed from queue + Evicted bool + // time where this batch expires + Expires time.Time +} + +// NewBatch creates new batch +func NewBatch(data string, expireDelayMs uint) *Batch { + return &Batch{ + Batch: data, + Expires: time.Now().Add(time.Duration(expireDelayMs) * time.Millisecond), + } +} + +// BatchErrorCallback is synchronously notified in case non-blocking write fails. +// It returns true if WriteAPI should continue with retrying, false will discard the batch. +type BatchErrorCallback func(batch *Batch, error2 http2.Error) bool + +// Service is responsible for reliable writing of batches +type Service struct { + org string + bucket string + httpService http2.Service + url string + lastWriteAttempt time.Time + retryQueue *queue + lock sync.Mutex + writeOptions *write.Options + retryExponentialBase uint + errorCb BatchErrorCallback + retryDelay uint + retryAttempts uint +} + +// NewService creates new write service +func NewService(org string, bucket string, httpService http2.Service, options *write.Options) *Service { + + retryBufferLimit := options.RetryBufferLimit() / options.BatchSize() + if retryBufferLimit == 0 { + retryBufferLimit = 1 + } + u, _ := url.Parse(httpService.ServerAPIURL()) + u, _ = u.Parse("write") + params := u.Query() + params.Set("org", org) + params.Set("bucket", bucket) + params.Set("precision", precisionToString(options.Precision())) + if options.Consistency() != "" { + params.Set("consistency", string(options.Consistency())) + } + u.RawQuery = params.Encode() + writeURL := u.String() + return &Service{ + org: org, + bucket: bucket, + httpService: httpService, + url: writeURL, + writeOptions: options, + retryQueue: newQueue(int(retryBufferLimit)), + retryExponentialBase: 2, + retryDelay: options.RetryInterval(), + retryAttempts: 0, + } +} + +// SetBatchErrorCallback sets callback allowing custom handling of failed writes. +// If callback returns true, failed batch will be retried, otherwise discarded. +func (w *Service) SetBatchErrorCallback(cb BatchErrorCallback) { + w.errorCb = cb +} + +// HandleWrite handles writes of batches and handles retrying. +// Retrying is triggered by new writes, there is no scheduler. +// It first checks retry queue, cause it has highest priority. +// If there are some batches in retry queue, those are written and incoming batch is added to end of retry queue. +// Immediate write is allowed only in case there was success or not retryable error. +// Otherwise delay is checked based on recent batch. +// If write of batch fails with retryable error (connection errors and HTTP code >= 429), +// Batch retry time is calculated based on #of attempts. +// If writes continues failing and # of attempts reaches maximum or total retry time reaches maxRetryTime, +// batch is discarded. +func (w *Service) HandleWrite(ctx context.Context, batch *Batch) error { + log.Debug("Write proc: received write request") + batchToWrite := batch + retrying := false + for { + select { + case <-ctx.Done(): + log.Debug("Write proc: ctx cancelled req") + return ctx.Err() + default: + } + if !w.retryQueue.isEmpty() { + log.Debug("Write proc: taking batch from retry queue") + if !retrying { + b := w.retryQueue.first() + + // Discard batches at beginning of retryQueue that have already expired + if time.Now().After(b.Expires) { + log.Error("Write proc: oldest batch in retry queue expired, discarding") + if !b.Evicted { + w.retryQueue.pop() + } + + continue + } + + // Can we write? In case of retryable error we must wait a bit + if w.lastWriteAttempt.IsZero() || time.Now().After(w.lastWriteAttempt.Add(time.Millisecond*time.Duration(w.retryDelay))) { + retrying = true + } else { + log.Warn("Write proc: cannot write yet, storing batch to queue") + if w.retryQueue.push(batch) { + log.Error("Write proc: Retry buffer full, discarding oldest batch") + } + batchToWrite = nil + } + } + if retrying { + batchToWrite = w.retryQueue.first() + if batch != nil { //store actual batch to retry queue + if w.retryQueue.push(batch) { + log.Error("Write proc: Retry buffer full, discarding oldest batch") + } + batch = nil + } + } + } + // write batch + if batchToWrite != nil { + perror := w.WriteBatch(ctx, batchToWrite) + if perror != nil { + if isIgnorableError(perror) { + log.Warnf("Write error: %s", perror.Error()) + } else { + if w.writeOptions.MaxRetries() != 0 && (perror.StatusCode == 0 || perror.StatusCode >= http.StatusTooManyRequests) { + log.Errorf("Write error: %s, batch kept for retrying\n", perror.Error()) + if perror.RetryAfter > 0 { + w.retryDelay = perror.RetryAfter * 1000 + } else { + w.retryDelay = w.computeRetryDelay(w.retryAttempts) + } + if w.errorCb != nil && !w.errorCb(batchToWrite, *perror) { + log.Error("Callback rejected batch, discarding") + if !batchToWrite.Evicted { + w.retryQueue.pop() + } + return perror + } + // store new batch (not taken from queue) + if !batchToWrite.Evicted && batchToWrite != w.retryQueue.first() { + if w.retryQueue.push(batch) { + log.Error("Retry buffer full, discarding oldest batch") + } + } else if batchToWrite.RetryAttempts == w.writeOptions.MaxRetries() { + log.Error("Reached maximum number of retries, discarding batch") + if !batchToWrite.Evicted { + w.retryQueue.pop() + } + } + batchToWrite.RetryAttempts++ + w.retryAttempts++ + log.Debugf("Write proc: next wait for write is %dms\n", w.retryDelay) + } else { + log.Errorf("Write error: %s\n", perror.Error()) + } + return fmt.Errorf("write failed (attempts %d): %w", batchToWrite.RetryAttempts, perror) + } + } + + w.retryDelay = w.writeOptions.RetryInterval() + w.retryAttempts = 0 + if retrying && !batchToWrite.Evicted { + w.retryQueue.pop() + } + batchToWrite = nil + } else { + break + } + } + return nil +} + +// Non-retryable errors +const ( + errStringHintedHandoffNotEmpty = "hinted handoff queue not empty" + errStringPartialWrite = "partial write" + errStringPointsBeyondRP = "points beyond retention policy" + errStringUnableToParse = "unable to parse" +) + +func isIgnorableError(error *http2.Error) bool { + // This "error" is an informational message about the state of the + // InfluxDB cluster. + if strings.Contains(error.Message, errStringHintedHandoffNotEmpty) { + return true + } + // Points beyond retention policy is returned when points are immediately + // discarded for being older than the retention policy. Usually this not + // a cause for concern, and we don't want to retry. + if strings.Contains(error.Message, errStringPointsBeyondRP) { + return true + } + // Other partial write errors, such as "field type conflict", are not + // correctable at this point and so the point is dropped instead of + // retrying. + if strings.Contains(error.Message, errStringPartialWrite) { + return true + } + // This error indicates an error in line protocol + // serialization, retries would not be successful. + if strings.Contains(error.Message, errStringUnableToParse) { + return true + } + return false +} + +// computeRetryDelay calculates retry delay +// Retry delay is calculated as random value within the interval +// [retry_interval * exponential_base^(attempts) and retry_interval * exponential_base^(attempts+1)] +func (w *Service) computeRetryDelay(attempts uint) uint { + minDelay := int(w.writeOptions.RetryInterval() * pow(w.writeOptions.ExponentialBase(), attempts)) + maxDelay := int(w.writeOptions.RetryInterval() * pow(w.writeOptions.ExponentialBase(), attempts+1)) + retryDelay := uint(rand.Intn(maxDelay-minDelay) + minDelay) + if retryDelay > w.writeOptions.MaxRetryInterval() { + retryDelay = w.writeOptions.MaxRetryInterval() + } + return retryDelay +} + +// pow computes x**y +func pow(x, y uint) uint { + p := uint(1) + if y == 0 { + return 1 + } + for i := uint(1); i <= y; i++ { + p = p * x + } + return p +} + +// WriteBatch performs actual writing via HTTP service +func (w *Service) WriteBatch(ctx context.Context, batch *Batch) *http2.Error { + var body io.Reader + var err error + body = strings.NewReader(batch.Batch) + + if log.Level() >= ilog.DebugLevel { + log.Debugf("Writing batch: %s", batch.Batch) + } + if w.writeOptions.UseGZip() { + body, err = gzip.CompressWithGzip(body) + if err != nil { + return http2.NewError(err) + } + } + w.lock.Lock() + w.lastWriteAttempt = time.Now() + w.lock.Unlock() + perror := w.httpService.DoPostRequest(ctx, w.url, body, func(req *http.Request) { + if w.writeOptions.UseGZip() { + req.Header.Set("Content-Encoding", "gzip") + } + }, func(r *http.Response) error { + return r.Body.Close() + }) + return perror +} + +// Flush sends batches from retry queue immediately, without retrying +func (w *Service) Flush() { + for !w.retryQueue.isEmpty() { + b := w.retryQueue.pop() + if time.Now().After(b.Expires) { + log.Error("Oldest batch in retry queue expired, discarding") + continue + } + if err := w.WriteBatch(context.Background(), b); err != nil { + log.Errorf("Error flushing batch from retry queue: %w", err.Unwrap()) + } + } +} + +// pointWithDefaultTags encapsulates Point with default tags +type pointWithDefaultTags struct { + point *write.Point + defaultTags map[string]string +} + +// Name returns the name of measurement of a point. +func (p *pointWithDefaultTags) Name() string { + return p.point.Name() +} + +// Time is the timestamp of a Point. +func (p *pointWithDefaultTags) Time() time.Time { + return p.point.Time() +} + +// FieldList returns a slice containing the fields of a Point. +func (p *pointWithDefaultTags) FieldList() []*lp.Field { + return p.point.FieldList() +} + +// TagList returns tags from point along with default tags +// If point of tag can override default tag +func (p *pointWithDefaultTags) TagList() []*lp.Tag { + tags := make([]*lp.Tag, 0, len(p.point.TagList())+len(p.defaultTags)) + tags = append(tags, p.point.TagList()...) + for k, v := range p.defaultTags { + if !existTag(p.point.TagList(), k) { + tags = append(tags, &lp.Tag{ + Key: k, + Value: v, + }) + } + } + sort.Slice(tags, func(i, j int) bool { return tags[i].Key < tags[j].Key }) + return tags +} + +func existTag(tags []*lp.Tag, key string) bool { + for _, tag := range tags { + if key == tag.Key { + return true + } + } + return false +} + +// EncodePoints creates line protocol string from points +func (w *Service) EncodePoints(points ...*write.Point) (string, error) { + var buffer bytes.Buffer + e := lp.NewEncoder(&buffer) + e.SetFieldTypeSupport(lp.UintSupport) + e.FailOnFieldErr(true) + e.SetPrecision(w.writeOptions.Precision()) + for _, point := range points { + _, err := e.Encode(w.pointToEncode(point)) + if err != nil { + return "", err + } + } + return buffer.String(), nil +} + +// pointToEncode determines whether default tags should be applied +// and returns point with default tags instead of point +func (w *Service) pointToEncode(point *write.Point) lp.Metric { + var m lp.Metric + if len(w.writeOptions.DefaultTags()) > 0 { + m = &pointWithDefaultTags{ + point: point, + defaultTags: w.writeOptions.DefaultTags(), + } + } else { + m = point + } + return m +} + +// WriteURL returns current write URL +func (w *Service) WriteURL() string { + return w.url +} + +func precisionToString(precision time.Duration) string { + prec := "ns" + switch precision { + case time.Microsecond: + prec = "us" + case time.Millisecond: + prec = "ms" + case time.Second: + prec = "s" + } + return prec +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/log/logger.go b/vendor/github.com/influxdata/influxdb-client-go/v2/log/logger.go new file mode 100644 index 0000000..4c9e61a --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/log/logger.go @@ -0,0 +1,133 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +// Package log defines Logging API. +// The global Log variable contains the actual logger. Set it to own implementation to override logging. Set it to nil to disable logging +package log + +import ( + "fmt" + "log" + "sync" +) + +// Log is the library wide logger. Setting to nil disables logging. +var Log Logger = &logger{logLevel: ErrorLevel, prefix: "influxdb2client"} + +// Log levels +const ( + ErrorLevel uint = iota + WarningLevel + InfoLevel + DebugLevel +) + +// Logger defines interface for logging +type Logger interface { + // Writes formatted debug message if debug logLevel is enabled. + Debugf(format string, v ...interface{}) + // Writes debug message if debug is enabled. + Debug(msg string) + // Writes formatted info message if info logLevel is enabled. + Infof(format string, v ...interface{}) + // Writes info message if info logLevel is enabled + Info(msg string) + // Writes formatted warning message if warning logLevel is enabled. + Warnf(format string, v ...interface{}) + // Writes warning message if warning logLevel is enabled. + Warn(msg string) + // Writes formatted error message + Errorf(format string, v ...interface{}) + // Writes error message + Error(msg string) + // SetLogLevel sets allowed logging level. + SetLogLevel(logLevel uint) + // LogLevel retrieves current logging level + LogLevel() uint + // SetPrefix sets logging prefix. + SetPrefix(prefix string) +} + +// logger provides default implementation for Logger. It logs using Go log API +// mutex is needed in cases when multiple clients run concurrently +type logger struct { + prefix string + logLevel uint + lock sync.Mutex +} + +func (l *logger) SetLogLevel(logLevel uint) { + l.lock.Lock() + defer l.lock.Unlock() + l.logLevel = logLevel +} + +func (l *logger) LogLevel() uint { + l.lock.Lock() + defer l.lock.Unlock() + return l.logLevel +} + +func (l *logger) SetPrefix(prefix string) { + l.lock.Lock() + defer l.lock.Unlock() + l.prefix = prefix +} + +func (l *logger) Debugf(format string, v ...interface{}) { + l.lock.Lock() + defer l.lock.Unlock() + if l.logLevel >= DebugLevel { + log.Print(l.prefix, " D! ", fmt.Sprintf(format, v...)) + } +} +func (l *logger) Debug(msg string) { + l.lock.Lock() + defer l.lock.Unlock() + if l.logLevel >= DebugLevel { + log.Print(l.prefix, " D! ", msg) + } +} + +func (l *logger) Infof(format string, v ...interface{}) { + l.lock.Lock() + defer l.lock.Unlock() + if l.logLevel >= InfoLevel { + log.Print(l.prefix, " I! ", fmt.Sprintf(format, v...)) + } +} +func (l *logger) Info(msg string) { + l.lock.Lock() + defer l.lock.Unlock() + if l.logLevel >= DebugLevel { + log.Print(l.prefix, " I! ", msg) + } +} + +func (l *logger) Warnf(format string, v ...interface{}) { + l.lock.Lock() + defer l.lock.Unlock() + if l.logLevel >= WarningLevel { + log.Print(l.prefix, " W! ", fmt.Sprintf(format, v...)) + } +} +func (l *logger) Warn(msg string) { + l.lock.Lock() + defer l.lock.Unlock() + if l.logLevel >= WarningLevel { + log.Print(l.prefix, " W! ", msg) + } +} + +func (l *logger) Errorf(format string, v ...interface{}) { + l.lock.Lock() + defer l.lock.Unlock() + log.Print(l.prefix, " E! ", fmt.Sprintf(format, v...)) +} + +func (l *logger) Error(msg string) { + l.lock.Lock() + defer l.lock.Unlock() + log.Print(l.prefix, " E! ", msg) +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/options.go b/vendor/github.com/influxdata/influxdb-client-go/v2/options.go new file mode 100644 index 0000000..000bfc2 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/options.go @@ -0,0 +1,225 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package influxdb2 + +import ( + "crypto/tls" + nethttp "net/http" + "time" + + "github.com/influxdata/influxdb-client-go/v2/api/http" + "github.com/influxdata/influxdb-client-go/v2/api/write" +) + +// Options holds configuration properties for communicating with InfluxDB server +type Options struct { + // LogLevel to filter log messages. Each level mean to log all categories bellow. 0 error, 1 - warning, 2 - info, 3 - debug + logLevel uint + // Writing options + writeOptions *write.Options + // Http options + httpOptions *http.Options +} + +// BatchSize returns size of batch +func (o *Options) BatchSize() uint { + return o.WriteOptions().BatchSize() +} + +// SetBatchSize sets number of points sent in single request +func (o *Options) SetBatchSize(batchSize uint) *Options { + o.WriteOptions().SetBatchSize(batchSize) + return o +} + +// FlushInterval returns flush interval in ms +func (o *Options) FlushInterval() uint { + return o.WriteOptions().FlushInterval() +} + +// SetFlushInterval sets flush interval in ms in which is buffer flushed if it has not been already written +func (o *Options) SetFlushInterval(flushIntervalMs uint) *Options { + o.WriteOptions().SetFlushInterval(flushIntervalMs) + return o +} + +// RetryInterval returns the retry interval in ms +func (o *Options) RetryInterval() uint { + return o.WriteOptions().RetryInterval() +} + +// SetRetryInterval sets retry interval in ms, which is set if not sent by server +func (o *Options) SetRetryInterval(retryIntervalMs uint) *Options { + o.WriteOptions().SetRetryInterval(retryIntervalMs) + return o +} + +// MaxRetries returns maximum count of retry attempts of failed writes, default 5. +func (o *Options) MaxRetries() uint { + return o.WriteOptions().MaxRetries() +} + +// SetMaxRetries sets maximum count of retry attempts of failed writes. +// Setting zero value disables retry strategy. +func (o *Options) SetMaxRetries(maxRetries uint) *Options { + o.WriteOptions().SetMaxRetries(maxRetries) + return o +} + +// RetryBufferLimit returns retry buffer limit +func (o *Options) RetryBufferLimit() uint { + return o.WriteOptions().RetryBufferLimit() +} + +// SetRetryBufferLimit sets maximum number of points to keep for retry. Should be multiple of BatchSize. +func (o *Options) SetRetryBufferLimit(retryBufferLimit uint) *Options { + o.WriteOptions().SetRetryBufferLimit(retryBufferLimit) + return o +} + +// MaxRetryInterval returns the maximum delay between each retry attempt in milliseconds, default 125,000. +func (o *Options) MaxRetryInterval() uint { + return o.WriteOptions().MaxRetryInterval() +} + +// SetMaxRetryInterval sets the maximum delay between each retry attempt in millisecond. +func (o *Options) SetMaxRetryInterval(maxRetryIntervalMs uint) *Options { + o.WriteOptions().SetMaxRetryInterval(maxRetryIntervalMs) + return o +} + +// MaxRetryTime returns the maximum total retry timeout in millisecond, default 180,000. +func (o *Options) MaxRetryTime() uint { + return o.WriteOptions().MaxRetryTime() +} + +// SetMaxRetryTime sets the maximum total retry timeout in millisecond. +func (o *Options) SetMaxRetryTime(maxRetryTimeMs uint) *Options { + o.WriteOptions().SetMaxRetryTime(maxRetryTimeMs) + return o +} + +// ExponentialBase returns the base for the exponential retry delay. Default 2. +func (o *Options) ExponentialBase() uint { + return o.WriteOptions().ExponentialBase() +} + +// SetExponentialBase sets the base for the exponential retry delay. +func (o *Options) SetExponentialBase(exponentialBase uint) *Options { + o.WriteOptions().SetExponentialBase(exponentialBase) + return o +} + +// LogLevel returns log level +func (o *Options) LogLevel() uint { + return o.logLevel +} + +// SetLogLevel set level to filter log messages. Each level mean to log all categories bellow. Default is ErrorLevel. +// There are four level constant int the log package in this library: +// - ErrorLevel +// - WarningLevel +// - InfoLevel +// - DebugLevel +// The DebugLevel will print also content of writen batches, queries. +// The InfoLevel prints HTTP requests info, among others. +// Set log.Log to nil in order to completely disable logging. +func (o *Options) SetLogLevel(logLevel uint) *Options { + o.logLevel = logLevel + return o +} + +// Precision returns time precision for writes +func (o *Options) Precision() time.Duration { + return o.WriteOptions().Precision() +} + +// SetPrecision sets time precision to use in writes for timestamp. In unit of duration: time.Nanosecond, time.Microsecond, time.Millisecond, time.Second +func (o *Options) SetPrecision(precision time.Duration) *Options { + o.WriteOptions().SetPrecision(precision) + return o +} + +// UseGZip returns true if write request are gzip`ed +func (o *Options) UseGZip() bool { + return o.WriteOptions().UseGZip() +} + +// SetUseGZip specifies whether to use GZip compression in write requests. +func (o *Options) SetUseGZip(useGZip bool) *Options { + o.WriteOptions().SetUseGZip(useGZip) + return o +} + +// HTTPClient returns the http.Client that is configured to be used +// for HTTP requests. It will return the one that has been set using +// SetHTTPClient or it will construct a default client using the +// other configured options. +func (o *Options) HTTPClient() *nethttp.Client { + return o.httpOptions.HTTPClient() +} + +// SetHTTPClient will configure the http.Client that is used +// for HTTP requests. If set to nil, an HTTPClient will be +// generated. +// +// Setting the HTTPClient will cause the other HTTP options +// to be ignored. +// In case of UsersAPI.SignIn() is used, HTTPClient.Jar will be used for storing session cookie. +func (o *Options) SetHTTPClient(c *nethttp.Client) *Options { + o.httpOptions.SetHTTPClient(c) + return o +} + +// TLSConfig returns TLS config +func (o *Options) TLSConfig() *tls.Config { + return o.HTTPOptions().TLSConfig() +} + +// SetTLSConfig sets TLS configuration for secure connection +func (o *Options) SetTLSConfig(tlsConfig *tls.Config) *Options { + o.HTTPOptions().SetTLSConfig(tlsConfig) + return o +} + +// HTTPRequestTimeout returns HTTP request timeout +func (o *Options) HTTPRequestTimeout() uint { + return o.HTTPOptions().HTTPRequestTimeout() +} + +// SetHTTPRequestTimeout sets HTTP request timeout in sec +func (o *Options) SetHTTPRequestTimeout(httpRequestTimeout uint) *Options { + o.HTTPOptions().SetHTTPRequestTimeout(httpRequestTimeout) + return o +} + +// WriteOptions returns write related options +func (o *Options) WriteOptions() *write.Options { + if o.writeOptions == nil { + o.writeOptions = write.DefaultOptions() + } + return o.writeOptions +} + +// HTTPOptions returns HTTP related options +func (o *Options) HTTPOptions() *http.Options { + if o.httpOptions == nil { + o.httpOptions = http.DefaultOptions() + } + return o.httpOptions +} + +// AddDefaultTag adds a default tag. DefaultTags are added to each written point. +// If a tag with the same key already exist it is overwritten. +// If a point already defines such a tag, it is left unchanged +func (o *Options) AddDefaultTag(key, value string) *Options { + o.WriteOptions().AddDefaultTag(key, value) + return o +} + +// DefaultOptions returns Options object with default values +func DefaultOptions() *Options { + return &Options{logLevel: 0, writeOptions: write.DefaultOptions(), httpOptions: http.DefaultOptions()} +} diff --git a/vendor/github.com/influxdata/influxdb-client-go/v2/version.go b/vendor/github.com/influxdata/influxdb-client-go/v2/version.go new file mode 100644 index 0000000..f7cb779 --- /dev/null +++ b/vendor/github.com/influxdata/influxdb-client-go/v2/version.go @@ -0,0 +1,21 @@ +// Copyright 2020-2021 InfluxData, Inc. All rights reserved. +// Use of this source code is governed by MIT +// license that can be found in the LICENSE file. + +package influxdb2 + +import ( + "fmt" + "runtime" + + "github.com/influxdata/influxdb-client-go/v2/internal/http" +) + +const ( + // Version defines current version + Version = "2.10.0" +) + +func init() { + http.UserAgent = fmt.Sprintf("influxdb-client-go/%s (%s; %s)", Version, runtime.GOOS, runtime.GOARCH) +} diff --git a/vendor/github.com/influxdata/line-protocol/.gitignore b/vendor/github.com/influxdata/line-protocol/.gitignore new file mode 100644 index 0000000..63bd916 --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/.gitignore @@ -0,0 +1,5 @@ +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out diff --git a/vendor/github.com/influxdata/line-protocol/LICENSE b/vendor/github.com/influxdata/line-protocol/LICENSE new file mode 100644 index 0000000..cfd3bfe --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013-2018 InfluxData Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/influxdata/line-protocol/README.md b/vendor/github.com/influxdata/line-protocol/README.md new file mode 100644 index 0000000..bd6d88e --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/README.md @@ -0,0 +1,22 @@ +# line-protocol + +This is an encoder for the influx [line protocol.](https://docs.influxdata.com/influxdb/latest/write_protocols/line_protocol_reference/) + +It has an interface similar to the standard library's `json.Encoder`. + + +### some caveats. +- It is not concurrency-safe. If you want to make multiple calls to `Encoder.Encode` concurrently you have to manage the concurrency yourself. +- It can only encode values that are uint64, int64, int, float32, float64, string, or bool. +- Ints are converted to int64, float32's to float64. +- If UintSupport is not set, uint64s are converted to int64's and if they are larger than the max int64, they get truncated to the max int64 instead of overflowing. + + +### Example: +```go +buf := &bytes.Buffer{} +serializer := protocol.NewEncoder(buf) +serializer.SetMaxLineBytes(1024) +serializer.SetFieldTypeSupport(UintSupport) +serializer.Encode(e) // where e is something that implements the protocol.Metric interface +``` diff --git a/vendor/github.com/influxdata/line-protocol/encoder.go b/vendor/github.com/influxdata/line-protocol/encoder.go new file mode 100644 index 0000000..5a8780c --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/encoder.go @@ -0,0 +1,299 @@ +package protocol + +import ( + "fmt" + "io" + "math" + "sort" + "strconv" + "time" +) + +// ErrIsNaN is a field error for when a float field is NaN. +var ErrIsNaN = &FieldError{"is NaN"} + +// ErrIsInf is a field error for when a float field is Inf. +var ErrIsInf = &FieldError{"is Inf"} + +// Encoder marshals Metrics into influxdb line protocol. +// It is not safe for concurrent use, make a new one! +// The default behavior when encountering a field error is to ignore the field and move on. +// If you wish it to error out on field errors, use Encoder.FailOnFieldErr(true) +type Encoder struct { + w io.Writer + fieldSortOrder FieldSortOrder + fieldTypeSupport FieldTypeSupport + failOnFieldError bool + maxLineBytes int + fieldList []*Field + header []byte + footer []byte + pair []byte + precision time.Duration +} + +// SetMaxLineBytes sets a maximum length for a line, Encode will error if the generated line is longer +func (e *Encoder) SetMaxLineBytes(i int) { + e.maxLineBytes = i +} + +// SetFieldSortOrder sets a sort order for the data. +// The options are: +// NoSortFields (doesn't sort the fields) +// SortFields (sorts the keys in alphabetical order) +func (e *Encoder) SetFieldSortOrder(s FieldSortOrder) { + e.fieldSortOrder = s +} + +// SetFieldTypeSupport sets flags for if the encoder supports certain optional field types such as uint64 +func (e *Encoder) SetFieldTypeSupport(s FieldTypeSupport) { + e.fieldTypeSupport = s +} + +// FailOnFieldErr whether or not to fail on a field error or just move on. +// The default behavior to move on +func (e *Encoder) FailOnFieldErr(s bool) { + e.failOnFieldError = s +} + +// SetPrecision sets time precision for writes +// Default is nanoseconds precision +func (e *Encoder) SetPrecision(p time.Duration) { + e.precision = p +} + +// NewEncoder gives us an encoder that marshals to a writer in influxdb line protocol +// as defined by: +// https://docs.influxdata.com/influxdb/v1.5/write_protocols/line_protocol_reference/ +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: w, + header: make([]byte, 0, 128), + footer: make([]byte, 0, 128), + pair: make([]byte, 0, 128), + fieldList: make([]*Field, 0, 16), + precision: time.Nanosecond, + } +} + +// This is here to significantly reduce allocations, wish that we had constant/immutable keyword that applied to +// more complex objects +var comma = []byte(",") + +// Encode marshals a Metric to the io.Writer in the Encoder +func (e *Encoder) Encode(m Metric) (int, error) { + err := e.buildHeader(m) + if err != nil { + return 0, err + } + + e.buildFooter(m.Time()) + + // here we make a copy of the *fields so we can do an in-place sort + e.fieldList = append(e.fieldList[:0], m.FieldList()...) + + if e.fieldSortOrder == SortFields { + sort.Slice(e.fieldList, func(i, j int) bool { + return e.fieldList[i].Key < e.fieldList[j].Key + }) + } + i := 0 + totalWritten := 0 + pairsLen := 0 + firstField := true + for _, field := range e.fieldList { + err = e.buildFieldPair(field.Key, field.Value) + if err != nil { + if e.failOnFieldError { + return 0, err + } + continue + } + + bytesNeeded := len(e.header) + pairsLen + len(e.pair) + len(e.footer) + + // Additional length needed for field separator `,` + if !firstField { + bytesNeeded++ + } + + if e.maxLineBytes > 0 && bytesNeeded > e.maxLineBytes { + // Need at least one field per line + if firstField { + return 0, ErrNeedMoreSpace + } + + i, err = e.w.Write(e.footer) + if err != nil { + return 0, err + } + pairsLen = 0 + totalWritten += i + + bytesNeeded = len(e.header) + len(e.pair) + len(e.footer) + + if e.maxLineBytes > 0 && bytesNeeded > e.maxLineBytes { + return 0, ErrNeedMoreSpace + } + + i, err = e.w.Write(e.header) + if err != nil { + return 0, err + } + totalWritten += i + + i, err = e.w.Write(e.pair) + if err != nil { + return 0, err + } + totalWritten += i + + pairsLen += len(e.pair) + firstField = false + continue + } + + if firstField { + i, err = e.w.Write(e.header) + if err != nil { + return 0, err + } + totalWritten += i + + } else { + i, err = e.w.Write(comma) + if err != nil { + return 0, err + } + totalWritten += i + + } + + e.w.Write(e.pair) + + pairsLen += len(e.pair) + firstField = false + } + + if firstField { + return 0, ErrNoFields + } + i, err = e.w.Write(e.footer) + if err != nil { + return 0, err + } + totalWritten += i + return totalWritten, nil + +} + +func (e *Encoder) buildHeader(m Metric) error { + e.header = e.header[:0] + name := nameEscape(m.Name()) + if name == "" { + return ErrInvalidName + } + e.header = append(e.header, name...) + + for _, tag := range m.TagList() { + key := escape(tag.Key) + value := escape(tag.Value) + + // Some keys and values are not encodeable as line protocol, such as + // those with a trailing '\' or empty strings. + if key == "" || value == "" { + continue + } + + e.header = append(e.header, ',') + e.header = append(e.header, key...) + e.header = append(e.header, '=') + e.header = append(e.header, value...) + } + + e.header = append(e.header, ' ') + return nil +} + +func (e *Encoder) buildFieldVal(value interface{}) error { + switch v := value.(type) { + case uint64: + if e.fieldTypeSupport&UintSupport != 0 { + e.pair = append(strconv.AppendUint(e.pair, v, 10), 'u') + } else if v <= uint64(math.MaxInt64) { + e.pair = append(strconv.AppendInt(e.pair, int64(v), 10), 'i') + } else { + e.pair = append(strconv.AppendInt(e.pair, math.MaxInt64, 10), 'i') + } + case int64: + e.pair = append(strconv.AppendInt(e.pair, v, 10), 'i') + case int: + e.pair = append(strconv.AppendInt(e.pair, int64(v), 10), 'i') + case float64: + if math.IsNaN(v) { + return ErrIsNaN + } + + if math.IsInf(v, 0) { + return ErrIsInf + } + + e.pair = strconv.AppendFloat(e.pair, v, 'f', -1, 64) + case float32: + v32 := float64(v) + if math.IsNaN(v32) { + return ErrIsNaN + } + + if math.IsInf(v32, 0) { + return ErrIsInf + } + + e.pair = strconv.AppendFloat(e.pair, v32, 'f', -1, 64) + + case string: + e.pair = append(e.pair, '"') + e.pair = append(e.pair, stringFieldEscape(v)...) + e.pair = append(e.pair, '"') + case []byte: + e.pair = append(e.pair, '"') + stringFieldEscapeBytes(&e.pair, v) + e.pair = append(e.pair, '"') + case bool: + e.pair = strconv.AppendBool(e.pair, v) + default: + return &FieldError{fmt.Sprintf("invalid value type: %T", v)} + } + return nil +} + +func (e *Encoder) buildFieldPair(key string, value interface{}) error { + e.pair = e.pair[:0] + key = escape(key) + // Some keys are not encodeable as line protocol, such as those with a + // trailing '\' or empty strings. + if key == "" || key[:len(key)-1] == "\\" { + return &FieldError{"invalid field key"} + } + e.pair = append(e.pair, key...) + e.pair = append(e.pair, '=') + return e.buildFieldVal(value) +} + +func (e *Encoder) buildFooter(t time.Time) { + e.footer = e.footer[:0] + if !t.IsZero() { + e.footer = append(e.footer, ' ') + switch e.precision { + case time.Microsecond: + e.footer = strconv.AppendInt(e.footer, t.UnixNano()/1000, 10) + case time.Millisecond: + e.footer = strconv.AppendInt(e.footer, t.UnixNano()/1000000, 10) + case time.Second: + e.footer = strconv.AppendInt(e.footer, t.Unix(), 10) + default: + e.footer = strconv.AppendInt(e.footer, t.UnixNano(), 10) + } + } + e.footer = append(e.footer, '\n') +} diff --git a/vendor/github.com/influxdata/line-protocol/escape.go b/vendor/github.com/influxdata/line-protocol/escape.go new file mode 100644 index 0000000..781b352 --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/escape.go @@ -0,0 +1,264 @@ +package protocol + +import ( + "bytes" + "reflect" + "strconv" + "strings" + "unicode/utf8" + "unsafe" +) + +const ( + escapes = "\t\n\f\r ,=" + nameEscapes = "\t\n\f\r ," + stringFieldEscapes = "\t\n\f\r\\\"" +) + +var ( + stringEscaper = strings.NewReplacer( + "\t", `\t`, + "\n", `\n`, + "\f", `\f`, + "\r", `\r`, + `,`, `\,`, + ` `, `\ `, + `=`, `\=`, + ) + + nameEscaper = strings.NewReplacer( + "\t", `\t`, + "\n", `\n`, + "\f", `\f`, + "\r", `\r`, + `,`, `\,`, + ` `, `\ `, + ) + + stringFieldEscaper = strings.NewReplacer( + "\t", `\t`, + "\n", `\n`, + "\f", `\f`, + "\r", `\r`, + `"`, `\"`, + `\`, `\\`, + ) +) + +var ( + unescaper = strings.NewReplacer( + `\,`, `,`, + `\"`, `"`, // ??? + `\ `, ` `, + `\=`, `=`, + ) + + nameUnescaper = strings.NewReplacer( + `\,`, `,`, + `\ `, ` `, + ) + + stringFieldUnescaper = strings.NewReplacer( + `\"`, `"`, + `\\`, `\`, + ) +) + +// The various escape functions allocate, I'd like to fix that. +// TODO: make escape not allocate + +// Escape a tagkey, tagvalue, or fieldkey +func escape(s string) string { + if strings.ContainsAny(s, escapes) { + return stringEscaper.Replace(s) + } + return s +} + +// Escape a measurement name +func nameEscape(s string) string { + if strings.ContainsAny(s, nameEscapes) { + return nameEscaper.Replace(s) + } + return s +} + +// Escape a string field +func stringFieldEscape(s string) string { + if strings.ContainsAny(s, stringFieldEscapes) { + return stringFieldEscaper.Replace(s) + } + return s +} + +const ( + utf8mask = byte(0x3F) + utf8bytex = byte(0x80) // 1000 0000 + utf8len2 = byte(0xC0) // 1100 0000 + utf8len3 = byte(0xE0) // 1110 0000 + utf8len4 = byte(0xF0) // 1111 0000 +) + +func escapeBytes(dest *[]byte, b []byte) { + if bytes.ContainsAny(b, escapes) { + var r rune + for i, j := 0, 0; i < len(b); i += j { + r, j = utf8.DecodeRune(b[i:]) + switch { + case r == '\t': + *dest = append(*dest, `\t`...) + case r == '\n': + *dest = append(*dest, `\n`...) + case r == '\f': + *dest = append(*dest, `\f`...) + case r == '\r': + *dest = append(*dest, `\r`...) + case r == ',': + *dest = append(*dest, `\,`...) + case r == ' ': + *dest = append(*dest, `\ `...) + case r == '=': + *dest = append(*dest, `\=`...) + case r <= 1<<7-1: + *dest = append(*dest, byte(r)) + case r <= 1<<11-1: + *dest = append(*dest, utf8len2|byte(r>>6), utf8bytex|byte(r)&utf8mask) + case r <= 1<<16-1: + *dest = append(*dest, utf8len3|byte(r>>12), utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask) + default: + *dest = append(*dest, utf8len4|byte(r>>18), utf8bytex|byte(r>>12)&utf8mask, utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask) + } + } + return + } + *dest = append(*dest, b...) +} + +// Escape a measurement name +func nameEscapeBytes(dest *[]byte, b []byte) { + if bytes.ContainsAny(b, nameEscapes) { + var r rune + for i, j := 0, 0; i < len(b); i += j { + r, j = utf8.DecodeRune(b[i:]) + switch { + case r == '\t': + *dest = append(*dest, `\t`...) + case r == '\n': + *dest = append(*dest, `\n`...) + case r == '\f': + *dest = append(*dest, `\f`...) + case r == '\r': + *dest = append(*dest, `\r`...) + case r == ',': + *dest = append(*dest, `\,`...) + case r == ' ': + *dest = append(*dest, `\ `...) + case r == '\\': + *dest = append(*dest, `\\`...) + case r <= 1<<7-1: + *dest = append(*dest, byte(r)) + case r <= 1<<11-1: + *dest = append(*dest, utf8len2|byte(r>>6), utf8bytex|byte(r)&utf8mask) + case r <= 1<<16-1: + *dest = append(*dest, utf8len3|byte(r>>12), utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask) + default: + *dest = append(*dest, utf8len4|byte(r>>18), utf8bytex|byte(r>>12)&utf8mask, utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask) + } + } + return + } + *dest = append(*dest, b...) +} + +func stringFieldEscapeBytes(dest *[]byte, b []byte) { + if bytes.ContainsAny(b, stringFieldEscapes) { + var r rune + for i, j := 0, 0; i < len(b); i += j { + r, j = utf8.DecodeRune(b[i:]) + switch { + case r == '\t': + *dest = append(*dest, `\t`...) + case r == '\n': + *dest = append(*dest, `\n`...) + case r == '\f': + *dest = append(*dest, `\f`...) + case r == '\r': + *dest = append(*dest, `\r`...) + case r == ',': + *dest = append(*dest, `\,`...) + case r == ' ': + *dest = append(*dest, `\ `...) + case r == '\\': + *dest = append(*dest, `\\`...) + case r <= 1<<7-1: + *dest = append(*dest, byte(r)) + case r <= 1<<11-1: + *dest = append(*dest, utf8len2|byte(r>>6), utf8bytex|byte(r)&utf8mask) + case r <= 1<<16-1: + *dest = append(*dest, utf8len3|byte(r>>12), utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask) + default: + *dest = append(*dest, utf8len4|byte(r>>18), utf8bytex|byte(r>>12)&utf8mask, utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask) + } + } + return + } + *dest = append(*dest, b...) +} + +func unescape(b []byte) string { + if bytes.ContainsAny(b, escapes) { + return unescaper.Replace(unsafeBytesToString(b)) + } + return string(b) +} + +func nameUnescape(b []byte) string { + if bytes.ContainsAny(b, nameEscapes) { + return nameUnescaper.Replace(unsafeBytesToString(b)) + } + return string(b) +} + +// unsafeBytesToString converts a []byte to a string without a heap allocation. +// +// It is unsafe, and is intended to prepare input to short-lived functions +// that require strings. +func unsafeBytesToString(in []byte) string { + src := *(*reflect.SliceHeader)(unsafe.Pointer(&in)) + dst := reflect.StringHeader{ + Data: src.Data, + Len: src.Len, + } + s := *(*string)(unsafe.Pointer(&dst)) + return s +} + +// parseIntBytes is a zero-alloc wrapper around strconv.ParseInt. +func parseIntBytes(b []byte, base int, bitSize int) (i int64, err error) { + s := unsafeBytesToString(b) + return strconv.ParseInt(s, base, bitSize) +} + +// parseUintBytes is a zero-alloc wrapper around strconv.ParseUint. +func parseUintBytes(b []byte, base int, bitSize int) (i uint64, err error) { + s := unsafeBytesToString(b) + return strconv.ParseUint(s, base, bitSize) +} + +// parseFloatBytes is a zero-alloc wrapper around strconv.ParseFloat. +func parseFloatBytes(b []byte, bitSize int) (float64, error) { + s := unsafeBytesToString(b) + return strconv.ParseFloat(s, bitSize) +} + +// parseBoolBytes is a zero-alloc wrapper around strconv.ParseBool. +func parseBoolBytes(b []byte) (bool, error) { + return strconv.ParseBool(unsafeBytesToString(b)) +} + +func stringFieldUnescape(b []byte) string { + if bytes.ContainsAny(b, stringFieldEscapes) { + return stringFieldUnescaper.Replace(unsafeBytesToString(b)) + } + return string(b) +} diff --git a/vendor/github.com/influxdata/line-protocol/handler.go b/vendor/github.com/influxdata/line-protocol/handler.go new file mode 100644 index 0000000..f28a8d0 --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/handler.go @@ -0,0 +1,128 @@ +package protocol + +import ( + "bytes" + "errors" + "strconv" + "time" +) + +// MetricHandler implements the Handler interface and produces Metric. +type MetricHandler struct { + timePrecision time.Duration + timeFunc TimeFunc + metric MutableMetric +} + +func NewMetricHandler() *MetricHandler { + return &MetricHandler{ + timePrecision: time.Nanosecond, + timeFunc: time.Now, + } +} + +func (h *MetricHandler) SetTimePrecision(p time.Duration) { + h.timePrecision = p + // When the timestamp is omitted from the metric, the timestamp + // comes from the server clock, truncated to the nearest unit of + // measurement provided in precision. + // + // When a timestamp is provided in the metric, precsision is + // overloaded to hold the unit of measurement of the timestamp. +} + +func (h *MetricHandler) SetTimeFunc(f TimeFunc) { + h.timeFunc = f +} + +func (h *MetricHandler) Metric() (Metric, error) { + if h.metric.Time().IsZero() { + h.metric.SetTime(h.timeFunc().Truncate(h.timePrecision)) + } + return h.metric, nil +} + +func (h *MetricHandler) SetMeasurement(name []byte) error { + var err error + h.metric, err = New(nameUnescape(name), + nil, nil, time.Time{}) + return err +} + +func (h *MetricHandler) AddTag(key []byte, value []byte) error { + tk := unescape(key) + tv := unescape(value) + h.metric.AddTag(tk, tv) + return nil +} + +func (h *MetricHandler) AddInt(key []byte, value []byte) error { + fk := unescape(key) + fv, err := parseIntBytes(bytes.TrimSuffix(value, []byte("i")), 10, 64) + if err != nil { + if numerr, ok := err.(*strconv.NumError); ok { + return numerr.Err + } + return err + } + h.metric.AddField(fk, fv) + return nil +} + +func (h *MetricHandler) AddUint(key []byte, value []byte) error { + fk := unescape(key) + fv, err := parseUintBytes(bytes.TrimSuffix(value, []byte("u")), 10, 64) + if err != nil { + if numerr, ok := err.(*strconv.NumError); ok { + return numerr.Err + } + return err + } + h.metric.AddField(fk, fv) + return nil +} + +func (h *MetricHandler) AddFloat(key []byte, value []byte) error { + fk := unescape(key) + fv, err := parseFloatBytes(value, 64) + if err != nil { + if numerr, ok := err.(*strconv.NumError); ok { + return numerr.Err + } + return err + } + h.metric.AddField(fk, fv) + return nil +} + +func (h *MetricHandler) AddString(key []byte, value []byte) error { + fk := unescape(key) + fv := stringFieldUnescape(value) + h.metric.AddField(fk, fv) + return nil +} + +func (h *MetricHandler) AddBool(key []byte, value []byte) error { + fk := unescape(key) + fv, err := parseBoolBytes(value) + if err != nil { + return errors.New("unparseable bool") + } + h.metric.AddField(fk, fv) + return nil +} + +func (h *MetricHandler) SetTimestamp(tm []byte) error { + v, err := parseIntBytes(tm, 10, 64) + if err != nil { + if numerr, ok := err.(*strconv.NumError); ok { + return numerr.Err + } + return err + } + + //time precision is overloaded to mean time unit here + ns := v * int64(h.timePrecision) + h.metric.SetTime(time.Unix(0, ns)) + return nil +} diff --git a/vendor/github.com/influxdata/line-protocol/machine.go b/vendor/github.com/influxdata/line-protocol/machine.go new file mode 100644 index 0000000..7ae6a28 --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/machine.go @@ -0,0 +1,34921 @@ +//line plugins/parsers/influx/machine.go.rl:1 +package protocol + +import ( + "errors" + "io" +) + +var ( + ErrNameParse = errors.New("expected measurement name") + ErrFieldParse = errors.New("expected field") + ErrTagParse = errors.New("expected tag") + ErrTimestampParse = errors.New("expected timestamp") + ErrParse = errors.New("parse error") + //lint:ignore ST1012 not needed + EOF = errors.New("EOF") +) + +//line plugins/parsers/influx/machine.go.rl:310 + +//line plugins/parsers/influx/machine.go:25 +const LineProtocol_start int = 270 +const LineProtocol_first_final int = 270 +const LineProtocol_error int = 0 + +const LineProtocol_en_main int = 270 +const LineProtocol_en_discard_line int = 258 +const LineProtocol_en_align int = 740 +const LineProtocol_en_series int = 261 + +//line plugins/parsers/influx/machine.go.rl:313 + +type Handler interface { + SetMeasurement(name []byte) error + AddTag(key []byte, value []byte) error + AddInt(key []byte, value []byte) error + AddUint(key []byte, value []byte) error + AddFloat(key []byte, value []byte) error + AddString(key []byte, value []byte) error + AddBool(key []byte, value []byte) error + SetTimestamp(tm []byte) error +} + +type machine struct { + data []byte + cs int + p, pe, eof int + pb int + lineno int + sol int + handler Handler + initState int + key []byte + beginMetric bool + finishMetric bool +} + +func NewMachine(handler Handler) *machine { + m := &machine{ + handler: handler, + initState: LineProtocol_en_align, + } + +//line plugins/parsers/influx/machine.go.rl:346 + +//line plugins/parsers/influx/machine.go.rl:347 + +//line plugins/parsers/influx/machine.go.rl:348 + +//line plugins/parsers/influx/machine.go.rl:349 + +//line plugins/parsers/influx/machine.go.rl:350 + +//line plugins/parsers/influx/machine.go.rl:351 + +//line plugins/parsers/influx/machine.go:82 + { + (m.cs) = LineProtocol_start + } + +//line plugins/parsers/influx/machine.go.rl:352 + + return m +} + +func NewSeriesMachine(handler Handler) *machine { + m := &machine{ + handler: handler, + initState: LineProtocol_en_series, + } + +//line plugins/parsers/influx/machine.go.rl:363 + +//line plugins/parsers/influx/machine.go.rl:364 + +//line plugins/parsers/influx/machine.go.rl:365 + +//line plugins/parsers/influx/machine.go.rl:366 + +//line plugins/parsers/influx/machine.go.rl:367 + +//line plugins/parsers/influx/machine.go:109 + { + (m.cs) = LineProtocol_start + } + +//line plugins/parsers/influx/machine.go.rl:368 + + return m +} + +func (m *machine) SetData(data []byte) { + m.data = data + m.p = 0 + m.pb = 0 + m.lineno = 1 + m.sol = 0 + m.pe = len(data) + m.eof = len(data) + m.key = nil + m.beginMetric = false + m.finishMetric = false + +//line plugins/parsers/influx/machine.go:132 + { + (m.cs) = LineProtocol_start + } + +//line plugins/parsers/influx/machine.go.rl:385 + m.cs = m.initState +} + +// Next parses the next metric line and returns nil if it was successfully +// processed. If the line contains a syntax error an error is returned, +// otherwise if the end of file is reached before finding a metric line then +// EOF is returned. +func (m *machine) Next() error { + if m.p == m.pe && m.pe == m.eof { + return EOF + } + + m.key = nil + m.beginMetric = false + m.finishMetric = false + + return m.exec() +} + +func (m *machine) exec() error { + var err error + +//line plugins/parsers/influx/machine.go:160 + { + if (m.p) == (m.pe) { + goto _test_eof + } + goto _resume + + _again: + switch m.cs { + case 270: + goto st270 + case 1: + goto st1 + case 2: + goto st2 + case 3: + goto st3 + case 0: + goto st0 + case 4: + goto st4 + case 5: + goto st5 + case 6: + goto st6 + case 7: + goto st7 + case 271: + goto st271 + case 272: + goto st272 + case 273: + goto st273 + case 8: + goto st8 + case 9: + goto st9 + case 10: + goto st10 + case 11: + goto st11 + case 12: + goto st12 + case 13: + goto st13 + case 14: + goto st14 + case 15: + goto st15 + case 16: + goto st16 + case 17: + goto st17 + case 18: + goto st18 + case 19: + goto st19 + case 20: + goto st20 + case 21: + goto st21 + case 22: + goto st22 + case 23: + goto st23 + case 24: + goto st24 + case 25: + goto st25 + case 26: + goto st26 + case 27: + goto st27 + case 28: + goto st28 + case 29: + goto st29 + case 30: + goto st30 + case 31: + goto st31 + case 32: + goto st32 + case 274: + goto st274 + case 275: + goto st275 + case 33: + goto st33 + case 34: + goto st34 + case 276: + goto st276 + case 277: + goto st277 + case 278: + goto st278 + case 35: + goto st35 + case 279: + goto st279 + case 280: + goto st280 + case 281: + goto st281 + case 282: + goto st282 + case 283: + goto st283 + case 284: + goto st284 + case 285: + goto st285 + case 286: + goto st286 + case 287: + goto st287 + case 288: + goto st288 + case 289: + goto st289 + case 290: + goto st290 + case 291: + goto st291 + case 292: + goto st292 + case 293: + goto st293 + case 294: + goto st294 + case 295: + goto st295 + case 296: + goto st296 + case 36: + goto st36 + case 37: + goto st37 + case 297: + goto st297 + case 298: + goto st298 + case 299: + goto st299 + case 38: + goto st38 + case 39: + goto st39 + case 40: + goto st40 + case 41: + goto st41 + case 42: + goto st42 + case 300: + goto st300 + case 301: + goto st301 + case 302: + goto st302 + case 303: + goto st303 + case 43: + goto st43 + case 304: + goto st304 + case 305: + goto st305 + case 306: + goto st306 + case 307: + goto st307 + case 308: + goto st308 + case 309: + goto st309 + case 310: + goto st310 + case 311: + goto st311 + case 312: + goto st312 + case 313: + goto st313 + case 314: + goto st314 + case 315: + goto st315 + case 316: + goto st316 + case 317: + goto st317 + case 318: + goto st318 + case 319: + goto st319 + case 320: + goto st320 + case 321: + goto st321 + case 322: + goto st322 + case 323: + goto st323 + case 324: + goto st324 + case 325: + goto st325 + case 44: + goto st44 + case 45: + goto st45 + case 46: + goto st46 + case 47: + goto st47 + case 48: + goto st48 + case 49: + goto st49 + case 50: + goto st50 + case 51: + goto st51 + case 52: + goto st52 + case 53: + goto st53 + case 326: + goto st326 + case 327: + goto st327 + case 328: + goto st328 + case 54: + goto st54 + case 55: + goto st55 + case 56: + goto st56 + case 57: + goto st57 + case 58: + goto st58 + case 59: + goto st59 + case 329: + goto st329 + case 330: + goto st330 + case 60: + goto st60 + case 331: + goto st331 + case 332: + goto st332 + case 333: + goto st333 + case 334: + goto st334 + case 335: + goto st335 + case 336: + goto st336 + case 337: + goto st337 + case 338: + goto st338 + case 339: + goto st339 + case 340: + goto st340 + case 341: + goto st341 + case 342: + goto st342 + case 343: + goto st343 + case 344: + goto st344 + case 345: + goto st345 + case 346: + goto st346 + case 347: + goto st347 + case 348: + goto st348 + case 349: + goto st349 + case 350: + goto st350 + case 61: + goto st61 + case 351: + goto st351 + case 352: + goto st352 + case 353: + goto st353 + case 62: + goto st62 + case 354: + goto st354 + case 355: + goto st355 + case 356: + goto st356 + case 357: + goto st357 + case 358: + goto st358 + case 359: + goto st359 + case 360: + goto st360 + case 361: + goto st361 + case 362: + goto st362 + case 363: + goto st363 + case 364: + goto st364 + case 365: + goto st365 + case 366: + goto st366 + case 367: + goto st367 + case 368: + goto st368 + case 369: + goto st369 + case 370: + goto st370 + case 371: + goto st371 + case 372: + goto st372 + case 373: + goto st373 + case 63: + goto st63 + case 64: + goto st64 + case 65: + goto st65 + case 66: + goto st66 + case 67: + goto st67 + case 374: + goto st374 + case 68: + goto st68 + case 69: + goto st69 + case 70: + goto st70 + case 71: + goto st71 + case 72: + goto st72 + case 375: + goto st375 + case 376: + goto st376 + case 377: + goto st377 + case 73: + goto st73 + case 74: + goto st74 + case 378: + goto st378 + case 379: + goto st379 + case 75: + goto st75 + case 380: + goto st380 + case 76: + goto st76 + case 381: + goto st381 + case 382: + goto st382 + case 383: + goto st383 + case 384: + goto st384 + case 385: + goto st385 + case 386: + goto st386 + case 387: + goto st387 + case 388: + goto st388 + case 389: + goto st389 + case 390: + goto st390 + case 391: + goto st391 + case 392: + goto st392 + case 393: + goto st393 + case 394: + goto st394 + case 395: + goto st395 + case 396: + goto st396 + case 397: + goto st397 + case 398: + goto st398 + case 399: + goto st399 + case 400: + goto st400 + case 77: + goto st77 + case 78: + goto st78 + case 79: + goto st79 + case 80: + goto st80 + case 81: + goto st81 + case 82: + goto st82 + case 83: + goto st83 + case 84: + goto st84 + case 85: + goto st85 + case 86: + goto st86 + case 87: + goto st87 + case 88: + goto st88 + case 89: + goto st89 + case 90: + goto st90 + case 401: + goto st401 + case 402: + goto st402 + case 403: + goto st403 + case 404: + goto st404 + case 91: + goto st91 + case 92: + goto st92 + case 93: + goto st93 + case 94: + goto st94 + case 405: + goto st405 + case 406: + goto st406 + case 95: + goto st95 + case 96: + goto st96 + case 407: + goto st407 + case 97: + goto st97 + case 98: + goto st98 + case 408: + goto st408 + case 409: + goto st409 + case 99: + goto st99 + case 410: + goto st410 + case 411: + goto st411 + case 100: + goto st100 + case 101: + goto st101 + case 412: + goto st412 + case 413: + goto st413 + case 414: + goto st414 + case 415: + goto st415 + case 416: + goto st416 + case 417: + goto st417 + case 418: + goto st418 + case 419: + goto st419 + case 420: + goto st420 + case 421: + goto st421 + case 422: + goto st422 + case 423: + goto st423 + case 424: + goto st424 + case 425: + goto st425 + case 426: + goto st426 + case 427: + goto st427 + case 428: + goto st428 + case 429: + goto st429 + case 102: + goto st102 + case 430: + goto st430 + case 431: + goto st431 + case 432: + goto st432 + case 103: + goto st103 + case 104: + goto st104 + case 433: + goto st433 + case 434: + goto st434 + case 435: + goto st435 + case 105: + goto st105 + case 436: + goto st436 + case 437: + goto st437 + case 438: + goto st438 + case 439: + goto st439 + case 440: + goto st440 + case 441: + goto st441 + case 442: + goto st442 + case 443: + goto st443 + case 444: + goto st444 + case 445: + goto st445 + case 446: + goto st446 + case 447: + goto st447 + case 448: + goto st448 + case 449: + goto st449 + case 450: + goto st450 + case 451: + goto st451 + case 452: + goto st452 + case 453: + goto st453 + case 454: + goto st454 + case 455: + goto st455 + case 106: + goto st106 + case 456: + goto st456 + case 457: + goto st457 + case 458: + goto st458 + case 459: + goto st459 + case 460: + goto st460 + case 461: + goto st461 + case 462: + goto st462 + case 463: + goto st463 + case 464: + goto st464 + case 465: + goto st465 + case 466: + goto st466 + case 467: + goto st467 + case 468: + goto st468 + case 469: + goto st469 + case 470: + goto st470 + case 471: + goto st471 + case 472: + goto st472 + case 473: + goto st473 + case 474: + goto st474 + case 475: + goto st475 + case 476: + goto st476 + case 477: + goto st477 + case 107: + goto st107 + case 108: + goto st108 + case 109: + goto st109 + case 110: + goto st110 + case 111: + goto st111 + case 478: + goto st478 + case 112: + goto st112 + case 479: + goto st479 + case 480: + goto st480 + case 113: + goto st113 + case 481: + goto st481 + case 482: + goto st482 + case 483: + goto st483 + case 484: + goto st484 + case 485: + goto st485 + case 486: + goto st486 + case 487: + goto st487 + case 488: + goto st488 + case 489: + goto st489 + case 114: + goto st114 + case 115: + goto st115 + case 116: + goto st116 + case 490: + goto st490 + case 117: + goto st117 + case 118: + goto st118 + case 119: + goto st119 + case 491: + goto st491 + case 120: + goto st120 + case 121: + goto st121 + case 492: + goto st492 + case 493: + goto st493 + case 122: + goto st122 + case 123: + goto st123 + case 124: + goto st124 + case 125: + goto st125 + case 494: + goto st494 + case 495: + goto st495 + case 496: + goto st496 + case 126: + goto st126 + case 497: + goto st497 + case 498: + goto st498 + case 499: + goto st499 + case 500: + goto st500 + case 501: + goto st501 + case 502: + goto st502 + case 503: + goto st503 + case 504: + goto st504 + case 505: + goto st505 + case 506: + goto st506 + case 507: + goto st507 + case 508: + goto st508 + case 509: + goto st509 + case 510: + goto st510 + case 511: + goto st511 + case 512: + goto st512 + case 513: + goto st513 + case 514: + goto st514 + case 515: + goto st515 + case 516: + goto st516 + case 127: + goto st127 + case 128: + goto st128 + case 517: + goto st517 + case 518: + goto st518 + case 519: + goto st519 + case 520: + goto st520 + case 521: + goto st521 + case 522: + goto st522 + case 523: + goto st523 + case 524: + goto st524 + case 525: + goto st525 + case 129: + goto st129 + case 130: + goto st130 + case 131: + goto st131 + case 526: + goto st526 + case 132: + goto st132 + case 133: + goto st133 + case 134: + goto st134 + case 527: + goto st527 + case 135: + goto st135 + case 136: + goto st136 + case 528: + goto st528 + case 529: + goto st529 + case 137: + goto st137 + case 138: + goto st138 + case 139: + goto st139 + case 530: + goto st530 + case 531: + goto st531 + case 140: + goto st140 + case 532: + goto st532 + case 141: + goto st141 + case 533: + goto st533 + case 534: + goto st534 + case 535: + goto st535 + case 536: + goto st536 + case 537: + goto st537 + case 538: + goto st538 + case 539: + goto st539 + case 540: + goto st540 + case 142: + goto st142 + case 143: + goto st143 + case 144: + goto st144 + case 541: + goto st541 + case 145: + goto st145 + case 146: + goto st146 + case 147: + goto st147 + case 542: + goto st542 + case 148: + goto st148 + case 149: + goto st149 + case 543: + goto st543 + case 544: + goto st544 + case 545: + goto st545 + case 546: + goto st546 + case 547: + goto st547 + case 548: + goto st548 + case 549: + goto st549 + case 550: + goto st550 + case 551: + goto st551 + case 552: + goto st552 + case 553: + goto st553 + case 554: + goto st554 + case 555: + goto st555 + case 556: + goto st556 + case 557: + goto st557 + case 558: + goto st558 + case 559: + goto st559 + case 560: + goto st560 + case 561: + goto st561 + case 562: + goto st562 + case 150: + goto st150 + case 151: + goto st151 + case 563: + goto st563 + case 564: + goto st564 + case 565: + goto st565 + case 152: + goto st152 + case 566: + goto st566 + case 567: + goto st567 + case 153: + goto st153 + case 568: + goto st568 + case 569: + goto st569 + case 570: + goto st570 + case 571: + goto st571 + case 572: + goto st572 + case 573: + goto st573 + case 574: + goto st574 + case 575: + goto st575 + case 576: + goto st576 + case 577: + goto st577 + case 578: + goto st578 + case 579: + goto st579 + case 580: + goto st580 + case 581: + goto st581 + case 582: + goto st582 + case 583: + goto st583 + case 584: + goto st584 + case 585: + goto st585 + case 154: + goto st154 + case 155: + goto st155 + case 586: + goto st586 + case 156: + goto st156 + case 587: + goto st587 + case 588: + goto st588 + case 589: + goto st589 + case 590: + goto st590 + case 591: + goto st591 + case 592: + goto st592 + case 593: + goto st593 + case 594: + goto st594 + case 157: + goto st157 + case 158: + goto st158 + case 159: + goto st159 + case 595: + goto st595 + case 160: + goto st160 + case 161: + goto st161 + case 162: + goto st162 + case 596: + goto st596 + case 163: + goto st163 + case 164: + goto st164 + case 597: + goto st597 + case 598: + goto st598 + case 165: + goto st165 + case 166: + goto st166 + case 167: + goto st167 + case 168: + goto st168 + case 169: + goto st169 + case 170: + goto st170 + case 599: + goto st599 + case 600: + goto st600 + case 601: + goto st601 + case 602: + goto st602 + case 603: + goto st603 + case 604: + goto st604 + case 605: + goto st605 + case 606: + goto st606 + case 607: + goto st607 + case 608: + goto st608 + case 609: + goto st609 + case 610: + goto st610 + case 611: + goto st611 + case 612: + goto st612 + case 613: + goto st613 + case 614: + goto st614 + case 615: + goto st615 + case 616: + goto st616 + case 617: + goto st617 + case 171: + goto st171 + case 172: + goto st172 + case 173: + goto st173 + case 618: + goto st618 + case 619: + goto st619 + case 620: + goto st620 + case 174: + goto st174 + case 621: + goto st621 + case 622: + goto st622 + case 175: + goto st175 + case 623: + goto st623 + case 624: + goto st624 + case 625: + goto st625 + case 626: + goto st626 + case 627: + goto st627 + case 176: + goto st176 + case 177: + goto st177 + case 178: + goto st178 + case 628: + goto st628 + case 179: + goto st179 + case 180: + goto st180 + case 181: + goto st181 + case 629: + goto st629 + case 182: + goto st182 + case 183: + goto st183 + case 630: + goto st630 + case 631: + goto st631 + case 184: + goto st184 + case 632: + goto st632 + case 633: + goto st633 + case 634: + goto st634 + case 185: + goto st185 + case 186: + goto st186 + case 187: + goto st187 + case 635: + goto st635 + case 188: + goto st188 + case 189: + goto st189 + case 190: + goto st190 + case 636: + goto st636 + case 191: + goto st191 + case 192: + goto st192 + case 637: + goto st637 + case 638: + goto st638 + case 193: + goto st193 + case 194: + goto st194 + case 195: + goto st195 + case 639: + goto st639 + case 196: + goto st196 + case 197: + goto st197 + case 640: + goto st640 + case 641: + goto st641 + case 642: + goto st642 + case 643: + goto st643 + case 644: + goto st644 + case 645: + goto st645 + case 646: + goto st646 + case 647: + goto st647 + case 198: + goto st198 + case 199: + goto st199 + case 200: + goto st200 + case 648: + goto st648 + case 201: + goto st201 + case 202: + goto st202 + case 203: + goto st203 + case 649: + goto st649 + case 204: + goto st204 + case 205: + goto st205 + case 650: + goto st650 + case 651: + goto st651 + case 206: + goto st206 + case 207: + goto st207 + case 208: + goto st208 + case 652: + goto st652 + case 653: + goto st653 + case 654: + goto st654 + case 655: + goto st655 + case 656: + goto st656 + case 657: + goto st657 + case 658: + goto st658 + case 659: + goto st659 + case 660: + goto st660 + case 661: + goto st661 + case 662: + goto st662 + case 663: + goto st663 + case 664: + goto st664 + case 665: + goto st665 + case 666: + goto st666 + case 667: + goto st667 + case 668: + goto st668 + case 669: + goto st669 + case 670: + goto st670 + case 209: + goto st209 + case 210: + goto st210 + case 211: + goto st211 + case 212: + goto st212 + case 213: + goto st213 + case 671: + goto st671 + case 214: + goto st214 + case 215: + goto st215 + case 672: + goto st672 + case 673: + goto st673 + case 674: + goto st674 + case 675: + goto st675 + case 676: + goto st676 + case 677: + goto st677 + case 678: + goto st678 + case 679: + goto st679 + case 680: + goto st680 + case 216: + goto st216 + case 217: + goto st217 + case 218: + goto st218 + case 681: + goto st681 + case 219: + goto st219 + case 220: + goto st220 + case 221: + goto st221 + case 682: + goto st682 + case 222: + goto st222 + case 223: + goto st223 + case 683: + goto st683 + case 684: + goto st684 + case 224: + goto st224 + case 225: + goto st225 + case 226: + goto st226 + case 685: + goto st685 + case 227: + goto st227 + case 228: + goto st228 + case 686: + goto st686 + case 687: + goto st687 + case 688: + goto st688 + case 689: + goto st689 + case 690: + goto st690 + case 691: + goto st691 + case 692: + goto st692 + case 693: + goto st693 + case 229: + goto st229 + case 230: + goto st230 + case 231: + goto st231 + case 694: + goto st694 + case 232: + goto st232 + case 233: + goto st233 + case 695: + goto st695 + case 696: + goto st696 + case 697: + goto st697 + case 698: + goto st698 + case 699: + goto st699 + case 700: + goto st700 + case 701: + goto st701 + case 702: + goto st702 + case 234: + goto st234 + case 235: + goto st235 + case 236: + goto st236 + case 703: + goto st703 + case 237: + goto st237 + case 238: + goto st238 + case 239: + goto st239 + case 704: + goto st704 + case 240: + goto st240 + case 241: + goto st241 + case 705: + goto st705 + case 706: + goto st706 + case 242: + goto st242 + case 243: + goto st243 + case 244: + goto st244 + case 707: + goto st707 + case 708: + goto st708 + case 709: + goto st709 + case 710: + goto st710 + case 711: + goto st711 + case 712: + goto st712 + case 713: + goto st713 + case 714: + goto st714 + case 715: + goto st715 + case 716: + goto st716 + case 717: + goto st717 + case 718: + goto st718 + case 719: + goto st719 + case 720: + goto st720 + case 721: + goto st721 + case 722: + goto st722 + case 723: + goto st723 + case 724: + goto st724 + case 725: + goto st725 + case 245: + goto st245 + case 246: + goto st246 + case 726: + goto st726 + case 247: + goto st247 + case 248: + goto st248 + case 727: + goto st727 + case 728: + goto st728 + case 729: + goto st729 + case 730: + goto st730 + case 731: + goto st731 + case 732: + goto st732 + case 733: + goto st733 + case 734: + goto st734 + case 249: + goto st249 + case 250: + goto st250 + case 251: + goto st251 + case 735: + goto st735 + case 252: + goto st252 + case 253: + goto st253 + case 254: + goto st254 + case 736: + goto st736 + case 255: + goto st255 + case 256: + goto st256 + case 737: + goto st737 + case 738: + goto st738 + case 257: + goto st257 + case 258: + goto st258 + case 739: + goto st739 + case 261: + goto st261 + case 741: + goto st741 + case 742: + goto st742 + case 262: + goto st262 + case 263: + goto st263 + case 264: + goto st264 + case 265: + goto st265 + case 743: + goto st743 + case 266: + goto st266 + case 744: + goto st744 + case 267: + goto st267 + case 268: + goto st268 + case 269: + goto st269 + case 740: + goto st740 + case 259: + goto st259 + case 260: + goto st260 + } + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof + } + _resume: + switch m.cs { + case 270: + goto st_case_270 + case 1: + goto st_case_1 + case 2: + goto st_case_2 + case 3: + goto st_case_3 + case 0: + goto st_case_0 + case 4: + goto st_case_4 + case 5: + goto st_case_5 + case 6: + goto st_case_6 + case 7: + goto st_case_7 + case 271: + goto st_case_271 + case 272: + goto st_case_272 + case 273: + goto st_case_273 + case 8: + goto st_case_8 + case 9: + goto st_case_9 + case 10: + goto st_case_10 + case 11: + goto st_case_11 + case 12: + goto st_case_12 + case 13: + goto st_case_13 + case 14: + goto st_case_14 + case 15: + goto st_case_15 + case 16: + goto st_case_16 + case 17: + goto st_case_17 + case 18: + goto st_case_18 + case 19: + goto st_case_19 + case 20: + goto st_case_20 + case 21: + goto st_case_21 + case 22: + goto st_case_22 + case 23: + goto st_case_23 + case 24: + goto st_case_24 + case 25: + goto st_case_25 + case 26: + goto st_case_26 + case 27: + goto st_case_27 + case 28: + goto st_case_28 + case 29: + goto st_case_29 + case 30: + goto st_case_30 + case 31: + goto st_case_31 + case 32: + goto st_case_32 + case 274: + goto st_case_274 + case 275: + goto st_case_275 + case 33: + goto st_case_33 + case 34: + goto st_case_34 + case 276: + goto st_case_276 + case 277: + goto st_case_277 + case 278: + goto st_case_278 + case 35: + goto st_case_35 + case 279: + goto st_case_279 + case 280: + goto st_case_280 + case 281: + goto st_case_281 + case 282: + goto st_case_282 + case 283: + goto st_case_283 + case 284: + goto st_case_284 + case 285: + goto st_case_285 + case 286: + goto st_case_286 + case 287: + goto st_case_287 + case 288: + goto st_case_288 + case 289: + goto st_case_289 + case 290: + goto st_case_290 + case 291: + goto st_case_291 + case 292: + goto st_case_292 + case 293: + goto st_case_293 + case 294: + goto st_case_294 + case 295: + goto st_case_295 + case 296: + goto st_case_296 + case 36: + goto st_case_36 + case 37: + goto st_case_37 + case 297: + goto st_case_297 + case 298: + goto st_case_298 + case 299: + goto st_case_299 + case 38: + goto st_case_38 + case 39: + goto st_case_39 + case 40: + goto st_case_40 + case 41: + goto st_case_41 + case 42: + goto st_case_42 + case 300: + goto st_case_300 + case 301: + goto st_case_301 + case 302: + goto st_case_302 + case 303: + goto st_case_303 + case 43: + goto st_case_43 + case 304: + goto st_case_304 + case 305: + goto st_case_305 + case 306: + goto st_case_306 + case 307: + goto st_case_307 + case 308: + goto st_case_308 + case 309: + goto st_case_309 + case 310: + goto st_case_310 + case 311: + goto st_case_311 + case 312: + goto st_case_312 + case 313: + goto st_case_313 + case 314: + goto st_case_314 + case 315: + goto st_case_315 + case 316: + goto st_case_316 + case 317: + goto st_case_317 + case 318: + goto st_case_318 + case 319: + goto st_case_319 + case 320: + goto st_case_320 + case 321: + goto st_case_321 + case 322: + goto st_case_322 + case 323: + goto st_case_323 + case 324: + goto st_case_324 + case 325: + goto st_case_325 + case 44: + goto st_case_44 + case 45: + goto st_case_45 + case 46: + goto st_case_46 + case 47: + goto st_case_47 + case 48: + goto st_case_48 + case 49: + goto st_case_49 + case 50: + goto st_case_50 + case 51: + goto st_case_51 + case 52: + goto st_case_52 + case 53: + goto st_case_53 + case 326: + goto st_case_326 + case 327: + goto st_case_327 + case 328: + goto st_case_328 + case 54: + goto st_case_54 + case 55: + goto st_case_55 + case 56: + goto st_case_56 + case 57: + goto st_case_57 + case 58: + goto st_case_58 + case 59: + goto st_case_59 + case 329: + goto st_case_329 + case 330: + goto st_case_330 + case 60: + goto st_case_60 + case 331: + goto st_case_331 + case 332: + goto st_case_332 + case 333: + goto st_case_333 + case 334: + goto st_case_334 + case 335: + goto st_case_335 + case 336: + goto st_case_336 + case 337: + goto st_case_337 + case 338: + goto st_case_338 + case 339: + goto st_case_339 + case 340: + goto st_case_340 + case 341: + goto st_case_341 + case 342: + goto st_case_342 + case 343: + goto st_case_343 + case 344: + goto st_case_344 + case 345: + goto st_case_345 + case 346: + goto st_case_346 + case 347: + goto st_case_347 + case 348: + goto st_case_348 + case 349: + goto st_case_349 + case 350: + goto st_case_350 + case 61: + goto st_case_61 + case 351: + goto st_case_351 + case 352: + goto st_case_352 + case 353: + goto st_case_353 + case 62: + goto st_case_62 + case 354: + goto st_case_354 + case 355: + goto st_case_355 + case 356: + goto st_case_356 + case 357: + goto st_case_357 + case 358: + goto st_case_358 + case 359: + goto st_case_359 + case 360: + goto st_case_360 + case 361: + goto st_case_361 + case 362: + goto st_case_362 + case 363: + goto st_case_363 + case 364: + goto st_case_364 + case 365: + goto st_case_365 + case 366: + goto st_case_366 + case 367: + goto st_case_367 + case 368: + goto st_case_368 + case 369: + goto st_case_369 + case 370: + goto st_case_370 + case 371: + goto st_case_371 + case 372: + goto st_case_372 + case 373: + goto st_case_373 + case 63: + goto st_case_63 + case 64: + goto st_case_64 + case 65: + goto st_case_65 + case 66: + goto st_case_66 + case 67: + goto st_case_67 + case 374: + goto st_case_374 + case 68: + goto st_case_68 + case 69: + goto st_case_69 + case 70: + goto st_case_70 + case 71: + goto st_case_71 + case 72: + goto st_case_72 + case 375: + goto st_case_375 + case 376: + goto st_case_376 + case 377: + goto st_case_377 + case 73: + goto st_case_73 + case 74: + goto st_case_74 + case 378: + goto st_case_378 + case 379: + goto st_case_379 + case 75: + goto st_case_75 + case 380: + goto st_case_380 + case 76: + goto st_case_76 + case 381: + goto st_case_381 + case 382: + goto st_case_382 + case 383: + goto st_case_383 + case 384: + goto st_case_384 + case 385: + goto st_case_385 + case 386: + goto st_case_386 + case 387: + goto st_case_387 + case 388: + goto st_case_388 + case 389: + goto st_case_389 + case 390: + goto st_case_390 + case 391: + goto st_case_391 + case 392: + goto st_case_392 + case 393: + goto st_case_393 + case 394: + goto st_case_394 + case 395: + goto st_case_395 + case 396: + goto st_case_396 + case 397: + goto st_case_397 + case 398: + goto st_case_398 + case 399: + goto st_case_399 + case 400: + goto st_case_400 + case 77: + goto st_case_77 + case 78: + goto st_case_78 + case 79: + goto st_case_79 + case 80: + goto st_case_80 + case 81: + goto st_case_81 + case 82: + goto st_case_82 + case 83: + goto st_case_83 + case 84: + goto st_case_84 + case 85: + goto st_case_85 + case 86: + goto st_case_86 + case 87: + goto st_case_87 + case 88: + goto st_case_88 + case 89: + goto st_case_89 + case 90: + goto st_case_90 + case 401: + goto st_case_401 + case 402: + goto st_case_402 + case 403: + goto st_case_403 + case 404: + goto st_case_404 + case 91: + goto st_case_91 + case 92: + goto st_case_92 + case 93: + goto st_case_93 + case 94: + goto st_case_94 + case 405: + goto st_case_405 + case 406: + goto st_case_406 + case 95: + goto st_case_95 + case 96: + goto st_case_96 + case 407: + goto st_case_407 + case 97: + goto st_case_97 + case 98: + goto st_case_98 + case 408: + goto st_case_408 + case 409: + goto st_case_409 + case 99: + goto st_case_99 + case 410: + goto st_case_410 + case 411: + goto st_case_411 + case 100: + goto st_case_100 + case 101: + goto st_case_101 + case 412: + goto st_case_412 + case 413: + goto st_case_413 + case 414: + goto st_case_414 + case 415: + goto st_case_415 + case 416: + goto st_case_416 + case 417: + goto st_case_417 + case 418: + goto st_case_418 + case 419: + goto st_case_419 + case 420: + goto st_case_420 + case 421: + goto st_case_421 + case 422: + goto st_case_422 + case 423: + goto st_case_423 + case 424: + goto st_case_424 + case 425: + goto st_case_425 + case 426: + goto st_case_426 + case 427: + goto st_case_427 + case 428: + goto st_case_428 + case 429: + goto st_case_429 + case 102: + goto st_case_102 + case 430: + goto st_case_430 + case 431: + goto st_case_431 + case 432: + goto st_case_432 + case 103: + goto st_case_103 + case 104: + goto st_case_104 + case 433: + goto st_case_433 + case 434: + goto st_case_434 + case 435: + goto st_case_435 + case 105: + goto st_case_105 + case 436: + goto st_case_436 + case 437: + goto st_case_437 + case 438: + goto st_case_438 + case 439: + goto st_case_439 + case 440: + goto st_case_440 + case 441: + goto st_case_441 + case 442: + goto st_case_442 + case 443: + goto st_case_443 + case 444: + goto st_case_444 + case 445: + goto st_case_445 + case 446: + goto st_case_446 + case 447: + goto st_case_447 + case 448: + goto st_case_448 + case 449: + goto st_case_449 + case 450: + goto st_case_450 + case 451: + goto st_case_451 + case 452: + goto st_case_452 + case 453: + goto st_case_453 + case 454: + goto st_case_454 + case 455: + goto st_case_455 + case 106: + goto st_case_106 + case 456: + goto st_case_456 + case 457: + goto st_case_457 + case 458: + goto st_case_458 + case 459: + goto st_case_459 + case 460: + goto st_case_460 + case 461: + goto st_case_461 + case 462: + goto st_case_462 + case 463: + goto st_case_463 + case 464: + goto st_case_464 + case 465: + goto st_case_465 + case 466: + goto st_case_466 + case 467: + goto st_case_467 + case 468: + goto st_case_468 + case 469: + goto st_case_469 + case 470: + goto st_case_470 + case 471: + goto st_case_471 + case 472: + goto st_case_472 + case 473: + goto st_case_473 + case 474: + goto st_case_474 + case 475: + goto st_case_475 + case 476: + goto st_case_476 + case 477: + goto st_case_477 + case 107: + goto st_case_107 + case 108: + goto st_case_108 + case 109: + goto st_case_109 + case 110: + goto st_case_110 + case 111: + goto st_case_111 + case 478: + goto st_case_478 + case 112: + goto st_case_112 + case 479: + goto st_case_479 + case 480: + goto st_case_480 + case 113: + goto st_case_113 + case 481: + goto st_case_481 + case 482: + goto st_case_482 + case 483: + goto st_case_483 + case 484: + goto st_case_484 + case 485: + goto st_case_485 + case 486: + goto st_case_486 + case 487: + goto st_case_487 + case 488: + goto st_case_488 + case 489: + goto st_case_489 + case 114: + goto st_case_114 + case 115: + goto st_case_115 + case 116: + goto st_case_116 + case 490: + goto st_case_490 + case 117: + goto st_case_117 + case 118: + goto st_case_118 + case 119: + goto st_case_119 + case 491: + goto st_case_491 + case 120: + goto st_case_120 + case 121: + goto st_case_121 + case 492: + goto st_case_492 + case 493: + goto st_case_493 + case 122: + goto st_case_122 + case 123: + goto st_case_123 + case 124: + goto st_case_124 + case 125: + goto st_case_125 + case 494: + goto st_case_494 + case 495: + goto st_case_495 + case 496: + goto st_case_496 + case 126: + goto st_case_126 + case 497: + goto st_case_497 + case 498: + goto st_case_498 + case 499: + goto st_case_499 + case 500: + goto st_case_500 + case 501: + goto st_case_501 + case 502: + goto st_case_502 + case 503: + goto st_case_503 + case 504: + goto st_case_504 + case 505: + goto st_case_505 + case 506: + goto st_case_506 + case 507: + goto st_case_507 + case 508: + goto st_case_508 + case 509: + goto st_case_509 + case 510: + goto st_case_510 + case 511: + goto st_case_511 + case 512: + goto st_case_512 + case 513: + goto st_case_513 + case 514: + goto st_case_514 + case 515: + goto st_case_515 + case 516: + goto st_case_516 + case 127: + goto st_case_127 + case 128: + goto st_case_128 + case 517: + goto st_case_517 + case 518: + goto st_case_518 + case 519: + goto st_case_519 + case 520: + goto st_case_520 + case 521: + goto st_case_521 + case 522: + goto st_case_522 + case 523: + goto st_case_523 + case 524: + goto st_case_524 + case 525: + goto st_case_525 + case 129: + goto st_case_129 + case 130: + goto st_case_130 + case 131: + goto st_case_131 + case 526: + goto st_case_526 + case 132: + goto st_case_132 + case 133: + goto st_case_133 + case 134: + goto st_case_134 + case 527: + goto st_case_527 + case 135: + goto st_case_135 + case 136: + goto st_case_136 + case 528: + goto st_case_528 + case 529: + goto st_case_529 + case 137: + goto st_case_137 + case 138: + goto st_case_138 + case 139: + goto st_case_139 + case 530: + goto st_case_530 + case 531: + goto st_case_531 + case 140: + goto st_case_140 + case 532: + goto st_case_532 + case 141: + goto st_case_141 + case 533: + goto st_case_533 + case 534: + goto st_case_534 + case 535: + goto st_case_535 + case 536: + goto st_case_536 + case 537: + goto st_case_537 + case 538: + goto st_case_538 + case 539: + goto st_case_539 + case 540: + goto st_case_540 + case 142: + goto st_case_142 + case 143: + goto st_case_143 + case 144: + goto st_case_144 + case 541: + goto st_case_541 + case 145: + goto st_case_145 + case 146: + goto st_case_146 + case 147: + goto st_case_147 + case 542: + goto st_case_542 + case 148: + goto st_case_148 + case 149: + goto st_case_149 + case 543: + goto st_case_543 + case 544: + goto st_case_544 + case 545: + goto st_case_545 + case 546: + goto st_case_546 + case 547: + goto st_case_547 + case 548: + goto st_case_548 + case 549: + goto st_case_549 + case 550: + goto st_case_550 + case 551: + goto st_case_551 + case 552: + goto st_case_552 + case 553: + goto st_case_553 + case 554: + goto st_case_554 + case 555: + goto st_case_555 + case 556: + goto st_case_556 + case 557: + goto st_case_557 + case 558: + goto st_case_558 + case 559: + goto st_case_559 + case 560: + goto st_case_560 + case 561: + goto st_case_561 + case 562: + goto st_case_562 + case 150: + goto st_case_150 + case 151: + goto st_case_151 + case 563: + goto st_case_563 + case 564: + goto st_case_564 + case 565: + goto st_case_565 + case 152: + goto st_case_152 + case 566: + goto st_case_566 + case 567: + goto st_case_567 + case 153: + goto st_case_153 + case 568: + goto st_case_568 + case 569: + goto st_case_569 + case 570: + goto st_case_570 + case 571: + goto st_case_571 + case 572: + goto st_case_572 + case 573: + goto st_case_573 + case 574: + goto st_case_574 + case 575: + goto st_case_575 + case 576: + goto st_case_576 + case 577: + goto st_case_577 + case 578: + goto st_case_578 + case 579: + goto st_case_579 + case 580: + goto st_case_580 + case 581: + goto st_case_581 + case 582: + goto st_case_582 + case 583: + goto st_case_583 + case 584: + goto st_case_584 + case 585: + goto st_case_585 + case 154: + goto st_case_154 + case 155: + goto st_case_155 + case 586: + goto st_case_586 + case 156: + goto st_case_156 + case 587: + goto st_case_587 + case 588: + goto st_case_588 + case 589: + goto st_case_589 + case 590: + goto st_case_590 + case 591: + goto st_case_591 + case 592: + goto st_case_592 + case 593: + goto st_case_593 + case 594: + goto st_case_594 + case 157: + goto st_case_157 + case 158: + goto st_case_158 + case 159: + goto st_case_159 + case 595: + goto st_case_595 + case 160: + goto st_case_160 + case 161: + goto st_case_161 + case 162: + goto st_case_162 + case 596: + goto st_case_596 + case 163: + goto st_case_163 + case 164: + goto st_case_164 + case 597: + goto st_case_597 + case 598: + goto st_case_598 + case 165: + goto st_case_165 + case 166: + goto st_case_166 + case 167: + goto st_case_167 + case 168: + goto st_case_168 + case 169: + goto st_case_169 + case 170: + goto st_case_170 + case 599: + goto st_case_599 + case 600: + goto st_case_600 + case 601: + goto st_case_601 + case 602: + goto st_case_602 + case 603: + goto st_case_603 + case 604: + goto st_case_604 + case 605: + goto st_case_605 + case 606: + goto st_case_606 + case 607: + goto st_case_607 + case 608: + goto st_case_608 + case 609: + goto st_case_609 + case 610: + goto st_case_610 + case 611: + goto st_case_611 + case 612: + goto st_case_612 + case 613: + goto st_case_613 + case 614: + goto st_case_614 + case 615: + goto st_case_615 + case 616: + goto st_case_616 + case 617: + goto st_case_617 + case 171: + goto st_case_171 + case 172: + goto st_case_172 + case 173: + goto st_case_173 + case 618: + goto st_case_618 + case 619: + goto st_case_619 + case 620: + goto st_case_620 + case 174: + goto st_case_174 + case 621: + goto st_case_621 + case 622: + goto st_case_622 + case 175: + goto st_case_175 + case 623: + goto st_case_623 + case 624: + goto st_case_624 + case 625: + goto st_case_625 + case 626: + goto st_case_626 + case 627: + goto st_case_627 + case 176: + goto st_case_176 + case 177: + goto st_case_177 + case 178: + goto st_case_178 + case 628: + goto st_case_628 + case 179: + goto st_case_179 + case 180: + goto st_case_180 + case 181: + goto st_case_181 + case 629: + goto st_case_629 + case 182: + goto st_case_182 + case 183: + goto st_case_183 + case 630: + goto st_case_630 + case 631: + goto st_case_631 + case 184: + goto st_case_184 + case 632: + goto st_case_632 + case 633: + goto st_case_633 + case 634: + goto st_case_634 + case 185: + goto st_case_185 + case 186: + goto st_case_186 + case 187: + goto st_case_187 + case 635: + goto st_case_635 + case 188: + goto st_case_188 + case 189: + goto st_case_189 + case 190: + goto st_case_190 + case 636: + goto st_case_636 + case 191: + goto st_case_191 + case 192: + goto st_case_192 + case 637: + goto st_case_637 + case 638: + goto st_case_638 + case 193: + goto st_case_193 + case 194: + goto st_case_194 + case 195: + goto st_case_195 + case 639: + goto st_case_639 + case 196: + goto st_case_196 + case 197: + goto st_case_197 + case 640: + goto st_case_640 + case 641: + goto st_case_641 + case 642: + goto st_case_642 + case 643: + goto st_case_643 + case 644: + goto st_case_644 + case 645: + goto st_case_645 + case 646: + goto st_case_646 + case 647: + goto st_case_647 + case 198: + goto st_case_198 + case 199: + goto st_case_199 + case 200: + goto st_case_200 + case 648: + goto st_case_648 + case 201: + goto st_case_201 + case 202: + goto st_case_202 + case 203: + goto st_case_203 + case 649: + goto st_case_649 + case 204: + goto st_case_204 + case 205: + goto st_case_205 + case 650: + goto st_case_650 + case 651: + goto st_case_651 + case 206: + goto st_case_206 + case 207: + goto st_case_207 + case 208: + goto st_case_208 + case 652: + goto st_case_652 + case 653: + goto st_case_653 + case 654: + goto st_case_654 + case 655: + goto st_case_655 + case 656: + goto st_case_656 + case 657: + goto st_case_657 + case 658: + goto st_case_658 + case 659: + goto st_case_659 + case 660: + goto st_case_660 + case 661: + goto st_case_661 + case 662: + goto st_case_662 + case 663: + goto st_case_663 + case 664: + goto st_case_664 + case 665: + goto st_case_665 + case 666: + goto st_case_666 + case 667: + goto st_case_667 + case 668: + goto st_case_668 + case 669: + goto st_case_669 + case 670: + goto st_case_670 + case 209: + goto st_case_209 + case 210: + goto st_case_210 + case 211: + goto st_case_211 + case 212: + goto st_case_212 + case 213: + goto st_case_213 + case 671: + goto st_case_671 + case 214: + goto st_case_214 + case 215: + goto st_case_215 + case 672: + goto st_case_672 + case 673: + goto st_case_673 + case 674: + goto st_case_674 + case 675: + goto st_case_675 + case 676: + goto st_case_676 + case 677: + goto st_case_677 + case 678: + goto st_case_678 + case 679: + goto st_case_679 + case 680: + goto st_case_680 + case 216: + goto st_case_216 + case 217: + goto st_case_217 + case 218: + goto st_case_218 + case 681: + goto st_case_681 + case 219: + goto st_case_219 + case 220: + goto st_case_220 + case 221: + goto st_case_221 + case 682: + goto st_case_682 + case 222: + goto st_case_222 + case 223: + goto st_case_223 + case 683: + goto st_case_683 + case 684: + goto st_case_684 + case 224: + goto st_case_224 + case 225: + goto st_case_225 + case 226: + goto st_case_226 + case 685: + goto st_case_685 + case 227: + goto st_case_227 + case 228: + goto st_case_228 + case 686: + goto st_case_686 + case 687: + goto st_case_687 + case 688: + goto st_case_688 + case 689: + goto st_case_689 + case 690: + goto st_case_690 + case 691: + goto st_case_691 + case 692: + goto st_case_692 + case 693: + goto st_case_693 + case 229: + goto st_case_229 + case 230: + goto st_case_230 + case 231: + goto st_case_231 + case 694: + goto st_case_694 + case 232: + goto st_case_232 + case 233: + goto st_case_233 + case 695: + goto st_case_695 + case 696: + goto st_case_696 + case 697: + goto st_case_697 + case 698: + goto st_case_698 + case 699: + goto st_case_699 + case 700: + goto st_case_700 + case 701: + goto st_case_701 + case 702: + goto st_case_702 + case 234: + goto st_case_234 + case 235: + goto st_case_235 + case 236: + goto st_case_236 + case 703: + goto st_case_703 + case 237: + goto st_case_237 + case 238: + goto st_case_238 + case 239: + goto st_case_239 + case 704: + goto st_case_704 + case 240: + goto st_case_240 + case 241: + goto st_case_241 + case 705: + goto st_case_705 + case 706: + goto st_case_706 + case 242: + goto st_case_242 + case 243: + goto st_case_243 + case 244: + goto st_case_244 + case 707: + goto st_case_707 + case 708: + goto st_case_708 + case 709: + goto st_case_709 + case 710: + goto st_case_710 + case 711: + goto st_case_711 + case 712: + goto st_case_712 + case 713: + goto st_case_713 + case 714: + goto st_case_714 + case 715: + goto st_case_715 + case 716: + goto st_case_716 + case 717: + goto st_case_717 + case 718: + goto st_case_718 + case 719: + goto st_case_719 + case 720: + goto st_case_720 + case 721: + goto st_case_721 + case 722: + goto st_case_722 + case 723: + goto st_case_723 + case 724: + goto st_case_724 + case 725: + goto st_case_725 + case 245: + goto st_case_245 + case 246: + goto st_case_246 + case 726: + goto st_case_726 + case 247: + goto st_case_247 + case 248: + goto st_case_248 + case 727: + goto st_case_727 + case 728: + goto st_case_728 + case 729: + goto st_case_729 + case 730: + goto st_case_730 + case 731: + goto st_case_731 + case 732: + goto st_case_732 + case 733: + goto st_case_733 + case 734: + goto st_case_734 + case 249: + goto st_case_249 + case 250: + goto st_case_250 + case 251: + goto st_case_251 + case 735: + goto st_case_735 + case 252: + goto st_case_252 + case 253: + goto st_case_253 + case 254: + goto st_case_254 + case 736: + goto st_case_736 + case 255: + goto st_case_255 + case 256: + goto st_case_256 + case 737: + goto st_case_737 + case 738: + goto st_case_738 + case 257: + goto st_case_257 + case 258: + goto st_case_258 + case 739: + goto st_case_739 + case 261: + goto st_case_261 + case 741: + goto st_case_741 + case 742: + goto st_case_742 + case 262: + goto st_case_262 + case 263: + goto st_case_263 + case 264: + goto st_case_264 + case 265: + goto st_case_265 + case 743: + goto st_case_743 + case 266: + goto st_case_266 + case 744: + goto st_case_744 + case 267: + goto st_case_267 + case 268: + goto st_case_268 + case 269: + goto st_case_269 + case 740: + goto st_case_740 + case 259: + goto st_case_259 + case 260: + goto st_case_260 + } + goto st_out + st270: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof270 + } + st_case_270: + switch (m.data)[(m.p)] { + case 10: + goto tr35 + case 11: + goto tr459 + case 13: + goto tr35 + case 32: + goto tr458 + case 35: + goto tr35 + case 44: + goto tr35 + case 92: + goto tr460 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr458 + } + goto tr457 + tr33: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st1 + tr457: +//line plugins/parsers/influx/machine.go.rl:74 + + m.beginMetric = true + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st1 + st1: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof1 + } + st_case_1: +//line plugins/parsers/influx/machine.go:3204 + switch (m.data)[(m.p)] { + case 10: + goto tr2 + case 11: + goto tr3 + case 13: + goto tr2 + case 32: + goto tr1 + case 44: + goto tr4 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + tr1: + (m.cs) = 2 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr60: + (m.cs) = 2 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st2: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof2 + } + st_case_2: +//line plugins/parsers/influx/machine.go:3254 + switch (m.data)[(m.p)] { + case 10: + goto tr8 + case 11: + goto tr9 + case 13: + goto tr8 + case 32: + goto st2 + case 44: + goto tr8 + case 61: + goto tr8 + case 92: + goto tr10 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st2 + } + goto tr6 + tr6: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st3 + st3: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof3 + } + st_case_3: +//line plugins/parsers/influx/machine.go:3286 + switch (m.data)[(m.p)] { + case 32: + goto tr8 + case 44: + goto tr8 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st3 + tr2: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr8: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr35: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:24 + + err = ErrNameParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr39: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:24 + + err = ErrNameParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr43: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:24 + + err = ErrNameParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr47: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr105: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr132: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr198: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr423: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:24 + + err = ErrNameParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr426: + (m.cs) = 0 +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + + goto _again + tr1055: +//line plugins/parsers/influx/machine.go.rl:65 + + (m.p)-- + + { + goto st270 + } + + goto st0 +//line plugins/parsers/influx/machine.go:3507 + st_case_0: + st0: + (m.cs) = 0 + goto _out + tr12: +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st4 + st4: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof4 + } + st_case_4: +//line plugins/parsers/influx/machine.go:3523 + switch (m.data)[(m.p)] { + case 34: + goto st5 + case 45: + goto tr15 + case 46: + goto tr16 + case 48: + goto tr17 + case 70: + goto tr19 + case 84: + goto tr20 + case 102: + goto tr21 + case 116: + goto tr22 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr18 + } + goto tr8 + st5: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof5 + } + st_case_5: + switch (m.data)[(m.p)] { + case 10: + goto tr24 + case 12: + goto tr8 + case 13: + goto tr25 + case 34: + goto tr26 + case 92: + goto tr27 + } + goto tr23 + tr23: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st6 + tr24: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st6 + tr29: +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st6 + st6: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof6 + } + st_case_6: +//line plugins/parsers/influx/machine.go:3595 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + goto st6 + tr25: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st7 + st7: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof7 + } + st_case_7: +//line plugins/parsers/influx/machine.go:3620 + if (m.data)[(m.p)] == 10 { + goto tr29 + } + goto tr8 + tr26: + (m.cs) = 271 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr31: + (m.cs) = 271 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st271: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof271 + } + st_case_271: +//line plugins/parsers/influx/machine.go:3660 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 13: + goto st33 + case 32: + goto st272 + case 44: + goto st36 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st272 + } + goto tr105 + tr535: + (m.cs) = 272 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr932: + (m.cs) = 272 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr935: + (m.cs) = 272 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr939: + (m.cs) = 272 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st272: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof272 + } + st_case_272: +//line plugins/parsers/influx/machine.go:3732 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 13: + goto st33 + case 32: + goto st272 + case 45: + goto tr464 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr465 + } + case (m.data)[(m.p)] >= 9: + goto st272 + } + goto tr426 + tr103: +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st273 + tr470: + (m.cs) = 273 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr734: + (m.cs) = 273 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr952: + (m.cs) = 273 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr957: + (m.cs) = 273 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr962: + (m.cs) = 273 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + st273: +//line plugins/parsers/influx/machine.go.rl:164 + + m.finishMetric = true + (m.cs) = 740 + { + (m.p)++ + goto _out + } + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof273 + } + st_case_273: +//line plugins/parsers/influx/machine.go:3866 + switch (m.data)[(m.p)] { + case 10: + goto tr35 + case 11: + goto tr36 + case 13: + goto tr35 + case 32: + goto st8 + case 35: + goto tr35 + case 44: + goto tr35 + case 92: + goto tr37 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st8 + } + goto tr33 + tr458: +//line plugins/parsers/influx/machine.go.rl:74 + + m.beginMetric = true + + goto st8 + st8: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof8 + } + st_case_8: +//line plugins/parsers/influx/machine.go:3898 + switch (m.data)[(m.p)] { + case 10: + goto tr35 + case 11: + goto tr36 + case 13: + goto tr35 + case 32: + goto st8 + case 35: + goto tr35 + case 44: + goto tr35 + case 92: + goto tr37 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st8 + } + goto tr33 + tr36: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st9 + tr459: +//line plugins/parsers/influx/machine.go.rl:74 + + m.beginMetric = true + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st9 + st9: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof9 + } + st_case_9: +//line plugins/parsers/influx/machine.go:3940 + switch (m.data)[(m.p)] { + case 10: + goto tr39 + case 11: + goto tr40 + case 13: + goto tr39 + case 32: + goto tr38 + case 35: + goto st1 + case 44: + goto tr4 + case 92: + goto tr37 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr38 + } + goto tr33 + tr38: + (m.cs) = 10 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st10: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof10 + } + st_case_10: +//line plugins/parsers/influx/machine.go:3979 + switch (m.data)[(m.p)] { + case 10: + goto tr43 + case 11: + goto tr44 + case 13: + goto tr43 + case 32: + goto st10 + case 35: + goto tr6 + case 44: + goto tr43 + case 61: + goto tr33 + case 92: + goto tr45 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st10 + } + goto tr41 + tr41: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st11 + st11: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof11 + } + st_case_11: +//line plugins/parsers/influx/machine.go:4013 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr48 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st11 + tr48: + (m.cs) = 12 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr51: + (m.cs) = 12 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st12: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof12 + } + st_case_12: +//line plugins/parsers/influx/machine.go:4069 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr51 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto tr45 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto tr41 + tr4: + (m.cs) = 13 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr62: + (m.cs) = 13 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st13: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof13 + } + st_case_13: +//line plugins/parsers/influx/machine.go:4121 + switch (m.data)[(m.p)] { + case 32: + goto tr2 + case 44: + goto tr2 + case 61: + goto tr2 + case 92: + goto tr53 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto tr52 + tr52: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st14 + st14: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof14 + } + st_case_14: +//line plugins/parsers/influx/machine.go:4152 + switch (m.data)[(m.p)] { + case 32: + goto tr2 + case 44: + goto tr2 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto st14 + tr55: +//line plugins/parsers/influx/machine.go.rl:87 + + m.key = m.text() + + goto st15 + st15: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof15 + } + st_case_15: +//line plugins/parsers/influx/machine.go:4183 + switch (m.data)[(m.p)] { + case 32: + goto tr2 + case 44: + goto tr2 + case 61: + goto tr2 + case 92: + goto tr58 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto tr57 + tr57: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st16 + st16: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof16 + } + st_case_16: +//line plugins/parsers/influx/machine.go:4214 + switch (m.data)[(m.p)] { + case 10: + goto tr2 + case 11: + goto tr61 + case 13: + goto tr2 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr2 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + tr61: + (m.cs) = 17 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st17: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof17 + } + st_case_17: +//line plugins/parsers/influx/machine.go:4253 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr65 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 92: + goto tr66 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto tr64 + tr64: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st18 + st18: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof18 + } + st_case_18: +//line plugins/parsers/influx/machine.go:4285 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr68 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st18 + tr68: + (m.cs) = 19 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr65: + (m.cs) = 19 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st19: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof19 + } + st_case_19: +//line plugins/parsers/influx/machine.go:4341 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr65 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto tr66 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto tr64 + tr66: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st20 + st20: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof20 + } + st_case_20: +//line plugins/parsers/influx/machine.go:4373 + if (m.data)[(m.p)] == 92 { + goto st21 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st18 + st21: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof21 + } + st_case_21: +//line plugins/parsers/influx/machine.go:4394 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr68 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st18 + tr58: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st22 + st22: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof22 + } + st_case_22: +//line plugins/parsers/influx/machine.go:4426 + if (m.data)[(m.p)] == 92 { + goto st23 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto st16 + st23: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof23 + } + st_case_23: +//line plugins/parsers/influx/machine.go:4447 + switch (m.data)[(m.p)] { + case 10: + goto tr2 + case 11: + goto tr61 + case 13: + goto tr2 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr2 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + tr53: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st24 + st24: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof24 + } + st_case_24: +//line plugins/parsers/influx/machine.go:4479 + if (m.data)[(m.p)] == 92 { + goto st25 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto st14 + st25: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof25 + } + st_case_25: +//line plugins/parsers/influx/machine.go:4500 + switch (m.data)[(m.p)] { + case 32: + goto tr2 + case 44: + goto tr2 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto st14 + tr49: +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st26 + tr425: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st26 + st26: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof26 + } + st_case_26: +//line plugins/parsers/influx/machine.go:4541 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 34: + goto st29 + case 44: + goto tr4 + case 45: + goto tr74 + case 46: + goto tr75 + case 48: + goto tr76 + case 70: + goto tr78 + case 84: + goto tr79 + case 92: + goto st95 + case 102: + goto tr80 + case 116: + goto tr81 + } + switch { + case (m.data)[(m.p)] > 12: + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr77 + } + case (m.data)[(m.p)] >= 9: + goto tr1 + } + goto st1 + tr3: + (m.cs) = 27 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st27: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof27 + } + st_case_27: +//line plugins/parsers/influx/machine.go:4599 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr51 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 61: + goto st1 + case 92: + goto tr45 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto tr41 + tr45: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st28 + st28: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof28 + } + st_case_28: +//line plugins/parsers/influx/machine.go:4631 + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st11 + st29: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof29 + } + st_case_29: + switch (m.data)[(m.p)] { + case 9: + goto tr83 + case 10: + goto tr24 + case 11: + goto tr84 + case 12: + goto tr1 + case 13: + goto tr25 + case 32: + goto tr83 + case 34: + goto tr85 + case 44: + goto tr86 + case 92: + goto tr87 + } + goto tr82 + tr82: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st30 + st30: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof30 + } + st_case_30: +//line plugins/parsers/influx/machine.go:4678 + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 92: + goto st141 + } + goto st30 + tr89: + (m.cs) = 31 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr83: + (m.cs) = 31 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr231: + (m.cs) = 31 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st31: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof31 + } + st_case_31: +//line plugins/parsers/influx/machine.go:4748 + switch (m.data)[(m.p)] { + case 9: + goto st31 + case 10: + goto tr29 + case 11: + goto tr96 + case 12: + goto st2 + case 13: + goto st7 + case 32: + goto st31 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr98 + } + goto tr94 + tr94: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st32 + st32: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof32 + } + st_case_32: +//line plugins/parsers/influx/machine.go:4783 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + goto st32 + tr97: + (m.cs) = 274 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr100: + (m.cs) = 274 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr386: + (m.cs) = 274 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st274: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof274 + } + st_case_274: +//line plugins/parsers/influx/machine.go:4857 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st275 + case 13: + goto st33 + case 32: + goto st272 + case 44: + goto st36 + case 61: + goto tr12 + case 92: + goto st35 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st272 + } + goto st3 + st275: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof275 + } + st_case_275: + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st275 + case 13: + goto st33 + case 32: + goto st272 + case 44: + goto tr105 + case 45: + goto tr467 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr468 + } + case (m.data)[(m.p)] >= 9: + goto st272 + } + goto st3 + tr472: + (m.cs) = 33 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr736: + (m.cs) = 33 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr954: + (m.cs) = 33 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr959: + (m.cs) = 33 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr964: + (m.cs) = 33 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st33: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof33 + } + st_case_33: +//line plugins/parsers/influx/machine.go:4980 + if (m.data)[(m.p)] == 10 { + goto tr103 + } + goto st0 + tr467: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st34 + st34: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof34 + } + st_case_34: +//line plugins/parsers/influx/machine.go:4996 + switch (m.data)[(m.p)] { + case 32: + goto tr105 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] < 12: + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 10 { + goto tr105 + } + case (m.data)[(m.p)] > 13: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st276 + } + default: + goto tr105 + } + goto st3 + tr468: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st276 + st276: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof276 + } + st_case_276: +//line plugins/parsers/influx/machine.go:5031 + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st279 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + tr469: + (m.cs) = 277 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st277: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof277 + } + st_case_277: +//line plugins/parsers/influx/machine.go:5075 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 13: + goto st33 + case 32: + goto st277 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st277 + } + goto st0 + tr471: + (m.cs) = 278 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st278: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof278 + } + st_case_278: +//line plugins/parsers/influx/machine.go:5106 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st278 + case 13: + goto st33 + case 32: + goto st277 + case 44: + goto tr8 + case 61: + goto tr12 + case 92: + goto st35 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st277 + } + goto st3 + tr10: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st35 + st35: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof35 + } + st_case_35: +//line plugins/parsers/influx/machine.go:5138 + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st3 + st279: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof279 + } + st_case_279: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st280 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st280: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof280 + } + st_case_280: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st281 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st281: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof281 + } + st_case_281: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st282 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st282: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof282 + } + st_case_282: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st283 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st283: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof283 + } + st_case_283: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st284 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st284: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof284 + } + st_case_284: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st285 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st285: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof285 + } + st_case_285: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st286 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st286: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof286 + } + st_case_286: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st287 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st287: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof287 + } + st_case_287: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st288 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st288: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof288 + } + st_case_288: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st289 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st289: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof289 + } + st_case_289: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st290 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st290: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof290 + } + st_case_290: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st291 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st291: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof291 + } + st_case_291: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st292 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st292: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof292 + } + st_case_292: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st293 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st293: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof293 + } + st_case_293: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st294 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st294: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof294 + } + st_case_294: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st295 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st295: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof295 + } + st_case_295: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st296 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st3 + st296: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof296 + } + st_case_296: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr471 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr105 + case 61: + goto tr12 + case 92: + goto st35 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr469 + } + goto st3 + tr930: + (m.cs) = 36 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1046: + (m.cs) = 36 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1048: + (m.cs) = 36 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1050: + (m.cs) = 36 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st36: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof36 + } + st_case_36: +//line plugins/parsers/influx/machine.go:5740 + switch (m.data)[(m.p)] { + case 32: + goto tr8 + case 44: + goto tr8 + case 61: + goto tr8 + case 92: + goto tr10 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto tr6 + tr101: +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st37 + st37: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof37 + } + st_case_37: +//line plugins/parsers/influx/machine.go:5771 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr107 + case 45: + goto tr108 + case 46: + goto tr109 + case 48: + goto tr110 + case 70: + goto tr112 + case 84: + goto tr113 + case 92: + goto st75 + case 102: + goto tr114 + case 116: + goto tr115 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr111 + } + goto st6 + tr107: + (m.cs) = 297 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st297: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof297 + } + st_case_297: +//line plugins/parsers/influx/machine.go:5820 + switch (m.data)[(m.p)] { + case 10: + goto tr494 + case 12: + goto st272 + case 13: + goto tr495 + case 32: + goto tr493 + case 34: + goto tr26 + case 44: + goto tr496 + case 92: + goto tr27 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr493 + } + goto tr23 + tr493: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st298 + tr988: + (m.cs) = 298 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr993: + (m.cs) = 298 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr996: + (m.cs) = 298 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr999: + (m.cs) = 298 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st298: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof298 + } + st_case_298: +//line plugins/parsers/influx/machine.go:5904 + switch (m.data)[(m.p)] { + case 10: + goto tr221 + case 12: + goto st272 + case 13: + goto st73 + case 32: + goto st298 + case 34: + goto tr31 + case 45: + goto tr499 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr500 + } + case (m.data)[(m.p)] >= 9: + goto st298 + } + goto st6 + tr494: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st299 + tr221: +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st299 + tr639: + (m.cs) = 299 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr603: + (m.cs) = 299 +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr823: + (m.cs) = 299 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr829: + (m.cs) = 299 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr810: + (m.cs) = 299 +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr765: + (m.cs) = 299 +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr798: + (m.cs) = 299 +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr804: + (m.cs) = 299 +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st299: +//line plugins/parsers/influx/machine.go.rl:164 + + m.finishMetric = true + (m.cs) = 740 + { + (m.p)++ + goto _out + } + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof299 + } + st_case_299: +//line plugins/parsers/influx/machine.go:6113 + switch (m.data)[(m.p)] { + case 9: + goto st38 + case 10: + goto tr29 + case 11: + goto tr117 + case 12: + goto st8 + case 13: + goto st7 + case 32: + goto st38 + case 34: + goto tr118 + case 35: + goto st6 + case 44: + goto st6 + case 92: + goto tr87 + } + goto tr82 + st38: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof38 + } + st_case_38: + switch (m.data)[(m.p)] { + case 9: + goto st38 + case 10: + goto tr29 + case 11: + goto tr117 + case 12: + goto st8 + case 13: + goto st7 + case 32: + goto st38 + case 34: + goto tr118 + case 35: + goto st6 + case 44: + goto st6 + case 92: + goto tr87 + } + goto tr82 + tr117: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st39 + st39: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof39 + } + st_case_39: +//line plugins/parsers/influx/machine.go:6176 + switch (m.data)[(m.p)] { + case 9: + goto tr119 + case 10: + goto tr29 + case 11: + goto tr120 + case 12: + goto tr38 + case 13: + goto st7 + case 32: + goto tr119 + case 34: + goto tr85 + case 35: + goto st30 + case 44: + goto tr92 + case 92: + goto tr87 + } + goto tr82 + tr119: + (m.cs) = 40 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st40: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof40 + } + st_case_40: +//line plugins/parsers/influx/machine.go:6218 + switch (m.data)[(m.p)] { + case 9: + goto st40 + case 10: + goto tr29 + case 11: + goto tr123 + case 12: + goto st10 + case 13: + goto st7 + case 32: + goto st40 + case 34: + goto tr124 + case 35: + goto tr94 + case 44: + goto st6 + case 61: + goto tr82 + case 92: + goto tr125 + } + goto tr121 + tr121: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st41 + st41: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof41 + } + st_case_41: +//line plugins/parsers/influx/machine.go:6255 + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr127 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + goto st41 + tr127: + (m.cs) = 42 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr131: + (m.cs) = 42 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st42: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof42 + } + st_case_42: +//line plugins/parsers/influx/machine.go:6314 + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr131 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr124 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto tr125 + } + goto tr121 + tr124: + (m.cs) = 300 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr128: + (m.cs) = 300 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st300: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof300 + } + st_case_300: +//line plugins/parsers/influx/machine.go:6373 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr502 + case 13: + goto st33 + case 32: + goto tr501 + case 44: + goto tr503 + case 61: + goto tr49 + case 92: + goto st28 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr501 + } + goto st11 + tr501: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr566: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr641: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr731: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr743: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr750: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr757: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr825: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr831: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr836: + (m.cs) = 301 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st301: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof301 + } + st_case_301: +//line plugins/parsers/influx/machine.go:6609 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr505 + case 13: + goto st33 + case 32: + goto st301 + case 44: + goto tr105 + case 45: + goto tr467 + case 61: + goto tr105 + case 92: + goto tr10 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr468 + } + case (m.data)[(m.p)] >= 9: + goto st301 + } + goto tr6 + tr505: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st302 + st302: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof302 + } + st_case_302: +//line plugins/parsers/influx/machine.go:6648 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr505 + case 13: + goto st33 + case 32: + goto st301 + case 44: + goto tr105 + case 45: + goto tr467 + case 61: + goto tr12 + case 92: + goto tr10 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr468 + } + case (m.data)[(m.p)] >= 9: + goto st301 + } + goto tr6 + tr502: + (m.cs) = 303 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr506: + (m.cs) = 303 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st303: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof303 + } + st_case_303: +//line plugins/parsers/influx/machine.go:6711 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr506 + case 13: + goto st33 + case 32: + goto tr501 + case 44: + goto tr4 + case 45: + goto tr507 + case 61: + goto tr49 + case 92: + goto tr45 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr508 + } + case (m.data)[(m.p)] >= 9: + goto tr501 + } + goto tr41 + tr507: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st43 + st43: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof43 + } + st_case_43: +//line plugins/parsers/influx/machine.go:6750 + switch (m.data)[(m.p)] { + case 10: + goto tr132 + case 11: + goto tr48 + case 13: + goto tr132 + case 32: + goto tr1 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st304 + } + case (m.data)[(m.p)] >= 9: + goto tr1 + } + goto st11 + tr508: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st304 + st304: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof304 + } + st_case_304: +//line plugins/parsers/influx/machine.go:6787 + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st308 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + tr514: + (m.cs) = 305 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr575: + (m.cs) = 305 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr509: + (m.cs) = 305 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr572: + (m.cs) = 305 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st305: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof305 + } + st_case_305: +//line plugins/parsers/influx/machine.go:6890 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr513 + case 13: + goto st33 + case 32: + goto st305 + case 44: + goto tr8 + case 61: + goto tr8 + case 92: + goto tr10 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st305 + } + goto tr6 + tr513: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st306 + st306: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof306 + } + st_case_306: +//line plugins/parsers/influx/machine.go:6922 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr513 + case 13: + goto st33 + case 32: + goto st305 + case 44: + goto tr8 + case 61: + goto tr12 + case 92: + goto tr10 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st305 + } + goto tr6 + tr515: + (m.cs) = 307 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr510: + (m.cs) = 307 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st307: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof307 + } + st_case_307: +//line plugins/parsers/influx/machine.go:6988 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr515 + case 13: + goto st33 + case 32: + goto tr514 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto tr45 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr514 + } + goto tr41 + st308: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof308 + } + st_case_308: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st309 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st309: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof309 + } + st_case_309: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st310 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st310: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof310 + } + st_case_310: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st311 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st311: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof311 + } + st_case_311: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st312 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st312: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof312 + } + st_case_312: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st313 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st313: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof313 + } + st_case_313: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st314 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st314: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof314 + } + st_case_314: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st315 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st315: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof315 + } + st_case_315: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st316 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st316: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof316 + } + st_case_316: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st317 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st317: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof317 + } + st_case_317: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st318 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st318: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof318 + } + st_case_318: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st319 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st319: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof319 + } + st_case_319: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st320 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st320: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof320 + } + st_case_320: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st321 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st321: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof321 + } + st_case_321: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st322 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st322: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof322 + } + st_case_322: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st323 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st323: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof323 + } + st_case_323: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st324 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st324: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof324 + } + st_case_324: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st325 + } + case (m.data)[(m.p)] >= 9: + goto tr509 + } + goto st11 + st325: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof325 + } + st_case_325: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr510 + case 13: + goto tr472 + case 32: + goto tr509 + case 44: + goto tr4 + case 61: + goto tr49 + case 92: + goto st28 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr509 + } + goto st11 + tr503: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr568: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr819: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr737: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr955: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr960: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr965: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1014: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1017: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1020: + (m.cs) = 44 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st44: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof44 + } + st_case_44: +//line plugins/parsers/influx/machine.go:7759 + switch (m.data)[(m.p)] { + case 32: + goto tr47 + case 44: + goto tr47 + case 61: + goto tr47 + case 92: + goto tr135 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto tr134 + tr134: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st45 + st45: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof45 + } + st_case_45: +//line plugins/parsers/influx/machine.go:7790 + switch (m.data)[(m.p)] { + case 32: + goto tr47 + case 44: + goto tr47 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st45 + tr137: +//line plugins/parsers/influx/machine.go.rl:87 + + m.key = m.text() + +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st46 + st46: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof46 + } + st_case_46: +//line plugins/parsers/influx/machine.go:7825 + switch (m.data)[(m.p)] { + case 32: + goto tr47 + case 34: + goto tr139 + case 44: + goto tr47 + case 45: + goto tr140 + case 46: + goto tr141 + case 48: + goto tr142 + case 61: + goto tr47 + case 70: + goto tr144 + case 84: + goto tr145 + case 92: + goto tr58 + case 102: + goto tr146 + case 116: + goto tr147 + } + switch { + case (m.data)[(m.p)] < 12: + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 10 { + goto tr47 + } + case (m.data)[(m.p)] > 13: + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr143 + } + default: + goto tr47 + } + goto tr57 + tr139: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st47 + st47: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof47 + } + st_case_47: +//line plugins/parsers/influx/machine.go:7876 + switch (m.data)[(m.p)] { + case 9: + goto tr149 + case 10: + goto tr24 + case 11: + goto tr150 + case 12: + goto tr60 + case 13: + goto tr25 + case 32: + goto tr149 + case 34: + goto tr151 + case 44: + goto tr152 + case 61: + goto tr23 + case 92: + goto tr153 + } + goto tr148 + tr148: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st48 + st48: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof48 + } + st_case_48: +//line plugins/parsers/influx/machine.go:7911 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + } + goto st48 + tr180: + (m.cs) = 49 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr155: + (m.cs) = 49 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr149: + (m.cs) = 49 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st49: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof49 + } + st_case_49: +//line plugins/parsers/influx/machine.go:7983 + switch (m.data)[(m.p)] { + case 9: + goto st49 + case 10: + goto tr29 + case 11: + goto tr162 + case 12: + goto st2 + case 13: + goto st7 + case 32: + goto st49 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr163 + } + goto tr160 + tr160: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st50 + st50: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof50 + } + st_case_50: +//line plugins/parsers/influx/machine.go:8018 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + goto st50 + tr165: +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st51 + st51: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof51 + } + st_case_51: +//line plugins/parsers/influx/machine.go:8051 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr107 + case 45: + goto tr167 + case 46: + goto tr168 + case 48: + goto tr169 + case 70: + goto tr171 + case 84: + goto tr172 + case 92: + goto st75 + case 102: + goto tr173 + case 116: + goto tr174 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr170 + } + goto st6 + tr167: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st52 + st52: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof52 + } + st_case_52: +//line plugins/parsers/influx/machine.go:8093 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 46: + goto st53 + case 48: + goto st632 + case 92: + goto st75 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st633 + } + goto st6 + tr168: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st53 + st53: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof53 + } + st_case_53: +//line plugins/parsers/influx/machine.go:8125 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st326 + } + goto st6 + st326: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof326 + } + st_case_326: + switch (m.data)[(m.p)] { + case 10: + goto tr534 + case 12: + goto tr535 + case 13: + goto tr536 + case 32: + goto tr533 + case 34: + goto tr31 + case 44: + goto tr537 + case 69: + goto st174 + case 92: + goto st75 + case 101: + goto st174 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st326 + } + case (m.data)[(m.p)] >= 9: + goto tr533 + } + goto st6 + tr925: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st327 + tr533: + (m.cs) = 327 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr931: + (m.cs) = 327 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr934: + (m.cs) = 327 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr938: + (m.cs) = 327 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st327: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof327 + } + st_case_327: +//line plugins/parsers/influx/machine.go:8239 + switch (m.data)[(m.p)] { + case 10: + goto tr275 + case 12: + goto st272 + case 13: + goto st103 + case 32: + goto st327 + case 34: + goto tr31 + case 45: + goto tr541 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr542 + } + case (m.data)[(m.p)] >= 9: + goto st327 + } + goto st6 + tr669: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st328 + tr275: +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st328 + tr534: + (m.cs) = 328 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr678: + (m.cs) = 328 +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr741: + (m.cs) = 328 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr748: + (m.cs) = 328 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr755: + (m.cs) = 328 +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr900: + (m.cs) = 328 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + st328: +//line plugins/parsers/influx/machine.go.rl:164 + + m.finishMetric = true + (m.cs) = 740 + { + (m.p)++ + goto _out + } + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof328 + } + st_case_328: +//line plugins/parsers/influx/machine.go:8410 + switch (m.data)[(m.p)] { + case 9: + goto st165 + case 10: + goto tr29 + case 11: + goto tr339 + case 12: + goto st8 + case 13: + goto st7 + case 32: + goto st165 + case 34: + goto tr118 + case 35: + goto st6 + case 44: + goto st6 + case 92: + goto tr340 + } + goto tr337 + tr337: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st54 + st54: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof54 + } + st_case_54: +//line plugins/parsers/influx/machine.go:8445 + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 92: + goto st156 + } + goto st54 + tr181: + (m.cs) = 55 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st55: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof55 + } + st_case_55: +//line plugins/parsers/influx/machine.go:8485 + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr185 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr124 + case 44: + goto tr182 + case 61: + goto st54 + case 92: + goto tr186 + } + goto tr184 + tr184: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st56 + st56: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof56 + } + st_case_56: +//line plugins/parsers/influx/machine.go:8520 + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr188 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + goto st56 + tr188: + (m.cs) = 57 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr185: + (m.cs) = 57 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st57: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof57 + } + st_case_57: +//line plugins/parsers/influx/machine.go:8579 + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr185 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr124 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto tr186 + } + goto tr184 + tr182: + (m.cs) = 58 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr158: + (m.cs) = 58 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr152: + (m.cs) = 58 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st58: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof58 + } + st_case_58: +//line plugins/parsers/influx/machine.go:8651 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr192 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr193 + } + goto tr191 + tr191: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st59 + st59: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof59 + } + st_case_59: +//line plugins/parsers/influx/machine.go:8684 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr195 + case 44: + goto st6 + case 61: + goto tr196 + case 92: + goto st70 + } + goto st59 + tr192: + (m.cs) = 329 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr195: + (m.cs) = 329 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st329: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof329 + } + st_case_329: +//line plugins/parsers/influx/machine.go:8741 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st330 + case 13: + goto st33 + case 32: + goto st272 + case 44: + goto st36 + case 61: + goto tr55 + case 92: + goto st24 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st272 + } + goto st14 + st330: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof330 + } + st_case_330: + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st330 + case 13: + goto st33 + case 32: + goto st272 + case 44: + goto tr198 + case 45: + goto tr544 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr545 + } + case (m.data)[(m.p)] >= 9: + goto st272 + } + goto st14 + tr544: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st60 + st60: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof60 + } + st_case_60: +//line plugins/parsers/influx/machine.go:8805 + switch (m.data)[(m.p)] { + case 32: + goto tr198 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] < 12: + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 10 { + goto tr198 + } + case (m.data)[(m.p)] > 13: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st331 + } + default: + goto tr198 + } + goto st14 + tr545: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st331 + st331: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof331 + } + st_case_331: +//line plugins/parsers/influx/machine.go:8840 + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st333 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + tr546: + (m.cs) = 332 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st332: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof332 + } + st_case_332: +//line plugins/parsers/influx/machine.go:8884 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st332 + case 13: + goto st33 + case 32: + goto st277 + case 44: + goto tr2 + case 61: + goto tr55 + case 92: + goto st24 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st277 + } + goto st14 + st333: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof333 + } + st_case_333: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st334 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st334: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof334 + } + st_case_334: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st335 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st335: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof335 + } + st_case_335: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st336 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st336: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof336 + } + st_case_336: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st337 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st337: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof337 + } + st_case_337: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st338 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st338: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof338 + } + st_case_338: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st339 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st339: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof339 + } + st_case_339: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st340 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st340: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof340 + } + st_case_340: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st341 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st341: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof341 + } + st_case_341: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st342 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st342: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof342 + } + st_case_342: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st343 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st343: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof343 + } + st_case_343: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st344 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st344: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof344 + } + st_case_344: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st345 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st345: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof345 + } + st_case_345: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st346 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st346: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof346 + } + st_case_346: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st347 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st347: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof347 + } + st_case_347: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st348 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st348: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof348 + } + st_case_348: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st349 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st349: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof349 + } + st_case_349: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st350 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st14 + st350: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof350 + } + st_case_350: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr546 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr198 + case 61: + goto tr55 + case 92: + goto st24 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr469 + } + goto st14 + tr196: +//line plugins/parsers/influx/machine.go.rl:87 + + m.key = m.text() + + goto st61 + st61: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof61 + } + st_case_61: +//line plugins/parsers/influx/machine.go:9451 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr151 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr153 + } + goto tr148 + tr151: + (m.cs) = 351 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr157: + (m.cs) = 351 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st351: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof351 + } + st_case_351: +//line plugins/parsers/influx/machine.go:9508 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr567 + case 13: + goto st33 + case 32: + goto tr566 + case 44: + goto tr568 + case 61: + goto tr132 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr566 + } + goto st16 + tr567: + (m.cs) = 352 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr735: + (m.cs) = 352 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr953: + (m.cs) = 352 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr958: + (m.cs) = 352 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr963: + (m.cs) = 352 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st352: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof352 + } + st_case_352: +//line plugins/parsers/influx/machine.go:9639 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr569 + case 13: + goto st33 + case 32: + goto tr566 + case 44: + goto tr62 + case 45: + goto tr570 + case 61: + goto tr132 + case 92: + goto tr66 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr571 + } + case (m.data)[(m.p)] >= 9: + goto tr566 + } + goto tr64 + tr594: + (m.cs) = 353 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr569: + (m.cs) = 353 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st353: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof353 + } + st_case_353: +//line plugins/parsers/influx/machine.go:9702 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr569 + case 13: + goto st33 + case 32: + goto tr566 + case 44: + goto tr62 + case 45: + goto tr570 + case 61: + goto tr12 + case 92: + goto tr66 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr571 + } + case (m.data)[(m.p)] >= 9: + goto tr566 + } + goto tr64 + tr570: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st62 + st62: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof62 + } + st_case_62: +//line plugins/parsers/influx/machine.go:9741 + switch (m.data)[(m.p)] { + case 10: + goto tr132 + case 11: + goto tr68 + case 13: + goto tr132 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st354 + } + case (m.data)[(m.p)] >= 9: + goto tr60 + } + goto st18 + tr571: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st354 + st354: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof354 + } + st_case_354: +//line plugins/parsers/influx/machine.go:9778 + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st356 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + tr576: + (m.cs) = 355 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr573: + (m.cs) = 355 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st355: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof355 + } + st_case_355: +//line plugins/parsers/influx/machine.go:9849 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr576 + case 13: + goto st33 + case 32: + goto tr575 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto tr66 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr575 + } + goto tr64 + st356: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof356 + } + st_case_356: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st357 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st357: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof357 + } + st_case_357: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st358 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st358: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof358 + } + st_case_358: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st359 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st359: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof359 + } + st_case_359: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st360 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st360: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof360 + } + st_case_360: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st361 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st361: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof361 + } + st_case_361: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st362 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st362: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof362 + } + st_case_362: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st363 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st363: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof363 + } + st_case_363: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st364 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st364: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof364 + } + st_case_364: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st365 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st365: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof365 + } + st_case_365: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st366 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st366: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof366 + } + st_case_366: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st367 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st367: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof367 + } + st_case_367: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st368 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st368: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof368 + } + st_case_368: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st369 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st369: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof369 + } + st_case_369: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st370 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st370: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof370 + } + st_case_370: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st371 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st371: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof371 + } + st_case_371: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st372 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st372: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof372 + } + st_case_372: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st373 + } + case (m.data)[(m.p)] >= 9: + goto tr572 + } + goto st18 + st373: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof373 + } + st_case_373: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr573 + case 13: + goto tr472 + case 32: + goto tr572 + case 44: + goto tr62 + case 61: + goto tr12 + case 92: + goto st20 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr572 + } + goto st18 + tr153: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st63 + st63: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof63 + } + st_case_63: +//line plugins/parsers/influx/machine.go:10416 + switch (m.data)[(m.p)] { + case 34: + goto st48 + case 92: + goto st64 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st16 + st64: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof64 + } + st_case_64: +//line plugins/parsers/influx/machine.go:10440 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + } + goto st48 + tr156: + (m.cs) = 65 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr150: + (m.cs) = 65 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st65: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof65 + } + st_case_65: +//line plugins/parsers/influx/machine.go:10499 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr203 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr204 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto tr205 + } + goto tr202 + tr202: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st66 + st66: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof66 + } + st_case_66: +//line plugins/parsers/influx/machine.go:10534 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr207 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + goto st66 + tr207: + (m.cs) = 67 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr203: + (m.cs) = 67 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st67: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof67 + } + st_case_67: +//line plugins/parsers/influx/machine.go:10593 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr203 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr204 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto tr205 + } + goto tr202 + tr204: + (m.cs) = 374 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr208: + (m.cs) = 374 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st374: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof374 + } + st_case_374: +//line plugins/parsers/influx/machine.go:10652 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr594 + case 13: + goto st33 + case 32: + goto tr566 + case 44: + goto tr568 + case 61: + goto tr12 + case 92: + goto st20 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr566 + } + goto st18 + tr205: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st68 + st68: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof68 + } + st_case_68: +//line plugins/parsers/influx/machine.go:10684 + switch (m.data)[(m.p)] { + case 34: + goto st66 + case 92: + goto st69 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st18 + st69: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof69 + } + st_case_69: +//line plugins/parsers/influx/machine.go:10708 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr207 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + goto st66 + tr193: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st70 + st70: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof70 + } + st_case_70: +//line plugins/parsers/influx/machine.go:10743 + switch (m.data)[(m.p)] { + case 34: + goto st59 + case 92: + goto st71 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st14 + st71: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof71 + } + st_case_71: +//line plugins/parsers/influx/machine.go:10767 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr195 + case 44: + goto st6 + case 61: + goto tr196 + case 92: + goto st70 + } + goto st59 + tr189: +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st72 + tr346: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st72 + st72: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof72 + } + st_case_72: +//line plugins/parsers/influx/machine.go:10810 + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr212 + case 44: + goto tr182 + case 45: + goto tr213 + case 46: + goto tr214 + case 48: + goto tr215 + case 70: + goto tr217 + case 84: + goto tr218 + case 92: + goto st156 + case 102: + goto tr219 + case 116: + goto tr220 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr216 + } + goto st54 + tr212: + (m.cs) = 375 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st375: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof375 + } + st_case_375: +//line plugins/parsers/influx/machine.go:10867 + switch (m.data)[(m.p)] { + case 9: + goto tr595 + case 10: + goto tr494 + case 11: + goto tr596 + case 12: + goto tr501 + case 13: + goto tr495 + case 32: + goto tr595 + case 34: + goto tr85 + case 44: + goto tr597 + case 92: + goto tr87 + } + goto tr82 + tr626: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr595: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr769: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr638: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr764: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr797: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr803: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr809: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr822: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr828: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr834: + (m.cs) = 376 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st376: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof376 + } + st_case_376: +//line plugins/parsers/influx/machine.go:11121 + switch (m.data)[(m.p)] { + case 9: + goto st376 + case 10: + goto tr221 + case 11: + goto tr599 + case 12: + goto st301 + case 13: + goto st73 + case 32: + goto st376 + case 34: + goto tr97 + case 44: + goto st6 + case 45: + goto tr600 + case 61: + goto st6 + case 92: + goto tr98 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr601 + } + goto tr94 + tr599: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st377 + st377: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof377 + } + st_case_377: +//line plugins/parsers/influx/machine.go:11161 + switch (m.data)[(m.p)] { + case 9: + goto st376 + case 10: + goto tr221 + case 11: + goto tr599 + case 12: + goto st301 + case 13: + goto st73 + case 32: + goto st376 + case 34: + goto tr97 + case 44: + goto st6 + case 45: + goto tr600 + case 61: + goto tr101 + case 92: + goto tr98 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr601 + } + goto tr94 + tr495: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st73 + tr605: + (m.cs) = 73 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr642: + (m.cs) = 73 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr800: + (m.cs) = 73 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr806: + (m.cs) = 73 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr812: + (m.cs) = 73 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st73: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof73 + } + st_case_73: +//line plugins/parsers/influx/machine.go:11266 + if (m.data)[(m.p)] == 10 { + goto tr221 + } + goto tr8 + tr600: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st74 + st74: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof74 + } + st_case_74: +//line plugins/parsers/influx/machine.go:11282 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr105 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st378 + } + goto st32 + tr601: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st378 + st378: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof378 + } + st_case_378: +//line plugins/parsers/influx/machine.go:11318 + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st381 + } + goto st32 + tr602: + (m.cs) = 379 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st379: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof379 + } + st_case_379: +//line plugins/parsers/influx/machine.go:11363 + switch (m.data)[(m.p)] { + case 10: + goto tr221 + case 12: + goto st277 + case 13: + goto st73 + case 32: + goto st379 + case 34: + goto tr31 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto st379 + } + goto st6 + tr27: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st75 + st75: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof75 + } + st_case_75: +//line plugins/parsers/influx/machine.go:11393 + switch (m.data)[(m.p)] { + case 34: + goto st6 + case 92: + goto st6 + } + goto tr8 + tr604: + (m.cs) = 380 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st380: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof380 + } + st_case_380: +//line plugins/parsers/influx/machine.go:11419 + switch (m.data)[(m.p)] { + case 9: + goto st379 + case 10: + goto tr221 + case 11: + goto st380 + case 12: + goto st277 + case 13: + goto st73 + case 32: + goto st379 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + goto st32 + tr98: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st76 + st76: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof76 + } + st_case_76: +//line plugins/parsers/influx/machine.go:11454 + switch (m.data)[(m.p)] { + case 34: + goto st32 + case 92: + goto st32 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st3 + st381: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof381 + } + st_case_381: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st382 + } + goto st32 + st382: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof382 + } + st_case_382: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st383 + } + goto st32 + st383: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof383 + } + st_case_383: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st384 + } + goto st32 + st384: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof384 + } + st_case_384: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st385 + } + goto st32 + st385: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof385 + } + st_case_385: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st386 + } + goto st32 + st386: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof386 + } + st_case_386: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st387 + } + goto st32 + st387: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof387 + } + st_case_387: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st388 + } + goto st32 + st388: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof388 + } + st_case_388: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st389 + } + goto st32 + st389: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof389 + } + st_case_389: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st390 + } + goto st32 + st390: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof390 + } + st_case_390: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st391 + } + goto st32 + st391: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof391 + } + st_case_391: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st392 + } + goto st32 + st392: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof392 + } + st_case_392: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st393 + } + goto st32 + st393: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof393 + } + st_case_393: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st394 + } + goto st32 + st394: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof394 + } + st_case_394: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st395 + } + goto st32 + st395: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof395 + } + st_case_395: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st396 + } + goto st32 + st396: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof396 + } + st_case_396: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st397 + } + goto st32 + st397: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof397 + } + st_case_397: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st398 + } + goto st32 + st398: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof398 + } + st_case_398: + switch (m.data)[(m.p)] { + case 9: + goto tr602 + case 10: + goto tr603 + case 11: + goto tr604 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto st76 + } + goto st32 + tr596: + (m.cs) = 399 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr640: + (m.cs) = 399 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr824: + (m.cs) = 399 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr830: + (m.cs) = 399 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr835: + (m.cs) = 399 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st399: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof399 + } + st_case_399: +//line plugins/parsers/influx/machine.go:12139 + switch (m.data)[(m.p)] { + case 9: + goto tr626 + case 10: + goto tr221 + case 11: + goto tr627 + case 12: + goto tr501 + case 13: + goto st73 + case 32: + goto tr626 + case 34: + goto tr124 + case 44: + goto tr92 + case 45: + goto tr628 + case 61: + goto st30 + case 92: + goto tr125 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr629 + } + goto tr121 + tr627: + (m.cs) = 400 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st400: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof400 + } + st_case_400: +//line plugins/parsers/influx/machine.go:12190 + switch (m.data)[(m.p)] { + case 9: + goto tr626 + case 10: + goto tr221 + case 11: + goto tr627 + case 12: + goto tr501 + case 13: + goto st73 + case 32: + goto tr626 + case 34: + goto tr124 + case 44: + goto tr92 + case 45: + goto tr628 + case 61: + goto tr129 + case 92: + goto tr125 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr629 + } + goto tr121 + tr92: + (m.cs) = 77 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr86: + (m.cs) = 77 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr233: + (m.cs) = 77 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st77: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof77 + } + st_case_77: +//line plugins/parsers/influx/machine.go:12267 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr192 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr224 + } + goto tr223 + tr223: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st78 + st78: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof78 + } + st_case_78: +//line plugins/parsers/influx/machine.go:12300 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr195 + case 44: + goto st6 + case 61: + goto tr226 + case 92: + goto st88 + } + goto st78 + tr226: +//line plugins/parsers/influx/machine.go.rl:87 + + m.key = m.text() + + goto st79 + st79: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof79 + } + st_case_79: +//line plugins/parsers/influx/machine.go:12333 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr151 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr229 + } + goto tr228 + tr228: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st80 + st80: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof80 + } + st_case_80: +//line plugins/parsers/influx/machine.go:12366 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + } + goto st80 + tr232: + (m.cs) = 81 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st81: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof81 + } + st_case_81: +//line plugins/parsers/influx/machine.go:12408 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr236 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr204 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto tr237 + } + goto tr235 + tr235: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st82 + st82: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof82 + } + st_case_82: +//line plugins/parsers/influx/machine.go:12443 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr239 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + goto st82 + tr239: + (m.cs) = 83 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr236: + (m.cs) = 83 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st83: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof83 + } + st_case_83: +//line plugins/parsers/influx/machine.go:12502 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr236 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr204 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto tr237 + } + goto tr235 + tr237: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st84 + st84: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof84 + } + st_case_84: +//line plugins/parsers/influx/machine.go:12537 + switch (m.data)[(m.p)] { + case 34: + goto st82 + case 92: + goto st85 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st18 + st85: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof85 + } + st_case_85: +//line plugins/parsers/influx/machine.go:12561 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr239 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + goto st82 + tr229: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st86 + st86: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof86 + } + st_case_86: +//line plugins/parsers/influx/machine.go:12596 + switch (m.data)[(m.p)] { + case 34: + goto st80 + case 92: + goto st87 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st16 + st87: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof87 + } + st_case_87: +//line plugins/parsers/influx/machine.go:12620 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + } + goto st80 + tr224: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st88 + st88: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof88 + } + st_case_88: +//line plugins/parsers/influx/machine.go:12655 + switch (m.data)[(m.p)] { + case 34: + goto st78 + case 92: + goto st89 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st14 + st89: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof89 + } + st_case_89: +//line plugins/parsers/influx/machine.go:12679 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr195 + case 44: + goto st6 + case 61: + goto tr226 + case 92: + goto st88 + } + goto st78 + tr628: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st90 + st90: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof90 + } + st_case_90: +//line plugins/parsers/influx/machine.go:12712 + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr127 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st401 + } + goto st41 + tr629: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st401 + st401: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof401 + } + st_case_401: +//line plugins/parsers/influx/machine.go:12750 + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st545 + } + goto st41 + tr635: + (m.cs) = 402 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr776: + (m.cs) = 402 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr630: + (m.cs) = 402 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr773: + (m.cs) = 402 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st402: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof402 + } + st_case_402: +//line plugins/parsers/influx/machine.go:12854 + switch (m.data)[(m.p)] { + case 9: + goto st402 + case 10: + goto tr221 + case 11: + goto tr634 + case 12: + goto st305 + case 13: + goto st73 + case 32: + goto st402 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr98 + } + goto tr94 + tr634: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st403 + st403: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof403 + } + st_case_403: +//line plugins/parsers/influx/machine.go:12889 + switch (m.data)[(m.p)] { + case 9: + goto st402 + case 10: + goto tr221 + case 11: + goto tr634 + case 12: + goto st305 + case 13: + goto st73 + case 32: + goto st402 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto tr98 + } + goto tr94 + tr636: + (m.cs) = 404 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr631: + (m.cs) = 404 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st404: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof404 + } + st_case_404: +//line plugins/parsers/influx/machine.go:12958 + switch (m.data)[(m.p)] { + case 9: + goto tr635 + case 10: + goto tr221 + case 11: + goto tr636 + case 12: + goto tr514 + case 13: + goto st73 + case 32: + goto tr635 + case 34: + goto tr124 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto tr125 + } + goto tr121 + tr129: +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st91 + tr383: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st91 + st91: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof91 + } + st_case_91: +//line plugins/parsers/influx/machine.go:13003 + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr212 + case 44: + goto tr92 + case 45: + goto tr245 + case 46: + goto tr246 + case 48: + goto tr247 + case 70: + goto tr249 + case 84: + goto tr250 + case 92: + goto st141 + case 102: + goto tr251 + case 116: + goto tr252 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr248 + } + goto st30 + tr90: + (m.cs) = 92 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr84: + (m.cs) = 92 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st92: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof92 + } + st_case_92: +//line plugins/parsers/influx/machine.go:13077 + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr131 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr124 + case 44: + goto tr92 + case 61: + goto st30 + case 92: + goto tr125 + } + goto tr121 + tr125: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st93 + st93: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof93 + } + st_case_93: +//line plugins/parsers/influx/machine.go:13112 + switch (m.data)[(m.p)] { + case 34: + goto st41 + case 92: + goto st41 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st11 + tr245: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st94 + st94: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof94 + } + st_case_94: +//line plugins/parsers/influx/machine.go:13139 + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 46: + goto st96 + case 48: + goto st533 + case 92: + goto st141 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st536 + } + goto st30 + tr85: + (m.cs) = 405 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr91: + (m.cs) = 405 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr118: + (m.cs) = 405 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st405: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof405 + } + st_case_405: +//line plugins/parsers/influx/machine.go:13220 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr637 + case 13: + goto st33 + case 32: + goto tr501 + case 44: + goto tr503 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr501 + } + goto st1 + tr637: + (m.cs) = 406 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr818: + (m.cs) = 406 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1013: + (m.cs) = 406 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1016: + (m.cs) = 406 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1019: + (m.cs) = 406 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st406: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof406 + } + st_case_406: +//line plugins/parsers/influx/machine.go:13349 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr506 + case 13: + goto st33 + case 32: + goto tr501 + case 44: + goto tr4 + case 45: + goto tr507 + case 61: + goto st1 + case 92: + goto tr45 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr508 + } + case (m.data)[(m.p)] >= 9: + goto tr501 + } + goto tr41 + tr37: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st95 + tr460: +//line plugins/parsers/influx/machine.go.rl:74 + + m.beginMetric = true + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st95 + st95: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof95 + } + st_case_95: +//line plugins/parsers/influx/machine.go:13398 + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto st0 + } + case (m.data)[(m.p)] >= 9: + goto st0 + } + goto st1 + tr246: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st96 + st96: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof96 + } + st_case_96: +//line plugins/parsers/influx/machine.go:13419 + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 92: + goto st141 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st407 + } + goto st30 + st407: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof407 + } + st_case_407: + switch (m.data)[(m.p)] { + case 9: + goto tr638 + case 10: + goto tr639 + case 11: + goto tr640 + case 12: + goto tr641 + case 13: + goto tr642 + case 32: + goto tr638 + case 34: + goto tr91 + case 44: + goto tr643 + case 69: + goto st139 + case 92: + goto st141 + case 101: + goto st139 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st407 + } + goto st30 + tr597: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr643: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr767: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr801: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr807: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr813: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr826: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr832: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr837: + (m.cs) = 97 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st97: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof97 + } + st_case_97: +//line plugins/parsers/influx/machine.go:13683 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr258 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr259 + } + goto tr257 + tr257: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st98 + st98: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof98 + } + st_case_98: +//line plugins/parsers/influx/machine.go:13716 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr261 + case 44: + goto st6 + case 61: + goto tr262 + case 92: + goto st137 + } + goto st98 + tr258: + (m.cs) = 408 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr261: + (m.cs) = 408 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st408: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof408 + } + st_case_408: +//line plugins/parsers/influx/machine.go:13773 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st409 + case 13: + goto st33 + case 32: + goto st272 + case 44: + goto st36 + case 61: + goto tr137 + case 92: + goto st100 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st272 + } + goto st45 + st409: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof409 + } + st_case_409: + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st409 + case 13: + goto st33 + case 32: + goto st272 + case 44: + goto tr132 + case 45: + goto tr646 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr647 + } + case (m.data)[(m.p)] >= 9: + goto st272 + } + goto st45 + tr646: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st99 + st99: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof99 + } + st_case_99: +//line plugins/parsers/influx/machine.go:13837 + switch (m.data)[(m.p)] { + case 32: + goto tr132 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] < 12: + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 10 { + goto tr132 + } + case (m.data)[(m.p)] > 13: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st410 + } + default: + goto tr132 + } + goto st45 + tr647: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st410 + st410: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof410 + } + st_case_410: +//line plugins/parsers/influx/machine.go:13872 + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st412 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + tr648: + (m.cs) = 411 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st411: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof411 + } + st_case_411: +//line plugins/parsers/influx/machine.go:13916 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto st411 + case 13: + goto st33 + case 32: + goto st277 + case 44: + goto tr47 + case 61: + goto tr137 + case 92: + goto st100 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st277 + } + goto st45 + tr135: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st100 + st100: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof100 + } + st_case_100: +//line plugins/parsers/influx/machine.go:13948 + if (m.data)[(m.p)] == 92 { + goto st101 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st45 + st101: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof101 + } + st_case_101: +//line plugins/parsers/influx/machine.go:13969 + switch (m.data)[(m.p)] { + case 32: + goto tr47 + case 44: + goto tr47 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st45 + st412: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof412 + } + st_case_412: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st413 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st413: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof413 + } + st_case_413: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st414 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st414: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof414 + } + st_case_414: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st415 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st415: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof415 + } + st_case_415: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st416 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st416: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof416 + } + st_case_416: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st417 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st417: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof417 + } + st_case_417: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st418 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st418: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof418 + } + st_case_418: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st419 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st419: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof419 + } + st_case_419: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st420 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st420: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof420 + } + st_case_420: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st421 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st421: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof421 + } + st_case_421: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st422 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st422: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof422 + } + st_case_422: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st423 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st423: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof423 + } + st_case_423: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st424 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st424: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof424 + } + st_case_424: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st425 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st425: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof425 + } + st_case_425: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st426 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st426: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof426 + } + st_case_426: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st427 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st427: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof427 + } + st_case_427: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st428 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st428: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof428 + } + st_case_428: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st429 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto st45 + st429: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof429 + } + st_case_429: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 11: + goto tr648 + case 13: + goto tr472 + case 32: + goto tr469 + case 44: + goto tr132 + case 61: + goto tr137 + case 92: + goto st100 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr469 + } + goto st45 + tr262: +//line plugins/parsers/influx/machine.go.rl:87 + + m.key = m.text() + +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st102 + st102: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof102 + } + st_case_102: +//line plugins/parsers/influx/machine.go:14539 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr266 + case 44: + goto st6 + case 45: + goto tr267 + case 46: + goto tr268 + case 48: + goto tr269 + case 61: + goto st6 + case 70: + goto tr271 + case 84: + goto tr272 + case 92: + goto tr229 + case 102: + goto tr273 + case 116: + goto tr274 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr270 + } + goto tr228 + tr266: + (m.cs) = 430 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st430: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof430 + } + st_case_430: +//line plugins/parsers/influx/machine.go:14600 + switch (m.data)[(m.p)] { + case 9: + goto tr668 + case 10: + goto tr669 + case 11: + goto tr670 + case 12: + goto tr566 + case 13: + goto tr671 + case 32: + goto tr668 + case 34: + goto tr151 + case 44: + goto tr672 + case 61: + goto tr23 + case 92: + goto tr153 + } + goto tr148 + tr863: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr701: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr668: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr859: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr729: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr740: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr747: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr754: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr891: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr895: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr899: + (m.cs) = 431 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st431: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof431 + } + st_case_431: +//line plugins/parsers/influx/machine.go:14856 + switch (m.data)[(m.p)] { + case 9: + goto st431 + case 10: + goto tr275 + case 11: + goto tr674 + case 12: + goto st301 + case 13: + goto st103 + case 32: + goto st431 + case 34: + goto tr97 + case 44: + goto st6 + case 45: + goto tr675 + case 61: + goto st6 + case 92: + goto tr163 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr676 + } + goto tr160 + tr674: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st432 + st432: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof432 + } + st_case_432: +//line plugins/parsers/influx/machine.go:14896 + switch (m.data)[(m.p)] { + case 9: + goto st431 + case 10: + goto tr275 + case 11: + goto tr674 + case 12: + goto st301 + case 13: + goto st103 + case 32: + goto st431 + case 34: + goto tr97 + case 44: + goto st6 + case 45: + goto tr675 + case 61: + goto tr165 + case 92: + goto tr163 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr676 + } + goto tr160 + tr671: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st103 + tr680: + (m.cs) = 103 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr536: + (m.cs) = 103 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr744: + (m.cs) = 103 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr751: + (m.cs) = 103 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr758: + (m.cs) = 103 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st103: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof103 + } + st_case_103: +//line plugins/parsers/influx/machine.go:15001 + if (m.data)[(m.p)] == 10 { + goto tr275 + } + goto tr8 + tr675: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st104 + st104: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof104 + } + st_case_104: +//line plugins/parsers/influx/machine.go:15017 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr105 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st433 + } + goto st50 + tr676: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st433 + st433: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof433 + } + st_case_433: +//line plugins/parsers/influx/machine.go:15053 + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st436 + } + goto st50 + tr677: + (m.cs) = 434 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st434: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof434 + } + st_case_434: +//line plugins/parsers/influx/machine.go:15098 + switch (m.data)[(m.p)] { + case 10: + goto tr275 + case 12: + goto st277 + case 13: + goto st103 + case 32: + goto st434 + case 34: + goto tr31 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto st434 + } + goto st6 + tr679: + (m.cs) = 435 +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st435: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof435 + } + st_case_435: +//line plugins/parsers/influx/machine.go:15135 + switch (m.data)[(m.p)] { + case 9: + goto st434 + case 10: + goto tr275 + case 11: + goto st435 + case 12: + goto st277 + case 13: + goto st103 + case 32: + goto st434 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + goto st50 + tr163: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st105 + st105: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof105 + } + st_case_105: +//line plugins/parsers/influx/machine.go:15170 + switch (m.data)[(m.p)] { + case 34: + goto st50 + case 92: + goto st50 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st3 + st436: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof436 + } + st_case_436: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st437 + } + goto st50 + st437: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof437 + } + st_case_437: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st438 + } + goto st50 + st438: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof438 + } + st_case_438: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st439 + } + goto st50 + st439: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof439 + } + st_case_439: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st440 + } + goto st50 + st440: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof440 + } + st_case_440: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st441 + } + goto st50 + st441: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof441 + } + st_case_441: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st442 + } + goto st50 + st442: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof442 + } + st_case_442: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st443 + } + goto st50 + st443: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof443 + } + st_case_443: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st444 + } + goto st50 + st444: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof444 + } + st_case_444: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st445 + } + goto st50 + st445: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof445 + } + st_case_445: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st446 + } + goto st50 + st446: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof446 + } + st_case_446: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st447 + } + goto st50 + st447: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof447 + } + st_case_447: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st448 + } + goto st50 + st448: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof448 + } + st_case_448: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st449 + } + goto st50 + st449: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof449 + } + st_case_449: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st450 + } + goto st50 + st450: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof450 + } + st_case_450: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st451 + } + goto st50 + st451: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof451 + } + st_case_451: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st452 + } + goto st50 + st452: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof452 + } + st_case_452: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st453 + } + goto st50 + st453: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof453 + } + st_case_453: + switch (m.data)[(m.p)] { + case 9: + goto tr677 + case 10: + goto tr678 + case 11: + goto tr679 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto st105 + } + goto st50 + tr670: + (m.cs) = 454 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr730: + (m.cs) = 454 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr742: + (m.cs) = 454 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr749: + (m.cs) = 454 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr756: + (m.cs) = 454 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st454: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof454 + } + st_case_454: +//line plugins/parsers/influx/machine.go:15855 + switch (m.data)[(m.p)] { + case 9: + goto tr701 + case 10: + goto tr275 + case 11: + goto tr702 + case 12: + goto tr566 + case 13: + goto st103 + case 32: + goto tr701 + case 34: + goto tr204 + case 44: + goto tr158 + case 45: + goto tr703 + case 61: + goto st6 + case 92: + goto tr205 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr704 + } + goto tr202 + tr702: + (m.cs) = 455 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st455: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof455 + } + st_case_455: +//line plugins/parsers/influx/machine.go:15906 + switch (m.data)[(m.p)] { + case 9: + goto tr701 + case 10: + goto tr275 + case 11: + goto tr702 + case 12: + goto tr566 + case 13: + goto st103 + case 32: + goto tr701 + case 34: + goto tr204 + case 44: + goto tr158 + case 45: + goto tr703 + case 61: + goto tr165 + case 92: + goto tr205 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr704 + } + goto tr202 + tr703: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st106 + st106: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof106 + } + st_case_106: +//line plugins/parsers/influx/machine.go:15946 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr207 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st456 + } + goto st66 + tr704: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st456 + st456: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof456 + } + st_case_456: +//line plugins/parsers/influx/machine.go:15984 + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st460 + } + goto st66 + tr870: + (m.cs) = 457 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr710: + (m.cs) = 457 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr867: + (m.cs) = 457 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr705: + (m.cs) = 457 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st457: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof457 + } + st_case_457: +//line plugins/parsers/influx/machine.go:16088 + switch (m.data)[(m.p)] { + case 9: + goto st457 + case 10: + goto tr275 + case 11: + goto tr709 + case 12: + goto st305 + case 13: + goto st103 + case 32: + goto st457 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr163 + } + goto tr160 + tr709: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st458 + st458: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof458 + } + st_case_458: +//line plugins/parsers/influx/machine.go:16123 + switch (m.data)[(m.p)] { + case 9: + goto st457 + case 10: + goto tr275 + case 11: + goto tr709 + case 12: + goto st305 + case 13: + goto st103 + case 32: + goto st457 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto tr163 + } + goto tr160 + tr711: + (m.cs) = 459 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr706: + (m.cs) = 459 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st459: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof459 + } + st_case_459: +//line plugins/parsers/influx/machine.go:16192 + switch (m.data)[(m.p)] { + case 9: + goto tr710 + case 10: + goto tr275 + case 11: + goto tr711 + case 12: + goto tr575 + case 13: + goto st103 + case 32: + goto tr710 + case 34: + goto tr204 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto tr205 + } + goto tr202 + st460: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof460 + } + st_case_460: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st461 + } + goto st66 + st461: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof461 + } + st_case_461: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st462 + } + goto st66 + st462: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof462 + } + st_case_462: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st463 + } + goto st66 + st463: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof463 + } + st_case_463: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st464 + } + goto st66 + st464: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof464 + } + st_case_464: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st465 + } + goto st66 + st465: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof465 + } + st_case_465: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st466 + } + goto st66 + st466: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof466 + } + st_case_466: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st467 + } + goto st66 + st467: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof467 + } + st_case_467: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st468 + } + goto st66 + st468: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof468 + } + st_case_468: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st469 + } + goto st66 + st469: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof469 + } + st_case_469: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st470 + } + goto st66 + st470: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof470 + } + st_case_470: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st471 + } + goto st66 + st471: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof471 + } + st_case_471: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st472 + } + goto st66 + st472: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof472 + } + st_case_472: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st473 + } + goto st66 + st473: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof473 + } + st_case_473: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st474 + } + goto st66 + st474: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof474 + } + st_case_474: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st475 + } + goto st66 + st475: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof475 + } + st_case_475: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st476 + } + goto st66 + st476: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof476 + } + st_case_476: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st477 + } + goto st66 + st477: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof477 + } + st_case_477: + switch (m.data)[(m.p)] { + case 9: + goto tr705 + case 10: + goto tr678 + case 11: + goto tr706 + case 12: + goto tr572 + case 13: + goto tr680 + case 32: + goto tr705 + case 34: + goto tr208 + case 44: + goto tr158 + case 61: + goto tr165 + case 92: + goto st68 + } + goto st66 + tr672: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr861: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr732: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr745: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr752: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr759: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr893: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr897: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr902: + (m.cs) = 107 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st107: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof107 + } + st_case_107: +//line plugins/parsers/influx/machine.go:16977 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr258 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr279 + } + goto tr278 + tr278: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st108 + st108: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof108 + } + st_case_108: +//line plugins/parsers/influx/machine.go:17010 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr261 + case 44: + goto st6 + case 61: + goto tr281 + case 92: + goto st122 + } + goto st108 + tr281: +//line plugins/parsers/influx/machine.go.rl:87 + + m.key = m.text() + +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st109 + st109: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof109 + } + st_case_109: +//line plugins/parsers/influx/machine.go:17047 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr266 + case 44: + goto st6 + case 45: + goto tr283 + case 46: + goto tr284 + case 48: + goto tr285 + case 61: + goto st6 + case 70: + goto tr287 + case 84: + goto tr288 + case 92: + goto tr153 + case 102: + goto tr289 + case 116: + goto tr290 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr286 + } + goto tr148 + tr283: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st110 + st110: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof110 + } + st_case_110: +//line plugins/parsers/influx/machine.go:17097 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 46: + goto st111 + case 48: + goto st482 + case 61: + goto st6 + case 92: + goto st63 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st485 + } + goto st48 + tr284: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st111 + st111: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof111 + } + st_case_111: +//line plugins/parsers/influx/machine.go:17139 + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st478 + } + goto st48 + st478: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof478 + } + st_case_478: + switch (m.data)[(m.p)] { + case 9: + goto tr729 + case 10: + goto tr534 + case 11: + goto tr730 + case 12: + goto tr731 + case 13: + goto tr536 + case 32: + goto tr729 + case 34: + goto tr157 + case 44: + goto tr732 + case 61: + goto st6 + case 69: + goto st112 + case 92: + goto st63 + case 101: + goto st112 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st478 + } + goto st48 + st112: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof112 + } + st_case_112: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr295 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + } + switch { + case (m.data)[(m.p)] > 45: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st481 + } + case (m.data)[(m.p)] >= 43: + goto st113 + } + goto st48 + tr295: + (m.cs) = 479 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st479: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof479 + } + st_case_479: +//line plugins/parsers/influx/machine.go:17255 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr567 + case 13: + goto st33 + case 32: + goto tr566 + case 44: + goto tr568 + case 61: + goto tr132 + case 92: + goto st22 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st480 + } + case (m.data)[(m.p)] >= 9: + goto tr566 + } + goto st16 + st480: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof480 + } + st_case_480: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr735 + case 13: + goto tr736 + case 32: + goto tr731 + case 44: + goto tr737 + case 61: + goto tr132 + case 92: + goto st22 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st480 + } + case (m.data)[(m.p)] >= 9: + goto tr731 + } + goto st16 + st113: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof113 + } + st_case_113: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st481 + } + goto st48 + st481: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof481 + } + st_case_481: + switch (m.data)[(m.p)] { + case 9: + goto tr729 + case 10: + goto tr534 + case 11: + goto tr730 + case 12: + goto tr731 + case 13: + goto tr536 + case 32: + goto tr729 + case 34: + goto tr157 + case 44: + goto tr732 + case 61: + goto st6 + case 92: + goto st63 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st481 + } + goto st48 + st482: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof482 + } + st_case_482: + switch (m.data)[(m.p)] { + case 9: + goto tr729 + case 10: + goto tr534 + case 11: + goto tr730 + case 12: + goto tr731 + case 13: + goto tr536 + case 32: + goto tr729 + case 34: + goto tr157 + case 44: + goto tr732 + case 46: + goto st478 + case 61: + goto st6 + case 69: + goto st112 + case 92: + goto st63 + case 101: + goto st112 + case 105: + goto st484 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st483 + } + goto st48 + st483: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof483 + } + st_case_483: + switch (m.data)[(m.p)] { + case 9: + goto tr729 + case 10: + goto tr534 + case 11: + goto tr730 + case 12: + goto tr731 + case 13: + goto tr536 + case 32: + goto tr729 + case 34: + goto tr157 + case 44: + goto tr732 + case 46: + goto st478 + case 61: + goto st6 + case 69: + goto st112 + case 92: + goto st63 + case 101: + goto st112 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st483 + } + goto st48 + st484: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof484 + } + st_case_484: + switch (m.data)[(m.p)] { + case 9: + goto tr740 + case 10: + goto tr741 + case 11: + goto tr742 + case 12: + goto tr743 + case 13: + goto tr744 + case 32: + goto tr740 + case 34: + goto tr157 + case 44: + goto tr745 + case 61: + goto st6 + case 92: + goto st63 + } + goto st48 + st485: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof485 + } + st_case_485: + switch (m.data)[(m.p)] { + case 9: + goto tr729 + case 10: + goto tr534 + case 11: + goto tr730 + case 12: + goto tr731 + case 13: + goto tr536 + case 32: + goto tr729 + case 34: + goto tr157 + case 44: + goto tr732 + case 46: + goto st478 + case 61: + goto st6 + case 69: + goto st112 + case 92: + goto st63 + case 101: + goto st112 + case 105: + goto st484 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st485 + } + goto st48 + tr285: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st486 + st486: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof486 + } + st_case_486: +//line plugins/parsers/influx/machine.go:17527 + switch (m.data)[(m.p)] { + case 9: + goto tr729 + case 10: + goto tr534 + case 11: + goto tr730 + case 12: + goto tr731 + case 13: + goto tr536 + case 32: + goto tr729 + case 34: + goto tr157 + case 44: + goto tr732 + case 46: + goto st478 + case 61: + goto st6 + case 69: + goto st112 + case 92: + goto st63 + case 101: + goto st112 + case 105: + goto st484 + case 117: + goto st487 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st483 + } + goto st48 + st487: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof487 + } + st_case_487: + switch (m.data)[(m.p)] { + case 9: + goto tr747 + case 10: + goto tr748 + case 11: + goto tr749 + case 12: + goto tr750 + case 13: + goto tr751 + case 32: + goto tr747 + case 34: + goto tr157 + case 44: + goto tr752 + case 61: + goto st6 + case 92: + goto st63 + } + goto st48 + tr286: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st488 + st488: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof488 + } + st_case_488: +//line plugins/parsers/influx/machine.go:17603 + switch (m.data)[(m.p)] { + case 9: + goto tr729 + case 10: + goto tr534 + case 11: + goto tr730 + case 12: + goto tr731 + case 13: + goto tr536 + case 32: + goto tr729 + case 34: + goto tr157 + case 44: + goto tr732 + case 46: + goto st478 + case 61: + goto st6 + case 69: + goto st112 + case 92: + goto st63 + case 101: + goto st112 + case 105: + goto st484 + case 117: + goto st487 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st488 + } + goto st48 + tr287: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st489 + st489: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof489 + } + st_case_489: +//line plugins/parsers/influx/machine.go:17651 + switch (m.data)[(m.p)] { + case 9: + goto tr754 + case 10: + goto tr755 + case 11: + goto tr756 + case 12: + goto tr757 + case 13: + goto tr758 + case 32: + goto tr754 + case 34: + goto tr157 + case 44: + goto tr759 + case 61: + goto st6 + case 65: + goto st114 + case 92: + goto st63 + case 97: + goto st117 + } + goto st48 + st114: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof114 + } + st_case_114: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 76: + goto st115 + case 92: + goto st63 + } + goto st48 + st115: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof115 + } + st_case_115: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 83: + goto st116 + case 92: + goto st63 + } + goto st48 + st116: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof116 + } + st_case_116: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 69: + goto st490 + case 92: + goto st63 + } + goto st48 + st490: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof490 + } + st_case_490: + switch (m.data)[(m.p)] { + case 9: + goto tr754 + case 10: + goto tr755 + case 11: + goto tr756 + case 12: + goto tr757 + case 13: + goto tr758 + case 32: + goto tr754 + case 34: + goto tr157 + case 44: + goto tr759 + case 61: + goto st6 + case 92: + goto st63 + } + goto st48 + st117: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof117 + } + st_case_117: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + case 108: + goto st118 + } + goto st48 + st118: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof118 + } + st_case_118: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + case 115: + goto st119 + } + goto st48 + st119: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof119 + } + st_case_119: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + case 101: + goto st490 + } + goto st48 + tr288: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st491 + st491: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof491 + } + st_case_491: +//line plugins/parsers/influx/machine.go:17898 + switch (m.data)[(m.p)] { + case 9: + goto tr754 + case 10: + goto tr755 + case 11: + goto tr756 + case 12: + goto tr757 + case 13: + goto tr758 + case 32: + goto tr754 + case 34: + goto tr157 + case 44: + goto tr759 + case 61: + goto st6 + case 82: + goto st120 + case 92: + goto st63 + case 114: + goto st121 + } + goto st48 + st120: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof120 + } + st_case_120: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 85: + goto st116 + case 92: + goto st63 + } + goto st48 + st121: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof121 + } + st_case_121: + switch (m.data)[(m.p)] { + case 9: + goto tr155 + case 10: + goto tr29 + case 11: + goto tr156 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr155 + case 34: + goto tr157 + case 44: + goto tr158 + case 61: + goto st6 + case 92: + goto st63 + case 117: + goto st119 + } + goto st48 + tr289: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st492 + st492: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof492 + } + st_case_492: +//line plugins/parsers/influx/machine.go:17997 + switch (m.data)[(m.p)] { + case 9: + goto tr754 + case 10: + goto tr755 + case 11: + goto tr756 + case 12: + goto tr757 + case 13: + goto tr758 + case 32: + goto tr754 + case 34: + goto tr157 + case 44: + goto tr759 + case 61: + goto st6 + case 92: + goto st63 + case 97: + goto st117 + } + goto st48 + tr290: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st493 + st493: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof493 + } + st_case_493: +//line plugins/parsers/influx/machine.go:18034 + switch (m.data)[(m.p)] { + case 9: + goto tr754 + case 10: + goto tr755 + case 11: + goto tr756 + case 12: + goto tr757 + case 13: + goto tr758 + case 32: + goto tr754 + case 34: + goto tr157 + case 44: + goto tr759 + case 61: + goto st6 + case 92: + goto st63 + case 114: + goto st121 + } + goto st48 + tr279: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st122 + st122: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof122 + } + st_case_122: +//line plugins/parsers/influx/machine.go:18071 + switch (m.data)[(m.p)] { + case 34: + goto st108 + case 92: + goto st123 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st45 + st123: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof123 + } + st_case_123: +//line plugins/parsers/influx/machine.go:18095 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr261 + case 44: + goto st6 + case 61: + goto tr281 + case 92: + goto st122 + } + goto st108 + tr267: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st124 + st124: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof124 + } + st_case_124: +//line plugins/parsers/influx/machine.go:18128 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 46: + goto st125 + case 48: + goto st518 + case 61: + goto st6 + case 92: + goto st86 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st521 + } + goto st80 + tr268: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st125 + st125: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof125 + } + st_case_125: +//line plugins/parsers/influx/machine.go:18170 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st494 + } + goto st80 + st494: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof494 + } + st_case_494: + switch (m.data)[(m.p)] { + case 9: + goto tr764 + case 10: + goto tr765 + case 11: + goto tr766 + case 12: + goto tr731 + case 13: + goto tr642 + case 32: + goto tr764 + case 34: + goto tr157 + case 44: + goto tr767 + case 61: + goto st6 + case 69: + goto st127 + case 92: + goto st86 + case 101: + goto st127 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st494 + } + goto st80 + tr766: + (m.cs) = 495 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr799: + (m.cs) = 495 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr805: + (m.cs) = 495 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr811: + (m.cs) = 495 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st495: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof495 + } + st_case_495: +//line plugins/parsers/influx/machine.go:18329 + switch (m.data)[(m.p)] { + case 9: + goto tr769 + case 10: + goto tr221 + case 11: + goto tr770 + case 12: + goto tr566 + case 13: + goto st73 + case 32: + goto tr769 + case 34: + goto tr204 + case 44: + goto tr233 + case 45: + goto tr771 + case 61: + goto st6 + case 92: + goto tr237 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr772 + } + goto tr235 + tr770: + (m.cs) = 496 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st496: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof496 + } + st_case_496: +//line plugins/parsers/influx/machine.go:18380 + switch (m.data)[(m.p)] { + case 9: + goto tr769 + case 10: + goto tr221 + case 11: + goto tr770 + case 12: + goto tr566 + case 13: + goto st73 + case 32: + goto tr769 + case 34: + goto tr204 + case 44: + goto tr233 + case 45: + goto tr771 + case 61: + goto tr101 + case 92: + goto tr237 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr772 + } + goto tr235 + tr771: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st126 + st126: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof126 + } + st_case_126: +//line plugins/parsers/influx/machine.go:18420 + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr239 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st497 + } + goto st82 + tr772: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st497 + st497: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof497 + } + st_case_497: +//line plugins/parsers/influx/machine.go:18458 + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st499 + } + goto st82 + tr777: + (m.cs) = 498 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr774: + (m.cs) = 498 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st498: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof498 + } + st_case_498: +//line plugins/parsers/influx/machine.go:18530 + switch (m.data)[(m.p)] { + case 9: + goto tr776 + case 10: + goto tr221 + case 11: + goto tr777 + case 12: + goto tr575 + case 13: + goto st73 + case 32: + goto tr776 + case 34: + goto tr204 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto tr237 + } + goto tr235 + st499: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof499 + } + st_case_499: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st500 + } + goto st82 + st500: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof500 + } + st_case_500: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st501 + } + goto st82 + st501: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof501 + } + st_case_501: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st502 + } + goto st82 + st502: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof502 + } + st_case_502: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st503 + } + goto st82 + st503: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof503 + } + st_case_503: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st504 + } + goto st82 + st504: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof504 + } + st_case_504: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st505 + } + goto st82 + st505: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof505 + } + st_case_505: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st506 + } + goto st82 + st506: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof506 + } + st_case_506: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st507 + } + goto st82 + st507: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof507 + } + st_case_507: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st508 + } + goto st82 + st508: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof508 + } + st_case_508: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st509 + } + goto st82 + st509: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof509 + } + st_case_509: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st510 + } + goto st82 + st510: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof510 + } + st_case_510: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st511 + } + goto st82 + st511: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof511 + } + st_case_511: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st512 + } + goto st82 + st512: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof512 + } + st_case_512: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st513 + } + goto st82 + st513: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof513 + } + st_case_513: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st514 + } + goto st82 + st514: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof514 + } + st_case_514: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st515 + } + goto st82 + st515: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof515 + } + st_case_515: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st516 + } + goto st82 + st516: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof516 + } + st_case_516: + switch (m.data)[(m.p)] { + case 9: + goto tr773 + case 10: + goto tr603 + case 11: + goto tr774 + case 12: + goto tr572 + case 13: + goto tr605 + case 32: + goto tr773 + case 34: + goto tr208 + case 44: + goto tr233 + case 61: + goto tr101 + case 92: + goto st84 + } + goto st82 + st127: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof127 + } + st_case_127: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr295 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + } + switch { + case (m.data)[(m.p)] > 45: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st517 + } + case (m.data)[(m.p)] >= 43: + goto st128 + } + goto st80 + st128: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof128 + } + st_case_128: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st517 + } + goto st80 + st517: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof517 + } + st_case_517: + switch (m.data)[(m.p)] { + case 9: + goto tr764 + case 10: + goto tr765 + case 11: + goto tr766 + case 12: + goto tr731 + case 13: + goto tr642 + case 32: + goto tr764 + case 34: + goto tr157 + case 44: + goto tr767 + case 61: + goto st6 + case 92: + goto st86 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st517 + } + goto st80 + st518: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof518 + } + st_case_518: + switch (m.data)[(m.p)] { + case 9: + goto tr764 + case 10: + goto tr765 + case 11: + goto tr766 + case 12: + goto tr731 + case 13: + goto tr642 + case 32: + goto tr764 + case 34: + goto tr157 + case 44: + goto tr767 + case 46: + goto st494 + case 61: + goto st6 + case 69: + goto st127 + case 92: + goto st86 + case 101: + goto st127 + case 105: + goto st520 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st519 + } + goto st80 + st519: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof519 + } + st_case_519: + switch (m.data)[(m.p)] { + case 9: + goto tr764 + case 10: + goto tr765 + case 11: + goto tr766 + case 12: + goto tr731 + case 13: + goto tr642 + case 32: + goto tr764 + case 34: + goto tr157 + case 44: + goto tr767 + case 46: + goto st494 + case 61: + goto st6 + case 69: + goto st127 + case 92: + goto st86 + case 101: + goto st127 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st519 + } + goto st80 + st520: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof520 + } + st_case_520: + switch (m.data)[(m.p)] { + case 9: + goto tr797 + case 10: + goto tr798 + case 11: + goto tr799 + case 12: + goto tr743 + case 13: + goto tr800 + case 32: + goto tr797 + case 34: + goto tr157 + case 44: + goto tr801 + case 61: + goto st6 + case 92: + goto st86 + } + goto st80 + st521: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof521 + } + st_case_521: + switch (m.data)[(m.p)] { + case 9: + goto tr764 + case 10: + goto tr765 + case 11: + goto tr766 + case 12: + goto tr731 + case 13: + goto tr642 + case 32: + goto tr764 + case 34: + goto tr157 + case 44: + goto tr767 + case 46: + goto st494 + case 61: + goto st6 + case 69: + goto st127 + case 92: + goto st86 + case 101: + goto st127 + case 105: + goto st520 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st521 + } + goto st80 + tr269: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st522 + st522: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof522 + } + st_case_522: +//line plugins/parsers/influx/machine.go:19361 + switch (m.data)[(m.p)] { + case 9: + goto tr764 + case 10: + goto tr765 + case 11: + goto tr766 + case 12: + goto tr731 + case 13: + goto tr642 + case 32: + goto tr764 + case 34: + goto tr157 + case 44: + goto tr767 + case 46: + goto st494 + case 61: + goto st6 + case 69: + goto st127 + case 92: + goto st86 + case 101: + goto st127 + case 105: + goto st520 + case 117: + goto st523 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st519 + } + goto st80 + st523: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof523 + } + st_case_523: + switch (m.data)[(m.p)] { + case 9: + goto tr803 + case 10: + goto tr804 + case 11: + goto tr805 + case 12: + goto tr750 + case 13: + goto tr806 + case 32: + goto tr803 + case 34: + goto tr157 + case 44: + goto tr807 + case 61: + goto st6 + case 92: + goto st86 + } + goto st80 + tr270: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st524 + st524: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof524 + } + st_case_524: +//line plugins/parsers/influx/machine.go:19437 + switch (m.data)[(m.p)] { + case 9: + goto tr764 + case 10: + goto tr765 + case 11: + goto tr766 + case 12: + goto tr731 + case 13: + goto tr642 + case 32: + goto tr764 + case 34: + goto tr157 + case 44: + goto tr767 + case 46: + goto st494 + case 61: + goto st6 + case 69: + goto st127 + case 92: + goto st86 + case 101: + goto st127 + case 105: + goto st520 + case 117: + goto st523 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st524 + } + goto st80 + tr271: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st525 + st525: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof525 + } + st_case_525: +//line plugins/parsers/influx/machine.go:19485 + switch (m.data)[(m.p)] { + case 9: + goto tr809 + case 10: + goto tr810 + case 11: + goto tr811 + case 12: + goto tr757 + case 13: + goto tr812 + case 32: + goto tr809 + case 34: + goto tr157 + case 44: + goto tr813 + case 61: + goto st6 + case 65: + goto st129 + case 92: + goto st86 + case 97: + goto st132 + } + goto st80 + st129: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof129 + } + st_case_129: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 76: + goto st130 + case 92: + goto st86 + } + goto st80 + st130: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof130 + } + st_case_130: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 83: + goto st131 + case 92: + goto st86 + } + goto st80 + st131: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof131 + } + st_case_131: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 69: + goto st526 + case 92: + goto st86 + } + goto st80 + st526: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof526 + } + st_case_526: + switch (m.data)[(m.p)] { + case 9: + goto tr809 + case 10: + goto tr810 + case 11: + goto tr811 + case 12: + goto tr757 + case 13: + goto tr812 + case 32: + goto tr809 + case 34: + goto tr157 + case 44: + goto tr813 + case 61: + goto st6 + case 92: + goto st86 + } + goto st80 + st132: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof132 + } + st_case_132: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + case 108: + goto st133 + } + goto st80 + st133: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof133 + } + st_case_133: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + case 115: + goto st134 + } + goto st80 + st134: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof134 + } + st_case_134: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + case 101: + goto st526 + } + goto st80 + tr272: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st527 + st527: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof527 + } + st_case_527: +//line plugins/parsers/influx/machine.go:19732 + switch (m.data)[(m.p)] { + case 9: + goto tr809 + case 10: + goto tr810 + case 11: + goto tr811 + case 12: + goto tr757 + case 13: + goto tr812 + case 32: + goto tr809 + case 34: + goto tr157 + case 44: + goto tr813 + case 61: + goto st6 + case 82: + goto st135 + case 92: + goto st86 + case 114: + goto st136 + } + goto st80 + st135: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof135 + } + st_case_135: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 85: + goto st131 + case 92: + goto st86 + } + goto st80 + st136: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof136 + } + st_case_136: + switch (m.data)[(m.p)] { + case 9: + goto tr231 + case 10: + goto tr29 + case 11: + goto tr232 + case 12: + goto tr60 + case 13: + goto st7 + case 32: + goto tr231 + case 34: + goto tr157 + case 44: + goto tr233 + case 61: + goto st6 + case 92: + goto st86 + case 117: + goto st134 + } + goto st80 + tr273: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st528 + st528: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof528 + } + st_case_528: +//line plugins/parsers/influx/machine.go:19831 + switch (m.data)[(m.p)] { + case 9: + goto tr809 + case 10: + goto tr810 + case 11: + goto tr811 + case 12: + goto tr757 + case 13: + goto tr812 + case 32: + goto tr809 + case 34: + goto tr157 + case 44: + goto tr813 + case 61: + goto st6 + case 92: + goto st86 + case 97: + goto st132 + } + goto st80 + tr274: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st529 + st529: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof529 + } + st_case_529: +//line plugins/parsers/influx/machine.go:19868 + switch (m.data)[(m.p)] { + case 9: + goto tr809 + case 10: + goto tr810 + case 11: + goto tr811 + case 12: + goto tr757 + case 13: + goto tr812 + case 32: + goto tr809 + case 34: + goto tr157 + case 44: + goto tr813 + case 61: + goto st6 + case 92: + goto st86 + case 114: + goto st136 + } + goto st80 + tr259: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st137 + st137: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof137 + } + st_case_137: +//line plugins/parsers/influx/machine.go:19905 + switch (m.data)[(m.p)] { + case 34: + goto st98 + case 92: + goto st138 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr47 + } + case (m.data)[(m.p)] >= 9: + goto tr47 + } + goto st45 + st138: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof138 + } + st_case_138: +//line plugins/parsers/influx/machine.go:19929 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr47 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr261 + case 44: + goto st6 + case 61: + goto tr262 + case 92: + goto st137 + } + goto st98 + st139: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof139 + } + st_case_139: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr317 + case 44: + goto tr92 + case 92: + goto st141 + } + switch { + case (m.data)[(m.p)] > 45: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st532 + } + case (m.data)[(m.p)] >= 43: + goto st140 + } + goto st30 + tr317: + (m.cs) = 530 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st530: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof530 + } + st_case_530: +//line plugins/parsers/influx/machine.go:20003 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 11: + goto tr637 + case 13: + goto st33 + case 32: + goto tr501 + case 44: + goto tr503 + case 92: + goto st95 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st531 + } + case (m.data)[(m.p)] >= 9: + goto tr501 + } + goto st1 + st531: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof531 + } + st_case_531: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr818 + case 13: + goto tr736 + case 32: + goto tr641 + case 44: + goto tr819 + case 92: + goto st95 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st531 + } + case (m.data)[(m.p)] >= 9: + goto tr641 + } + goto st1 + st140: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof140 + } + st_case_140: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 92: + goto st141 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st532 + } + goto st30 + st532: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof532 + } + st_case_532: + switch (m.data)[(m.p)] { + case 9: + goto tr638 + case 10: + goto tr639 + case 11: + goto tr640 + case 12: + goto tr641 + case 13: + goto tr642 + case 32: + goto tr638 + case 34: + goto tr91 + case 44: + goto tr643 + case 92: + goto st141 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st532 + } + goto st30 + tr87: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st141 + st141: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof141 + } + st_case_141: +//line plugins/parsers/influx/machine.go:20124 + switch (m.data)[(m.p)] { + case 34: + goto st30 + case 92: + goto st30 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st1 + st533: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof533 + } + st_case_533: + switch (m.data)[(m.p)] { + case 9: + goto tr638 + case 10: + goto tr639 + case 11: + goto tr640 + case 12: + goto tr641 + case 13: + goto tr642 + case 32: + goto tr638 + case 34: + goto tr91 + case 44: + goto tr643 + case 46: + goto st407 + case 69: + goto st139 + case 92: + goto st141 + case 101: + goto st139 + case 105: + goto st535 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st534 + } + goto st30 + st534: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof534 + } + st_case_534: + switch (m.data)[(m.p)] { + case 9: + goto tr638 + case 10: + goto tr639 + case 11: + goto tr640 + case 12: + goto tr641 + case 13: + goto tr642 + case 32: + goto tr638 + case 34: + goto tr91 + case 44: + goto tr643 + case 46: + goto st407 + case 69: + goto st139 + case 92: + goto st141 + case 101: + goto st139 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st534 + } + goto st30 + st535: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof535 + } + st_case_535: + switch (m.data)[(m.p)] { + case 9: + goto tr822 + case 10: + goto tr823 + case 11: + goto tr824 + case 12: + goto tr825 + case 13: + goto tr800 + case 32: + goto tr822 + case 34: + goto tr91 + case 44: + goto tr826 + case 92: + goto st141 + } + goto st30 + st536: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof536 + } + st_case_536: + switch (m.data)[(m.p)] { + case 9: + goto tr638 + case 10: + goto tr639 + case 11: + goto tr640 + case 12: + goto tr641 + case 13: + goto tr642 + case 32: + goto tr638 + case 34: + goto tr91 + case 44: + goto tr643 + case 46: + goto st407 + case 69: + goto st139 + case 92: + goto st141 + case 101: + goto st139 + case 105: + goto st535 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st536 + } + goto st30 + tr247: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st537 + st537: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof537 + } + st_case_537: +//line plugins/parsers/influx/machine.go:20286 + switch (m.data)[(m.p)] { + case 9: + goto tr638 + case 10: + goto tr639 + case 11: + goto tr640 + case 12: + goto tr641 + case 13: + goto tr642 + case 32: + goto tr638 + case 34: + goto tr91 + case 44: + goto tr643 + case 46: + goto st407 + case 69: + goto st139 + case 92: + goto st141 + case 101: + goto st139 + case 105: + goto st535 + case 117: + goto st538 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st534 + } + goto st30 + st538: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof538 + } + st_case_538: + switch (m.data)[(m.p)] { + case 9: + goto tr828 + case 10: + goto tr829 + case 11: + goto tr830 + case 12: + goto tr831 + case 13: + goto tr806 + case 32: + goto tr828 + case 34: + goto tr91 + case 44: + goto tr832 + case 92: + goto st141 + } + goto st30 + tr248: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st539 + st539: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof539 + } + st_case_539: +//line plugins/parsers/influx/machine.go:20358 + switch (m.data)[(m.p)] { + case 9: + goto tr638 + case 10: + goto tr639 + case 11: + goto tr640 + case 12: + goto tr641 + case 13: + goto tr642 + case 32: + goto tr638 + case 34: + goto tr91 + case 44: + goto tr643 + case 46: + goto st407 + case 69: + goto st139 + case 92: + goto st141 + case 101: + goto st139 + case 105: + goto st535 + case 117: + goto st538 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st539 + } + goto st30 + tr249: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st540 + st540: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof540 + } + st_case_540: +//line plugins/parsers/influx/machine.go:20404 + switch (m.data)[(m.p)] { + case 9: + goto tr834 + case 10: + goto tr810 + case 11: + goto tr835 + case 12: + goto tr836 + case 13: + goto tr812 + case 32: + goto tr834 + case 34: + goto tr91 + case 44: + goto tr837 + case 65: + goto st142 + case 92: + goto st141 + case 97: + goto st145 + } + goto st30 + st142: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof142 + } + st_case_142: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 76: + goto st143 + case 92: + goto st141 + } + goto st30 + st143: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof143 + } + st_case_143: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 83: + goto st144 + case 92: + goto st141 + } + goto st30 + st144: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof144 + } + st_case_144: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 69: + goto st541 + case 92: + goto st141 + } + goto st30 + st541: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof541 + } + st_case_541: + switch (m.data)[(m.p)] { + case 9: + goto tr834 + case 10: + goto tr810 + case 11: + goto tr835 + case 12: + goto tr836 + case 13: + goto tr812 + case 32: + goto tr834 + case 34: + goto tr91 + case 44: + goto tr837 + case 92: + goto st141 + } + goto st30 + st145: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof145 + } + st_case_145: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 92: + goto st141 + case 108: + goto st146 + } + goto st30 + st146: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof146 + } + st_case_146: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 92: + goto st141 + case 115: + goto st147 + } + goto st30 + st147: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof147 + } + st_case_147: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 92: + goto st141 + case 101: + goto st541 + } + goto st30 + tr250: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st542 + st542: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof542 + } + st_case_542: +//line plugins/parsers/influx/machine.go:20635 + switch (m.data)[(m.p)] { + case 9: + goto tr834 + case 10: + goto tr810 + case 11: + goto tr835 + case 12: + goto tr836 + case 13: + goto tr812 + case 32: + goto tr834 + case 34: + goto tr91 + case 44: + goto tr837 + case 82: + goto st148 + case 92: + goto st141 + case 114: + goto st149 + } + goto st30 + st148: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof148 + } + st_case_148: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 85: + goto st144 + case 92: + goto st141 + } + goto st30 + st149: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof149 + } + st_case_149: + switch (m.data)[(m.p)] { + case 9: + goto tr89 + case 10: + goto tr29 + case 11: + goto tr90 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr89 + case 34: + goto tr91 + case 44: + goto tr92 + case 92: + goto st141 + case 117: + goto st147 + } + goto st30 + tr251: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st543 + st543: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof543 + } + st_case_543: +//line plugins/parsers/influx/machine.go:20728 + switch (m.data)[(m.p)] { + case 9: + goto tr834 + case 10: + goto tr810 + case 11: + goto tr835 + case 12: + goto tr836 + case 13: + goto tr812 + case 32: + goto tr834 + case 34: + goto tr91 + case 44: + goto tr837 + case 92: + goto st141 + case 97: + goto st145 + } + goto st30 + tr252: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st544 + st544: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof544 + } + st_case_544: +//line plugins/parsers/influx/machine.go:20763 + switch (m.data)[(m.p)] { + case 9: + goto tr834 + case 10: + goto tr810 + case 11: + goto tr835 + case 12: + goto tr836 + case 13: + goto tr812 + case 32: + goto tr834 + case 34: + goto tr91 + case 44: + goto tr837 + case 92: + goto st141 + case 114: + goto st149 + } + goto st30 + st545: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof545 + } + st_case_545: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st546 + } + goto st41 + st546: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof546 + } + st_case_546: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st547 + } + goto st41 + st547: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof547 + } + st_case_547: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st548 + } + goto st41 + st548: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof548 + } + st_case_548: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st549 + } + goto st41 + st549: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof549 + } + st_case_549: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st550 + } + goto st41 + st550: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof550 + } + st_case_550: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st551 + } + goto st41 + st551: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof551 + } + st_case_551: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st552 + } + goto st41 + st552: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof552 + } + st_case_552: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st553 + } + goto st41 + st553: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof553 + } + st_case_553: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st554 + } + goto st41 + st554: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof554 + } + st_case_554: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st555 + } + goto st41 + st555: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof555 + } + st_case_555: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st556 + } + goto st41 + st556: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof556 + } + st_case_556: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st557 + } + goto st41 + st557: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof557 + } + st_case_557: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st558 + } + goto st41 + st558: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof558 + } + st_case_558: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st559 + } + goto st41 + st559: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof559 + } + st_case_559: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st560 + } + goto st41 + st560: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof560 + } + st_case_560: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st561 + } + goto st41 + st561: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof561 + } + st_case_561: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st562 + } + goto st41 + st562: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof562 + } + st_case_562: + switch (m.data)[(m.p)] { + case 9: + goto tr630 + case 10: + goto tr603 + case 11: + goto tr631 + case 12: + goto tr509 + case 13: + goto tr605 + case 32: + goto tr630 + case 34: + goto tr128 + case 44: + goto tr92 + case 61: + goto tr129 + case 92: + goto st93 + } + goto st41 + tr213: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st150 + st150: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof150 + } + st_case_150: +//line plugins/parsers/influx/machine.go:21353 + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 46: + goto st151 + case 48: + goto st587 + case 92: + goto st156 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st590 + } + goto st54 + tr214: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st151 + st151: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof151 + } + st_case_151: +//line plugins/parsers/influx/machine.go:21393 + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 92: + goto st156 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st563 + } + goto st54 + st563: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof563 + } + st_case_563: + switch (m.data)[(m.p)] { + case 9: + goto tr859 + case 10: + goto tr534 + case 11: + goto tr860 + case 12: + goto tr641 + case 13: + goto tr536 + case 32: + goto tr859 + case 34: + goto tr91 + case 44: + goto tr861 + case 69: + goto st154 + case 92: + goto st156 + case 101: + goto st154 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st563 + } + goto st54 + tr860: + (m.cs) = 564 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr892: + (m.cs) = 564 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr896: + (m.cs) = 564 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr901: + (m.cs) = 564 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st564: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof564 + } + st_case_564: +//line plugins/parsers/influx/machine.go:21548 + switch (m.data)[(m.p)] { + case 9: + goto tr863 + case 10: + goto tr275 + case 11: + goto tr864 + case 12: + goto tr501 + case 13: + goto st103 + case 32: + goto tr863 + case 34: + goto tr124 + case 44: + goto tr182 + case 45: + goto tr865 + case 61: + goto st54 + case 92: + goto tr186 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr866 + } + goto tr184 + tr864: + (m.cs) = 565 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + st565: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof565 + } + st_case_565: +//line plugins/parsers/influx/machine.go:21599 + switch (m.data)[(m.p)] { + case 9: + goto tr863 + case 10: + goto tr275 + case 11: + goto tr864 + case 12: + goto tr501 + case 13: + goto st103 + case 32: + goto tr863 + case 34: + goto tr124 + case 44: + goto tr182 + case 45: + goto tr865 + case 61: + goto tr189 + case 92: + goto tr186 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr866 + } + goto tr184 + tr865: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st152 + st152: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof152 + } + st_case_152: +//line plugins/parsers/influx/machine.go:21639 + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr188 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st566 + } + goto st56 + tr866: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st566 + st566: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof566 + } + st_case_566: +//line plugins/parsers/influx/machine.go:21677 + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st568 + } + goto st56 + tr871: + (m.cs) = 567 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto _again + tr868: + (m.cs) = 567 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st567: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof567 + } + st_case_567: +//line plugins/parsers/influx/machine.go:21749 + switch (m.data)[(m.p)] { + case 9: + goto tr870 + case 10: + goto tr275 + case 11: + goto tr871 + case 12: + goto tr514 + case 13: + goto st103 + case 32: + goto tr870 + case 34: + goto tr124 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto tr186 + } + goto tr184 + tr186: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st153 + st153: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof153 + } + st_case_153: +//line plugins/parsers/influx/machine.go:21784 + switch (m.data)[(m.p)] { + case 34: + goto st56 + case 92: + goto st56 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st11 + st568: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof568 + } + st_case_568: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st569 + } + goto st56 + st569: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof569 + } + st_case_569: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st570 + } + goto st56 + st570: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof570 + } + st_case_570: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st571 + } + goto st56 + st571: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof571 + } + st_case_571: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st572 + } + goto st56 + st572: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof572 + } + st_case_572: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st573 + } + goto st56 + st573: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof573 + } + st_case_573: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st574 + } + goto st56 + st574: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof574 + } + st_case_574: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st575 + } + goto st56 + st575: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof575 + } + st_case_575: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st576 + } + goto st56 + st576: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof576 + } + st_case_576: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st577 + } + goto st56 + st577: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof577 + } + st_case_577: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st578 + } + goto st56 + st578: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof578 + } + st_case_578: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st579 + } + goto st56 + st579: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof579 + } + st_case_579: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st580 + } + goto st56 + st580: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof580 + } + st_case_580: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st581 + } + goto st56 + st581: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof581 + } + st_case_581: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st582 + } + goto st56 + st582: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof582 + } + st_case_582: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st583 + } + goto st56 + st583: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof583 + } + st_case_583: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st584 + } + goto st56 + st584: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof584 + } + st_case_584: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st585 + } + goto st56 + st585: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof585 + } + st_case_585: + switch (m.data)[(m.p)] { + case 9: + goto tr867 + case 10: + goto tr678 + case 11: + goto tr868 + case 12: + goto tr509 + case 13: + goto tr680 + case 32: + goto tr867 + case 34: + goto tr128 + case 44: + goto tr182 + case 61: + goto tr189 + case 92: + goto st153 + } + goto st56 + st154: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof154 + } + st_case_154: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr317 + case 44: + goto tr182 + case 92: + goto st156 + } + switch { + case (m.data)[(m.p)] > 45: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st586 + } + case (m.data)[(m.p)] >= 43: + goto st155 + } + goto st54 + st155: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof155 + } + st_case_155: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 92: + goto st156 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st586 + } + goto st54 + st586: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof586 + } + st_case_586: + switch (m.data)[(m.p)] { + case 9: + goto tr859 + case 10: + goto tr534 + case 11: + goto tr860 + case 12: + goto tr641 + case 13: + goto tr536 + case 32: + goto tr859 + case 34: + goto tr91 + case 44: + goto tr861 + case 92: + goto st156 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st586 + } + goto st54 + tr340: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st156 + st156: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof156 + } + st_case_156: +//line plugins/parsers/influx/machine.go:22458 + switch (m.data)[(m.p)] { + case 34: + goto st54 + case 92: + goto st54 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st1 + st587: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof587 + } + st_case_587: + switch (m.data)[(m.p)] { + case 9: + goto tr859 + case 10: + goto tr534 + case 11: + goto tr860 + case 12: + goto tr641 + case 13: + goto tr536 + case 32: + goto tr859 + case 34: + goto tr91 + case 44: + goto tr861 + case 46: + goto st563 + case 69: + goto st154 + case 92: + goto st156 + case 101: + goto st154 + case 105: + goto st589 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st588 + } + goto st54 + st588: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof588 + } + st_case_588: + switch (m.data)[(m.p)] { + case 9: + goto tr859 + case 10: + goto tr534 + case 11: + goto tr860 + case 12: + goto tr641 + case 13: + goto tr536 + case 32: + goto tr859 + case 34: + goto tr91 + case 44: + goto tr861 + case 46: + goto st563 + case 69: + goto st154 + case 92: + goto st156 + case 101: + goto st154 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st588 + } + goto st54 + st589: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof589 + } + st_case_589: + switch (m.data)[(m.p)] { + case 9: + goto tr891 + case 10: + goto tr741 + case 11: + goto tr892 + case 12: + goto tr825 + case 13: + goto tr744 + case 32: + goto tr891 + case 34: + goto tr91 + case 44: + goto tr893 + case 92: + goto st156 + } + goto st54 + st590: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof590 + } + st_case_590: + switch (m.data)[(m.p)] { + case 9: + goto tr859 + case 10: + goto tr534 + case 11: + goto tr860 + case 12: + goto tr641 + case 13: + goto tr536 + case 32: + goto tr859 + case 34: + goto tr91 + case 44: + goto tr861 + case 46: + goto st563 + case 69: + goto st154 + case 92: + goto st156 + case 101: + goto st154 + case 105: + goto st589 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st590 + } + goto st54 + tr215: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st591 + st591: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof591 + } + st_case_591: +//line plugins/parsers/influx/machine.go:22620 + switch (m.data)[(m.p)] { + case 9: + goto tr859 + case 10: + goto tr534 + case 11: + goto tr860 + case 12: + goto tr641 + case 13: + goto tr536 + case 32: + goto tr859 + case 34: + goto tr91 + case 44: + goto tr861 + case 46: + goto st563 + case 69: + goto st154 + case 92: + goto st156 + case 101: + goto st154 + case 105: + goto st589 + case 117: + goto st592 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st588 + } + goto st54 + st592: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof592 + } + st_case_592: + switch (m.data)[(m.p)] { + case 9: + goto tr895 + case 10: + goto tr748 + case 11: + goto tr896 + case 12: + goto tr831 + case 13: + goto tr751 + case 32: + goto tr895 + case 34: + goto tr91 + case 44: + goto tr897 + case 92: + goto st156 + } + goto st54 + tr216: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st593 + st593: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof593 + } + st_case_593: +//line plugins/parsers/influx/machine.go:22692 + switch (m.data)[(m.p)] { + case 9: + goto tr859 + case 10: + goto tr534 + case 11: + goto tr860 + case 12: + goto tr641 + case 13: + goto tr536 + case 32: + goto tr859 + case 34: + goto tr91 + case 44: + goto tr861 + case 46: + goto st563 + case 69: + goto st154 + case 92: + goto st156 + case 101: + goto st154 + case 105: + goto st589 + case 117: + goto st592 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st593 + } + goto st54 + tr217: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st594 + st594: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof594 + } + st_case_594: +//line plugins/parsers/influx/machine.go:22738 + switch (m.data)[(m.p)] { + case 9: + goto tr899 + case 10: + goto tr900 + case 11: + goto tr901 + case 12: + goto tr836 + case 13: + goto tr758 + case 32: + goto tr899 + case 34: + goto tr91 + case 44: + goto tr902 + case 65: + goto st157 + case 92: + goto st156 + case 97: + goto st160 + } + goto st54 + st157: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof157 + } + st_case_157: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 76: + goto st158 + case 92: + goto st156 + } + goto st54 + st158: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof158 + } + st_case_158: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 83: + goto st159 + case 92: + goto st156 + } + goto st54 + st159: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof159 + } + st_case_159: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 69: + goto st595 + case 92: + goto st156 + } + goto st54 + st595: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof595 + } + st_case_595: + switch (m.data)[(m.p)] { + case 9: + goto tr899 + case 10: + goto tr900 + case 11: + goto tr901 + case 12: + goto tr836 + case 13: + goto tr758 + case 32: + goto tr899 + case 34: + goto tr91 + case 44: + goto tr902 + case 92: + goto st156 + } + goto st54 + st160: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof160 + } + st_case_160: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 92: + goto st156 + case 108: + goto st161 + } + goto st54 + st161: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof161 + } + st_case_161: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 92: + goto st156 + case 115: + goto st162 + } + goto st54 + st162: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof162 + } + st_case_162: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 92: + goto st156 + case 101: + goto st595 + } + goto st54 + tr218: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st596 + st596: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof596 + } + st_case_596: +//line plugins/parsers/influx/machine.go:22969 + switch (m.data)[(m.p)] { + case 9: + goto tr899 + case 10: + goto tr900 + case 11: + goto tr901 + case 12: + goto tr836 + case 13: + goto tr758 + case 32: + goto tr899 + case 34: + goto tr91 + case 44: + goto tr902 + case 82: + goto st163 + case 92: + goto st156 + case 114: + goto st164 + } + goto st54 + st163: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof163 + } + st_case_163: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 85: + goto st159 + case 92: + goto st156 + } + goto st54 + st164: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof164 + } + st_case_164: + switch (m.data)[(m.p)] { + case 9: + goto tr180 + case 10: + goto tr29 + case 11: + goto tr181 + case 12: + goto tr1 + case 13: + goto st7 + case 32: + goto tr180 + case 34: + goto tr91 + case 44: + goto tr182 + case 92: + goto st156 + case 117: + goto st162 + } + goto st54 + tr219: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st597 + st597: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof597 + } + st_case_597: +//line plugins/parsers/influx/machine.go:23062 + switch (m.data)[(m.p)] { + case 9: + goto tr899 + case 10: + goto tr900 + case 11: + goto tr901 + case 12: + goto tr836 + case 13: + goto tr758 + case 32: + goto tr899 + case 34: + goto tr91 + case 44: + goto tr902 + case 92: + goto st156 + case 97: + goto st160 + } + goto st54 + tr220: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st598 + st598: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof598 + } + st_case_598: +//line plugins/parsers/influx/machine.go:23097 + switch (m.data)[(m.p)] { + case 9: + goto tr899 + case 10: + goto tr900 + case 11: + goto tr901 + case 12: + goto tr836 + case 13: + goto tr758 + case 32: + goto tr899 + case 34: + goto tr91 + case 44: + goto tr902 + case 92: + goto st156 + case 114: + goto st164 + } + goto st54 + st165: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof165 + } + st_case_165: + switch (m.data)[(m.p)] { + case 9: + goto st165 + case 10: + goto tr29 + case 11: + goto tr339 + case 12: + goto st8 + case 13: + goto st7 + case 32: + goto st165 + case 34: + goto tr118 + case 35: + goto st6 + case 44: + goto st6 + case 92: + goto tr340 + } + goto tr337 + tr339: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st166 + st166: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof166 + } + st_case_166: +//line plugins/parsers/influx/machine.go:23160 + switch (m.data)[(m.p)] { + case 9: + goto tr341 + case 10: + goto tr29 + case 11: + goto tr342 + case 12: + goto tr38 + case 13: + goto st7 + case 32: + goto tr341 + case 34: + goto tr85 + case 35: + goto st54 + case 44: + goto tr182 + case 92: + goto tr340 + } + goto tr337 + tr341: + (m.cs) = 167 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st167: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof167 + } + st_case_167: +//line plugins/parsers/influx/machine.go:23202 + switch (m.data)[(m.p)] { + case 9: + goto st167 + case 10: + goto tr29 + case 11: + goto tr344 + case 12: + goto st10 + case 13: + goto st7 + case 32: + goto st167 + case 34: + goto tr124 + case 35: + goto tr160 + case 44: + goto st6 + case 61: + goto tr337 + case 92: + goto tr186 + } + goto tr184 + tr344: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st168 + tr345: + (m.cs) = 168 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st168: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof168 + } + st_case_168: +//line plugins/parsers/influx/machine.go:23256 + switch (m.data)[(m.p)] { + case 9: + goto tr341 + case 10: + goto tr29 + case 11: + goto tr345 + case 12: + goto tr38 + case 13: + goto st7 + case 32: + goto tr341 + case 34: + goto tr124 + case 44: + goto tr182 + case 61: + goto tr346 + case 92: + goto tr186 + } + goto tr184 + tr342: + (m.cs) = 169 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st169: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof169 + } + st_case_169: +//line plugins/parsers/influx/machine.go:23302 + switch (m.data)[(m.p)] { + case 9: + goto tr341 + case 10: + goto tr29 + case 11: + goto tr345 + case 12: + goto tr38 + case 13: + goto st7 + case 32: + goto tr341 + case 34: + goto tr124 + case 44: + goto tr182 + case 61: + goto tr337 + case 92: + goto tr186 + } + goto tr184 + tr541: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st170 + st170: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof170 + } + st_case_170: +//line plugins/parsers/influx/machine.go:23337 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr105 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st599 + } + goto st6 + tr542: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st599 + st599: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof599 + } + st_case_599: +//line plugins/parsers/influx/machine.go:23365 + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st600 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st600: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof600 + } + st_case_600: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st601 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st601: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof601 + } + st_case_601: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st602 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st602: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof602 + } + st_case_602: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st603 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st603: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof603 + } + st_case_603: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st604 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st604: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof604 + } + st_case_604: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st605 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st605: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof605 + } + st_case_605: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st606 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st606: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof606 + } + st_case_606: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st607 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st607: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof607 + } + st_case_607: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st608 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st608: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof608 + } + st_case_608: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st609 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st609: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof609 + } + st_case_609: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st610 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st610: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof610 + } + st_case_610: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st611 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st611: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof611 + } + st_case_611: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st612 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st612: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof612 + } + st_case_612: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st613 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st613: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof613 + } + st_case_613: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st614 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st614: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof614 + } + st_case_614: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st615 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st615: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof615 + } + st_case_615: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st616 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st616: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof616 + } + st_case_616: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st617 + } + case (m.data)[(m.p)] >= 9: + goto tr677 + } + goto st6 + st617: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof617 + } + st_case_617: + switch (m.data)[(m.p)] { + case 10: + goto tr678 + case 12: + goto tr469 + case 13: + goto tr680 + case 32: + goto tr677 + case 34: + goto tr31 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr677 + } + goto st6 + tr926: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st171 + tr537: + (m.cs) = 171 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr933: + (m.cs) = 171 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr936: + (m.cs) = 171 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr940: + (m.cs) = 171 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st171: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof171 + } + st_case_171: +//line plugins/parsers/influx/machine.go:23951 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr349 + } + goto tr348 + tr348: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st172 + st172: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof172 + } + st_case_172: +//line plugins/parsers/influx/machine.go:23984 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr351 + case 92: + goto st184 + } + goto st172 + tr351: +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st173 + st173: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof173 + } + st_case_173: +//line plugins/parsers/influx/machine.go:24017 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr353 + case 45: + goto tr167 + case 46: + goto tr168 + case 48: + goto tr169 + case 70: + goto tr354 + case 84: + goto tr355 + case 92: + goto st75 + case 102: + goto tr356 + case 116: + goto tr357 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr170 + } + goto st6 + tr353: + (m.cs) = 618 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st618: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof618 + } + st_case_618: +//line plugins/parsers/influx/machine.go:24066 + switch (m.data)[(m.p)] { + case 10: + goto tr669 + case 12: + goto st272 + case 13: + goto tr671 + case 32: + goto tr925 + case 34: + goto tr26 + case 44: + goto tr926 + case 92: + goto tr27 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr925 + } + goto tr23 + tr169: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st619 + st619: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof619 + } + st_case_619: +//line plugins/parsers/influx/machine.go:24098 + switch (m.data)[(m.p)] { + case 10: + goto tr534 + case 12: + goto tr535 + case 13: + goto tr536 + case 32: + goto tr533 + case 34: + goto tr31 + case 44: + goto tr537 + case 46: + goto st326 + case 69: + goto st174 + case 92: + goto st75 + case 101: + goto st174 + case 105: + goto st624 + case 117: + goto st625 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st620 + } + case (m.data)[(m.p)] >= 9: + goto tr533 + } + goto st6 + st620: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof620 + } + st_case_620: + switch (m.data)[(m.p)] { + case 10: + goto tr534 + case 12: + goto tr535 + case 13: + goto tr536 + case 32: + goto tr533 + case 34: + goto tr31 + case 44: + goto tr537 + case 46: + goto st326 + case 69: + goto st174 + case 92: + goto st75 + case 101: + goto st174 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st620 + } + case (m.data)[(m.p)] >= 9: + goto tr533 + } + goto st6 + st174: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof174 + } + st_case_174: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr358 + case 43: + goto st175 + case 45: + goto st175 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st623 + } + goto st6 + tr358: + (m.cs) = 621 +//line plugins/parsers/influx/machine.go.rl:140 + + err = m.handler.AddString(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st621: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof621 + } + st_case_621: +//line plugins/parsers/influx/machine.go:24213 + switch (m.data)[(m.p)] { + case 10: + goto tr103 + case 13: + goto st33 + case 32: + goto st272 + case 44: + goto st36 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st622 + } + case (m.data)[(m.p)] >= 9: + goto st272 + } + goto tr105 + st622: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof622 + } + st_case_622: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 13: + goto tr736 + case 32: + goto tr535 + case 44: + goto tr930 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st622 + } + case (m.data)[(m.p)] >= 9: + goto tr535 + } + goto tr105 + st175: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof175 + } + st_case_175: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st623 + } + goto st6 + st623: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof623 + } + st_case_623: + switch (m.data)[(m.p)] { + case 10: + goto tr534 + case 12: + goto tr535 + case 13: + goto tr536 + case 32: + goto tr533 + case 34: + goto tr31 + case 44: + goto tr537 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st623 + } + case (m.data)[(m.p)] >= 9: + goto tr533 + } + goto st6 + st624: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof624 + } + st_case_624: + switch (m.data)[(m.p)] { + case 10: + goto tr741 + case 12: + goto tr932 + case 13: + goto tr744 + case 32: + goto tr931 + case 34: + goto tr31 + case 44: + goto tr933 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr931 + } + goto st6 + st625: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof625 + } + st_case_625: + switch (m.data)[(m.p)] { + case 10: + goto tr748 + case 12: + goto tr935 + case 13: + goto tr751 + case 32: + goto tr934 + case 34: + goto tr31 + case 44: + goto tr936 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr934 + } + goto st6 + tr170: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st626 + st626: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof626 + } + st_case_626: +//line plugins/parsers/influx/machine.go:24369 + switch (m.data)[(m.p)] { + case 10: + goto tr534 + case 12: + goto tr535 + case 13: + goto tr536 + case 32: + goto tr533 + case 34: + goto tr31 + case 44: + goto tr537 + case 46: + goto st326 + case 69: + goto st174 + case 92: + goto st75 + case 101: + goto st174 + case 105: + goto st624 + case 117: + goto st625 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st626 + } + case (m.data)[(m.p)] >= 9: + goto tr533 + } + goto st6 + tr354: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st627 + st627: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof627 + } + st_case_627: +//line plugins/parsers/influx/machine.go:24416 + switch (m.data)[(m.p)] { + case 10: + goto tr755 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 65: + goto st176 + case 92: + goto st75 + case 97: + goto st179 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + st176: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof176 + } + st_case_176: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 76: + goto st177 + case 92: + goto st75 + } + goto st6 + st177: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof177 + } + st_case_177: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 83: + goto st178 + case 92: + goto st75 + } + goto st6 + st178: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof178 + } + st_case_178: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 69: + goto st628 + case 92: + goto st75 + } + goto st6 + st628: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof628 + } + st_case_628: + switch (m.data)[(m.p)] { + case 10: + goto tr755 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + st179: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof179 + } + st_case_179: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 108: + goto st180 + } + goto st6 + st180: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof180 + } + st_case_180: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 115: + goto st181 + } + goto st6 + st181: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof181 + } + st_case_181: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 101: + goto st628 + } + goto st6 + tr355: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st629 + st629: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof629 + } + st_case_629: +//line plugins/parsers/influx/machine.go:24597 + switch (m.data)[(m.p)] { + case 10: + goto tr755 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 82: + goto st182 + case 92: + goto st75 + case 114: + goto st183 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + st182: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof182 + } + st_case_182: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 85: + goto st178 + case 92: + goto st75 + } + goto st6 + st183: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof183 + } + st_case_183: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 117: + goto st181 + } + goto st6 + tr356: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st630 + st630: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof630 + } + st_case_630: +//line plugins/parsers/influx/machine.go:24673 + switch (m.data)[(m.p)] { + case 10: + goto tr755 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 92: + goto st75 + case 97: + goto st179 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + tr357: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st631 + st631: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof631 + } + st_case_631: +//line plugins/parsers/influx/machine.go:24707 + switch (m.data)[(m.p)] { + case 10: + goto tr755 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 92: + goto st75 + case 114: + goto st183 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + tr349: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st184 + st184: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof184 + } + st_case_184: +//line plugins/parsers/influx/machine.go:24741 + switch (m.data)[(m.p)] { + case 34: + goto st172 + case 92: + goto st172 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st3 + st632: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof632 + } + st_case_632: + switch (m.data)[(m.p)] { + case 10: + goto tr534 + case 12: + goto tr535 + case 13: + goto tr536 + case 32: + goto tr533 + case 34: + goto tr31 + case 44: + goto tr537 + case 46: + goto st326 + case 69: + goto st174 + case 92: + goto st75 + case 101: + goto st174 + case 105: + goto st624 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st620 + } + case (m.data)[(m.p)] >= 9: + goto tr533 + } + goto st6 + st633: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof633 + } + st_case_633: + switch (m.data)[(m.p)] { + case 10: + goto tr534 + case 12: + goto tr535 + case 13: + goto tr536 + case 32: + goto tr533 + case 34: + goto tr31 + case 44: + goto tr537 + case 46: + goto st326 + case 69: + goto st174 + case 92: + goto st75 + case 101: + goto st174 + case 105: + goto st624 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st633 + } + case (m.data)[(m.p)] >= 9: + goto tr533 + } + goto st6 + tr171: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st634 + st634: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof634 + } + st_case_634: +//line plugins/parsers/influx/machine.go:24844 + switch (m.data)[(m.p)] { + case 10: + goto tr900 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 65: + goto st185 + case 92: + goto st75 + case 97: + goto st188 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + st185: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof185 + } + st_case_185: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 76: + goto st186 + case 92: + goto st75 + } + goto st6 + st186: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof186 + } + st_case_186: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 83: + goto st187 + case 92: + goto st75 + } + goto st6 + st187: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof187 + } + st_case_187: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 69: + goto st635 + case 92: + goto st75 + } + goto st6 + st635: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof635 + } + st_case_635: + switch (m.data)[(m.p)] { + case 10: + goto tr900 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + st188: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof188 + } + st_case_188: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 108: + goto st189 + } + goto st6 + st189: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof189 + } + st_case_189: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 115: + goto st190 + } + goto st6 + st190: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof190 + } + st_case_190: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 101: + goto st635 + } + goto st6 + tr172: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st636 + st636: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof636 + } + st_case_636: +//line plugins/parsers/influx/machine.go:25025 + switch (m.data)[(m.p)] { + case 10: + goto tr900 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 82: + goto st191 + case 92: + goto st75 + case 114: + goto st192 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + st191: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof191 + } + st_case_191: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 85: + goto st187 + case 92: + goto st75 + } + goto st6 + st192: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof192 + } + st_case_192: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 117: + goto st190 + } + goto st6 + tr173: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st637 + st637: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof637 + } + st_case_637: +//line plugins/parsers/influx/machine.go:25101 + switch (m.data)[(m.p)] { + case 10: + goto tr900 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 92: + goto st75 + case 97: + goto st188 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + tr174: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st638 + st638: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof638 + } + st_case_638: +//line plugins/parsers/influx/machine.go:25135 + switch (m.data)[(m.p)] { + case 10: + goto tr900 + case 12: + goto tr939 + case 13: + goto tr758 + case 32: + goto tr938 + case 34: + goto tr31 + case 44: + goto tr940 + case 92: + goto st75 + case 114: + goto st192 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr938 + } + goto st6 + tr162: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st193 + st193: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof193 + } + st_case_193: +//line plugins/parsers/influx/machine.go:25169 + switch (m.data)[(m.p)] { + case 9: + goto st49 + case 10: + goto tr29 + case 11: + goto tr162 + case 12: + goto st2 + case 13: + goto st7 + case 32: + goto st49 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto tr165 + case 92: + goto tr163 + } + goto tr160 + tr140: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st194 + st194: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof194 + } + st_case_194: +//line plugins/parsers/influx/machine.go:25204 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 46: + goto st195 + case 48: + goto st640 + case 61: + goto tr47 + case 92: + goto st22 + } + switch { + case (m.data)[(m.p)] > 12: + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st643 + } + case (m.data)[(m.p)] >= 9: + goto tr60 + } + goto st16 + tr141: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st195 + st195: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof195 + } + st_case_195: +//line plugins/parsers/influx/machine.go:25245 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 92: + goto st22 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st639 + } + case (m.data)[(m.p)] >= 9: + goto tr60 + } + goto st16 + st639: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof639 + } + st_case_639: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr735 + case 13: + goto tr736 + case 32: + goto tr731 + case 44: + goto tr737 + case 61: + goto tr132 + case 69: + goto st196 + case 92: + goto st22 + case 101: + goto st196 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st639 + } + case (m.data)[(m.p)] >= 9: + goto tr731 + } + goto st16 + st196: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof196 + } + st_case_196: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 34: + goto st197 + case 44: + goto tr62 + case 61: + goto tr47 + case 92: + goto st22 + } + switch { + case (m.data)[(m.p)] < 43: + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + case (m.data)[(m.p)] > 45: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st480 + } + default: + goto st197 + } + goto st16 + st197: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof197 + } + st_case_197: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 92: + goto st22 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st480 + } + case (m.data)[(m.p)] >= 9: + goto tr60 + } + goto st16 + st640: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof640 + } + st_case_640: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr735 + case 13: + goto tr736 + case 32: + goto tr731 + case 44: + goto tr737 + case 46: + goto st639 + case 61: + goto tr132 + case 69: + goto st196 + case 92: + goto st22 + case 101: + goto st196 + case 105: + goto st642 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st641 + } + case (m.data)[(m.p)] >= 9: + goto tr731 + } + goto st16 + st641: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof641 + } + st_case_641: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr735 + case 13: + goto tr736 + case 32: + goto tr731 + case 44: + goto tr737 + case 46: + goto st639 + case 61: + goto tr132 + case 69: + goto st196 + case 92: + goto st22 + case 101: + goto st196 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st641 + } + case (m.data)[(m.p)] >= 9: + goto tr731 + } + goto st16 + st642: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof642 + } + st_case_642: + switch (m.data)[(m.p)] { + case 10: + goto tr952 + case 11: + goto tr953 + case 13: + goto tr954 + case 32: + goto tr743 + case 44: + goto tr955 + case 61: + goto tr132 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr743 + } + goto st16 + st643: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof643 + } + st_case_643: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr735 + case 13: + goto tr736 + case 32: + goto tr731 + case 44: + goto tr737 + case 46: + goto st639 + case 61: + goto tr132 + case 69: + goto st196 + case 92: + goto st22 + case 101: + goto st196 + case 105: + goto st642 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st643 + } + case (m.data)[(m.p)] >= 9: + goto tr731 + } + goto st16 + tr142: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st644 + st644: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof644 + } + st_case_644: +//line plugins/parsers/influx/machine.go:25519 + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr735 + case 13: + goto tr736 + case 32: + goto tr731 + case 44: + goto tr737 + case 46: + goto st639 + case 61: + goto tr132 + case 69: + goto st196 + case 92: + goto st22 + case 101: + goto st196 + case 105: + goto st642 + case 117: + goto st645 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st641 + } + case (m.data)[(m.p)] >= 9: + goto tr731 + } + goto st16 + st645: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof645 + } + st_case_645: + switch (m.data)[(m.p)] { + case 10: + goto tr957 + case 11: + goto tr958 + case 13: + goto tr959 + case 32: + goto tr750 + case 44: + goto tr960 + case 61: + goto tr132 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr750 + } + goto st16 + tr143: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st646 + st646: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof646 + } + st_case_646: +//line plugins/parsers/influx/machine.go:25591 + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr735 + case 13: + goto tr736 + case 32: + goto tr731 + case 44: + goto tr737 + case 46: + goto st639 + case 61: + goto tr132 + case 69: + goto st196 + case 92: + goto st22 + case 101: + goto st196 + case 105: + goto st642 + case 117: + goto st645 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st646 + } + case (m.data)[(m.p)] >= 9: + goto tr731 + } + goto st16 + tr144: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st647 + st647: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof647 + } + st_case_647: +//line plugins/parsers/influx/machine.go:25638 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr963 + case 13: + goto tr964 + case 32: + goto tr757 + case 44: + goto tr965 + case 61: + goto tr132 + case 65: + goto st198 + case 92: + goto st22 + case 97: + goto st201 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr757 + } + goto st16 + st198: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof198 + } + st_case_198: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 76: + goto st199 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + st199: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof199 + } + st_case_199: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 83: + goto st200 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + st200: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof200 + } + st_case_200: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 69: + goto st648 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + st648: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof648 + } + st_case_648: + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr963 + case 13: + goto tr964 + case 32: + goto tr757 + case 44: + goto tr965 + case 61: + goto tr132 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr757 + } + goto st16 + st201: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof201 + } + st_case_201: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 92: + goto st22 + case 108: + goto st202 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + st202: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof202 + } + st_case_202: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 92: + goto st22 + case 115: + goto st203 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + st203: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof203 + } + st_case_203: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 92: + goto st22 + case 101: + goto st648 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + tr145: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st649 + st649: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof649 + } + st_case_649: +//line plugins/parsers/influx/machine.go:25861 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr963 + case 13: + goto tr964 + case 32: + goto tr757 + case 44: + goto tr965 + case 61: + goto tr132 + case 82: + goto st204 + case 92: + goto st22 + case 114: + goto st205 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr757 + } + goto st16 + st204: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof204 + } + st_case_204: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 85: + goto st200 + case 92: + goto st22 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + st205: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof205 + } + st_case_205: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr61 + case 13: + goto tr47 + case 32: + goto tr60 + case 44: + goto tr62 + case 61: + goto tr47 + case 92: + goto st22 + case 117: + goto st203 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr60 + } + goto st16 + tr146: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st650 + st650: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof650 + } + st_case_650: +//line plugins/parsers/influx/machine.go:25951 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr963 + case 13: + goto tr964 + case 32: + goto tr757 + case 44: + goto tr965 + case 61: + goto tr132 + case 92: + goto st22 + case 97: + goto st201 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr757 + } + goto st16 + tr147: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st651 + st651: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof651 + } + st_case_651: +//line plugins/parsers/influx/machine.go:25985 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr963 + case 13: + goto tr964 + case 32: + goto tr757 + case 44: + goto tr965 + case 61: + goto tr132 + case 92: + goto st22 + case 114: + goto st205 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr757 + } + goto st16 + tr123: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st206 + tr382: + (m.cs) = 206 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st206: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof206 + } + st_case_206: +//line plugins/parsers/influx/machine.go:26036 + switch (m.data)[(m.p)] { + case 9: + goto tr119 + case 10: + goto tr29 + case 11: + goto tr382 + case 12: + goto tr38 + case 13: + goto st7 + case 32: + goto tr119 + case 34: + goto tr124 + case 44: + goto tr92 + case 61: + goto tr383 + case 92: + goto tr125 + } + goto tr121 + tr120: + (m.cs) = 207 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st207: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof207 + } + st_case_207: +//line plugins/parsers/influx/machine.go:26082 + switch (m.data)[(m.p)] { + case 9: + goto tr119 + case 10: + goto tr29 + case 11: + goto tr382 + case 12: + goto tr38 + case 13: + goto st7 + case 32: + goto tr119 + case 34: + goto tr124 + case 44: + goto tr92 + case 61: + goto tr82 + case 92: + goto tr125 + } + goto tr121 + tr499: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st208 + st208: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof208 + } + st_case_208: +//line plugins/parsers/influx/machine.go:26117 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr105 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st652 + } + goto st6 + tr500: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st652 + st652: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof652 + } + st_case_652: +//line plugins/parsers/influx/machine.go:26145 + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st653 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st653: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof653 + } + st_case_653: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st654 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st654: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof654 + } + st_case_654: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st655 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st655: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof655 + } + st_case_655: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st656 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st656: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof656 + } + st_case_656: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st657 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st657: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof657 + } + st_case_657: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st658 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st658: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof658 + } + st_case_658: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st659 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st659: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof659 + } + st_case_659: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st660 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st660: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof660 + } + st_case_660: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st661 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st661: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof661 + } + st_case_661: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st662 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st662: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof662 + } + st_case_662: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st663 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st663: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof663 + } + st_case_663: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st664 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st664: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof664 + } + st_case_664: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st665 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st665: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof665 + } + st_case_665: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st666 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st666: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof666 + } + st_case_666: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st667 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st667: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof667 + } + st_case_667: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st668 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st668: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof668 + } + st_case_668: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st669 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st669: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof669 + } + st_case_669: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st670 + } + case (m.data)[(m.p)] >= 9: + goto tr602 + } + goto st6 + st670: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof670 + } + st_case_670: + switch (m.data)[(m.p)] { + case 10: + goto tr603 + case 12: + goto tr469 + case 13: + goto tr605 + case 32: + goto tr602 + case 34: + goto tr31 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr602 + } + goto st6 + tr496: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st209 + tr989: + (m.cs) = 209 +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr994: + (m.cs) = 209 +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr997: + (m.cs) = 209 +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1000: + (m.cs) = 209 +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st209: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof209 + } + st_case_209: +//line plugins/parsers/influx/machine.go:26731 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr386 + case 44: + goto st6 + case 61: + goto st6 + case 92: + goto tr387 + } + goto tr385 + tr385: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st210 + st210: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof210 + } + st_case_210: +//line plugins/parsers/influx/machine.go:26764 + switch (m.data)[(m.p)] { + case 9: + goto st6 + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 32: + goto st6 + case 34: + goto tr100 + case 44: + goto st6 + case 61: + goto tr389 + case 92: + goto st224 + } + goto st210 + tr389: +//line plugins/parsers/influx/machine.go.rl:100 + + m.key = m.text() + + goto st211 + st211: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof211 + } + st_case_211: +//line plugins/parsers/influx/machine.go:26797 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr353 + case 45: + goto tr391 + case 46: + goto tr392 + case 48: + goto tr393 + case 70: + goto tr112 + case 84: + goto tr113 + case 92: + goto st75 + case 102: + goto tr114 + case 116: + goto tr115 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto tr394 + } + goto st6 + tr391: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st212 + st212: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof212 + } + st_case_212: +//line plugins/parsers/influx/machine.go:26839 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 46: + goto st213 + case 48: + goto st673 + case 92: + goto st75 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st676 + } + goto st6 + tr392: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st213 + st213: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof213 + } + st_case_213: +//line plugins/parsers/influx/machine.go:26871 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st671 + } + goto st6 + st671: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof671 + } + st_case_671: + switch (m.data)[(m.p)] { + case 10: + goto tr765 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 69: + goto st214 + case 92: + goto st75 + case 101: + goto st214 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st671 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st214: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof214 + } + st_case_214: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr358 + case 43: + goto st215 + case 45: + goto st215 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st672 + } + goto st6 + st215: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof215 + } + st_case_215: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st672 + } + goto st6 + st672: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof672 + } + st_case_672: + switch (m.data)[(m.p)] { + case 10: + goto tr765 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st672 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st673: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof673 + } + st_case_673: + switch (m.data)[(m.p)] { + case 10: + goto tr765 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st671 + case 69: + goto st214 + case 92: + goto st75 + case 101: + goto st214 + case 105: + goto st675 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st674 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st674: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof674 + } + st_case_674: + switch (m.data)[(m.p)] { + case 10: + goto tr765 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st671 + case 69: + goto st214 + case 92: + goto st75 + case 101: + goto st214 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st674 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st675: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof675 + } + st_case_675: + switch (m.data)[(m.p)] { + case 10: + goto tr798 + case 12: + goto tr932 + case 13: + goto tr800 + case 32: + goto tr993 + case 34: + goto tr31 + case 44: + goto tr994 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr993 + } + goto st6 + st676: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof676 + } + st_case_676: + switch (m.data)[(m.p)] { + case 10: + goto tr765 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st671 + case 69: + goto st214 + case 92: + goto st75 + case 101: + goto st214 + case 105: + goto st675 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st676 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + tr393: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st677 + st677: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof677 + } + st_case_677: +//line plugins/parsers/influx/machine.go:27146 + switch (m.data)[(m.p)] { + case 10: + goto tr765 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st671 + case 69: + goto st214 + case 92: + goto st75 + case 101: + goto st214 + case 105: + goto st675 + case 117: + goto st678 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st674 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st678: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof678 + } + st_case_678: + switch (m.data)[(m.p)] { + case 10: + goto tr804 + case 12: + goto tr935 + case 13: + goto tr806 + case 32: + goto tr996 + case 34: + goto tr31 + case 44: + goto tr997 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr996 + } + goto st6 + tr394: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st679 + st679: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof679 + } + st_case_679: +//line plugins/parsers/influx/machine.go:27218 + switch (m.data)[(m.p)] { + case 10: + goto tr765 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st671 + case 69: + goto st214 + case 92: + goto st75 + case 101: + goto st214 + case 105: + goto st675 + case 117: + goto st678 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st679 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + tr112: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st680 + st680: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof680 + } + st_case_680: +//line plugins/parsers/influx/machine.go:27265 + switch (m.data)[(m.p)] { + case 10: + goto tr810 + case 12: + goto tr939 + case 13: + goto tr812 + case 32: + goto tr999 + case 34: + goto tr31 + case 44: + goto tr1000 + case 65: + goto st216 + case 92: + goto st75 + case 97: + goto st219 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr999 + } + goto st6 + st216: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof216 + } + st_case_216: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 76: + goto st217 + case 92: + goto st75 + } + goto st6 + st217: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof217 + } + st_case_217: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 83: + goto st218 + case 92: + goto st75 + } + goto st6 + st218: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof218 + } + st_case_218: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 69: + goto st681 + case 92: + goto st75 + } + goto st6 + st681: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof681 + } + st_case_681: + switch (m.data)[(m.p)] { + case 10: + goto tr810 + case 12: + goto tr939 + case 13: + goto tr812 + case 32: + goto tr999 + case 34: + goto tr31 + case 44: + goto tr1000 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr999 + } + goto st6 + st219: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof219 + } + st_case_219: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 108: + goto st220 + } + goto st6 + st220: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof220 + } + st_case_220: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 115: + goto st221 + } + goto st6 + st221: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof221 + } + st_case_221: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 101: + goto st681 + } + goto st6 + tr113: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st682 + st682: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof682 + } + st_case_682: +//line plugins/parsers/influx/machine.go:27446 + switch (m.data)[(m.p)] { + case 10: + goto tr810 + case 12: + goto tr939 + case 13: + goto tr812 + case 32: + goto tr999 + case 34: + goto tr31 + case 44: + goto tr1000 + case 82: + goto st222 + case 92: + goto st75 + case 114: + goto st223 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr999 + } + goto st6 + st222: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof222 + } + st_case_222: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 85: + goto st218 + case 92: + goto st75 + } + goto st6 + st223: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof223 + } + st_case_223: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + case 117: + goto st221 + } + goto st6 + tr114: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st683 + st683: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof683 + } + st_case_683: +//line plugins/parsers/influx/machine.go:27522 + switch (m.data)[(m.p)] { + case 10: + goto tr810 + case 12: + goto tr939 + case 13: + goto tr812 + case 32: + goto tr999 + case 34: + goto tr31 + case 44: + goto tr1000 + case 92: + goto st75 + case 97: + goto st219 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr999 + } + goto st6 + tr115: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st684 + st684: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof684 + } + st_case_684: +//line plugins/parsers/influx/machine.go:27556 + switch (m.data)[(m.p)] { + case 10: + goto tr810 + case 12: + goto tr939 + case 13: + goto tr812 + case 32: + goto tr999 + case 34: + goto tr31 + case 44: + goto tr1000 + case 92: + goto st75 + case 114: + goto st223 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr999 + } + goto st6 + tr387: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st224 + st224: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof224 + } + st_case_224: +//line plugins/parsers/influx/machine.go:27590 + switch (m.data)[(m.p)] { + case 34: + goto st210 + case 92: + goto st210 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr8 + } + case (m.data)[(m.p)] >= 9: + goto tr8 + } + goto st3 + tr108: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st225 + st225: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof225 + } + st_case_225: +//line plugins/parsers/influx/machine.go:27617 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 46: + goto st226 + case 48: + goto st687 + case 92: + goto st75 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st690 + } + goto st6 + tr109: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st226 + st226: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof226 + } + st_case_226: +//line plugins/parsers/influx/machine.go:27649 + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st685 + } + goto st6 + st685: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof685 + } + st_case_685: + switch (m.data)[(m.p)] { + case 10: + goto tr639 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 69: + goto st227 + case 92: + goto st75 + case 101: + goto st227 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st685 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st227: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof227 + } + st_case_227: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr358 + case 43: + goto st228 + case 45: + goto st228 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st686 + } + goto st6 + st228: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof228 + } + st_case_228: + switch (m.data)[(m.p)] { + case 10: + goto tr29 + case 12: + goto tr8 + case 13: + goto st7 + case 34: + goto tr31 + case 92: + goto st75 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st686 + } + goto st6 + st686: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof686 + } + st_case_686: + switch (m.data)[(m.p)] { + case 10: + goto tr639 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 92: + goto st75 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st686 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st687: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof687 + } + st_case_687: + switch (m.data)[(m.p)] { + case 10: + goto tr639 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st685 + case 69: + goto st227 + case 92: + goto st75 + case 101: + goto st227 + case 105: + goto st689 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st688 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st688: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof688 + } + st_case_688: + switch (m.data)[(m.p)] { + case 10: + goto tr639 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st685 + case 69: + goto st227 + case 92: + goto st75 + case 101: + goto st227 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st688 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st689: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof689 + } + st_case_689: + switch (m.data)[(m.p)] { + case 10: + goto tr823 + case 12: + goto tr932 + case 13: + goto tr800 + case 32: + goto tr993 + case 34: + goto tr31 + case 44: + goto tr994 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr993 + } + goto st6 + st690: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof690 + } + st_case_690: + switch (m.data)[(m.p)] { + case 10: + goto tr639 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st685 + case 69: + goto st227 + case 92: + goto st75 + case 101: + goto st227 + case 105: + goto st689 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st690 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + tr110: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st691 + st691: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof691 + } + st_case_691: +//line plugins/parsers/influx/machine.go:27924 + switch (m.data)[(m.p)] { + case 10: + goto tr639 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st685 + case 69: + goto st227 + case 92: + goto st75 + case 101: + goto st227 + case 105: + goto st689 + case 117: + goto st692 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st688 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + st692: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof692 + } + st_case_692: + switch (m.data)[(m.p)] { + case 10: + goto tr829 + case 12: + goto tr935 + case 13: + goto tr806 + case 32: + goto tr996 + case 34: + goto tr31 + case 44: + goto tr997 + case 92: + goto st75 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 11 { + goto tr996 + } + goto st6 + tr111: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st693 + st693: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof693 + } + st_case_693: +//line plugins/parsers/influx/machine.go:27996 + switch (m.data)[(m.p)] { + case 10: + goto tr639 + case 12: + goto tr535 + case 13: + goto tr642 + case 32: + goto tr988 + case 34: + goto tr31 + case 44: + goto tr989 + case 46: + goto st685 + case 69: + goto st227 + case 92: + goto st75 + case 101: + goto st227 + case 105: + goto st689 + case 117: + goto st692 + } + switch { + case (m.data)[(m.p)] > 11: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st693 + } + case (m.data)[(m.p)] >= 9: + goto tr988 + } + goto st6 + tr96: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st229 + st229: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof229 + } + st_case_229: +//line plugins/parsers/influx/machine.go:28043 + switch (m.data)[(m.p)] { + case 9: + goto st31 + case 10: + goto tr29 + case 11: + goto tr96 + case 12: + goto st2 + case 13: + goto st7 + case 32: + goto st31 + case 34: + goto tr97 + case 44: + goto st6 + case 61: + goto tr101 + case 92: + goto tr98 + } + goto tr94 + tr74: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st230 + st230: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof230 + } + st_case_230: +//line plugins/parsers/influx/machine.go:28078 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 46: + goto st231 + case 48: + goto st695 + case 92: + goto st95 + } + switch { + case (m.data)[(m.p)] > 12: + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st698 + } + case (m.data)[(m.p)] >= 9: + goto tr1 + } + goto st1 + tr75: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st231 + st231: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof231 + } + st_case_231: +//line plugins/parsers/influx/machine.go:28117 + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 92: + goto st95 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st694 + } + case (m.data)[(m.p)] >= 9: + goto tr1 + } + goto st1 + st694: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof694 + } + st_case_694: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr818 + case 13: + goto tr736 + case 32: + goto tr641 + case 44: + goto tr819 + case 69: + goto st232 + case 92: + goto st95 + case 101: + goto st232 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st694 + } + case (m.data)[(m.p)] >= 9: + goto tr641 + } + goto st1 + st232: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof232 + } + st_case_232: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 34: + goto st233 + case 44: + goto tr4 + case 92: + goto st95 + } + switch { + case (m.data)[(m.p)] < 43: + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + case (m.data)[(m.p)] > 45: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st531 + } + default: + goto st233 + } + goto st1 + st233: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof233 + } + st_case_233: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 92: + goto st95 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st531 + } + case (m.data)[(m.p)] >= 9: + goto tr1 + } + goto st1 + st695: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof695 + } + st_case_695: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr818 + case 13: + goto tr736 + case 32: + goto tr641 + case 44: + goto tr819 + case 46: + goto st694 + case 69: + goto st232 + case 92: + goto st95 + case 101: + goto st232 + case 105: + goto st697 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st696 + } + case (m.data)[(m.p)] >= 9: + goto tr641 + } + goto st1 + st696: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof696 + } + st_case_696: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr818 + case 13: + goto tr736 + case 32: + goto tr641 + case 44: + goto tr819 + case 46: + goto st694 + case 69: + goto st232 + case 92: + goto st95 + case 101: + goto st232 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st696 + } + case (m.data)[(m.p)] >= 9: + goto tr641 + } + goto st1 + st697: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof697 + } + st_case_697: + switch (m.data)[(m.p)] { + case 10: + goto tr952 + case 11: + goto tr1013 + case 13: + goto tr954 + case 32: + goto tr825 + case 44: + goto tr1014 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr825 + } + goto st1 + st698: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof698 + } + st_case_698: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr818 + case 13: + goto tr736 + case 32: + goto tr641 + case 44: + goto tr819 + case 46: + goto st694 + case 69: + goto st232 + case 92: + goto st95 + case 101: + goto st232 + case 105: + goto st697 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st698 + } + case (m.data)[(m.p)] >= 9: + goto tr641 + } + goto st1 + tr76: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st699 + st699: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof699 + } + st_case_699: +//line plugins/parsers/influx/machine.go:28375 + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr818 + case 13: + goto tr736 + case 32: + goto tr641 + case 44: + goto tr819 + case 46: + goto st694 + case 69: + goto st232 + case 92: + goto st95 + case 101: + goto st232 + case 105: + goto st697 + case 117: + goto st700 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st696 + } + case (m.data)[(m.p)] >= 9: + goto tr641 + } + goto st1 + st700: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof700 + } + st_case_700: + switch (m.data)[(m.p)] { + case 10: + goto tr957 + case 11: + goto tr1016 + case 13: + goto tr959 + case 32: + goto tr831 + case 44: + goto tr1017 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr831 + } + goto st1 + tr77: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st701 + st701: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof701 + } + st_case_701: +//line plugins/parsers/influx/machine.go:28443 + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 11: + goto tr818 + case 13: + goto tr736 + case 32: + goto tr641 + case 44: + goto tr819 + case 46: + goto st694 + case 69: + goto st232 + case 92: + goto st95 + case 101: + goto st232 + case 105: + goto st697 + case 117: + goto st700 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st701 + } + case (m.data)[(m.p)] >= 9: + goto tr641 + } + goto st1 + tr78: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st702 + st702: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof702 + } + st_case_702: +//line plugins/parsers/influx/machine.go:28488 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr1019 + case 13: + goto tr964 + case 32: + goto tr836 + case 44: + goto tr1020 + case 65: + goto st234 + case 92: + goto st95 + case 97: + goto st237 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr836 + } + goto st1 + st234: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof234 + } + st_case_234: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 76: + goto st235 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + st235: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof235 + } + st_case_235: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 83: + goto st236 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + st236: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof236 + } + st_case_236: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 69: + goto st703 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + st703: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof703 + } + st_case_703: + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr1019 + case 13: + goto tr964 + case 32: + goto tr836 + case 44: + goto tr1020 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr836 + } + goto st1 + st237: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof237 + } + st_case_237: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 92: + goto st95 + case 108: + goto st238 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + st238: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof238 + } + st_case_238: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 92: + goto st95 + case 115: + goto st239 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + st239: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof239 + } + st_case_239: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 92: + goto st95 + case 101: + goto st703 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + tr79: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st704 + st704: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof704 + } + st_case_704: +//line plugins/parsers/influx/machine.go:28695 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr1019 + case 13: + goto tr964 + case 32: + goto tr836 + case 44: + goto tr1020 + case 82: + goto st240 + case 92: + goto st95 + case 114: + goto st241 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr836 + } + goto st1 + st240: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof240 + } + st_case_240: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 85: + goto st236 + case 92: + goto st95 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + st241: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof241 + } + st_case_241: + switch (m.data)[(m.p)] { + case 10: + goto tr47 + case 11: + goto tr3 + case 13: + goto tr47 + case 32: + goto tr1 + case 44: + goto tr4 + case 92: + goto st95 + case 117: + goto st239 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr1 + } + goto st1 + tr80: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st705 + st705: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof705 + } + st_case_705: +//line plugins/parsers/influx/machine.go:28779 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr1019 + case 13: + goto tr964 + case 32: + goto tr836 + case 44: + goto tr1020 + case 92: + goto st95 + case 97: + goto st237 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr836 + } + goto st1 + tr81: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st706 + st706: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof706 + } + st_case_706: +//line plugins/parsers/influx/machine.go:28811 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 11: + goto tr1019 + case 13: + goto tr964 + case 32: + goto tr836 + case 44: + goto tr1020 + case 92: + goto st95 + case 114: + goto st241 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr836 + } + goto st1 + tr44: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st242 + tr424: + (m.cs) = 242 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st242: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof242 + } + st_case_242: +//line plugins/parsers/influx/machine.go:28860 + switch (m.data)[(m.p)] { + case 10: + goto tr423 + case 11: + goto tr424 + case 13: + goto tr423 + case 32: + goto tr38 + case 44: + goto tr4 + case 61: + goto tr425 + case 92: + goto tr45 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr38 + } + goto tr41 + tr40: + (m.cs) = 243 +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st243: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof243 + } + st_case_243: +//line plugins/parsers/influx/machine.go:28903 + switch (m.data)[(m.p)] { + case 10: + goto tr423 + case 11: + goto tr424 + case 13: + goto tr423 + case 32: + goto tr38 + case 44: + goto tr4 + case 61: + goto tr33 + case 92: + goto tr45 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr38 + } + goto tr41 + tr464: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st244 + st244: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof244 + } + st_case_244: +//line plugins/parsers/influx/machine.go:28935 + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st707 + } + goto tr426 + tr465: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st707 + st707: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof707 + } + st_case_707: +//line plugins/parsers/influx/machine.go:28951 + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st708 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st708: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof708 + } + st_case_708: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st709 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st709: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof709 + } + st_case_709: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st710 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st710: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof710 + } + st_case_710: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st711 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st711: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof711 + } + st_case_711: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st712 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st712: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof712 + } + st_case_712: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st713 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st713: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof713 + } + st_case_713: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st714 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st714: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof714 + } + st_case_714: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st715 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st715: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof715 + } + st_case_715: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st716 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st716: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof716 + } + st_case_716: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st717 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st717: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof717 + } + st_case_717: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st718 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st718: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof718 + } + st_case_718: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st719 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st719: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof719 + } + st_case_719: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st720 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st720: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof720 + } + st_case_720: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st721 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st721: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof721 + } + st_case_721: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st722 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st722: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof722 + } + st_case_722: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st723 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st723: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof723 + } + st_case_723: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st724 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st724: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof724 + } + st_case_724: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st725 + } + case (m.data)[(m.p)] >= 9: + goto tr469 + } + goto tr426 + st725: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof725 + } + st_case_725: + switch (m.data)[(m.p)] { + case 10: + goto tr470 + case 13: + goto tr472 + case 32: + goto tr469 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr469 + } + goto tr426 + tr15: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st245 + st245: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof245 + } + st_case_245: +//line plugins/parsers/influx/machine.go:29371 + switch (m.data)[(m.p)] { + case 46: + goto st246 + case 48: + goto st727 + } + if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st730 + } + goto tr8 + tr16: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st246 + st246: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof246 + } + st_case_246: +//line plugins/parsers/influx/machine.go:29393 + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st726 + } + goto tr8 + st726: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof726 + } + st_case_726: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 13: + goto tr736 + case 32: + goto tr535 + case 44: + goto tr930 + case 69: + goto st247 + case 101: + goto st247 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st726 + } + case (m.data)[(m.p)] >= 9: + goto tr535 + } + goto tr105 + st247: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof247 + } + st_case_247: + switch (m.data)[(m.p)] { + case 34: + goto st248 + case 43: + goto st248 + case 45: + goto st248 + } + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st622 + } + goto tr8 + st248: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof248 + } + st_case_248: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st622 + } + goto tr8 + st727: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof727 + } + st_case_727: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 13: + goto tr736 + case 32: + goto tr535 + case 44: + goto tr930 + case 46: + goto st726 + case 69: + goto st247 + case 101: + goto st247 + case 105: + goto st729 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st728 + } + case (m.data)[(m.p)] >= 9: + goto tr535 + } + goto tr105 + st728: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof728 + } + st_case_728: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 13: + goto tr736 + case 32: + goto tr535 + case 44: + goto tr930 + case 46: + goto st726 + case 69: + goto st247 + case 101: + goto st247 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st728 + } + case (m.data)[(m.p)] >= 9: + goto tr535 + } + goto tr105 + st729: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof729 + } + st_case_729: + switch (m.data)[(m.p)] { + case 10: + goto tr952 + case 13: + goto tr954 + case 32: + goto tr932 + case 44: + goto tr1046 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr932 + } + goto tr105 + st730: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof730 + } + st_case_730: + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 13: + goto tr736 + case 32: + goto tr535 + case 44: + goto tr930 + case 46: + goto st726 + case 69: + goto st247 + case 101: + goto st247 + case 105: + goto st729 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st730 + } + case (m.data)[(m.p)] >= 9: + goto tr535 + } + goto tr105 + tr17: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st731 + st731: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof731 + } + st_case_731: +//line plugins/parsers/influx/machine.go:29576 + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 13: + goto tr736 + case 32: + goto tr535 + case 44: + goto tr930 + case 46: + goto st726 + case 69: + goto st247 + case 101: + goto st247 + case 105: + goto st729 + case 117: + goto st732 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st728 + } + case (m.data)[(m.p)] >= 9: + goto tr535 + } + goto tr105 + st732: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof732 + } + st_case_732: + switch (m.data)[(m.p)] { + case 10: + goto tr957 + case 13: + goto tr959 + case 32: + goto tr935 + case 44: + goto tr1048 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr935 + } + goto tr105 + tr18: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st733 + st733: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof733 + } + st_case_733: +//line plugins/parsers/influx/machine.go:29636 + switch (m.data)[(m.p)] { + case 10: + goto tr734 + case 13: + goto tr736 + case 32: + goto tr535 + case 44: + goto tr930 + case 46: + goto st726 + case 69: + goto st247 + case 101: + goto st247 + case 105: + goto st729 + case 117: + goto st732 + } + switch { + case (m.data)[(m.p)] > 12: + if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 { + goto st733 + } + case (m.data)[(m.p)] >= 9: + goto tr535 + } + goto tr105 + tr19: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st734 + st734: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof734 + } + st_case_734: +//line plugins/parsers/influx/machine.go:29677 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 13: + goto tr964 + case 32: + goto tr939 + case 44: + goto tr1050 + case 65: + goto st249 + case 97: + goto st252 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr939 + } + goto tr105 + st249: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof249 + } + st_case_249: + if (m.data)[(m.p)] == 76 { + goto st250 + } + goto tr8 + st250: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof250 + } + st_case_250: + if (m.data)[(m.p)] == 83 { + goto st251 + } + goto tr8 + st251: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof251 + } + st_case_251: + if (m.data)[(m.p)] == 69 { + goto st735 + } + goto tr8 + st735: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof735 + } + st_case_735: + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 13: + goto tr964 + case 32: + goto tr939 + case 44: + goto tr1050 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr939 + } + goto tr105 + st252: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof252 + } + st_case_252: + if (m.data)[(m.p)] == 108 { + goto st253 + } + goto tr8 + st253: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof253 + } + st_case_253: + if (m.data)[(m.p)] == 115 { + goto st254 + } + goto tr8 + st254: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof254 + } + st_case_254: + if (m.data)[(m.p)] == 101 { + goto st735 + } + goto tr8 + tr20: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st736 + st736: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof736 + } + st_case_736: +//line plugins/parsers/influx/machine.go:29780 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 13: + goto tr964 + case 32: + goto tr939 + case 44: + goto tr1050 + case 82: + goto st255 + case 114: + goto st256 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr939 + } + goto tr105 + st255: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof255 + } + st_case_255: + if (m.data)[(m.p)] == 85 { + goto st251 + } + goto tr8 + st256: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof256 + } + st_case_256: + if (m.data)[(m.p)] == 117 { + goto st254 + } + goto tr8 + tr21: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st737 + st737: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof737 + } + st_case_737: +//line plugins/parsers/influx/machine.go:29828 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 13: + goto tr964 + case 32: + goto tr939 + case 44: + goto tr1050 + case 97: + goto st252 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr939 + } + goto tr105 + tr22: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st738 + st738: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof738 + } + st_case_738: +//line plugins/parsers/influx/machine.go:29856 + switch (m.data)[(m.p)] { + case 10: + goto tr962 + case 13: + goto tr964 + case 32: + goto tr939 + case 44: + goto tr1050 + case 114: + goto st256 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto tr939 + } + goto tr105 + tr9: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st257 + st257: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof257 + } + st_case_257: +//line plugins/parsers/influx/machine.go:29884 + switch (m.data)[(m.p)] { + case 10: + goto tr8 + case 11: + goto tr9 + case 13: + goto tr8 + case 32: + goto st2 + case 44: + goto tr8 + case 61: + goto tr12 + case 92: + goto tr10 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st2 + } + goto tr6 + st258: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof258 + } + st_case_258: + if (m.data)[(m.p)] == 10 { + goto tr440 + } + goto st258 + tr440: +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + +//line plugins/parsers/influx/machine.go.rl:70 + + { + goto st740 + } + + goto st739 + st739: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof739 + } + st_case_739: +//line plugins/parsers/influx/machine.go:29931 + goto st0 + st261: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof261 + } + st_case_261: + switch (m.data)[(m.p)] { + case 32: + goto tr35 + case 35: + goto tr35 + case 44: + goto tr35 + case 92: + goto tr444 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr35 + } + case (m.data)[(m.p)] >= 9: + goto tr35 + } + goto tr443 + tr443: +//line plugins/parsers/influx/machine.go.rl:74 + + m.beginMetric = true + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st741 + st741: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof741 + } + st_case_741: +//line plugins/parsers/influx/machine.go:29972 + switch (m.data)[(m.p)] { + case 9: + goto tr2 + case 10: + goto tr1058 + case 12: + goto tr2 + case 13: + goto tr1059 + case 32: + goto tr2 + case 44: + goto tr1060 + case 92: + goto st269 + } + goto st741 + tr445: +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st742 + tr1058: + (m.cs) = 742 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + tr1062: + (m.cs) = 742 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto _again + st742: +//line plugins/parsers/influx/machine.go.rl:164 + + m.finishMetric = true + (m.cs) = 740 + { + (m.p)++ + goto _out + } + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof742 + } + st_case_742: +//line plugins/parsers/influx/machine.go:30047 + goto st0 + tr1059: + (m.cs) = 262 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1063: + (m.cs) = 262 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st262: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof262 + } + st_case_262: +//line plugins/parsers/influx/machine.go:30080 + if (m.data)[(m.p)] == 10 { + goto tr445 + } + goto st0 + tr1060: + (m.cs) = 263 +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + tr1064: + (m.cs) = 263 +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + goto _out + } + } + + goto _again + st263: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof263 + } + st_case_263: +//line plugins/parsers/influx/machine.go:30116 + switch (m.data)[(m.p)] { + case 32: + goto tr2 + case 44: + goto tr2 + case 61: + goto tr2 + case 92: + goto tr447 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto tr446 + tr446: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st264 + st264: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof264 + } + st_case_264: +//line plugins/parsers/influx/machine.go:30147 + switch (m.data)[(m.p)] { + case 32: + goto tr2 + case 44: + goto tr2 + case 61: + goto tr449 + case 92: + goto st267 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto st264 + tr449: +//line plugins/parsers/influx/machine.go.rl:87 + + m.key = m.text() + + goto st265 + st265: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof265 + } + st_case_265: +//line plugins/parsers/influx/machine.go:30178 + switch (m.data)[(m.p)] { + case 32: + goto tr2 + case 44: + goto tr2 + case 61: + goto tr2 + case 92: + goto tr452 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto tr451 + tr451: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st743 + st743: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof743 + } + st_case_743: +//line plugins/parsers/influx/machine.go:30209 + switch (m.data)[(m.p)] { + case 9: + goto tr2 + case 10: + goto tr1062 + case 12: + goto tr2 + case 13: + goto tr1063 + case 32: + goto tr2 + case 44: + goto tr1064 + case 61: + goto tr2 + case 92: + goto st266 + } + goto st743 + tr452: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st266 + st266: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof266 + } + st_case_266: +//line plugins/parsers/influx/machine.go:30240 + if (m.data)[(m.p)] == 92 { + goto st744 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto st743 + st744: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof744 + } + st_case_744: +//line plugins/parsers/influx/machine.go:30261 + switch (m.data)[(m.p)] { + case 9: + goto tr2 + case 10: + goto tr1062 + case 12: + goto tr2 + case 13: + goto tr1063 + case 32: + goto tr2 + case 44: + goto tr1064 + case 61: + goto tr2 + case 92: + goto st266 + } + goto st743 + tr447: +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st267 + st267: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof267 + } + st_case_267: +//line plugins/parsers/influx/machine.go:30292 + if (m.data)[(m.p)] == 92 { + goto st268 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto st264 + st268: +//line plugins/parsers/influx/machine.go.rl:240 + (m.p)-- + + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof268 + } + st_case_268: +//line plugins/parsers/influx/machine.go:30313 + switch (m.data)[(m.p)] { + case 32: + goto tr2 + case 44: + goto tr2 + case 61: + goto tr449 + case 92: + goto st267 + } + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto tr2 + } + case (m.data)[(m.p)] >= 9: + goto tr2 + } + goto st264 + tr444: +//line plugins/parsers/influx/machine.go.rl:74 + + m.beginMetric = true + +//line plugins/parsers/influx/machine.go.rl:20 + + m.pb = m.p + + goto st269 + st269: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof269 + } + st_case_269: +//line plugins/parsers/influx/machine.go:30348 + switch { + case (m.data)[(m.p)] > 10: + if 12 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 { + goto st0 + } + case (m.data)[(m.p)] >= 9: + goto st0 + } + goto st741 + tr441: +//line plugins/parsers/influx/machine.go.rl:158 + + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line + + goto st740 + st740: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof740 + } + st_case_740: +//line plugins/parsers/influx/machine.go:30371 + switch (m.data)[(m.p)] { + case 10: + goto tr441 + case 13: + goto st259 + case 32: + goto st740 + case 35: + goto st260 + } + if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 { + goto st740 + } + goto tr1055 + st259: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof259 + } + st_case_259: + if (m.data)[(m.p)] == 10 { + goto tr441 + } + goto st0 + st260: + if (m.p)++; (m.p) == (m.pe) { + goto _test_eof260 + } + st_case_260: + if (m.data)[(m.p)] == 10 { + goto tr441 + } + goto st260 + st_out: + _test_eof270: + (m.cs) = 270 + goto _test_eof + _test_eof1: + (m.cs) = 1 + goto _test_eof + _test_eof2: + (m.cs) = 2 + goto _test_eof + _test_eof3: + (m.cs) = 3 + goto _test_eof + _test_eof4: + (m.cs) = 4 + goto _test_eof + _test_eof5: + (m.cs) = 5 + goto _test_eof + _test_eof6: + (m.cs) = 6 + goto _test_eof + _test_eof7: + (m.cs) = 7 + goto _test_eof + _test_eof271: + (m.cs) = 271 + goto _test_eof + _test_eof272: + (m.cs) = 272 + goto _test_eof + _test_eof273: + (m.cs) = 273 + goto _test_eof + _test_eof8: + (m.cs) = 8 + goto _test_eof + _test_eof9: + (m.cs) = 9 + goto _test_eof + _test_eof10: + (m.cs) = 10 + goto _test_eof + _test_eof11: + (m.cs) = 11 + goto _test_eof + _test_eof12: + (m.cs) = 12 + goto _test_eof + _test_eof13: + (m.cs) = 13 + goto _test_eof + _test_eof14: + (m.cs) = 14 + goto _test_eof + _test_eof15: + (m.cs) = 15 + goto _test_eof + _test_eof16: + (m.cs) = 16 + goto _test_eof + _test_eof17: + (m.cs) = 17 + goto _test_eof + _test_eof18: + (m.cs) = 18 + goto _test_eof + _test_eof19: + (m.cs) = 19 + goto _test_eof + _test_eof20: + (m.cs) = 20 + goto _test_eof + _test_eof21: + (m.cs) = 21 + goto _test_eof + _test_eof22: + (m.cs) = 22 + goto _test_eof + _test_eof23: + (m.cs) = 23 + goto _test_eof + _test_eof24: + (m.cs) = 24 + goto _test_eof + _test_eof25: + (m.cs) = 25 + goto _test_eof + _test_eof26: + (m.cs) = 26 + goto _test_eof + _test_eof27: + (m.cs) = 27 + goto _test_eof + _test_eof28: + (m.cs) = 28 + goto _test_eof + _test_eof29: + (m.cs) = 29 + goto _test_eof + _test_eof30: + (m.cs) = 30 + goto _test_eof + _test_eof31: + (m.cs) = 31 + goto _test_eof + _test_eof32: + (m.cs) = 32 + goto _test_eof + _test_eof274: + (m.cs) = 274 + goto _test_eof + _test_eof275: + (m.cs) = 275 + goto _test_eof + _test_eof33: + (m.cs) = 33 + goto _test_eof + _test_eof34: + (m.cs) = 34 + goto _test_eof + _test_eof276: + (m.cs) = 276 + goto _test_eof + _test_eof277: + (m.cs) = 277 + goto _test_eof + _test_eof278: + (m.cs) = 278 + goto _test_eof + _test_eof35: + (m.cs) = 35 + goto _test_eof + _test_eof279: + (m.cs) = 279 + goto _test_eof + _test_eof280: + (m.cs) = 280 + goto _test_eof + _test_eof281: + (m.cs) = 281 + goto _test_eof + _test_eof282: + (m.cs) = 282 + goto _test_eof + _test_eof283: + (m.cs) = 283 + goto _test_eof + _test_eof284: + (m.cs) = 284 + goto _test_eof + _test_eof285: + (m.cs) = 285 + goto _test_eof + _test_eof286: + (m.cs) = 286 + goto _test_eof + _test_eof287: + (m.cs) = 287 + goto _test_eof + _test_eof288: + (m.cs) = 288 + goto _test_eof + _test_eof289: + (m.cs) = 289 + goto _test_eof + _test_eof290: + (m.cs) = 290 + goto _test_eof + _test_eof291: + (m.cs) = 291 + goto _test_eof + _test_eof292: + (m.cs) = 292 + goto _test_eof + _test_eof293: + (m.cs) = 293 + goto _test_eof + _test_eof294: + (m.cs) = 294 + goto _test_eof + _test_eof295: + (m.cs) = 295 + goto _test_eof + _test_eof296: + (m.cs) = 296 + goto _test_eof + _test_eof36: + (m.cs) = 36 + goto _test_eof + _test_eof37: + (m.cs) = 37 + goto _test_eof + _test_eof297: + (m.cs) = 297 + goto _test_eof + _test_eof298: + (m.cs) = 298 + goto _test_eof + _test_eof299: + (m.cs) = 299 + goto _test_eof + _test_eof38: + (m.cs) = 38 + goto _test_eof + _test_eof39: + (m.cs) = 39 + goto _test_eof + _test_eof40: + (m.cs) = 40 + goto _test_eof + _test_eof41: + (m.cs) = 41 + goto _test_eof + _test_eof42: + (m.cs) = 42 + goto _test_eof + _test_eof300: + (m.cs) = 300 + goto _test_eof + _test_eof301: + (m.cs) = 301 + goto _test_eof + _test_eof302: + (m.cs) = 302 + goto _test_eof + _test_eof303: + (m.cs) = 303 + goto _test_eof + _test_eof43: + (m.cs) = 43 + goto _test_eof + _test_eof304: + (m.cs) = 304 + goto _test_eof + _test_eof305: + (m.cs) = 305 + goto _test_eof + _test_eof306: + (m.cs) = 306 + goto _test_eof + _test_eof307: + (m.cs) = 307 + goto _test_eof + _test_eof308: + (m.cs) = 308 + goto _test_eof + _test_eof309: + (m.cs) = 309 + goto _test_eof + _test_eof310: + (m.cs) = 310 + goto _test_eof + _test_eof311: + (m.cs) = 311 + goto _test_eof + _test_eof312: + (m.cs) = 312 + goto _test_eof + _test_eof313: + (m.cs) = 313 + goto _test_eof + _test_eof314: + (m.cs) = 314 + goto _test_eof + _test_eof315: + (m.cs) = 315 + goto _test_eof + _test_eof316: + (m.cs) = 316 + goto _test_eof + _test_eof317: + (m.cs) = 317 + goto _test_eof + _test_eof318: + (m.cs) = 318 + goto _test_eof + _test_eof319: + (m.cs) = 319 + goto _test_eof + _test_eof320: + (m.cs) = 320 + goto _test_eof + _test_eof321: + (m.cs) = 321 + goto _test_eof + _test_eof322: + (m.cs) = 322 + goto _test_eof + _test_eof323: + (m.cs) = 323 + goto _test_eof + _test_eof324: + (m.cs) = 324 + goto _test_eof + _test_eof325: + (m.cs) = 325 + goto _test_eof + _test_eof44: + (m.cs) = 44 + goto _test_eof + _test_eof45: + (m.cs) = 45 + goto _test_eof + _test_eof46: + (m.cs) = 46 + goto _test_eof + _test_eof47: + (m.cs) = 47 + goto _test_eof + _test_eof48: + (m.cs) = 48 + goto _test_eof + _test_eof49: + (m.cs) = 49 + goto _test_eof + _test_eof50: + (m.cs) = 50 + goto _test_eof + _test_eof51: + (m.cs) = 51 + goto _test_eof + _test_eof52: + (m.cs) = 52 + goto _test_eof + _test_eof53: + (m.cs) = 53 + goto _test_eof + _test_eof326: + (m.cs) = 326 + goto _test_eof + _test_eof327: + (m.cs) = 327 + goto _test_eof + _test_eof328: + (m.cs) = 328 + goto _test_eof + _test_eof54: + (m.cs) = 54 + goto _test_eof + _test_eof55: + (m.cs) = 55 + goto _test_eof + _test_eof56: + (m.cs) = 56 + goto _test_eof + _test_eof57: + (m.cs) = 57 + goto _test_eof + _test_eof58: + (m.cs) = 58 + goto _test_eof + _test_eof59: + (m.cs) = 59 + goto _test_eof + _test_eof329: + (m.cs) = 329 + goto _test_eof + _test_eof330: + (m.cs) = 330 + goto _test_eof + _test_eof60: + (m.cs) = 60 + goto _test_eof + _test_eof331: + (m.cs) = 331 + goto _test_eof + _test_eof332: + (m.cs) = 332 + goto _test_eof + _test_eof333: + (m.cs) = 333 + goto _test_eof + _test_eof334: + (m.cs) = 334 + goto _test_eof + _test_eof335: + (m.cs) = 335 + goto _test_eof + _test_eof336: + (m.cs) = 336 + goto _test_eof + _test_eof337: + (m.cs) = 337 + goto _test_eof + _test_eof338: + (m.cs) = 338 + goto _test_eof + _test_eof339: + (m.cs) = 339 + goto _test_eof + _test_eof340: + (m.cs) = 340 + goto _test_eof + _test_eof341: + (m.cs) = 341 + goto _test_eof + _test_eof342: + (m.cs) = 342 + goto _test_eof + _test_eof343: + (m.cs) = 343 + goto _test_eof + _test_eof344: + (m.cs) = 344 + goto _test_eof + _test_eof345: + (m.cs) = 345 + goto _test_eof + _test_eof346: + (m.cs) = 346 + goto _test_eof + _test_eof347: + (m.cs) = 347 + goto _test_eof + _test_eof348: + (m.cs) = 348 + goto _test_eof + _test_eof349: + (m.cs) = 349 + goto _test_eof + _test_eof350: + (m.cs) = 350 + goto _test_eof + _test_eof61: + (m.cs) = 61 + goto _test_eof + _test_eof351: + (m.cs) = 351 + goto _test_eof + _test_eof352: + (m.cs) = 352 + goto _test_eof + _test_eof353: + (m.cs) = 353 + goto _test_eof + _test_eof62: + (m.cs) = 62 + goto _test_eof + _test_eof354: + (m.cs) = 354 + goto _test_eof + _test_eof355: + (m.cs) = 355 + goto _test_eof + _test_eof356: + (m.cs) = 356 + goto _test_eof + _test_eof357: + (m.cs) = 357 + goto _test_eof + _test_eof358: + (m.cs) = 358 + goto _test_eof + _test_eof359: + (m.cs) = 359 + goto _test_eof + _test_eof360: + (m.cs) = 360 + goto _test_eof + _test_eof361: + (m.cs) = 361 + goto _test_eof + _test_eof362: + (m.cs) = 362 + goto _test_eof + _test_eof363: + (m.cs) = 363 + goto _test_eof + _test_eof364: + (m.cs) = 364 + goto _test_eof + _test_eof365: + (m.cs) = 365 + goto _test_eof + _test_eof366: + (m.cs) = 366 + goto _test_eof + _test_eof367: + (m.cs) = 367 + goto _test_eof + _test_eof368: + (m.cs) = 368 + goto _test_eof + _test_eof369: + (m.cs) = 369 + goto _test_eof + _test_eof370: + (m.cs) = 370 + goto _test_eof + _test_eof371: + (m.cs) = 371 + goto _test_eof + _test_eof372: + (m.cs) = 372 + goto _test_eof + _test_eof373: + (m.cs) = 373 + goto _test_eof + _test_eof63: + (m.cs) = 63 + goto _test_eof + _test_eof64: + (m.cs) = 64 + goto _test_eof + _test_eof65: + (m.cs) = 65 + goto _test_eof + _test_eof66: + (m.cs) = 66 + goto _test_eof + _test_eof67: + (m.cs) = 67 + goto _test_eof + _test_eof374: + (m.cs) = 374 + goto _test_eof + _test_eof68: + (m.cs) = 68 + goto _test_eof + _test_eof69: + (m.cs) = 69 + goto _test_eof + _test_eof70: + (m.cs) = 70 + goto _test_eof + _test_eof71: + (m.cs) = 71 + goto _test_eof + _test_eof72: + (m.cs) = 72 + goto _test_eof + _test_eof375: + (m.cs) = 375 + goto _test_eof + _test_eof376: + (m.cs) = 376 + goto _test_eof + _test_eof377: + (m.cs) = 377 + goto _test_eof + _test_eof73: + (m.cs) = 73 + goto _test_eof + _test_eof74: + (m.cs) = 74 + goto _test_eof + _test_eof378: + (m.cs) = 378 + goto _test_eof + _test_eof379: + (m.cs) = 379 + goto _test_eof + _test_eof75: + (m.cs) = 75 + goto _test_eof + _test_eof380: + (m.cs) = 380 + goto _test_eof + _test_eof76: + (m.cs) = 76 + goto _test_eof + _test_eof381: + (m.cs) = 381 + goto _test_eof + _test_eof382: + (m.cs) = 382 + goto _test_eof + _test_eof383: + (m.cs) = 383 + goto _test_eof + _test_eof384: + (m.cs) = 384 + goto _test_eof + _test_eof385: + (m.cs) = 385 + goto _test_eof + _test_eof386: + (m.cs) = 386 + goto _test_eof + _test_eof387: + (m.cs) = 387 + goto _test_eof + _test_eof388: + (m.cs) = 388 + goto _test_eof + _test_eof389: + (m.cs) = 389 + goto _test_eof + _test_eof390: + (m.cs) = 390 + goto _test_eof + _test_eof391: + (m.cs) = 391 + goto _test_eof + _test_eof392: + (m.cs) = 392 + goto _test_eof + _test_eof393: + (m.cs) = 393 + goto _test_eof + _test_eof394: + (m.cs) = 394 + goto _test_eof + _test_eof395: + (m.cs) = 395 + goto _test_eof + _test_eof396: + (m.cs) = 396 + goto _test_eof + _test_eof397: + (m.cs) = 397 + goto _test_eof + _test_eof398: + (m.cs) = 398 + goto _test_eof + _test_eof399: + (m.cs) = 399 + goto _test_eof + _test_eof400: + (m.cs) = 400 + goto _test_eof + _test_eof77: + (m.cs) = 77 + goto _test_eof + _test_eof78: + (m.cs) = 78 + goto _test_eof + _test_eof79: + (m.cs) = 79 + goto _test_eof + _test_eof80: + (m.cs) = 80 + goto _test_eof + _test_eof81: + (m.cs) = 81 + goto _test_eof + _test_eof82: + (m.cs) = 82 + goto _test_eof + _test_eof83: + (m.cs) = 83 + goto _test_eof + _test_eof84: + (m.cs) = 84 + goto _test_eof + _test_eof85: + (m.cs) = 85 + goto _test_eof + _test_eof86: + (m.cs) = 86 + goto _test_eof + _test_eof87: + (m.cs) = 87 + goto _test_eof + _test_eof88: + (m.cs) = 88 + goto _test_eof + _test_eof89: + (m.cs) = 89 + goto _test_eof + _test_eof90: + (m.cs) = 90 + goto _test_eof + _test_eof401: + (m.cs) = 401 + goto _test_eof + _test_eof402: + (m.cs) = 402 + goto _test_eof + _test_eof403: + (m.cs) = 403 + goto _test_eof + _test_eof404: + (m.cs) = 404 + goto _test_eof + _test_eof91: + (m.cs) = 91 + goto _test_eof + _test_eof92: + (m.cs) = 92 + goto _test_eof + _test_eof93: + (m.cs) = 93 + goto _test_eof + _test_eof94: + (m.cs) = 94 + goto _test_eof + _test_eof405: + (m.cs) = 405 + goto _test_eof + _test_eof406: + (m.cs) = 406 + goto _test_eof + _test_eof95: + (m.cs) = 95 + goto _test_eof + _test_eof96: + (m.cs) = 96 + goto _test_eof + _test_eof407: + (m.cs) = 407 + goto _test_eof + _test_eof97: + (m.cs) = 97 + goto _test_eof + _test_eof98: + (m.cs) = 98 + goto _test_eof + _test_eof408: + (m.cs) = 408 + goto _test_eof + _test_eof409: + (m.cs) = 409 + goto _test_eof + _test_eof99: + (m.cs) = 99 + goto _test_eof + _test_eof410: + (m.cs) = 410 + goto _test_eof + _test_eof411: + (m.cs) = 411 + goto _test_eof + _test_eof100: + (m.cs) = 100 + goto _test_eof + _test_eof101: + (m.cs) = 101 + goto _test_eof + _test_eof412: + (m.cs) = 412 + goto _test_eof + _test_eof413: + (m.cs) = 413 + goto _test_eof + _test_eof414: + (m.cs) = 414 + goto _test_eof + _test_eof415: + (m.cs) = 415 + goto _test_eof + _test_eof416: + (m.cs) = 416 + goto _test_eof + _test_eof417: + (m.cs) = 417 + goto _test_eof + _test_eof418: + (m.cs) = 418 + goto _test_eof + _test_eof419: + (m.cs) = 419 + goto _test_eof + _test_eof420: + (m.cs) = 420 + goto _test_eof + _test_eof421: + (m.cs) = 421 + goto _test_eof + _test_eof422: + (m.cs) = 422 + goto _test_eof + _test_eof423: + (m.cs) = 423 + goto _test_eof + _test_eof424: + (m.cs) = 424 + goto _test_eof + _test_eof425: + (m.cs) = 425 + goto _test_eof + _test_eof426: + (m.cs) = 426 + goto _test_eof + _test_eof427: + (m.cs) = 427 + goto _test_eof + _test_eof428: + (m.cs) = 428 + goto _test_eof + _test_eof429: + (m.cs) = 429 + goto _test_eof + _test_eof102: + (m.cs) = 102 + goto _test_eof + _test_eof430: + (m.cs) = 430 + goto _test_eof + _test_eof431: + (m.cs) = 431 + goto _test_eof + _test_eof432: + (m.cs) = 432 + goto _test_eof + _test_eof103: + (m.cs) = 103 + goto _test_eof + _test_eof104: + (m.cs) = 104 + goto _test_eof + _test_eof433: + (m.cs) = 433 + goto _test_eof + _test_eof434: + (m.cs) = 434 + goto _test_eof + _test_eof435: + (m.cs) = 435 + goto _test_eof + _test_eof105: + (m.cs) = 105 + goto _test_eof + _test_eof436: + (m.cs) = 436 + goto _test_eof + _test_eof437: + (m.cs) = 437 + goto _test_eof + _test_eof438: + (m.cs) = 438 + goto _test_eof + _test_eof439: + (m.cs) = 439 + goto _test_eof + _test_eof440: + (m.cs) = 440 + goto _test_eof + _test_eof441: + (m.cs) = 441 + goto _test_eof + _test_eof442: + (m.cs) = 442 + goto _test_eof + _test_eof443: + (m.cs) = 443 + goto _test_eof + _test_eof444: + (m.cs) = 444 + goto _test_eof + _test_eof445: + (m.cs) = 445 + goto _test_eof + _test_eof446: + (m.cs) = 446 + goto _test_eof + _test_eof447: + (m.cs) = 447 + goto _test_eof + _test_eof448: + (m.cs) = 448 + goto _test_eof + _test_eof449: + (m.cs) = 449 + goto _test_eof + _test_eof450: + (m.cs) = 450 + goto _test_eof + _test_eof451: + (m.cs) = 451 + goto _test_eof + _test_eof452: + (m.cs) = 452 + goto _test_eof + _test_eof453: + (m.cs) = 453 + goto _test_eof + _test_eof454: + (m.cs) = 454 + goto _test_eof + _test_eof455: + (m.cs) = 455 + goto _test_eof + _test_eof106: + (m.cs) = 106 + goto _test_eof + _test_eof456: + (m.cs) = 456 + goto _test_eof + _test_eof457: + (m.cs) = 457 + goto _test_eof + _test_eof458: + (m.cs) = 458 + goto _test_eof + _test_eof459: + (m.cs) = 459 + goto _test_eof + _test_eof460: + (m.cs) = 460 + goto _test_eof + _test_eof461: + (m.cs) = 461 + goto _test_eof + _test_eof462: + (m.cs) = 462 + goto _test_eof + _test_eof463: + (m.cs) = 463 + goto _test_eof + _test_eof464: + (m.cs) = 464 + goto _test_eof + _test_eof465: + (m.cs) = 465 + goto _test_eof + _test_eof466: + (m.cs) = 466 + goto _test_eof + _test_eof467: + (m.cs) = 467 + goto _test_eof + _test_eof468: + (m.cs) = 468 + goto _test_eof + _test_eof469: + (m.cs) = 469 + goto _test_eof + _test_eof470: + (m.cs) = 470 + goto _test_eof + _test_eof471: + (m.cs) = 471 + goto _test_eof + _test_eof472: + (m.cs) = 472 + goto _test_eof + _test_eof473: + (m.cs) = 473 + goto _test_eof + _test_eof474: + (m.cs) = 474 + goto _test_eof + _test_eof475: + (m.cs) = 475 + goto _test_eof + _test_eof476: + (m.cs) = 476 + goto _test_eof + _test_eof477: + (m.cs) = 477 + goto _test_eof + _test_eof107: + (m.cs) = 107 + goto _test_eof + _test_eof108: + (m.cs) = 108 + goto _test_eof + _test_eof109: + (m.cs) = 109 + goto _test_eof + _test_eof110: + (m.cs) = 110 + goto _test_eof + _test_eof111: + (m.cs) = 111 + goto _test_eof + _test_eof478: + (m.cs) = 478 + goto _test_eof + _test_eof112: + (m.cs) = 112 + goto _test_eof + _test_eof479: + (m.cs) = 479 + goto _test_eof + _test_eof480: + (m.cs) = 480 + goto _test_eof + _test_eof113: + (m.cs) = 113 + goto _test_eof + _test_eof481: + (m.cs) = 481 + goto _test_eof + _test_eof482: + (m.cs) = 482 + goto _test_eof + _test_eof483: + (m.cs) = 483 + goto _test_eof + _test_eof484: + (m.cs) = 484 + goto _test_eof + _test_eof485: + (m.cs) = 485 + goto _test_eof + _test_eof486: + (m.cs) = 486 + goto _test_eof + _test_eof487: + (m.cs) = 487 + goto _test_eof + _test_eof488: + (m.cs) = 488 + goto _test_eof + _test_eof489: + (m.cs) = 489 + goto _test_eof + _test_eof114: + (m.cs) = 114 + goto _test_eof + _test_eof115: + (m.cs) = 115 + goto _test_eof + _test_eof116: + (m.cs) = 116 + goto _test_eof + _test_eof490: + (m.cs) = 490 + goto _test_eof + _test_eof117: + (m.cs) = 117 + goto _test_eof + _test_eof118: + (m.cs) = 118 + goto _test_eof + _test_eof119: + (m.cs) = 119 + goto _test_eof + _test_eof491: + (m.cs) = 491 + goto _test_eof + _test_eof120: + (m.cs) = 120 + goto _test_eof + _test_eof121: + (m.cs) = 121 + goto _test_eof + _test_eof492: + (m.cs) = 492 + goto _test_eof + _test_eof493: + (m.cs) = 493 + goto _test_eof + _test_eof122: + (m.cs) = 122 + goto _test_eof + _test_eof123: + (m.cs) = 123 + goto _test_eof + _test_eof124: + (m.cs) = 124 + goto _test_eof + _test_eof125: + (m.cs) = 125 + goto _test_eof + _test_eof494: + (m.cs) = 494 + goto _test_eof + _test_eof495: + (m.cs) = 495 + goto _test_eof + _test_eof496: + (m.cs) = 496 + goto _test_eof + _test_eof126: + (m.cs) = 126 + goto _test_eof + _test_eof497: + (m.cs) = 497 + goto _test_eof + _test_eof498: + (m.cs) = 498 + goto _test_eof + _test_eof499: + (m.cs) = 499 + goto _test_eof + _test_eof500: + (m.cs) = 500 + goto _test_eof + _test_eof501: + (m.cs) = 501 + goto _test_eof + _test_eof502: + (m.cs) = 502 + goto _test_eof + _test_eof503: + (m.cs) = 503 + goto _test_eof + _test_eof504: + (m.cs) = 504 + goto _test_eof + _test_eof505: + (m.cs) = 505 + goto _test_eof + _test_eof506: + (m.cs) = 506 + goto _test_eof + _test_eof507: + (m.cs) = 507 + goto _test_eof + _test_eof508: + (m.cs) = 508 + goto _test_eof + _test_eof509: + (m.cs) = 509 + goto _test_eof + _test_eof510: + (m.cs) = 510 + goto _test_eof + _test_eof511: + (m.cs) = 511 + goto _test_eof + _test_eof512: + (m.cs) = 512 + goto _test_eof + _test_eof513: + (m.cs) = 513 + goto _test_eof + _test_eof514: + (m.cs) = 514 + goto _test_eof + _test_eof515: + (m.cs) = 515 + goto _test_eof + _test_eof516: + (m.cs) = 516 + goto _test_eof + _test_eof127: + (m.cs) = 127 + goto _test_eof + _test_eof128: + (m.cs) = 128 + goto _test_eof + _test_eof517: + (m.cs) = 517 + goto _test_eof + _test_eof518: + (m.cs) = 518 + goto _test_eof + _test_eof519: + (m.cs) = 519 + goto _test_eof + _test_eof520: + (m.cs) = 520 + goto _test_eof + _test_eof521: + (m.cs) = 521 + goto _test_eof + _test_eof522: + (m.cs) = 522 + goto _test_eof + _test_eof523: + (m.cs) = 523 + goto _test_eof + _test_eof524: + (m.cs) = 524 + goto _test_eof + _test_eof525: + (m.cs) = 525 + goto _test_eof + _test_eof129: + (m.cs) = 129 + goto _test_eof + _test_eof130: + (m.cs) = 130 + goto _test_eof + _test_eof131: + (m.cs) = 131 + goto _test_eof + _test_eof526: + (m.cs) = 526 + goto _test_eof + _test_eof132: + (m.cs) = 132 + goto _test_eof + _test_eof133: + (m.cs) = 133 + goto _test_eof + _test_eof134: + (m.cs) = 134 + goto _test_eof + _test_eof527: + (m.cs) = 527 + goto _test_eof + _test_eof135: + (m.cs) = 135 + goto _test_eof + _test_eof136: + (m.cs) = 136 + goto _test_eof + _test_eof528: + (m.cs) = 528 + goto _test_eof + _test_eof529: + (m.cs) = 529 + goto _test_eof + _test_eof137: + (m.cs) = 137 + goto _test_eof + _test_eof138: + (m.cs) = 138 + goto _test_eof + _test_eof139: + (m.cs) = 139 + goto _test_eof + _test_eof530: + (m.cs) = 530 + goto _test_eof + _test_eof531: + (m.cs) = 531 + goto _test_eof + _test_eof140: + (m.cs) = 140 + goto _test_eof + _test_eof532: + (m.cs) = 532 + goto _test_eof + _test_eof141: + (m.cs) = 141 + goto _test_eof + _test_eof533: + (m.cs) = 533 + goto _test_eof + _test_eof534: + (m.cs) = 534 + goto _test_eof + _test_eof535: + (m.cs) = 535 + goto _test_eof + _test_eof536: + (m.cs) = 536 + goto _test_eof + _test_eof537: + (m.cs) = 537 + goto _test_eof + _test_eof538: + (m.cs) = 538 + goto _test_eof + _test_eof539: + (m.cs) = 539 + goto _test_eof + _test_eof540: + (m.cs) = 540 + goto _test_eof + _test_eof142: + (m.cs) = 142 + goto _test_eof + _test_eof143: + (m.cs) = 143 + goto _test_eof + _test_eof144: + (m.cs) = 144 + goto _test_eof + _test_eof541: + (m.cs) = 541 + goto _test_eof + _test_eof145: + (m.cs) = 145 + goto _test_eof + _test_eof146: + (m.cs) = 146 + goto _test_eof + _test_eof147: + (m.cs) = 147 + goto _test_eof + _test_eof542: + (m.cs) = 542 + goto _test_eof + _test_eof148: + (m.cs) = 148 + goto _test_eof + _test_eof149: + (m.cs) = 149 + goto _test_eof + _test_eof543: + (m.cs) = 543 + goto _test_eof + _test_eof544: + (m.cs) = 544 + goto _test_eof + _test_eof545: + (m.cs) = 545 + goto _test_eof + _test_eof546: + (m.cs) = 546 + goto _test_eof + _test_eof547: + (m.cs) = 547 + goto _test_eof + _test_eof548: + (m.cs) = 548 + goto _test_eof + _test_eof549: + (m.cs) = 549 + goto _test_eof + _test_eof550: + (m.cs) = 550 + goto _test_eof + _test_eof551: + (m.cs) = 551 + goto _test_eof + _test_eof552: + (m.cs) = 552 + goto _test_eof + _test_eof553: + (m.cs) = 553 + goto _test_eof + _test_eof554: + (m.cs) = 554 + goto _test_eof + _test_eof555: + (m.cs) = 555 + goto _test_eof + _test_eof556: + (m.cs) = 556 + goto _test_eof + _test_eof557: + (m.cs) = 557 + goto _test_eof + _test_eof558: + (m.cs) = 558 + goto _test_eof + _test_eof559: + (m.cs) = 559 + goto _test_eof + _test_eof560: + (m.cs) = 560 + goto _test_eof + _test_eof561: + (m.cs) = 561 + goto _test_eof + _test_eof562: + (m.cs) = 562 + goto _test_eof + _test_eof150: + (m.cs) = 150 + goto _test_eof + _test_eof151: + (m.cs) = 151 + goto _test_eof + _test_eof563: + (m.cs) = 563 + goto _test_eof + _test_eof564: + (m.cs) = 564 + goto _test_eof + _test_eof565: + (m.cs) = 565 + goto _test_eof + _test_eof152: + (m.cs) = 152 + goto _test_eof + _test_eof566: + (m.cs) = 566 + goto _test_eof + _test_eof567: + (m.cs) = 567 + goto _test_eof + _test_eof153: + (m.cs) = 153 + goto _test_eof + _test_eof568: + (m.cs) = 568 + goto _test_eof + _test_eof569: + (m.cs) = 569 + goto _test_eof + _test_eof570: + (m.cs) = 570 + goto _test_eof + _test_eof571: + (m.cs) = 571 + goto _test_eof + _test_eof572: + (m.cs) = 572 + goto _test_eof + _test_eof573: + (m.cs) = 573 + goto _test_eof + _test_eof574: + (m.cs) = 574 + goto _test_eof + _test_eof575: + (m.cs) = 575 + goto _test_eof + _test_eof576: + (m.cs) = 576 + goto _test_eof + _test_eof577: + (m.cs) = 577 + goto _test_eof + _test_eof578: + (m.cs) = 578 + goto _test_eof + _test_eof579: + (m.cs) = 579 + goto _test_eof + _test_eof580: + (m.cs) = 580 + goto _test_eof + _test_eof581: + (m.cs) = 581 + goto _test_eof + _test_eof582: + (m.cs) = 582 + goto _test_eof + _test_eof583: + (m.cs) = 583 + goto _test_eof + _test_eof584: + (m.cs) = 584 + goto _test_eof + _test_eof585: + (m.cs) = 585 + goto _test_eof + _test_eof154: + (m.cs) = 154 + goto _test_eof + _test_eof155: + (m.cs) = 155 + goto _test_eof + _test_eof586: + (m.cs) = 586 + goto _test_eof + _test_eof156: + (m.cs) = 156 + goto _test_eof + _test_eof587: + (m.cs) = 587 + goto _test_eof + _test_eof588: + (m.cs) = 588 + goto _test_eof + _test_eof589: + (m.cs) = 589 + goto _test_eof + _test_eof590: + (m.cs) = 590 + goto _test_eof + _test_eof591: + (m.cs) = 591 + goto _test_eof + _test_eof592: + (m.cs) = 592 + goto _test_eof + _test_eof593: + (m.cs) = 593 + goto _test_eof + _test_eof594: + (m.cs) = 594 + goto _test_eof + _test_eof157: + (m.cs) = 157 + goto _test_eof + _test_eof158: + (m.cs) = 158 + goto _test_eof + _test_eof159: + (m.cs) = 159 + goto _test_eof + _test_eof595: + (m.cs) = 595 + goto _test_eof + _test_eof160: + (m.cs) = 160 + goto _test_eof + _test_eof161: + (m.cs) = 161 + goto _test_eof + _test_eof162: + (m.cs) = 162 + goto _test_eof + _test_eof596: + (m.cs) = 596 + goto _test_eof + _test_eof163: + (m.cs) = 163 + goto _test_eof + _test_eof164: + (m.cs) = 164 + goto _test_eof + _test_eof597: + (m.cs) = 597 + goto _test_eof + _test_eof598: + (m.cs) = 598 + goto _test_eof + _test_eof165: + (m.cs) = 165 + goto _test_eof + _test_eof166: + (m.cs) = 166 + goto _test_eof + _test_eof167: + (m.cs) = 167 + goto _test_eof + _test_eof168: + (m.cs) = 168 + goto _test_eof + _test_eof169: + (m.cs) = 169 + goto _test_eof + _test_eof170: + (m.cs) = 170 + goto _test_eof + _test_eof599: + (m.cs) = 599 + goto _test_eof + _test_eof600: + (m.cs) = 600 + goto _test_eof + _test_eof601: + (m.cs) = 601 + goto _test_eof + _test_eof602: + (m.cs) = 602 + goto _test_eof + _test_eof603: + (m.cs) = 603 + goto _test_eof + _test_eof604: + (m.cs) = 604 + goto _test_eof + _test_eof605: + (m.cs) = 605 + goto _test_eof + _test_eof606: + (m.cs) = 606 + goto _test_eof + _test_eof607: + (m.cs) = 607 + goto _test_eof + _test_eof608: + (m.cs) = 608 + goto _test_eof + _test_eof609: + (m.cs) = 609 + goto _test_eof + _test_eof610: + (m.cs) = 610 + goto _test_eof + _test_eof611: + (m.cs) = 611 + goto _test_eof + _test_eof612: + (m.cs) = 612 + goto _test_eof + _test_eof613: + (m.cs) = 613 + goto _test_eof + _test_eof614: + (m.cs) = 614 + goto _test_eof + _test_eof615: + (m.cs) = 615 + goto _test_eof + _test_eof616: + (m.cs) = 616 + goto _test_eof + _test_eof617: + (m.cs) = 617 + goto _test_eof + _test_eof171: + (m.cs) = 171 + goto _test_eof + _test_eof172: + (m.cs) = 172 + goto _test_eof + _test_eof173: + (m.cs) = 173 + goto _test_eof + _test_eof618: + (m.cs) = 618 + goto _test_eof + _test_eof619: + (m.cs) = 619 + goto _test_eof + _test_eof620: + (m.cs) = 620 + goto _test_eof + _test_eof174: + (m.cs) = 174 + goto _test_eof + _test_eof621: + (m.cs) = 621 + goto _test_eof + _test_eof622: + (m.cs) = 622 + goto _test_eof + _test_eof175: + (m.cs) = 175 + goto _test_eof + _test_eof623: + (m.cs) = 623 + goto _test_eof + _test_eof624: + (m.cs) = 624 + goto _test_eof + _test_eof625: + (m.cs) = 625 + goto _test_eof + _test_eof626: + (m.cs) = 626 + goto _test_eof + _test_eof627: + (m.cs) = 627 + goto _test_eof + _test_eof176: + (m.cs) = 176 + goto _test_eof + _test_eof177: + (m.cs) = 177 + goto _test_eof + _test_eof178: + (m.cs) = 178 + goto _test_eof + _test_eof628: + (m.cs) = 628 + goto _test_eof + _test_eof179: + (m.cs) = 179 + goto _test_eof + _test_eof180: + (m.cs) = 180 + goto _test_eof + _test_eof181: + (m.cs) = 181 + goto _test_eof + _test_eof629: + (m.cs) = 629 + goto _test_eof + _test_eof182: + (m.cs) = 182 + goto _test_eof + _test_eof183: + (m.cs) = 183 + goto _test_eof + _test_eof630: + (m.cs) = 630 + goto _test_eof + _test_eof631: + (m.cs) = 631 + goto _test_eof + _test_eof184: + (m.cs) = 184 + goto _test_eof + _test_eof632: + (m.cs) = 632 + goto _test_eof + _test_eof633: + (m.cs) = 633 + goto _test_eof + _test_eof634: + (m.cs) = 634 + goto _test_eof + _test_eof185: + (m.cs) = 185 + goto _test_eof + _test_eof186: + (m.cs) = 186 + goto _test_eof + _test_eof187: + (m.cs) = 187 + goto _test_eof + _test_eof635: + (m.cs) = 635 + goto _test_eof + _test_eof188: + (m.cs) = 188 + goto _test_eof + _test_eof189: + (m.cs) = 189 + goto _test_eof + _test_eof190: + (m.cs) = 190 + goto _test_eof + _test_eof636: + (m.cs) = 636 + goto _test_eof + _test_eof191: + (m.cs) = 191 + goto _test_eof + _test_eof192: + (m.cs) = 192 + goto _test_eof + _test_eof637: + (m.cs) = 637 + goto _test_eof + _test_eof638: + (m.cs) = 638 + goto _test_eof + _test_eof193: + (m.cs) = 193 + goto _test_eof + _test_eof194: + (m.cs) = 194 + goto _test_eof + _test_eof195: + (m.cs) = 195 + goto _test_eof + _test_eof639: + (m.cs) = 639 + goto _test_eof + _test_eof196: + (m.cs) = 196 + goto _test_eof + _test_eof197: + (m.cs) = 197 + goto _test_eof + _test_eof640: + (m.cs) = 640 + goto _test_eof + _test_eof641: + (m.cs) = 641 + goto _test_eof + _test_eof642: + (m.cs) = 642 + goto _test_eof + _test_eof643: + (m.cs) = 643 + goto _test_eof + _test_eof644: + (m.cs) = 644 + goto _test_eof + _test_eof645: + (m.cs) = 645 + goto _test_eof + _test_eof646: + (m.cs) = 646 + goto _test_eof + _test_eof647: + (m.cs) = 647 + goto _test_eof + _test_eof198: + (m.cs) = 198 + goto _test_eof + _test_eof199: + (m.cs) = 199 + goto _test_eof + _test_eof200: + (m.cs) = 200 + goto _test_eof + _test_eof648: + (m.cs) = 648 + goto _test_eof + _test_eof201: + (m.cs) = 201 + goto _test_eof + _test_eof202: + (m.cs) = 202 + goto _test_eof + _test_eof203: + (m.cs) = 203 + goto _test_eof + _test_eof649: + (m.cs) = 649 + goto _test_eof + _test_eof204: + (m.cs) = 204 + goto _test_eof + _test_eof205: + (m.cs) = 205 + goto _test_eof + _test_eof650: + (m.cs) = 650 + goto _test_eof + _test_eof651: + (m.cs) = 651 + goto _test_eof + _test_eof206: + (m.cs) = 206 + goto _test_eof + _test_eof207: + (m.cs) = 207 + goto _test_eof + _test_eof208: + (m.cs) = 208 + goto _test_eof + _test_eof652: + (m.cs) = 652 + goto _test_eof + _test_eof653: + (m.cs) = 653 + goto _test_eof + _test_eof654: + (m.cs) = 654 + goto _test_eof + _test_eof655: + (m.cs) = 655 + goto _test_eof + _test_eof656: + (m.cs) = 656 + goto _test_eof + _test_eof657: + (m.cs) = 657 + goto _test_eof + _test_eof658: + (m.cs) = 658 + goto _test_eof + _test_eof659: + (m.cs) = 659 + goto _test_eof + _test_eof660: + (m.cs) = 660 + goto _test_eof + _test_eof661: + (m.cs) = 661 + goto _test_eof + _test_eof662: + (m.cs) = 662 + goto _test_eof + _test_eof663: + (m.cs) = 663 + goto _test_eof + _test_eof664: + (m.cs) = 664 + goto _test_eof + _test_eof665: + (m.cs) = 665 + goto _test_eof + _test_eof666: + (m.cs) = 666 + goto _test_eof + _test_eof667: + (m.cs) = 667 + goto _test_eof + _test_eof668: + (m.cs) = 668 + goto _test_eof + _test_eof669: + (m.cs) = 669 + goto _test_eof + _test_eof670: + (m.cs) = 670 + goto _test_eof + _test_eof209: + (m.cs) = 209 + goto _test_eof + _test_eof210: + (m.cs) = 210 + goto _test_eof + _test_eof211: + (m.cs) = 211 + goto _test_eof + _test_eof212: + (m.cs) = 212 + goto _test_eof + _test_eof213: + (m.cs) = 213 + goto _test_eof + _test_eof671: + (m.cs) = 671 + goto _test_eof + _test_eof214: + (m.cs) = 214 + goto _test_eof + _test_eof215: + (m.cs) = 215 + goto _test_eof + _test_eof672: + (m.cs) = 672 + goto _test_eof + _test_eof673: + (m.cs) = 673 + goto _test_eof + _test_eof674: + (m.cs) = 674 + goto _test_eof + _test_eof675: + (m.cs) = 675 + goto _test_eof + _test_eof676: + (m.cs) = 676 + goto _test_eof + _test_eof677: + (m.cs) = 677 + goto _test_eof + _test_eof678: + (m.cs) = 678 + goto _test_eof + _test_eof679: + (m.cs) = 679 + goto _test_eof + _test_eof680: + (m.cs) = 680 + goto _test_eof + _test_eof216: + (m.cs) = 216 + goto _test_eof + _test_eof217: + (m.cs) = 217 + goto _test_eof + _test_eof218: + (m.cs) = 218 + goto _test_eof + _test_eof681: + (m.cs) = 681 + goto _test_eof + _test_eof219: + (m.cs) = 219 + goto _test_eof + _test_eof220: + (m.cs) = 220 + goto _test_eof + _test_eof221: + (m.cs) = 221 + goto _test_eof + _test_eof682: + (m.cs) = 682 + goto _test_eof + _test_eof222: + (m.cs) = 222 + goto _test_eof + _test_eof223: + (m.cs) = 223 + goto _test_eof + _test_eof683: + (m.cs) = 683 + goto _test_eof + _test_eof684: + (m.cs) = 684 + goto _test_eof + _test_eof224: + (m.cs) = 224 + goto _test_eof + _test_eof225: + (m.cs) = 225 + goto _test_eof + _test_eof226: + (m.cs) = 226 + goto _test_eof + _test_eof685: + (m.cs) = 685 + goto _test_eof + _test_eof227: + (m.cs) = 227 + goto _test_eof + _test_eof228: + (m.cs) = 228 + goto _test_eof + _test_eof686: + (m.cs) = 686 + goto _test_eof + _test_eof687: + (m.cs) = 687 + goto _test_eof + _test_eof688: + (m.cs) = 688 + goto _test_eof + _test_eof689: + (m.cs) = 689 + goto _test_eof + _test_eof690: + (m.cs) = 690 + goto _test_eof + _test_eof691: + (m.cs) = 691 + goto _test_eof + _test_eof692: + (m.cs) = 692 + goto _test_eof + _test_eof693: + (m.cs) = 693 + goto _test_eof + _test_eof229: + (m.cs) = 229 + goto _test_eof + _test_eof230: + (m.cs) = 230 + goto _test_eof + _test_eof231: + (m.cs) = 231 + goto _test_eof + _test_eof694: + (m.cs) = 694 + goto _test_eof + _test_eof232: + (m.cs) = 232 + goto _test_eof + _test_eof233: + (m.cs) = 233 + goto _test_eof + _test_eof695: + (m.cs) = 695 + goto _test_eof + _test_eof696: + (m.cs) = 696 + goto _test_eof + _test_eof697: + (m.cs) = 697 + goto _test_eof + _test_eof698: + (m.cs) = 698 + goto _test_eof + _test_eof699: + (m.cs) = 699 + goto _test_eof + _test_eof700: + (m.cs) = 700 + goto _test_eof + _test_eof701: + (m.cs) = 701 + goto _test_eof + _test_eof702: + (m.cs) = 702 + goto _test_eof + _test_eof234: + (m.cs) = 234 + goto _test_eof + _test_eof235: + (m.cs) = 235 + goto _test_eof + _test_eof236: + (m.cs) = 236 + goto _test_eof + _test_eof703: + (m.cs) = 703 + goto _test_eof + _test_eof237: + (m.cs) = 237 + goto _test_eof + _test_eof238: + (m.cs) = 238 + goto _test_eof + _test_eof239: + (m.cs) = 239 + goto _test_eof + _test_eof704: + (m.cs) = 704 + goto _test_eof + _test_eof240: + (m.cs) = 240 + goto _test_eof + _test_eof241: + (m.cs) = 241 + goto _test_eof + _test_eof705: + (m.cs) = 705 + goto _test_eof + _test_eof706: + (m.cs) = 706 + goto _test_eof + _test_eof242: + (m.cs) = 242 + goto _test_eof + _test_eof243: + (m.cs) = 243 + goto _test_eof + _test_eof244: + (m.cs) = 244 + goto _test_eof + _test_eof707: + (m.cs) = 707 + goto _test_eof + _test_eof708: + (m.cs) = 708 + goto _test_eof + _test_eof709: + (m.cs) = 709 + goto _test_eof + _test_eof710: + (m.cs) = 710 + goto _test_eof + _test_eof711: + (m.cs) = 711 + goto _test_eof + _test_eof712: + (m.cs) = 712 + goto _test_eof + _test_eof713: + (m.cs) = 713 + goto _test_eof + _test_eof714: + (m.cs) = 714 + goto _test_eof + _test_eof715: + (m.cs) = 715 + goto _test_eof + _test_eof716: + (m.cs) = 716 + goto _test_eof + _test_eof717: + (m.cs) = 717 + goto _test_eof + _test_eof718: + (m.cs) = 718 + goto _test_eof + _test_eof719: + (m.cs) = 719 + goto _test_eof + _test_eof720: + (m.cs) = 720 + goto _test_eof + _test_eof721: + (m.cs) = 721 + goto _test_eof + _test_eof722: + (m.cs) = 722 + goto _test_eof + _test_eof723: + (m.cs) = 723 + goto _test_eof + _test_eof724: + (m.cs) = 724 + goto _test_eof + _test_eof725: + (m.cs) = 725 + goto _test_eof + _test_eof245: + (m.cs) = 245 + goto _test_eof + _test_eof246: + (m.cs) = 246 + goto _test_eof + _test_eof726: + (m.cs) = 726 + goto _test_eof + _test_eof247: + (m.cs) = 247 + goto _test_eof + _test_eof248: + (m.cs) = 248 + goto _test_eof + _test_eof727: + (m.cs) = 727 + goto _test_eof + _test_eof728: + (m.cs) = 728 + goto _test_eof + _test_eof729: + (m.cs) = 729 + goto _test_eof + _test_eof730: + (m.cs) = 730 + goto _test_eof + _test_eof731: + (m.cs) = 731 + goto _test_eof + _test_eof732: + (m.cs) = 732 + goto _test_eof + _test_eof733: + (m.cs) = 733 + goto _test_eof + _test_eof734: + (m.cs) = 734 + goto _test_eof + _test_eof249: + (m.cs) = 249 + goto _test_eof + _test_eof250: + (m.cs) = 250 + goto _test_eof + _test_eof251: + (m.cs) = 251 + goto _test_eof + _test_eof735: + (m.cs) = 735 + goto _test_eof + _test_eof252: + (m.cs) = 252 + goto _test_eof + _test_eof253: + (m.cs) = 253 + goto _test_eof + _test_eof254: + (m.cs) = 254 + goto _test_eof + _test_eof736: + (m.cs) = 736 + goto _test_eof + _test_eof255: + (m.cs) = 255 + goto _test_eof + _test_eof256: + (m.cs) = 256 + goto _test_eof + _test_eof737: + (m.cs) = 737 + goto _test_eof + _test_eof738: + (m.cs) = 738 + goto _test_eof + _test_eof257: + (m.cs) = 257 + goto _test_eof + _test_eof258: + (m.cs) = 258 + goto _test_eof + _test_eof739: + (m.cs) = 739 + goto _test_eof + _test_eof261: + (m.cs) = 261 + goto _test_eof + _test_eof741: + (m.cs) = 741 + goto _test_eof + _test_eof742: + (m.cs) = 742 + goto _test_eof + _test_eof262: + (m.cs) = 262 + goto _test_eof + _test_eof263: + (m.cs) = 263 + goto _test_eof + _test_eof264: + (m.cs) = 264 + goto _test_eof + _test_eof265: + (m.cs) = 265 + goto _test_eof + _test_eof743: + (m.cs) = 743 + goto _test_eof + _test_eof266: + (m.cs) = 266 + goto _test_eof + _test_eof744: + (m.cs) = 744 + goto _test_eof + _test_eof267: + (m.cs) = 267 + goto _test_eof + _test_eof268: + (m.cs) = 268 + goto _test_eof + _test_eof269: + (m.cs) = 269 + goto _test_eof + _test_eof740: + (m.cs) = 740 + goto _test_eof + _test_eof259: + (m.cs) = 259 + goto _test_eof + _test_eof260: + (m.cs) = 260 + goto _test_eof + + _test_eof: + { + } + if (m.p) == (m.eof) { + switch m.cs { + case 8, 261: +//line plugins/parsers/influx/machine.go.rl:24 + + err = ErrNameParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 2, 3, 4, 5, 6, 7, 28, 31, 32, 35, 36, 37, 49, 50, 51, 52, 53, 73, 75, 76, 93, 103, 105, 141, 153, 156, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257: +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 13, 14, 15, 22, 24, 25, 263, 264, 265, 266, 267, 268: +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 244: +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 741: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + + case 743, 744: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + + case 271, 272, 273, 274, 275, 277, 278, 297, 298, 299, 301, 302, 305, 306, 327, 328, 329, 330, 332, 376, 377, 379, 380, 402, 403, 408, 409, 411, 431, 432, 434, 435, 457, 458, 618, 621: +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 10, 38, 40, 165, 167: +//line plugins/parsers/influx/machine.go.rl:24 + + err = ErrNameParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 34, 74, 104, 170, 208: +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 20, 44, 45, 46, 58, 59, 61, 63, 68, 70, 71, 77, 78, 79, 84, 86, 88, 89, 97, 98, 100, 101, 102, 107, 108, 109, 122, 123, 137, 138: +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 60: +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 270: +//line plugins/parsers/influx/machine.go.rl:74 + + m.beginMetric = true + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 1: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 300, 303, 307, 375, 399, 400, 404, 405, 406, 530, 564, 565, 567: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 16, 23: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 351, 352, 353, 355, 374, 430, 454, 455, 459, 479, 495, 496, 498: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 624, 675, 689, 729: +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 625, 678, 692, 732: +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 326, 619, 620, 622, 623, 626, 632, 633, 671, 672, 673, 674, 676, 677, 679, 685, 686, 687, 688, 690, 691, 693, 726, 727, 728, 730, 731, 733: +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 627, 628, 629, 630, 631, 634, 635, 636, 637, 638, 680, 681, 682, 683, 684, 734, 735, 736, 737, 738: +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 276, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 331, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 378, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 410, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 433, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725: +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 9: +//line plugins/parsers/influx/machine.go.rl:24 + + err = ErrNameParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 99: +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 11, 12, 26, 27, 29, 30, 41, 42, 54, 55, 56, 57, 72, 91, 92, 94, 96, 139, 140, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 154, 155, 157, 158, 159, 160, 161, 162, 163, 164, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 535, 589, 697: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 538, 592, 700: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 407, 531, 532, 533, 534, 536, 537, 539, 563, 586, 587, 588, 590, 591, 593, 694, 695, 696, 698, 699, 701: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 540, 541, 542, 543, 544, 594, 595, 596, 597, 598, 702, 703, 704, 705, 706: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 304, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 401, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 566, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 17, 18, 19, 21, 47, 48, 64, 65, 66, 67, 69, 80, 81, 82, 83, 85, 87, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 484, 520, 642: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:104 + + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 487, 523, 645: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:113 + + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 478, 480, 481, 482, 483, 485, 486, 488, 494, 517, 518, 519, 521, 522, 524, 639, 640, 641, 643, 644, 646: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:122 + + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 489, 490, 491, 492, 493, 525, 526, 527, 528, 529, 647, 648, 649, 650, 651: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:131 + + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 354, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 456, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 497, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:149 + + err = m.handler.SetTimestamp(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:170 + + m.finishMetric = true + + case 39, 166, 168, 169, 206, 207, 242, 243: +//line plugins/parsers/influx/machine.go.rl:24 + + err = ErrNameParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 43, 90, 152: +//line plugins/parsers/influx/machine.go.rl:78 + + err = m.handler.SetMeasurement(m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + + case 62, 106, 126: +//line plugins/parsers/influx/machine.go.rl:91 + + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + } + +//line plugins/parsers/influx/machine.go.rl:38 + + err = ErrTagParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:31 + + err = ErrFieldParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go.rl:45 + + err = ErrTimestampParse + (m.p)-- + + (m.cs) = 258 + { + (m.p)++ + (m.cs) = 0 + goto _out + } + +//line plugins/parsers/influx/machine.go:31897 + } + } + + _out: + { + } + } + +//line plugins/parsers/influx/machine.go.rl:407 + + if err != nil { + return err + } + + // This would indicate an error in the machine that was reported with a + // more specific error. We return a generic error but this should + // possibly be a panic. + if m.cs == 0 { + m.cs = LineProtocol_en_discard_line + return ErrParse + } + + // If we haven't found a metric line yet and we reached the EOF, report it + // now. This happens when the data ends with a comment or whitespace. + // + // Otherwise we have successfully parsed a metric line, so if we are at + // the EOF we will report it the next call. + if !m.beginMetric && m.p == m.pe && m.pe == m.eof { + return EOF + } + + return nil +} + +// Position returns the current byte offset into the data. +func (m *machine) Position() int { + return m.p +} + +// LineOffset returns the byte offset of the current line. +func (m *machine) LineOffset() int { + return m.sol +} + +// LineNumber returns the current line number. Lines are counted based on the +// regular expression `\r?\n`. +func (m *machine) LineNumber() int { + return m.lineno +} + +// Column returns the current column. +func (m *machine) Column() int { + lineOffset := m.p - m.sol + return lineOffset + 1 +} + +func (m *machine) text() []byte { + return m.data[m.pb:m.p] +} + +type streamMachine struct { + machine *machine + reader io.Reader +} + +func NewStreamMachine(r io.Reader, handler Handler) *streamMachine { + m := &streamMachine{ + machine: NewMachine(handler), + reader: r, + } + + m.machine.SetData(make([]byte, 1024)) + m.machine.pe = 0 + m.machine.eof = -1 + return m +} + +func (m *streamMachine) Next() error { + // Check if we are already at EOF, this should only happen if called again + // after already returning EOF. + if m.machine.p == m.machine.pe && m.machine.pe == m.machine.eof { + return EOF + } + + copy(m.machine.data, m.machine.data[m.machine.p:]) + m.machine.pe = m.machine.pe - m.machine.p + m.machine.sol = m.machine.sol - m.machine.p + m.machine.pb = 0 + m.machine.p = 0 + m.machine.eof = -1 + + m.machine.key = nil + m.machine.beginMetric = false + m.machine.finishMetric = false + + for { + // Expand the buffer if it is full + if m.machine.pe == len(m.machine.data) { + expanded := make([]byte, 2*len(m.machine.data)) + copy(expanded, m.machine.data) + m.machine.data = expanded + } + + n, err := m.reader.Read(m.machine.data[m.machine.pe:]) + if n == 0 && err == io.EOF { + m.machine.eof = m.machine.pe + } else if err != nil && err != io.EOF { + return err + } + + m.machine.pe += n + + err = m.machine.exec() + if err != nil { + return err + } + + // If we have successfully parsed a full metric line break out + if m.machine.finishMetric { + break + } + + } + + return nil +} + +// Position returns the current byte offset into the data. +func (m *streamMachine) Position() int { + return m.machine.Position() +} + +// LineOffset returns the byte offset of the current line. +func (m *streamMachine) LineOffset() int { + return m.machine.LineOffset() +} + +// LineNumber returns the current line number. Lines are counted based on the +// regular expression `\r?\n`. +func (m *streamMachine) LineNumber() int { + return m.machine.LineNumber() +} + +// Column returns the current column. +func (m *streamMachine) Column() int { + return m.machine.Column() +} + +// LineText returns the text of the current line that has been parsed so far. +func (m *streamMachine) LineText() string { + return string(m.machine.data[0:m.machine.p]) +} diff --git a/vendor/github.com/influxdata/line-protocol/machine.go.rl b/vendor/github.com/influxdata/line-protocol/machine.go.rl new file mode 100644 index 0000000..1a2fef9 --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/machine.go.rl @@ -0,0 +1,549 @@ +package protocol + +import ( + "errors" + "io" +) + +var ( + ErrNameParse = errors.New("expected measurement name") + ErrFieldParse = errors.New("expected field") + ErrTagParse = errors.New("expected tag") + ErrTimestampParse = errors.New("expected timestamp") + ErrParse = errors.New("parse error") + EOF = errors.New("EOF") +) + +%%{ +machine LineProtocol; + +action begin { + m.pb = m.p +} + +action name_error { + err = ErrNameParse + fhold; + fnext discard_line; + fbreak; +} + +action field_error { + err = ErrFieldParse + fhold; + fnext discard_line; + fbreak; +} + +action tagset_error { + err = ErrTagParse + fhold; + fnext discard_line; + fbreak; +} + +action timestamp_error { + err = ErrTimestampParse + fhold; + fnext discard_line; + fbreak; +} + +action parse_error { + err = ErrParse + fhold; + fnext discard_line; + fbreak; +} + +action align_error { + err = ErrParse + fnext discard_line; + fbreak; +} + +action hold_recover { + fhold; + fgoto main; +} + +action goto_align { + fgoto align; +} + +action begin_metric { + m.beginMetric = true +} + +action name { + err = m.handler.SetMeasurement(m.text()) + if err != nil { + fhold; + fnext discard_line; + fbreak; + } +} + +action tagkey { + m.key = m.text() +} + +action tagvalue { + err = m.handler.AddTag(m.key, m.text()) + if err != nil { + fhold; + fnext discard_line; + fbreak; + } +} + +action fieldkey { + m.key = m.text() +} + +action integer { + err = m.handler.AddInt(m.key, m.text()) + if err != nil { + fhold; + fnext discard_line; + fbreak; + } +} + +action unsigned { + err = m.handler.AddUint(m.key, m.text()) + if err != nil { + fhold; + fnext discard_line; + fbreak; + } +} + +action float { + err = m.handler.AddFloat(m.key, m.text()) + if err != nil { + fhold; + fnext discard_line; + fbreak; + } +} + +action bool { + err = m.handler.AddBool(m.key, m.text()) + if err != nil { + fhold; + fnext discard_line; + fbreak; + } +} + +action string { + err = m.handler.AddString(m.key, m.text()) + if err != nil { + fhold; + fnext discard_line; + fbreak; + } +} + +action timestamp { + err = m.handler.SetTimestamp(m.text()) + if err != nil { + fhold; + fnext discard_line; + fbreak; + } +} + +action incr_newline { + m.lineno++ + m.sol = m.p + m.sol++ // next char will be the first column in the line +} + +action eol { + m.finishMetric = true + fnext align; + fbreak; +} + +action finish_metric { + m.finishMetric = true +} + +ws = + [\t\v\f ]; + +newline = + '\r'? '\n' >incr_newline; + +non_zero_digit = + [1-9]; + +integer = + '-'? ( digit | ( non_zero_digit digit* ) ); + +unsigned = + ( digit | ( non_zero_digit digit* ) ); + +number = + '-'? (digit+ ('.' digit*)? | '.' digit+); + +scientific = + number 'e'i ["\-+"]? digit+; + +timestamp = + ('-'? digit{1,19}) >begin %timestamp; + +fieldkeychar = + [^\t\n\f\r ,=\\] | ( '\\' [^\t\n\f\r] ); + +fieldkey = + fieldkeychar+ >begin %fieldkey; + +fieldfloat = + (scientific | number) >begin %float; + +fieldinteger = + (integer 'i') >begin %integer; + +fieldunsigned = + (unsigned 'u') >begin %unsigned; + +false = + "false" | "FALSE" | "False" | "F" | "f"; + +true = + "true" | "TRUE" | "True" | "T" | "t"; + +fieldbool = + (true | false) >begin %bool; + +fieldstringchar = + [^\f\r\n\\"] | '\\' [\\"] | newline; + +fieldstring = + fieldstringchar* >begin %string; + +fieldstringquoted = + '"' fieldstring '"'; + +fieldvalue = fieldinteger | fieldunsigned | fieldfloat | fieldstringquoted | fieldbool; + +field = + fieldkey '=' fieldvalue; + +fieldset = + field ( ',' field )*; + +tagchar = + [^\t\n\f\r ,=\\] | ( '\\' [^\t\n\f\r\\] ) | '\\\\' %to{ fhold; }; + +tagkey = + tagchar+ >begin %tagkey; + +tagvalue = + tagchar+ >begin %eof(tagvalue) %tagvalue; + +tagset = + ((',' tagkey '=' tagvalue) $err(tagset_error))*; + +measurement_chars = + [^\t\n\f\r ,\\] | ( '\\' [^\t\n\f\r] ); + +measurement_start = + measurement_chars - '#'; + +measurement = + (measurement_start measurement_chars*) >begin %eof(name) %name; + +eol_break = + newline %to(eol) + ; + +metric = + measurement >err(name_error) + tagset + ws+ fieldset $err(field_error) + (ws+ timestamp)? $err(timestamp_error) + ; + +line_with_term = + ws* metric ws* eol_break + ; + +line_without_term = + ws* metric ws* + ; + +main := + (line_with_term* + (line_with_term | line_without_term?) + ) >begin_metric %eof(finish_metric) + ; + +# The discard_line machine discards the current line. Useful for recovering +# on the next line when an error occurs. +discard_line := + (any -- newline)* newline @goto_align; + +commentline = + ws* '#' (any -- newline)* newline; + +emptyline = + ws* newline; + +# The align machine scans forward to the start of the next line. This machine +# is used to skip over whitespace and comments, keeping this logic out of the +# main machine. +# +# Skip valid lines that don't contain line protocol, any other data will move +# control to the main parser via the err action. +align := + (emptyline | commentline | ws+)* %err(hold_recover); + +# Series is a machine for matching measurement+tagset +series := + (measurement >err(name_error) tagset eol_break?) + >begin_metric + ; +}%% + +%% write data; + +type Handler interface { + SetMeasurement(name []byte) error + AddTag(key []byte, value []byte) error + AddInt(key []byte, value []byte) error + AddUint(key []byte, value []byte) error + AddFloat(key []byte, value []byte) error + AddString(key []byte, value []byte) error + AddBool(key []byte, value []byte) error + SetTimestamp(tm []byte) error +} + +type machine struct { + data []byte + cs int + p, pe, eof int + pb int + lineno int + sol int + handler Handler + initState int + key []byte + beginMetric bool + finishMetric bool +} + +func NewMachine(handler Handler) *machine { + m := &machine{ + handler: handler, + initState: LineProtocol_en_align, + } + + %% access m.; + %% variable p m.p; + %% variable cs m.cs; + %% variable pe m.pe; + %% variable eof m.eof; + %% variable data m.data; + %% write init; + + return m +} + +func NewSeriesMachine(handler Handler) *machine { + m := &machine{ + handler: handler, + initState: LineProtocol_en_series, + } + + %% access m.; + %% variable p m.p; + %% variable pe m.pe; + %% variable eof m.eof; + %% variable data m.data; + %% write init; + + return m +} + +func (m *machine) SetData(data []byte) { + m.data = data + m.p = 0 + m.pb = 0 + m.lineno = 1 + m.sol = 0 + m.pe = len(data) + m.eof = len(data) + m.key = nil + m.beginMetric = false + m.finishMetric = false + + %% write init; + m.cs = m.initState +} + +// Next parses the next metric line and returns nil if it was successfully +// processed. If the line contains a syntax error an error is returned, +// otherwise if the end of file is reached before finding a metric line then +// EOF is returned. +func (m *machine) Next() error { + if m.p == m.pe && m.pe == m.eof { + return EOF + } + + m.key = nil + m.beginMetric = false + m.finishMetric = false + + return m.exec() +} + +func (m *machine) exec() error { + var err error + %% write exec; + + if err != nil { + return err + } + + // This would indicate an error in the machine that was reported with a + // more specific error. We return a generic error but this should + // possibly be a panic. + if m.cs == %%{ write error; }%% { + m.cs = LineProtocol_en_discard_line + return ErrParse + } + + // If we haven't found a metric line yet and we reached the EOF, report it + // now. This happens when the data ends with a comment or whitespace. + // + // Otherwise we have successfully parsed a metric line, so if we are at + // the EOF we will report it the next call. + if !m.beginMetric && m.p == m.pe && m.pe == m.eof { + return EOF + } + + return nil +} + +// Position returns the current byte offset into the data. +func (m *machine) Position() int { + return m.p +} + +// LineOffset returns the byte offset of the current line. +func (m *machine) LineOffset() int { + return m.sol +} + +// LineNumber returns the current line number. Lines are counted based on the +// regular expression `\r?\n`. +func (m *machine) LineNumber() int { + return m.lineno +} + +// Column returns the current column. +func (m *machine) Column() int { + lineOffset := m.p - m.sol + return lineOffset + 1 +} + +func (m *machine) text() []byte { + return m.data[m.pb:m.p] +} + +type streamMachine struct { + machine *machine + reader io.Reader +} + +func NewStreamMachine(r io.Reader, handler Handler) *streamMachine { + m := &streamMachine{ + machine: NewMachine(handler), + reader: r, + } + + m.machine.SetData(make([]byte, 1024)) + m.machine.pe = 0 + m.machine.eof = -1 + return m +} + +func (m *streamMachine) Next() error { + // Check if we are already at EOF, this should only happen if called again + // after already returning EOF. + if m.machine.p == m.machine.pe && m.machine.pe == m.machine.eof { + return EOF + } + + copy(m.machine.data, m.machine.data[m.machine.p:]) + m.machine.pe = m.machine.pe - m.machine.p + m.machine.sol = m.machine.sol - m.machine.p + m.machine.pb = 0 + m.machine.p = 0 + m.machine.eof = -1 + + m.machine.key = nil + m.machine.beginMetric = false + m.machine.finishMetric = false + + for { + // Expand the buffer if it is full + if m.machine.pe == len(m.machine.data) { + expanded := make([]byte, 2 * len(m.machine.data)) + copy(expanded, m.machine.data) + m.machine.data = expanded + } + + n, err := m.reader.Read(m.machine.data[m.machine.pe:]) + if n == 0 && err == io.EOF { + m.machine.eof = m.machine.pe + } else if err != nil && err != io.EOF { + return err + } + + m.machine.pe += n + + err = m.machine.exec() + if err != nil { + return err + } + + // If we have successfully parsed a full metric line break out + if m.machine.finishMetric { + break + } + + } + + return nil +} + +// Position returns the current byte offset into the data. +func (m *streamMachine) Position() int { + return m.machine.Position() +} + +// LineOffset returns the byte offset of the current line. +func (m *streamMachine) LineOffset() int { + return m.machine.LineOffset() +} + +// LineNumber returns the current line number. Lines are counted based on the +// regular expression `\r?\n`. +func (m *streamMachine) LineNumber() int { + return m.machine.LineNumber() +} + +// Column returns the current column. +func (m *streamMachine) Column() int { + return m.machine.Column() +} + +// LineText returns the text of the current line that has been parsed so far. +func (m *streamMachine) LineText() string { + return string(m.machine.data[0:m.machine.p]) +} diff --git a/vendor/github.com/influxdata/line-protocol/metric.go b/vendor/github.com/influxdata/line-protocol/metric.go new file mode 100644 index 0000000..0b1fa2e --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/metric.go @@ -0,0 +1,428 @@ +package protocol + +import ( + "fmt" + "hash/fnv" + "sort" + "time" +) + +// Tag holds the keys and values for a bunch of Tag k/v pairs. +type Tag struct { + Key string + Value string +} + +// Field holds the keys and values for a bunch of Metric Field k/v pairs where Value can be a uint64, int64, int, float32, float64, string, or bool. +type Field struct { + Key string + Value interface{} +} + +// Metric is the interface for marshaling, if you implement this interface you can be marshalled into the line protocol. Woot! +type Metric interface { + Time() time.Time + Name() string + TagList() []*Tag + FieldList() []*Field +} + +// MutableMetric represents a metric that can be be modified. +type MutableMetric interface { + Metric + SetTime(time.Time) + AddTag(key, value string) + AddField(key string, value interface{}) +} + +// FieldSortOrder is a type for controlling if Fields are sorted +type FieldSortOrder int + +const ( + // NoSortFields tells the Decoder to not sort the fields. + NoSortFields FieldSortOrder = iota + + // SortFields tells the Decoder to sort the fields. + SortFields +) + +// FieldTypeSupport is a type for the parser to understand its type support. +type FieldTypeSupport int + +const ( + // UintSupport means the parser understands uint64s and can store them without having to convert to int64. + UintSupport FieldTypeSupport = 1 << iota +) + +// MetricError is an error causing a metric to be unserializable. +type MetricError struct { + s string +} + +func (e MetricError) Error() string { + return e.s +} + +// FieldError is an error causing a field to be unserializable. +type FieldError struct { + s string +} + +func (e FieldError) Error() string { + return e.s +} + +var ( + // ErrNeedMoreSpace tells us that the Decoder's io.Reader is full. + ErrNeedMoreSpace = &MetricError{"need more space"} + + // ErrInvalidName tells us that the chosen name is invalid. + ErrInvalidName = &MetricError{"invalid name"} + + // ErrNoFields tells us that there were no serializable fields in the line/metric. + ErrNoFields = &MetricError{"no serializable fields"} +) + +type metric struct { + name string + tags []*Tag + fields []*Field + tm time.Time +} + +// New creates a new metric via maps. +func New( + name string, + tags map[string]string, + fields map[string]interface{}, + tm time.Time, +) (MutableMetric, error) { + m := &metric{ + name: name, + tags: nil, + fields: nil, + tm: tm, + } + + if len(tags) > 0 { + m.tags = make([]*Tag, 0, len(tags)) + for k, v := range tags { + m.tags = append(m.tags, + &Tag{Key: k, Value: v}) + } + sort.Slice(m.tags, func(i, j int) bool { return m.tags[i].Key < m.tags[j].Key }) + } + + if len(fields) > 0 { + m.fields = make([]*Field, 0, len(fields)) + for k, v := range fields { + v := convertField(v) + if v == nil { + continue + } + m.AddField(k, v) + } + } + + return m, nil +} + +// FromMetric returns a deep copy of the metric with any tracking information +// removed. +func FromMetric(other Metric) Metric { + m := &metric{ + name: other.Name(), + tags: make([]*Tag, len(other.TagList())), + fields: make([]*Field, len(other.FieldList())), + tm: other.Time(), + } + + for i, tag := range other.TagList() { + m.tags[i] = &Tag{Key: tag.Key, Value: tag.Value} + } + + for i, field := range other.FieldList() { + m.fields[i] = &Field{Key: field.Key, Value: field.Value} + } + return m +} + +func (m *metric) String() string { + return fmt.Sprintf("%s %v %v %d", m.name, m.Tags(), m.Fields(), m.tm.UnixNano()) +} + +func (m *metric) Name() string { + return m.name +} + +func (m *metric) Tags() map[string]string { + tags := make(map[string]string, len(m.tags)) + for _, tag := range m.tags { + tags[tag.Key] = tag.Value + } + return tags +} + +func (m *metric) TagList() []*Tag { + return m.tags +} + +func (m *metric) Fields() map[string]interface{} { + fields := make(map[string]interface{}, len(m.fields)) + for _, field := range m.fields { + fields[field.Key] = field.Value + } + + return fields +} + +func (m *metric) FieldList() []*Field { + return m.fields +} + +func (m *metric) Time() time.Time { + return m.tm +} + +func (m *metric) SetName(name string) { + m.name = name +} + +func (m *metric) AddPrefix(prefix string) { + m.name = prefix + m.name +} + +func (m *metric) AddSuffix(suffix string) { + m.name = m.name + suffix +} + +func (m *metric) AddTag(key, value string) { + for i, tag := range m.tags { + if key > tag.Key { + continue + } + + if key == tag.Key { + tag.Value = value + return + } + + m.tags = append(m.tags, nil) + copy(m.tags[i+1:], m.tags[i:]) + m.tags[i] = &Tag{Key: key, Value: value} + return + } + + m.tags = append(m.tags, &Tag{Key: key, Value: value}) +} + +func (m *metric) HasTag(key string) bool { + for _, tag := range m.tags { + if tag.Key == key { + return true + } + } + return false +} + +func (m *metric) GetTag(key string) (string, bool) { + for _, tag := range m.tags { + if tag.Key == key { + return tag.Value, true + } + } + return "", false +} + +func (m *metric) RemoveTag(key string) { + for i, tag := range m.tags { + if tag.Key == key { + copy(m.tags[i:], m.tags[i+1:]) + m.tags[len(m.tags)-1] = nil + m.tags = m.tags[:len(m.tags)-1] + return + } + } +} + +func (m *metric) AddField(key string, value interface{}) { + for i, field := range m.fields { + if key == field.Key { + m.fields[i] = &Field{Key: key, Value: convertField(value)} + return + } + } + m.fields = append(m.fields, &Field{Key: key, Value: convertField(value)}) +} + +func (m *metric) HasField(key string) bool { + for _, field := range m.fields { + if field.Key == key { + return true + } + } + return false +} + +func (m *metric) GetField(key string) (interface{}, bool) { + for _, field := range m.fields { + if field.Key == key { + return field.Value, true + } + } + return nil, false +} + +func (m *metric) RemoveField(key string) { + for i, field := range m.fields { + if field.Key == key { + copy(m.fields[i:], m.fields[i+1:]) + m.fields[len(m.fields)-1] = nil + m.fields = m.fields[:len(m.fields)-1] + return + } + } +} + +func (m *metric) SetTime(t time.Time) { + m.tm = t +} + +func (m *metric) Copy() Metric { + m2 := &metric{ + name: m.name, + tags: make([]*Tag, len(m.tags)), + fields: make([]*Field, len(m.fields)), + tm: m.tm, + } + + for i, tag := range m.tags { + m2.tags[i] = &Tag{Key: tag.Key, Value: tag.Value} + } + + for i, field := range m.fields { + m2.fields[i] = &Field{Key: field.Key, Value: field.Value} + } + return m2 +} + +func (m *metric) HashID() uint64 { + h := fnv.New64a() + h.Write([]byte(m.name)) + h.Write([]byte("\n")) + for _, tag := range m.tags { + h.Write([]byte(tag.Key)) + h.Write([]byte("\n")) + h.Write([]byte(tag.Value)) + h.Write([]byte("\n")) + } + return h.Sum64() +} + +func (m *metric) Accept() { +} + +func (m *metric) Reject() { +} + +func (m *metric) Drop() { +} + +// Convert field to a supported type or nil if unconvertible +func convertField(v interface{}) interface{} { + switch v := v.(type) { + case float64: + return v + case int64: + return v + case string: + return v + case bool: + return v + case int: + return int64(v) + case uint: + return uint64(v) + case uint64: + return uint64(v) + case []byte: + return string(v) + case int32: + return int64(v) + case int16: + return int64(v) + case int8: + return int64(v) + case uint32: + return uint64(v) + case uint16: + return uint64(v) + case uint8: + return uint64(v) + case float32: + return float64(v) + case *float64: + if v != nil { + return *v + } + case *int64: + if v != nil { + return *v + } + case *string: + if v != nil { + return *v + } + case *bool: + if v != nil { + return *v + } + case *int: + if v != nil { + return int64(*v) + } + case *uint: + if v != nil { + return uint64(*v) + } + case *uint64: + if v != nil { + return uint64(*v) + } + case *[]byte: + if v != nil { + return string(*v) + } + case *int32: + if v != nil { + return int64(*v) + } + case *int16: + if v != nil { + return int64(*v) + } + case *int8: + if v != nil { + return int64(*v) + } + case *uint32: + if v != nil { + return uint64(*v) + } + case *uint16: + if v != nil { + return uint64(*v) + } + case *uint8: + if v != nil { + return uint64(*v) + } + case *float32: + if v != nil { + return float64(*v) + } + default: + return nil + } + return nil +} diff --git a/vendor/github.com/influxdata/line-protocol/parser.go b/vendor/github.com/influxdata/line-protocol/parser.go new file mode 100644 index 0000000..b9eaa91 --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/parser.go @@ -0,0 +1,192 @@ +package protocol + +import ( + "fmt" + "io" + "strings" + "sync" + "time" +) + +const ( + maxErrorBufferSize = 1024 +) + +// TimeFunc is used to override the default time for a metric +// with no specified timestamp. +type TimeFunc func() time.Time + +// ParseError indicates a error in the parsing of the text. +type ParseError struct { + Offset int + LineOffset int + LineNumber int + Column int + msg string + buf string +} + +func (e *ParseError) Error() string { + buffer := e.buf[e.LineOffset:] + eol := strings.IndexAny(buffer, "\r\n") + if eol >= 0 { + buffer = buffer[:eol] + } + if len(buffer) > maxErrorBufferSize { + buffer = buffer[:maxErrorBufferSize] + "..." + } + return fmt.Sprintf("metric parse error: %s at %d:%d: %q", e.msg, e.LineNumber, e.Column, buffer) +} + +// Parser is an InfluxDB Line Protocol parser that implements the +// parsers.Parser interface. +type Parser struct { + DefaultTags map[string]string + + sync.Mutex + *machine + handler *MetricHandler +} + +// NewParser returns a Parser than accepts line protocol +func NewParser(handler *MetricHandler) *Parser { + return &Parser{ + machine: NewMachine(handler), + handler: handler, + } +} + +// NewSeriesParser returns a Parser than accepts a measurement and tagset +func NewSeriesParser(handler *MetricHandler) *Parser { + return &Parser{ + machine: NewSeriesMachine(handler), + handler: handler, + } +} + +// SetTimeFunc allows default times to be set when no time is specified +// for a metric in line-protocol. +func (p *Parser) SetTimeFunc(f TimeFunc) { + p.handler.SetTimeFunc(f) +} + +// Parse interprets line-protocol bytes as many metrics. +func (p *Parser) Parse(input []byte) ([]Metric, error) { + p.Lock() + defer p.Unlock() + metrics := make([]Metric, 0) + p.machine.SetData(input) + + for { + err := p.machine.Next() + if err == EOF { + break + } + + if err != nil { + return nil, &ParseError{ + Offset: p.machine.Position(), + LineOffset: p.machine.LineOffset(), + LineNumber: p.machine.LineNumber(), + Column: p.machine.Column(), + msg: err.Error(), + buf: string(input), + } + } + + metric, err := p.handler.Metric() + if err != nil { + return nil, err + } + + if metric == nil { + continue + } + + metrics = append(metrics, metric) + } + + return metrics, nil +} + +// StreamParser is an InfluxDB Line Protocol parser. It is not safe for +// concurrent use in multiple goroutines. +type StreamParser struct { + machine *streamMachine + handler *MetricHandler +} + +// NewStreamParser parses from a reader and iterates the machine +// metric by metric. Not safe for concurrent use in multiple goroutines. +func NewStreamParser(r io.Reader) *StreamParser { + handler := NewMetricHandler() + return &StreamParser{ + machine: NewStreamMachine(r, handler), + handler: handler, + } +} + +// SetTimeFunc changes the function used to determine the time of metrics +// without a timestamp. The default TimeFunc is time.Now. Useful mostly for +// testing, or perhaps if you want all metrics to have the same timestamp. +func (p *StreamParser) SetTimeFunc(f TimeFunc) { + p.handler.SetTimeFunc(f) +} + +// SetTimePrecision specifies units for the time stamp. +func (p *StreamParser) SetTimePrecision(u time.Duration) { + p.handler.SetTimePrecision(u) +} + +// Next parses the next item from the stream. You can repeat calls to this +// function until it returns EOF. +func (p *StreamParser) Next() (Metric, error) { + err := p.machine.Next() + if err == EOF { + return nil, EOF + } + + if err != nil { + return nil, &ParseError{ + Offset: p.machine.Position(), + LineOffset: p.machine.LineOffset(), + LineNumber: p.machine.LineNumber(), + Column: p.machine.Column(), + msg: err.Error(), + buf: p.machine.LineText(), + } + } + + metric, err := p.handler.Metric() + if err != nil { + return nil, err + } + + return metric, nil +} + +// Position returns the current byte offset into the data. +func (p *StreamParser) Position() int { + return p.machine.Position() +} + +// LineOffset returns the byte offset of the current line. +func (p *StreamParser) LineOffset() int { + return p.machine.LineOffset() +} + +// LineNumber returns the current line number. Lines are counted based on the +// regular expression `\r?\n`. +func (p *StreamParser) LineNumber() int { + return p.machine.LineNumber() +} + +// Column returns the current column. +func (p *StreamParser) Column() int { + return p.machine.Column() +} + +// LineText returns the text of the current line that has been parsed so far. +func (p *StreamParser) LineText() string { + return p.machine.LineText() +} diff --git a/vendor/github.com/influxdata/line-protocol/writer.go b/vendor/github.com/influxdata/line-protocol/writer.go new file mode 100644 index 0000000..aa77533 --- /dev/null +++ b/vendor/github.com/influxdata/line-protocol/writer.go @@ -0,0 +1,130 @@ +package protocol + +import ( + "fmt" + "time" +) + +// Write writes out data to a line protocol encoder. Note: it does no sorting. It assumes you have done your own sorting for tagValues +func (e *Encoder) Write(name []byte, ts time.Time, tagKeys, tagVals, fieldKeys [][]byte, fieldVals []interface{}) (int, error) { + e.header = e.header[:0] + if len(name) == 0 || name[len(name)-1] == byte('\\') { + return 0, ErrInvalidName + } + nameEscapeBytes(&e.header, name) + for i := range tagKeys { + // Some keys and values are not encodeable as line protocol, such as + // those with a trailing '\' or empty strings. + if len(tagKeys[i]) == 0 || len(tagVals[i]) == 0 || tagKeys[i][len(tagKeys[i])-1] == byte('\\') { + if e.failOnFieldError { + return 0, fmt.Errorf("invalid field: key \"%s\", val \"%s\"", tagKeys[i], tagVals[i]) + } + continue + } + e.header = append(e.header, byte(',')) + escapeBytes(&e.header, tagKeys[i]) + e.header = append(e.header, byte('=')) + escapeBytes(&e.header, tagVals[i]) + } + e.header = append(e.header, byte(' ')) + e.buildFooter(ts) + + i := 0 + totalWritten := 0 + pairsLen := 0 + firstField := true + for i := range fieldKeys { + e.pair = e.pair[:0] + key := fieldKeys[i] + if len(key) == 0 || key[len(key)-1] == byte('\\') { + if e.failOnFieldError { + return 0, &FieldError{"invalid field key"} + } + continue + } + escapeBytes(&e.pair, key) + // Some keys are not encodeable as line protocol, such as those with a + // trailing '\' or empty strings. + e.pair = append(e.pair, byte('=')) + err := e.buildFieldVal(fieldVals[i]) + if err != nil { + if e.failOnFieldError { + return 0, err + } + continue + } + + bytesNeeded := len(e.header) + pairsLen + len(e.pair) + len(e.footer) + + // Additional length needed for field separator `,` + if !firstField { + bytesNeeded++ + } + + if e.maxLineBytes > 0 && bytesNeeded > e.maxLineBytes { + // Need at least one field per line + if firstField { + return 0, ErrNeedMoreSpace + } + + i, err = e.w.Write(e.footer) + if err != nil { + return 0, err + } + totalWritten += i + + bytesNeeded = len(e.header) + len(e.pair) + len(e.footer) + + if e.maxLineBytes > 0 && bytesNeeded > e.maxLineBytes { + return 0, ErrNeedMoreSpace + } + + i, err = e.w.Write(e.header) + if err != nil { + return 0, err + } + totalWritten += i + + i, err = e.w.Write(e.pair) + if err != nil { + return 0, err + } + totalWritten += i + + pairsLen += len(e.pair) + firstField = false + continue + } + + if firstField { + i, err = e.w.Write(e.header) + if err != nil { + return 0, err + } + totalWritten += i + + } else { + i, err = e.w.Write(comma) + if err != nil { + return 0, err + } + totalWritten += i + + } + + e.w.Write(e.pair) + + pairsLen += len(e.pair) + firstField = false + } + + if firstField { + return 0, ErrNoFields + } + i, err := e.w.Write(e.footer) + if err != nil { + return 0, err + } + totalWritten += i + return totalWritten, nil +} diff --git a/vendor/github.com/pkg/errors/.gitignore b/vendor/github.com/pkg/errors/.gitignore new file mode 100644 index 0000000..daf913b --- /dev/null +++ b/vendor/github.com/pkg/errors/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/pkg/errors/.travis.yml b/vendor/github.com/pkg/errors/.travis.yml new file mode 100644 index 0000000..9159de0 --- /dev/null +++ b/vendor/github.com/pkg/errors/.travis.yml @@ -0,0 +1,10 @@ +language: go +go_import_path: github.com/pkg/errors +go: + - 1.11.x + - 1.12.x + - 1.13.x + - tip + +script: + - make check diff --git a/vendor/github.com/pkg/errors/LICENSE b/vendor/github.com/pkg/errors/LICENSE new file mode 100644 index 0000000..835ba3e --- /dev/null +++ b/vendor/github.com/pkg/errors/LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2015, Dave Cheney +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/pkg/errors/Makefile b/vendor/github.com/pkg/errors/Makefile new file mode 100644 index 0000000..ce9d7cd --- /dev/null +++ b/vendor/github.com/pkg/errors/Makefile @@ -0,0 +1,44 @@ +PKGS := github.com/pkg/errors +SRCDIRS := $(shell go list -f '{{.Dir}}' $(PKGS)) +GO := go + +check: test vet gofmt misspell unconvert staticcheck ineffassign unparam + +test: + $(GO) test $(PKGS) + +vet: | test + $(GO) vet $(PKGS) + +staticcheck: + $(GO) get honnef.co/go/tools/cmd/staticcheck + staticcheck -checks all $(PKGS) + +misspell: + $(GO) get github.com/client9/misspell/cmd/misspell + misspell \ + -locale GB \ + -error \ + *.md *.go + +unconvert: + $(GO) get github.com/mdempsky/unconvert + unconvert -v $(PKGS) + +ineffassign: + $(GO) get github.com/gordonklaus/ineffassign + find $(SRCDIRS) -name '*.go' | xargs ineffassign + +pedantic: check errcheck + +unparam: + $(GO) get mvdan.cc/unparam + unparam ./... + +errcheck: + $(GO) get github.com/kisielk/errcheck + errcheck $(PKGS) + +gofmt: + @echo Checking code is gofmted + @test -z "$(shell gofmt -s -l -d -e $(SRCDIRS) | tee /dev/stderr)" diff --git a/vendor/github.com/pkg/errors/README.md b/vendor/github.com/pkg/errors/README.md new file mode 100644 index 0000000..54dfdcb --- /dev/null +++ b/vendor/github.com/pkg/errors/README.md @@ -0,0 +1,59 @@ +# errors [![Travis-CI](https://travis-ci.org/pkg/errors.svg)](https://travis-ci.org/pkg/errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/b98mptawhudj53ep/branch/master?svg=true)](https://ci.appveyor.com/project/davecheney/errors/branch/master) [![GoDoc](https://godoc.org/github.com/pkg/errors?status.svg)](http://godoc.org/github.com/pkg/errors) [![Report card](https://goreportcard.com/badge/github.com/pkg/errors)](https://goreportcard.com/report/github.com/pkg/errors) [![Sourcegraph](https://sourcegraph.com/github.com/pkg/errors/-/badge.svg)](https://sourcegraph.com/github.com/pkg/errors?badge) + +Package errors provides simple error handling primitives. + +`go get github.com/pkg/errors` + +The traditional error handling idiom in Go is roughly akin to +```go +if err != nil { + return err +} +``` +which applied recursively up the call stack results in error reports without context or debugging information. The errors package allows programmers to add context to the failure path in their code in a way that does not destroy the original value of the error. + +## Adding context to an error + +The errors.Wrap function returns a new error that adds context to the original error. For example +```go +_, err := ioutil.ReadAll(r) +if err != nil { + return errors.Wrap(err, "read failed") +} +``` +## Retrieving the cause of an error + +Using `errors.Wrap` constructs a stack of errors, adding context to the preceding error. Depending on the nature of the error it may be necessary to reverse the operation of errors.Wrap to retrieve the original error for inspection. Any error value which implements this interface can be inspected by `errors.Cause`. +```go +type causer interface { + Cause() error +} +``` +`errors.Cause` will recursively retrieve the topmost error which does not implement `causer`, which is assumed to be the original cause. For example: +```go +switch err := errors.Cause(err).(type) { +case *MyError: + // handle specifically +default: + // unknown error +} +``` + +[Read the package documentation for more information](https://godoc.org/github.com/pkg/errors). + +## Roadmap + +With the upcoming [Go2 error proposals](https://go.googlesource.com/proposal/+/master/design/go2draft.md) this package is moving into maintenance mode. The roadmap for a 1.0 release is as follows: + +- 0.9. Remove pre Go 1.9 and Go 1.10 support, address outstanding pull requests (if possible) +- 1.0. Final release. + +## Contributing + +Because of the Go2 errors changes, this package is not accepting proposals for new functionality. With that said, we welcome pull requests, bug fixes and issue reports. + +Before sending a PR, please discuss your change by raising an issue. + +## License + +BSD-2-Clause diff --git a/vendor/github.com/pkg/errors/appveyor.yml b/vendor/github.com/pkg/errors/appveyor.yml new file mode 100644 index 0000000..a932ead --- /dev/null +++ b/vendor/github.com/pkg/errors/appveyor.yml @@ -0,0 +1,32 @@ +version: build-{build}.{branch} + +clone_folder: C:\gopath\src\github.com\pkg\errors +shallow_clone: true # for startup speed + +environment: + GOPATH: C:\gopath + +platform: + - x64 + +# http://www.appveyor.com/docs/installed-software +install: + # some helpful output for debugging builds + - go version + - go env + # pre-installed MinGW at C:\MinGW is 32bit only + # but MSYS2 at C:\msys64 has mingw64 + - set PATH=C:\msys64\mingw64\bin;%PATH% + - gcc --version + - g++ --version + +build_script: + - go install -v ./... + +test_script: + - set PATH=C:\gopath\bin;%PATH% + - go test -v ./... + +#artifacts: +# - path: '%GOPATH%\bin\*.exe' +deploy: off diff --git a/vendor/github.com/pkg/errors/errors.go b/vendor/github.com/pkg/errors/errors.go new file mode 100644 index 0000000..161aea2 --- /dev/null +++ b/vendor/github.com/pkg/errors/errors.go @@ -0,0 +1,288 @@ +// Package errors provides simple error handling primitives. +// +// The traditional error handling idiom in Go is roughly akin to +// +// if err != nil { +// return err +// } +// +// which when applied recursively up the call stack results in error reports +// without context or debugging information. The errors package allows +// programmers to add context to the failure path in their code in a way +// that does not destroy the original value of the error. +// +// Adding context to an error +// +// The errors.Wrap function returns a new error that adds context to the +// original error by recording a stack trace at the point Wrap is called, +// together with the supplied message. For example +// +// _, err := ioutil.ReadAll(r) +// if err != nil { +// return errors.Wrap(err, "read failed") +// } +// +// If additional control is required, the errors.WithStack and +// errors.WithMessage functions destructure errors.Wrap into its component +// operations: annotating an error with a stack trace and with a message, +// respectively. +// +// Retrieving the cause of an error +// +// Using errors.Wrap constructs a stack of errors, adding context to the +// preceding error. Depending on the nature of the error it may be necessary +// to reverse the operation of errors.Wrap to retrieve the original error +// for inspection. Any error value which implements this interface +// +// type causer interface { +// Cause() error +// } +// +// can be inspected by errors.Cause. errors.Cause will recursively retrieve +// the topmost error that does not implement causer, which is assumed to be +// the original cause. For example: +// +// switch err := errors.Cause(err).(type) { +// case *MyError: +// // handle specifically +// default: +// // unknown error +// } +// +// Although the causer interface is not exported by this package, it is +// considered a part of its stable public interface. +// +// Formatted printing of errors +// +// All error values returned from this package implement fmt.Formatter and can +// be formatted by the fmt package. The following verbs are supported: +// +// %s print the error. If the error has a Cause it will be +// printed recursively. +// %v see %s +// %+v extended format. Each Frame of the error's StackTrace will +// be printed in detail. +// +// Retrieving the stack trace of an error or wrapper +// +// New, Errorf, Wrap, and Wrapf record a stack trace at the point they are +// invoked. This information can be retrieved with the following interface: +// +// type stackTracer interface { +// StackTrace() errors.StackTrace +// } +// +// The returned errors.StackTrace type is defined as +// +// type StackTrace []Frame +// +// The Frame type represents a call site in the stack trace. Frame supports +// the fmt.Formatter interface that can be used for printing information about +// the stack trace of this error. For example: +// +// if err, ok := err.(stackTracer); ok { +// for _, f := range err.StackTrace() { +// fmt.Printf("%+s:%d\n", f, f) +// } +// } +// +// Although the stackTracer interface is not exported by this package, it is +// considered a part of its stable public interface. +// +// See the documentation for Frame.Format for more details. +package errors + +import ( + "fmt" + "io" +) + +// New returns an error with the supplied message. +// New also records the stack trace at the point it was called. +func New(message string) error { + return &fundamental{ + msg: message, + stack: callers(), + } +} + +// Errorf formats according to a format specifier and returns the string +// as a value that satisfies error. +// Errorf also records the stack trace at the point it was called. +func Errorf(format string, args ...interface{}) error { + return &fundamental{ + msg: fmt.Sprintf(format, args...), + stack: callers(), + } +} + +// fundamental is an error that has a message and a stack, but no caller. +type fundamental struct { + msg string + *stack +} + +func (f *fundamental) Error() string { return f.msg } + +func (f *fundamental) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + io.WriteString(s, f.msg) + f.stack.Format(s, verb) + return + } + fallthrough + case 's': + io.WriteString(s, f.msg) + case 'q': + fmt.Fprintf(s, "%q", f.msg) + } +} + +// WithStack annotates err with a stack trace at the point WithStack was called. +// If err is nil, WithStack returns nil. +func WithStack(err error) error { + if err == nil { + return nil + } + return &withStack{ + err, + callers(), + } +} + +type withStack struct { + error + *stack +} + +func (w *withStack) Cause() error { return w.error } + +// Unwrap provides compatibility for Go 1.13 error chains. +func (w *withStack) Unwrap() error { return w.error } + +func (w *withStack) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + fmt.Fprintf(s, "%+v", w.Cause()) + w.stack.Format(s, verb) + return + } + fallthrough + case 's': + io.WriteString(s, w.Error()) + case 'q': + fmt.Fprintf(s, "%q", w.Error()) + } +} + +// Wrap returns an error annotating err with a stack trace +// at the point Wrap is called, and the supplied message. +// If err is nil, Wrap returns nil. +func Wrap(err error, message string) error { + if err == nil { + return nil + } + err = &withMessage{ + cause: err, + msg: message, + } + return &withStack{ + err, + callers(), + } +} + +// Wrapf returns an error annotating err with a stack trace +// at the point Wrapf is called, and the format specifier. +// If err is nil, Wrapf returns nil. +func Wrapf(err error, format string, args ...interface{}) error { + if err == nil { + return nil + } + err = &withMessage{ + cause: err, + msg: fmt.Sprintf(format, args...), + } + return &withStack{ + err, + callers(), + } +} + +// WithMessage annotates err with a new message. +// If err is nil, WithMessage returns nil. +func WithMessage(err error, message string) error { + if err == nil { + return nil + } + return &withMessage{ + cause: err, + msg: message, + } +} + +// WithMessagef annotates err with the format specifier. +// If err is nil, WithMessagef returns nil. +func WithMessagef(err error, format string, args ...interface{}) error { + if err == nil { + return nil + } + return &withMessage{ + cause: err, + msg: fmt.Sprintf(format, args...), + } +} + +type withMessage struct { + cause error + msg string +} + +func (w *withMessage) Error() string { return w.msg + ": " + w.cause.Error() } +func (w *withMessage) Cause() error { return w.cause } + +// Unwrap provides compatibility for Go 1.13 error chains. +func (w *withMessage) Unwrap() error { return w.cause } + +func (w *withMessage) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + fmt.Fprintf(s, "%+v\n", w.Cause()) + io.WriteString(s, w.msg) + return + } + fallthrough + case 's', 'q': + io.WriteString(s, w.Error()) + } +} + +// Cause returns the underlying cause of the error, if possible. +// An error value has a cause if it implements the following +// interface: +// +// type causer interface { +// Cause() error +// } +// +// If the error does not implement Cause, the original error will +// be returned. If the error is nil, nil will be returned without further +// investigation. +func Cause(err error) error { + type causer interface { + Cause() error + } + + for err != nil { + cause, ok := err.(causer) + if !ok { + break + } + err = cause.Cause() + } + return err +} diff --git a/vendor/github.com/pkg/errors/go113.go b/vendor/github.com/pkg/errors/go113.go new file mode 100644 index 0000000..be0d10d --- /dev/null +++ b/vendor/github.com/pkg/errors/go113.go @@ -0,0 +1,38 @@ +// +build go1.13 + +package errors + +import ( + stderrors "errors" +) + +// Is reports whether any error in err's chain matches target. +// +// The chain consists of err itself followed by the sequence of errors obtained by +// repeatedly calling Unwrap. +// +// An error is considered to match a target if it is equal to that target or if +// it implements a method Is(error) bool such that Is(target) returns true. +func Is(err, target error) bool { return stderrors.Is(err, target) } + +// As finds the first error in err's chain that matches target, and if so, sets +// target to that error value and returns true. +// +// The chain consists of err itself followed by the sequence of errors obtained by +// repeatedly calling Unwrap. +// +// An error matches target if the error's concrete value is assignable to the value +// pointed to by target, or if the error has a method As(interface{}) bool such that +// As(target) returns true. In the latter case, the As method is responsible for +// setting target. +// +// As will panic if target is not a non-nil pointer to either a type that implements +// error, or to any interface type. As returns false if err is nil. +func As(err error, target interface{}) bool { return stderrors.As(err, target) } + +// Unwrap returns the result of calling the Unwrap method on err, if err's +// type contains an Unwrap method returning error. +// Otherwise, Unwrap returns nil. +func Unwrap(err error) error { + return stderrors.Unwrap(err) +} diff --git a/vendor/github.com/pkg/errors/stack.go b/vendor/github.com/pkg/errors/stack.go new file mode 100644 index 0000000..779a834 --- /dev/null +++ b/vendor/github.com/pkg/errors/stack.go @@ -0,0 +1,177 @@ +package errors + +import ( + "fmt" + "io" + "path" + "runtime" + "strconv" + "strings" +) + +// Frame represents a program counter inside a stack frame. +// For historical reasons if Frame is interpreted as a uintptr +// its value represents the program counter + 1. +type Frame uintptr + +// pc returns the program counter for this frame; +// multiple frames may have the same PC value. +func (f Frame) pc() uintptr { return uintptr(f) - 1 } + +// file returns the full path to the file that contains the +// function for this Frame's pc. +func (f Frame) file() string { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return "unknown" + } + file, _ := fn.FileLine(f.pc()) + return file +} + +// line returns the line number of source code of the +// function for this Frame's pc. +func (f Frame) line() int { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return 0 + } + _, line := fn.FileLine(f.pc()) + return line +} + +// name returns the name of this function, if known. +func (f Frame) name() string { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return "unknown" + } + return fn.Name() +} + +// Format formats the frame according to the fmt.Formatter interface. +// +// %s source file +// %d source line +// %n function name +// %v equivalent to %s:%d +// +// Format accepts flags that alter the printing of some verbs, as follows: +// +// %+s function name and path of source file relative to the compile time +// GOPATH separated by \n\t (\n\t) +// %+v equivalent to %+s:%d +func (f Frame) Format(s fmt.State, verb rune) { + switch verb { + case 's': + switch { + case s.Flag('+'): + io.WriteString(s, f.name()) + io.WriteString(s, "\n\t") + io.WriteString(s, f.file()) + default: + io.WriteString(s, path.Base(f.file())) + } + case 'd': + io.WriteString(s, strconv.Itoa(f.line())) + case 'n': + io.WriteString(s, funcname(f.name())) + case 'v': + f.Format(s, 's') + io.WriteString(s, ":") + f.Format(s, 'd') + } +} + +// MarshalText formats a stacktrace Frame as a text string. The output is the +// same as that of fmt.Sprintf("%+v", f), but without newlines or tabs. +func (f Frame) MarshalText() ([]byte, error) { + name := f.name() + if name == "unknown" { + return []byte(name), nil + } + return []byte(fmt.Sprintf("%s %s:%d", name, f.file(), f.line())), nil +} + +// StackTrace is stack of Frames from innermost (newest) to outermost (oldest). +type StackTrace []Frame + +// Format formats the stack of Frames according to the fmt.Formatter interface. +// +// %s lists source files for each Frame in the stack +// %v lists the source file and line number for each Frame in the stack +// +// Format accepts flags that alter the printing of some verbs, as follows: +// +// %+v Prints filename, function, and line number for each Frame in the stack. +func (st StackTrace) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + switch { + case s.Flag('+'): + for _, f := range st { + io.WriteString(s, "\n") + f.Format(s, verb) + } + case s.Flag('#'): + fmt.Fprintf(s, "%#v", []Frame(st)) + default: + st.formatSlice(s, verb) + } + case 's': + st.formatSlice(s, verb) + } +} + +// formatSlice will format this StackTrace into the given buffer as a slice of +// Frame, only valid when called with '%s' or '%v'. +func (st StackTrace) formatSlice(s fmt.State, verb rune) { + io.WriteString(s, "[") + for i, f := range st { + if i > 0 { + io.WriteString(s, " ") + } + f.Format(s, verb) + } + io.WriteString(s, "]") +} + +// stack represents a stack of program counters. +type stack []uintptr + +func (s *stack) Format(st fmt.State, verb rune) { + switch verb { + case 'v': + switch { + case st.Flag('+'): + for _, pc := range *s { + f := Frame(pc) + fmt.Fprintf(st, "\n%+v", f) + } + } + } +} + +func (s *stack) StackTrace() StackTrace { + f := make([]Frame, len(*s)) + for i := 0; i < len(f); i++ { + f[i] = Frame((*s)[i]) + } + return f +} + +func callers() *stack { + const depth = 32 + var pcs [depth]uintptr + n := runtime.Callers(3, pcs[:]) + var st stack = pcs[0:n] + return &st +} + +// funcname removes the path prefix component of a function's name reported by func.Name(). +func funcname(name string) string { + i := strings.LastIndex(name, "/") + name = name[i+1:] + i = strings.Index(name, ".") + return name[i+1:] +} diff --git a/vendor/golang.org/x/net/AUTHORS b/vendor/golang.org/x/net/AUTHORS new file mode 100644 index 0000000..15167cd --- /dev/null +++ b/vendor/golang.org/x/net/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/net/CONTRIBUTORS b/vendor/golang.org/x/net/CONTRIBUTORS new file mode 100644 index 0000000..1c4577e --- /dev/null +++ b/vendor/golang.org/x/net/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/net/LICENSE b/vendor/golang.org/x/net/LICENSE new file mode 100644 index 0000000..6a66aea --- /dev/null +++ b/vendor/golang.org/x/net/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/net/PATENTS b/vendor/golang.org/x/net/PATENTS new file mode 100644 index 0000000..7330990 --- /dev/null +++ b/vendor/golang.org/x/net/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/net/publicsuffix/list.go b/vendor/golang.org/x/net/publicsuffix/list.go new file mode 100644 index 0000000..e2fddd6 --- /dev/null +++ b/vendor/golang.org/x/net/publicsuffix/list.go @@ -0,0 +1,182 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run gen.go + +// Package publicsuffix provides a public suffix list based on data from +// https://publicsuffix.org/ +// +// A public suffix is one under which Internet users can directly register +// names. It is related to, but different from, a TLD (top level domain). +// +// "com" is a TLD (top level domain). Top level means it has no dots. +// +// "com" is also a public suffix. Amazon and Google have registered different +// siblings under that domain: "amazon.com" and "google.com". +// +// "au" is another TLD, again because it has no dots. But it's not "amazon.au". +// Instead, it's "amazon.com.au". +// +// "com.au" isn't an actual TLD, because it's not at the top level (it has +// dots). But it is an eTLD (effective TLD), because that's the branching point +// for domain name registrars. +// +// Another name for "an eTLD" is "a public suffix". Often, what's more of +// interest is the eTLD+1, or one more label than the public suffix. For +// example, browsers partition read/write access to HTTP cookies according to +// the eTLD+1. Web pages served from "amazon.com.au" can't read cookies from +// "google.com.au", but web pages served from "maps.google.com" can share +// cookies from "www.google.com", so you don't have to sign into Google Maps +// separately from signing into Google Web Search. Note that all four of those +// domains have 3 labels and 2 dots. The first two domains are each an eTLD+1, +// the last two are not (but share the same eTLD+1: "google.com"). +// +// All of these domains have the same eTLD+1: +// - "www.books.amazon.co.uk" +// - "books.amazon.co.uk" +// - "amazon.co.uk" +// +// Specifically, the eTLD+1 is "amazon.co.uk", because the eTLD is "co.uk". +// +// There is no closed form algorithm to calculate the eTLD of a domain. +// Instead, the calculation is data driven. This package provides a +// pre-compiled snapshot of Mozilla's PSL (Public Suffix List) data at +// https://publicsuffix.org/ +package publicsuffix // import "golang.org/x/net/publicsuffix" + +// TODO: specify case sensitivity and leading/trailing dot behavior for +// func PublicSuffix and func EffectiveTLDPlusOne. + +import ( + "fmt" + "net/http/cookiejar" + "strings" +) + +// List implements the cookiejar.PublicSuffixList interface by calling the +// PublicSuffix function. +var List cookiejar.PublicSuffixList = list{} + +type list struct{} + +func (list) PublicSuffix(domain string) string { + ps, _ := PublicSuffix(domain) + return ps +} + +func (list) String() string { + return version +} + +// PublicSuffix returns the public suffix of the domain using a copy of the +// publicsuffix.org database compiled into the library. +// +// icann is whether the public suffix is managed by the Internet Corporation +// for Assigned Names and Numbers. If not, the public suffix is either a +// privately managed domain (and in practice, not a top level domain) or an +// unmanaged top level domain (and not explicitly mentioned in the +// publicsuffix.org list). For example, "foo.org" and "foo.co.uk" are ICANN +// domains, "foo.dyndns.org" and "foo.blogspot.co.uk" are private domains and +// "cromulent" is an unmanaged top level domain. +// +// Use cases for distinguishing ICANN domains like "foo.com" from private +// domains like "foo.appspot.com" can be found at +// https://wiki.mozilla.org/Public_Suffix_List/Use_Cases +func PublicSuffix(domain string) (publicSuffix string, icann bool) { + lo, hi := uint32(0), uint32(numTLD) + s, suffix, icannNode, wildcard := domain, len(domain), false, false +loop: + for { + dot := strings.LastIndex(s, ".") + if wildcard { + icann = icannNode + suffix = 1 + dot + } + if lo == hi { + break + } + f := find(s[1+dot:], lo, hi) + if f == notFound { + break + } + + u := nodes[f] >> (nodesBitsTextOffset + nodesBitsTextLength) + icannNode = u&(1<>= nodesBitsICANN + u = children[u&(1<>= childrenBitsLo + hi = u & (1<>= childrenBitsHi + switch u & (1<>= childrenBitsNodeType + wildcard = u&(1<>= nodesBitsTextLength + offset := x & (1<