diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..4f650a1 --- /dev/null +++ b/.env.example @@ -0,0 +1,2 @@ +NIKI_STAGE_MARIADB_UR_PASSWORD="nikiappt0lk2o20" +NIKI_STAGE_MARIADB_RT_PASSWORD="nikiappt0lk2o20" \ No newline at end of file diff --git a/.gitignore b/.gitignore index c841ccc..bfc531f 100644 --- a/.gitignore +++ b/.gitignore @@ -20,7 +20,7 @@ .idea bin -#env +#.env *.env logs/ diff --git a/.mise.toml b/.mise.toml index 5b2b992..39cce95 100644 --- a/.mise.toml +++ b/.mise.toml @@ -1,5 +1,5 @@ [env] -# supports arbitrary env vars so rtx can be used like direnv/dotenv +# supports arbitrary .env vars so rtx can be used like direnv/dotenv GO_ENV = 'GOLANG_MISE' MISE_USE_TOML= 1 #Set to 1 to default to using .mise.toml RUST_BACKTRACE=0 diff --git a/Makefile b/Makefile index 4fa0af0..f30ee83 100644 --- a/Makefile +++ b/Makefile @@ -18,4 +18,10 @@ format: @golangci-lint run --fix build: - go build main.go \ No newline at end of file + go build main.go --migrate + +run: + go run main.go --migrate + +docker: + sudo docker compose up -d \ No newline at end of file diff --git a/config/loader.go b/config/loader.go index 4ecf582..3867c15 100644 --- a/config/loader.go +++ b/config/loader.go @@ -29,7 +29,7 @@ type Option struct { } // our environment variables must prefix with `EB_` -// for nested env should use `__` aka: EB__DB__HOST. +// for nested .env should use `__` aka: EB__DB__HOST. func defaultCallbackEnv(source string) string { base := strings.ToLower(strings.TrimPrefix(source, defaultPrefix)) diff --git a/delivery/http_server/benefactor/address/add_address.go b/delivery/http_server/benefactor/address/add_address.go index c9beb4f..f228b40 100644 --- a/delivery/http_server/benefactor/address/add_address.go +++ b/delivery/http_server/benefactor/address/add_address.go @@ -9,6 +9,16 @@ import ( "github.com/labstack/echo/v4" ) +// AddAddress godoc +// @Summary Add Address benefactor +// @Tags benefactor +// @Accept json +// @Produce json +// @Param Request body param.BenefactorAddAddressRequest true "Add Address benefactor" +// @Success 200 {object} param.BenefactorAddAddressResponse +// @Failure 400 {string} "Bad request" +// @Router /address/ [post] +// @Security AuthBearer func (h Handler) AddAddress(c echo.Context) error { req := param.BenefactorAddAddressRequest{} if bErr := c.Bind(&req); bErr != nil { diff --git a/delivery/http_server/benefactor/address/get_all_cities.go b/delivery/http_server/benefactor/address/get_all_cities.go index b66cde0..cceb597 100644 --- a/delivery/http_server/benefactor/address/get_all_cities.go +++ b/delivery/http_server/benefactor/address/get_all_cities.go @@ -8,6 +8,14 @@ import ( "github.com/labstack/echo/v4" ) +// GetAllCities godoc +// @Summary get all cities +// @Tags benefactor +// @Accept json +// @Produce json +// @Success 200 {object} addressparam.GetAllCitiesResponse +// @Failure 400 {string} "Bad request" +// @Router /address/cities [get] func (h Handler) GetAllCities(c echo.Context) error { var req addressparam.GetAllCitiesRequest diff --git a/delivery/http_server/benefactor/address/get_all_provinces.go b/delivery/http_server/benefactor/address/get_all_provinces.go index 00fb1cc..2bc9b45 100644 --- a/delivery/http_server/benefactor/address/get_all_provinces.go +++ b/delivery/http_server/benefactor/address/get_all_provinces.go @@ -8,6 +8,14 @@ import ( "github.com/labstack/echo/v4" ) +// GetAllProvinces godoc +// @Summary get all provinces +// @Tags benefactor +// @Accept json +// @Produce json +// @Success 200 {object} addressparam.GetAllProvincesResponse +// @Failure 400 {string} "Bad request" +// @Router /address/provinces [get] func (h Handler) GetAllProvinces(c echo.Context) error { var req addressparam.GetAllProvincesRequest diff --git a/delivery/http_server/benefactor/benefactor/login_register.go b/delivery/http_server/benefactor/benefactor/login_register.go index 8817fac..31a3a66 100644 --- a/delivery/http_server/benefactor/benefactor/login_register.go +++ b/delivery/http_server/benefactor/benefactor/login_register.go @@ -8,6 +8,15 @@ import ( "github.com/labstack/echo/v4" ) +// loginOrRegister godoc +// @Summary login Or Register benefactor +// @Tags benefactor +// @Accept json +// @Produce json +// @Param Request body benefactoreparam.LoginOrRegisterRequest true "login Or Register benefactor" +// @Success 200 {object} benefactoreparam.LoginOrRegisterResponse +// @Failure 400 {string} "Bad request" +// @Router /benefactor/login-register [post] func (h Handler) loginOrRegister(c echo.Context) error { var req benefactoreparam.LoginOrRegisterRequest diff --git a/delivery/http_server/benefactor/benefactor/send_otp.go b/delivery/http_server/benefactor/benefactor/send_otp.go index e6c1fc0..7e15dd2 100644 --- a/delivery/http_server/benefactor/benefactor/send_otp.go +++ b/delivery/http_server/benefactor/benefactor/send_otp.go @@ -8,6 +8,15 @@ import ( "github.com/labstack/echo/v4" ) +// SendOtp godoc +// @Summary send otp benefactor +// @Tags benefactor +// @Accept json +// @Produce json +// @Param Request body benefactoreparam.SendOtpRequest true "send otp benefactor" +// @Success 200 {object} benefactoreparam.SendOtpResponse +// @Failure 400 {string} "Bad request" +// @Router /benefactor/send-otp [post] func (h Handler) SendOtp(c echo.Context) error { var req benefactoreparam.SendOtpRequest diff --git a/delivery/http_server/server.go b/delivery/http_server/server.go index bb1f422..28962ff 100644 --- a/delivery/http_server/server.go +++ b/delivery/http_server/server.go @@ -2,13 +2,13 @@ package httpserver import ( "fmt" - config "git.gocasts.ir/ebhomengo/niki/config" adminhandler "git.gocasts.ir/ebhomengo/niki/delivery/http_server/admin/admin" adminkindboxreqhandler "git.gocasts.ir/ebhomengo/niki/delivery/http_server/admin/kind_box_req" benefactoraddresshandler "git.gocasts.ir/ebhomengo/niki/delivery/http_server/benefactor/address" benefactorhandler "git.gocasts.ir/ebhomengo/niki/delivery/http_server/benefactor/benefactor" benefactorkindboxreqhandler "git.gocasts.ir/ebhomengo/niki/delivery/http_server/benefactor/kind_box_req" + "git.gocasts.ir/ebhomengo/niki/docs" adminservice "git.gocasts.ir/ebhomengo/niki/service/admin/admin" adminauthorizationservice "git.gocasts.ir/ebhomengo/niki/service/admin/authorization" adminkindboxreqservice "git.gocasts.ir/ebhomengo/niki/service/admin/kind_box_req" @@ -23,6 +23,7 @@ import ( benefactorkindboxreqvalidator "git.gocasts.ir/ebhomengo/niki/validator/benefactor/kind_box_req" echo "github.com/labstack/echo/v4" middleware "github.com/labstack/echo/v4/middleware" + echoSwagger "github.com/swaggo/echo-swagger" ) type Server struct { @@ -64,8 +65,8 @@ func New( func (s Server) Serve() { s.Router.Use(middleware.RequestID()) - s.Router.Use(middleware.Recover()) + RegisterSwagger(s.Router, s.config) // Routes s.Router.GET("/health-check", s.healthCheck) @@ -74,7 +75,6 @@ func (s Server) Serve() { s.benefactorAddressHandler.SetRoutes(s.Router) s.adminHandler.SetRoutes(s.Router) s.adminKindBoxReqHandler.SetRoutes(s.Router) - // Start server address := fmt.Sprintf(":%d", s.config.HTTPServer.Port) fmt.Printf("start echo server on %s\n", address) @@ -82,3 +82,12 @@ func (s Server) Serve() { fmt.Println("router start error", err) } } + +func RegisterSwagger(s *echo.Echo, config config.Config) { + docs.SwaggerInfo.Title = "NIKI Api" + docs.SwaggerInfo.Description = " This is swagger api documentation for niki project" + docs.SwaggerInfo.Version = "1.0.0" + //docs.SwaggerInfo.BasePath = "/api" + docs.SwaggerInfo.Host = fmt.Sprintf("localhost:%d", config.HTTPServer.Port) + s.GET("/swagger/*any", echoSwagger.WrapHandler) +} diff --git a/docker-compose.yaml b/docker-compose.yaml index f698007..1cc1d13 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,22 +1,22 @@ version: '3.9' services: - niki-core: - build: - context: . - target: development - image: niki-core - container_name: niki-core - networks: - - core - restart: always - ports: - - "1313:1313" - links: - - "niki-mariadb" - depends_on: - - "niki-mariadb" - - "niki-redis" +# niki-core: +# build: +# context: . +# target: development +# image: niki-core +# container_name: niki-core +# networks: +# - core +# restart: always +# ports: +# - "1313:1313" +# links: +# - "niki-mariadb" +# depends_on: +# - "niki-mariadb" +# - "niki-redis" niki-mariadb: image: docker.io/bitnami/mariadb:11.1 diff --git a/docs/docs.go b/docs/docs.go new file mode 100644 index 0000000..f433bd4 --- /dev/null +++ b/docs/docs.go @@ -0,0 +1,422 @@ +// Package docs Code generated by swaggo/swag. DO NOT EDIT +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "contact": {}, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/address/": { + "post": { + "security": [ + { + "AuthBearer": [] + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "Add Address benefactor", + "parameters": [ + { + "description": "Add Address benefactor", + "name": "Request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/addressparam.BenefactorAddAddressRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/addressparam.BenefactorAddAddressResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + }, + "/address/cities": { + "get": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "get all cities", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/addressparam.GetAllCitiesResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + }, + "/address/provinces": { + "get": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "get all provinces", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/addressparam.GetAllProvincesResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + }, + "/benefactor/login-register": { + "post": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "login Or Register benefactor", + "parameters": [ + { + "description": "login Or Register benefactor", + "name": "Request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/benefactoreparam.LoginOrRegisterRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/benefactoreparam.LoginOrRegisterResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + }, + "/benefactor/send-otp": { + "post": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "send otp benefactor", + "parameters": [ + { + "description": "send otp benefactor", + "name": "Request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/benefactoreparam.SendOtpRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/benefactoreparam.SendOtpResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + } + }, + "definitions": { + "addressparam.BenefactorAddAddressRequest": { + "type": "object", + "properties": { + "address": { + "type": "string", + "example": "tehran" + }, + "benefactor_id": { + "type": "integer", + "example": 1 + }, + "city_id": { + "type": "integer", + "example": 1 + }, + "lat": { + "type": "number", + "example": 22.23 + }, + "lon": { + "type": "number", + "example": 22.22 + }, + "postal_code": { + "type": "string", + "example": "1234567890" + }, + "province_id": { + "type": "integer", + "example": 1 + } + } + }, + "addressparam.BenefactorAddAddressResponse": { + "type": "object", + "properties": { + "address": { + "$ref": "#/definitions/entity.Address" + } + } + }, + "addressparam.GetAllCitiesResponse": { + "type": "object", + "properties": { + "cities": { + "type": "array", + "items": { + "$ref": "#/definitions/entity.City" + } + } + } + }, + "addressparam.GetAllProvincesResponse": { + "type": "object", + "properties": { + "provinces": { + "type": "array", + "items": { + "$ref": "#/definitions/entity.Province" + } + } + } + }, + "benefactoreparam.BenefactroInfo": { + "type": "object", + "properties": { + "first_name": { + "type": "string", + "example": "mehdi" + }, + "id": { + "type": "integer", + "example": 1 + }, + "last_name": { + "type": "string", + "example": "rez" + }, + "role": { + "type": "string", + "example": "benefactor" + } + } + }, + "benefactoreparam.LoginOrRegisterRequest": { + "type": "object", + "properties": { + "phone_number": { + "type": "string", + "example": "09198829528" + }, + "verification_code": { + "type": "string", + "example": "123456" + } + } + }, + "benefactoreparam.LoginOrRegisterResponse": { + "type": "object", + "properties": { + "benefactore_info": { + "$ref": "#/definitions/benefactoreparam.BenefactroInfo" + }, + "tokens": { + "$ref": "#/definitions/benefactoreparam.Tokens" + } + } + }, + "benefactoreparam.SendOtpRequest": { + "type": "object", + "properties": { + "phone_number": { + "type": "string", + "example": "09198829528" + } + } + }, + "benefactoreparam.SendOtpResponse": { + "type": "object", + "properties": { + "code": { + "description": "this just use in test .env\n\t\tTODO - remove it after test", + "type": "string" + }, + "phone_number": { + "type": "string", + "example": "09198829528" + } + } + }, + "benefactoreparam.Tokens": { + "type": "object", + "properties": { + "access_token": { + "type": "string" + }, + "refresh_token": { + "type": "string" + } + } + }, + "entity.Address": { + "type": "object", + "properties": { + "address": { + "type": "string" + }, + "benefactorID": { + "type": "integer" + }, + "cityID": { + "type": "integer" + }, + "id": { + "type": "integer" + }, + "isMain": { + "type": "boolean" + }, + "lat": { + "type": "number" + }, + "lon": { + "type": "number" + }, + "postalCode": { + "type": "string" + }, + "provinceID": { + "type": "integer" + } + } + }, + "entity.City": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "provinceID": { + "type": "integer" + } + } + }, + "entity.Province": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + } + }, + "securityDefinitions": { + "AuthBearer": { + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "", + Host: "", + BasePath: "", + Schemes: []string{}, + Title: "", + Description: "", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, + LeftDelim: "{{", + RightDelim: "}}", +} + +func init() { + swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) +} diff --git a/doc/mise.md b/docs/mise.md similarity index 100% rename from doc/mise.md rename to docs/mise.md diff --git a/docs/swagger.json b/docs/swagger.json new file mode 100644 index 0000000..734e84d --- /dev/null +++ b/docs/swagger.json @@ -0,0 +1,393 @@ +{ + "swagger": "2.0", + "info": { + "contact": {} + }, + "paths": { + "/address/": { + "post": { + "security": [ + { + "AuthBearer": [] + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "Add Address benefactor", + "parameters": [ + { + "description": "Add Address benefactor", + "name": "Request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/addressparam.BenefactorAddAddressRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/addressparam.BenefactorAddAddressResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + }, + "/address/cities": { + "get": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "get all cities", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/addressparam.GetAllCitiesResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + }, + "/address/provinces": { + "get": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "get all provinces", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/addressparam.GetAllProvincesResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + }, + "/benefactor/login-register": { + "post": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "login Or Register benefactor", + "parameters": [ + { + "description": "login Or Register benefactor", + "name": "Request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/benefactoreparam.LoginOrRegisterRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/benefactoreparam.LoginOrRegisterResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + }, + "/benefactor/send-otp": { + "post": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "benefactor" + ], + "summary": "send otp benefactor", + "parameters": [ + { + "description": "send otp benefactor", + "name": "Request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/benefactoreparam.SendOtpRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/benefactoreparam.SendOtpResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "string" + } + } + } + } + } + }, + "definitions": { + "addressparam.BenefactorAddAddressRequest": { + "type": "object", + "properties": { + "address": { + "type": "string", + "example": "tehran" + }, + "benefactor_id": { + "type": "integer", + "example": 1 + }, + "city_id": { + "type": "integer", + "example": 1 + }, + "lat": { + "type": "number", + "example": 22.23 + }, + "lon": { + "type": "number", + "example": 22.22 + }, + "postal_code": { + "type": "string", + "example": "1234567890" + }, + "province_id": { + "type": "integer", + "example": 1 + } + } + }, + "addressparam.BenefactorAddAddressResponse": { + "type": "object", + "properties": { + "address": { + "$ref": "#/definitions/entity.Address" + } + } + }, + "addressparam.GetAllCitiesResponse": { + "type": "object", + "properties": { + "cities": { + "type": "array", + "items": { + "$ref": "#/definitions/entity.City" + } + } + } + }, + "addressparam.GetAllProvincesResponse": { + "type": "object", + "properties": { + "provinces": { + "type": "array", + "items": { + "$ref": "#/definitions/entity.Province" + } + } + } + }, + "benefactoreparam.BenefactroInfo": { + "type": "object", + "properties": { + "first_name": { + "type": "string", + "example": "mehdi" + }, + "id": { + "type": "integer", + "example": 1 + }, + "last_name": { + "type": "string", + "example": "rez" + }, + "role": { + "type": "string", + "example": "benefactor" + } + } + }, + "benefactoreparam.LoginOrRegisterRequest": { + "type": "object", + "properties": { + "phone_number": { + "type": "string", + "example": "09198829528" + }, + "verification_code": { + "type": "string", + "example": "123456" + } + } + }, + "benefactoreparam.LoginOrRegisterResponse": { + "type": "object", + "properties": { + "benefactore_info": { + "$ref": "#/definitions/benefactoreparam.BenefactroInfo" + }, + "tokens": { + "$ref": "#/definitions/benefactoreparam.Tokens" + } + } + }, + "benefactoreparam.SendOtpRequest": { + "type": "object", + "properties": { + "phone_number": { + "type": "string", + "example": "09198829528" + } + } + }, + "benefactoreparam.SendOtpResponse": { + "type": "object", + "properties": { + "code": { + "description": "this just use in test .env\n\t\tTODO - remove it after test", + "type": "string" + }, + "phone_number": { + "type": "string", + "example": "09198829528" + } + } + }, + "benefactoreparam.Tokens": { + "type": "object", + "properties": { + "access_token": { + "type": "string" + }, + "refresh_token": { + "type": "string" + } + } + }, + "entity.Address": { + "type": "object", + "properties": { + "address": { + "type": "string" + }, + "benefactorID": { + "type": "integer" + }, + "cityID": { + "type": "integer" + }, + "id": { + "type": "integer" + }, + "isMain": { + "type": "boolean" + }, + "lat": { + "type": "number" + }, + "lon": { + "type": "number" + }, + "postalCode": { + "type": "string" + }, + "provinceID": { + "type": "integer" + } + } + }, + "entity.City": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "provinceID": { + "type": "integer" + } + } + }, + "entity.Province": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + } + }, + "securityDefinitions": { + "AuthBearer": { + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + } +} \ No newline at end of file diff --git a/docs/swagger.yaml b/docs/swagger.yaml new file mode 100644 index 0000000..2161826 --- /dev/null +++ b/docs/swagger.yaml @@ -0,0 +1,256 @@ +definitions: + addressparam.BenefactorAddAddressRequest: + properties: + address: + example: tehran + type: string + benefactor_id: + example: 1 + type: integer + city_id: + example: 1 + type: integer + lat: + example: 22.23 + type: number + lon: + example: 22.22 + type: number + postal_code: + example: "1234567890" + type: string + province_id: + example: 1 + type: integer + type: object + addressparam.BenefactorAddAddressResponse: + properties: + address: + $ref: '#/definitions/entity.Address' + type: object + addressparam.GetAllCitiesResponse: + properties: + cities: + items: + $ref: '#/definitions/entity.City' + type: array + type: object + addressparam.GetAllProvincesResponse: + properties: + provinces: + items: + $ref: '#/definitions/entity.Province' + type: array + type: object + benefactoreparam.BenefactroInfo: + properties: + first_name: + example: mehdi + type: string + id: + example: 1 + type: integer + last_name: + example: rez + type: string + role: + example: benefactor + type: string + type: object + benefactoreparam.LoginOrRegisterRequest: + properties: + phone_number: + example: "09198829528" + type: string + verification_code: + example: "123456" + type: string + type: object + benefactoreparam.LoginOrRegisterResponse: + properties: + benefactore_info: + $ref: '#/definitions/benefactoreparam.BenefactroInfo' + tokens: + $ref: '#/definitions/benefactoreparam.Tokens' + type: object + benefactoreparam.SendOtpRequest: + properties: + phone_number: + example: "09198829528" + type: string + type: object + benefactoreparam.SendOtpResponse: + properties: + code: + description: "this just use in test .env\n\t\tTODO - remove it after test" + type: string + phone_number: + example: "09198829528" + type: string + type: object + benefactoreparam.Tokens: + properties: + access_token: + type: string + refresh_token: + type: string + type: object + entity.Address: + properties: + address: + type: string + benefactorID: + type: integer + cityID: + type: integer + id: + type: integer + isMain: + type: boolean + lat: + type: number + lon: + type: number + postalCode: + type: string + provinceID: + type: integer + type: object + entity.City: + properties: + id: + type: integer + name: + type: string + provinceID: + type: integer + type: object + entity.Province: + properties: + id: + type: integer + name: + type: string + type: object +info: + contact: {} +paths: + /address/: + post: + consumes: + - application/json + parameters: + - description: Add Address benefactor + in: body + name: Request + required: true + schema: + $ref: '#/definitions/addressparam.BenefactorAddAddressRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/addressparam.BenefactorAddAddressResponse' + "400": + description: Bad request + schema: + type: string + security: + - AuthBearer: [] + summary: Add Address benefactor + tags: + - benefactor + /address/cities: + get: + consumes: + - application/json + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/addressparam.GetAllCitiesResponse' + "400": + description: Bad request + schema: + type: string + summary: get all cities + tags: + - benefactor + /address/provinces: + get: + consumes: + - application/json + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/addressparam.GetAllProvincesResponse' + "400": + description: Bad request + schema: + type: string + summary: get all provinces + tags: + - benefactor + /benefactor/login-register: + post: + consumes: + - application/json + parameters: + - description: login Or Register benefactor + in: body + name: Request + required: true + schema: + $ref: '#/definitions/benefactoreparam.LoginOrRegisterRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/benefactoreparam.LoginOrRegisterResponse' + "400": + description: Bad request + schema: + type: string + summary: login Or Register benefactor + tags: + - benefactor + /benefactor/send-otp: + post: + consumes: + - application/json + parameters: + - description: send otp benefactor + in: body + name: Request + required: true + schema: + $ref: '#/definitions/benefactoreparam.SendOtpRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/benefactoreparam.SendOtpResponse' + "400": + description: Bad request + schema: + type: string + summary: send otp benefactor + tags: + - benefactor +securityDefinitions: + AuthBearer: + in: header + name: Authorization + type: apiKey +swagger: "2.0" diff --git a/go.mod b/go.mod index 5dda685..8b012c6 100644 --- a/go.mod +++ b/go.mod @@ -11,36 +11,49 @@ require ( github.com/kavenegar/kavenegar-go v0.0.0-20221124112814-40341057b5ca github.com/knadh/koanf v1.5.0 github.com/labstack/echo-jwt/v4 v4.2.0 - github.com/labstack/echo/v4 v4.11.4 + github.com/labstack/echo/v4 v4.12.0 github.com/redis/go-redis/v9 v9.4.0 github.com/rubenv/sql-migrate v1.6.0 - github.com/stretchr/testify v1.8.4 - golang.org/x/crypto v0.17.0 + github.com/stretchr/testify v1.9.0 + github.com/swaggo/echo-swagger v1.4.1 + golang.org/x/crypto v0.23.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 ) require ( + github.com/KyleBanks/depth v1.2.1 // indirect github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/fatih/structs v1.1.0 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect + github.com/ghodss/yaml v1.0.0 // indirect github.com/go-gorp/gorp/v3 v3.1.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect + github.com/go-openapi/spec v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect github.com/golang-jwt/jwt/v5 v5.0.0 // indirect + github.com/josharian/intern v1.0.0 // indirect github.com/labstack/gommon v0.4.2 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/swaggo/files/v2 v2.0.0 // indirect + github.com/swaggo/swag v1.16.3 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect - golang.org/x/net v0.19.0 // indirect - golang.org/x/sys v0.15.0 // indirect - golang.org/x/text v0.14.0 // indirect + golang.org/x/net v0.25.0 // indirect + golang.org/x/sys v0.20.0 // indirect + golang.org/x/text v0.15.0 // indirect golang.org/x/time v0.5.0 // indirect + golang.org/x/tools v0.21.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 56fb3a7..c838f47 100644 --- a/go.sum +++ b/go.sum @@ -1,6 +1,8 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= @@ -60,6 +62,7 @@ github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gorp/gorp/v3 v3.1.0 h1:ItKF/Vbuj31dmV4jxA1qblpSwkl9g1typ24xoe70IGs= github.com/go-gorp/gorp/v3 v3.1.0/go.mod h1:dLEjIyyRNiXvNZ8PSmzpt1GsWAUK8kjVhEpjH8TixEw= @@ -70,6 +73,14 @@ github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= github.com/go-ozzo/ozzo-validation v3.6.0+incompatible h1:msy24VGS42fKO9K1vLz82/GeYW1cILu7Nuuj1N3BBkE= github.com/go-ozzo/ozzo-validation v3.6.0+incompatible/go.mod h1:gsEKFIVnabGBt6mXmxK0MoFy+cZoTJY6mu5Ll3LVLBU= github.com/go-ozzo/ozzo-validation/v4 v4.3.0 h1:byhDUpfEwjsVQb1vBunvIjh2BHQ9ead57VkAEY4V+Es= @@ -158,6 +169,8 @@ github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHW github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -183,12 +196,14 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/labstack/echo-jwt/v4 v4.2.0 h1:odSISV9JgcSCuhgQSV/6Io3i7nUmfM/QkBeR5GVJj5c= github.com/labstack/echo-jwt/v4 v4.2.0/go.mod h1:MA2RqdXdEn4/uEglx0HcUOgQSyBaTh5JcaHIan3biwU= -github.com/labstack/echo/v4 v4.11.4 h1:vDZmA+qNeh1pd/cCkEicDMrjtrnMGQ1QFI9gWN1zGq8= -github.com/labstack/echo/v4 v4.11.4/go.mod h1:noh7EvLwqDsmh/X/HWKPUl1AjzJrhyptRyEbQJfxen8= +github.com/labstack/echo/v4 v4.12.0 h1:IKpw49IMryVB2p1a4dzwlhP1O2Tf2E0Ir/450lH+kI0= +github.com/labstack/echo/v4 v4.12.0/go.mod h1:UP9Cr2DJXbOK3Kr9ONYzNowSh7HP0aG0ShAyycHSJvM= github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0= github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU= github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= @@ -264,8 +279,8 @@ github.com/redis/go-redis/v9 v9.4.0 h1:Yzoz33UZw9I/mFhx4MNrB6Fk+XHO1VukNcCa1+lwy github.com/redis/go-redis/v9 v9.4.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= -github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= github.com/rubenv/sql-migrate v1.6.0 h1:IZpcTlAx/VKXphWEpwWJ7BaMq05tYtE80zYz+8a5Il8= github.com/rubenv/sql-migrate v1.6.0/go.mod h1:m3ilnKP7sNb4eYkLsp6cGdPOl4OBcXM6rcbzU+Oqc5k= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= @@ -284,8 +299,14 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/swaggo/echo-swagger v1.4.1 h1:Yf0uPaJWp1uRtDloZALyLnvdBeoEL5Kc7DtnjzO/TUk= +github.com/swaggo/echo-swagger v1.4.1/go.mod h1:C8bSi+9yH2FLZsnhqMZLIZddpUxZdBYuNHbtaS1Hljc= +github.com/swaggo/files/v2 v2.0.0 h1:hmAt8Dkynw7Ssz46F6pn8ok6YmGZqHSVLZ+HQM7i0kw= +github.com/swaggo/files/v2 v2.0.0/go.mod h1:24kk2Y9NYEJ5lHuCra6iVwkMjIekMCaFq/0JQj66kyM= +github.com/swaggo/swag v1.16.3 h1:PnCYjPCah8FK4I26l2F/KQ4yz3sILcVUN3cTlBFA9Pg= +github.com/swaggo/swag v1.16.3/go.mod h1:DImHIuOFXKpMFAQjcC7FG4m3Dg4+QuUgUzJmKjI/gRk= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= @@ -304,8 +325,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -315,6 +336,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -332,8 +355,8 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -346,6 +369,8 @@ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -378,8 +403,8 @@ golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= -golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= @@ -387,8 +412,8 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= @@ -403,6 +428,8 @@ golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.21.0 h1:qc0xYgIbsSDt9EyWz05J5wfa7LOVW0YTLOXrqdLAWIw= +golang.org/x/tools v0.21.0/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -450,6 +477,7 @@ gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/main.go b/main.go index 8481cea..8b3e01d 100644 --- a/main.go +++ b/main.go @@ -26,6 +26,9 @@ func parseFlags() bool { return *migrateFlag } +// @securityDefinitions.apikey AuthBearer +// @in header +// @name Authorization func main() { migrate := parseFlags() diff --git a/mise.log b/mise.log new file mode 100644 index 0000000..d5d7585 --- /dev/null +++ b/mise.log @@ -0,0 +1,2519 @@ +11:00:21 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:00:34 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:00:38 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:01:27 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:01:27 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:01:27 [DEBUG] mise::toolset::builder: Toolset: go@1.21.4 +11:02:45 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:02:45 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:03:34 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:03:34 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:04:04 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:04:04 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:04:23 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise ls +11:04:23 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:04:23 [DEBUG] mise::toolset::builder: Toolset: go@1.21.4 +11:04:23 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:04:23 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:04:37 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:04:37 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:04:58 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise current +11:04:58 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:04:58 [DEBUG] mise::toolset::builder: Toolset: go@1.21.4 +11:04:58 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:04:58 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:05:06 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:05:06 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:06:25 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:06:25 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:07:22 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:07:22 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:08:25 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:08:25 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:08:39 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:08:39 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:09:30 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:09:30 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:10:23 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:10:23 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:12:58 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:12:58 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:13:11 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:13:11 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:13:36 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:13:37 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:13:38 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:13:38 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:14:04 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:14:04 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:16:12 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:16:12 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:16:21 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:16:21 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:16:28 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:16:28 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:16:33 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:16:33 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:17:23 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:17:23 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:18:03 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:18:03 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:44:27 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:44:27 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:47:01 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:47:01 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:48:29 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:48:29 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:48:56 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:48:56 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:49:16 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise current +14:49:16 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:49:16 [DEBUG] mise::toolset::builder: Toolset: go@1.21.4 +14:49:16 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:49:16 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:50:31 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:50:31 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:50:47 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-not-found -s bash -- $ +14:50:47 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:50:47 [DEBUG] mise::toolset::builder: Toolset: go@1.21.4 +14:50:48 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-not-found -s bash -- $ +14:50:48 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:50:48 [DEBUG] mise::toolset::builder: Toolset: go@1.21.4 +14:50:48 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:50:48 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:53:28 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:53:28 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:55:17 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:55:17 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +14:57:30 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +14:57:30 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:06:02 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:06:02 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:06:54 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:06:54 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:12:09 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:12:09 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:12:11 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:12:11 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:14:01 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:14:01 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:14:03 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:14:03 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:14:10 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:14:10 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:14:37 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:14:37 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:14:39 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:14:39 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:15:40 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:15:40 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:24:35 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:24:35 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:24:37 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:24:37 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:26:07 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:26:07 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:27:40 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:27:40 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:27:42 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:27:42 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:30:39 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:30:39 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:30:45 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:30:45 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:31:53 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:31:53 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:31:55 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:31:55 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:34:29 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:34:29 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:34:30 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:34:30 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:35:26 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:35:26 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:39:14 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:39:14 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:39:16 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:39:16 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:39:47 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:39:47 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:39:49 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:39:49 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:41:19 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:41:19 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:41:19 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:41:19 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:41:21 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:41:21 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:42:32 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:42:32 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:42:34 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:42:34 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:47:26 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:47:26 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:47:30 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:47:30 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:48:31 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:48:31 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:49:56 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:49:56 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +15:49:59 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +15:49:59 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:07:56 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:07:56 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:07:58 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:07:58 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:15:15 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:15:15 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:15:22 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:15:22 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:15:38 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:15:38 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:16:53 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:16:53 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:16:54 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:16:54 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:27:48 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:27:48 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:27:50 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:27:50 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:28:34 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:28:34 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:28:37 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:28:37 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:29:54 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:29:54 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:29:55 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:29:55 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +16:32:00 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +16:32:00 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:28:22 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:28:22 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:28:24 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:28:24 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:31:02 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:31:02 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:31:04 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:31:04 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:34:13 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:34:14 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:34:15 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:34:15 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:37:31 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:37:31 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:40:53 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:40:53 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:40:53 [DEBUG] mise::toolset::builder: Toolset: go@1.21.4 +11:40:58 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:40:58 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:41:20 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:41:20 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:48:47 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:48:47 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:49:01 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:49:01 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:49:15 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:49:15 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:49:17 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +11:49:17 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +11:49:17 [DEBUG] mise::toolset::builder: Toolset: go@1.21.4 +12:02:25 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:02:25 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:02:29 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:02:29 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:07:24 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:07:24 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:07:25 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:07:25 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:08:41 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:08:41 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:08:42 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:08:42 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:11:29 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:11:29 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:11:31 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:11:31 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:16:15 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:16:15 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:16:17 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:16:17 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:16:35 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:16:35 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:16:36 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:16:36 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:20:54 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:20:55 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:20:56 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:20:56 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:27:41 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:27:41 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:27:43 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:27:43 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:29:40 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:29:40 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:29:48 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:29:48 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:47:08 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:47:08 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:47:18 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:47:18 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:47:39 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:47:39 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:50:19 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:50:19 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:50:19 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:50:19 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:54:23 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:54:23 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:54:31 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:54:31 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:54:32 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:54:32 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +12:56:55 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +12:56:55 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} +13:06:35 [DEBUG] mise::cli: ARGS: /home/mehdi/.local/bin/mise hook-env -s bash +13:06:35 [DEBUG] mise::config: Config { + Config Files: [ + "~/go/src/niki/.mise.toml", + "~/.config/mise/config.toml", + ], + Env: { + "GO_ENV": "GOLANG_MISE", + "MISE_USE_TOML": "1", + "RUST_BACKTRACE": "0", + "MISE_VERBOSE": "0", + "MISE_LOG_FILE": "./mise.log", + "MISE_LOG_FILE_LEVEL": "debug", + "MISE_DEBUG": "0", + "MISE_QUIET": "1", + "MISE_ERORR": "1", + "MISE_TRACE": "0", + }, +} diff --git a/param/benefactor/address/add.go b/param/benefactor/address/add.go index 74c1f8b..099376d 100644 --- a/param/benefactor/address/add.go +++ b/param/benefactor/address/add.go @@ -3,13 +3,13 @@ package addressparam import "git.gocasts.ir/ebhomengo/niki/entity" type BenefactorAddAddressRequest struct { - PostalCode string `json:"postal_code"` - Address string `json:"address"` - Lat float64 `json:"lat"` - Lon float64 `json:"lon"` - CityID uint `json:"city_id"` - ProvinceID uint `json:"province_id"` - BenefactorID uint `json:"benefactor_id"` + PostalCode string `json:"postal_code" example:"1234567890"` + Address string `json:"address" example:"tehran"` + Lat float64 `json:"lat" example:"22.23"` + Lon float64 `json:"lon" example:"22.22"` + CityID uint `json:"city_id" example:"1"` + ProvinceID uint `json:"province_id" example:"1"` + BenefactorID uint `json:"benefactor_id" example:"1"` } type BenefactorAddAddressResponse struct { diff --git a/param/benefactor/benefactore/info.go b/param/benefactor/benefactore/info.go index 19d29f1..38ee224 100644 --- a/param/benefactor/benefactore/info.go +++ b/param/benefactor/benefactore/info.go @@ -1,8 +1,8 @@ package benefactoreparam type BenefactroInfo struct { - ID uint `json:"id"` - FirstName string `json:"first_name"` - LastName string `json:"last_name"` - Role string `json:"role"` + ID uint `json:"id" example:"1"` + FirstName string `json:"first_name" example:"mehdi"` + LastName string `json:"last_name" example:"rez"` + Role string `json:"role" example:"benefactor"` } diff --git a/param/benefactor/benefactore/login_register.go b/param/benefactor/benefactore/login_register.go index babcec5..c34933b 100644 --- a/param/benefactor/benefactore/login_register.go +++ b/param/benefactor/benefactore/login_register.go @@ -1,8 +1,8 @@ package benefactoreparam type LoginOrRegisterRequest struct { - PhoneNumber string `json:"phone_number"` - VerificationCode string `json:"verification_code"` + PhoneNumber string `json:"phone_number" example:"09198829528"` + VerificationCode string `json:"verification_code" example:"123456"` } type LoginOrRegisterResponse struct { diff --git a/param/benefactor/benefactore/send_otp.go b/param/benefactor/benefactore/send_otp.go index 946925a..7acbeb2 100644 --- a/param/benefactor/benefactore/send_otp.go +++ b/param/benefactor/benefactore/send_otp.go @@ -1,12 +1,12 @@ package benefactoreparam type SendOtpRequest struct { - PhoneNumber string `json:"phone_number"` + PhoneNumber string `json:"phone_number" example:"09198829528"` } type SendOtpResponse struct { - PhoneNumber string `json:"phone_number"` + PhoneNumber string `json:"phone_number" example:"09198829528"` /* - this just use in test env + this just use in test .env TODO - remove it after test */ Code string `json:"code"` diff --git a/param/benefactor/benefactore/token.go b/param/benefactor/benefactore/token.go index 3c31348..14d29a4 100644 --- a/param/benefactor/benefactore/token.go +++ b/param/benefactor/benefactore/token.go @@ -1,6 +1,6 @@ package benefactoreparam type Tokens struct { - AccessToken string `json:"access_token"` + AccessToken string `json:"access_token" ` RefreshToken string `json:"refresh_token"` } diff --git a/repository/mysql/address/get.go b/repository/mysql/address/get.go index bae3d93..c6e9931 100644 --- a/repository/mysql/address/get.go +++ b/repository/mysql/address/get.go @@ -36,12 +36,12 @@ func (d *DB) GetAddressByID(ctx context.Context, id uint) (*entity.Address, erro } func scanAddress(scanner mysql.Scanner) (entity.Address, error) { - var createdAt time.Time + var createdAt, updatedAt time.Time var address entity.Address err := scanner.Scan(&address.ID, &address.PostalCode, &address.Address, &address.Lat, &address.Lon, - &address.ProvinceID, &address.CityID, &address.BenefactorID, - &createdAt) + &address.IsMain, &address.CityID, &address.ProvinceID, &address.BenefactorID, + &createdAt, &updatedAt) return address, err } diff --git a/repository/mysql/address/get_all_cities.go b/repository/mysql/address/get_all_cities.go index a2e6767..e869e73 100644 --- a/repository/mysql/address/get_all_cities.go +++ b/repository/mysql/address/get_all_cities.go @@ -45,7 +45,7 @@ func scanCity(scanner mysql.Scanner) (entity.City, error) { var createdAt time.Time var city entity.City - err := scanner.Scan(&city.ID, &city.ProvinceID, &city.Name, &createdAt) + err := scanner.Scan(&city.ID, &city.Name, &city.ProvinceID, &createdAt) return city, err } diff --git a/vendor/github.com/KyleBanks/depth/.gitignore b/vendor/github.com/KyleBanks/depth/.gitignore new file mode 100644 index 0000000..8c2e171 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/.gitignore @@ -0,0 +1,16 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ + +bin/ diff --git a/vendor/github.com/KyleBanks/depth/.travis.yml b/vendor/github.com/KyleBanks/depth/.travis.yml new file mode 100644 index 0000000..ab506f9 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/.travis.yml @@ -0,0 +1,9 @@ +language: go +sudo: false +go: + - 1.9.x +before_install: + - go get github.com/mattn/goveralls +script: + - $HOME/gopath/bin/goveralls -service=travis-ci +#script: go test $(go list ./... | grep -v vendor/) diff --git a/vendor/github.com/KyleBanks/depth/LICENSE b/vendor/github.com/KyleBanks/depth/LICENSE new file mode 100644 index 0000000..070b426 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Kyle Banks + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/KyleBanks/depth/Makefile b/vendor/github.com/KyleBanks/depth/Makefile new file mode 100644 index 0000000..acd35bb --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/Makefile @@ -0,0 +1,32 @@ +VERSION = 1.2.1 + +RELEASE_PKG = ./cmd/depth +INSTALL_PKG = $(RELEASE_PKG) + + +# Remote includes require 'mmake' +# github.com/tj/mmake +include github.com/KyleBanks/make/go/install +include github.com/KyleBanks/make/go/sanity +include github.com/KyleBanks/make/go/release +include github.com/KyleBanks/make/go/bench +include github.com/KyleBanks/make/git/precommit + +# Runs a number of depth commands as examples of what's possible. +example: | install + depth github.com/KyleBanks/depth/cmd/depth strings ./ + + depth -internal strings + + depth -json github.com/KyleBanks/depth/cmd/depth + + depth -test github.com/KyleBanks/depth/cmd/depth + + depth -test -internal strings + + depth -test -internal -max 3 strings + + depth . + + depth ./cmd/depth +.PHONY: example diff --git a/vendor/github.com/KyleBanks/depth/README.md b/vendor/github.com/KyleBanks/depth/README.md new file mode 100644 index 0000000..0de954f --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/README.md @@ -0,0 +1,232 @@ +# depth + +[![GoDoc](https://godoc.org/github.com/KyleBanks/depth?status.svg)](https://godoc.org/github.com/KyleBanks/depth)  +[![Build Status](https://travis-ci.org/KyleBanks/depth.svg?branch=master)](https://travis-ci.org/KyleBanks/depth)  +[![Go Report Card](https://goreportcard.com/badge/github.com/KyleBanks/depth)](https://goreportcard.com/report/github.com/KyleBanks/depth)  +[![Coverage Status](https://coveralls.io/repos/github/KyleBanks/depth/badge.svg?branch=master)](https://coveralls.io/github/KyleBanks/depth?branch=master) + +`depth` is tool to retrieve and visualize Go source code dependency trees. + +## Install + +Download the appropriate binary for your platform from the [Releases](https://github.com/KyleBanks/depth/releases) page, or: + +```sh +go get github.com/KyleBanks/depth/cmd/depth +``` + +## Usage + +`depth` can be used as a standalone command-line application, or as a package within your own project. + +### Command-Line + +Simply execute `depth` with one or more package names to visualize. You can use the fully qualified import path of the package, like so: + +```sh +$ depth github.com/KyleBanks/depth/cmd/depth +github.com/KyleBanks/depth/cmd/depth + ├ encoding/json + ├ flag + ├ fmt + ├ io + ├ log + ├ os + ├ strings + └ github.com/KyleBanks/depth + ├ fmt + ├ go/build + ├ path + ├ sort + └ strings +12 dependencies (11 internal, 1 external, 0 testing). +``` + +Or you can use a relative path, for example: + +```sh +$ depth . +$ depth ./cmd/depth +$ depth ../ +``` + +You can also use `depth` on the Go standard library: + +```sh +$ depth strings +strings + ├ errors + ├ io + ├ unicode + └ unicode/utf8 +5 dependencies (5 internal, 0 external, 0 testing). +``` + +Visualizing multiple packages at a time is supported by simply naming the packages you'd like to visualize: + +```sh +$ depth strings github.com/KyleBanks/depth +strings + ├ errors + ├ io + ├ unicode + └ unicode/utf8 +5 dependencies (5 internal, 0 external, 0 testing). +github.com/KyleBanks/depth + ├ fmt + ├ go/build + ├ path + ├ sort + └ strings +7 dependencies (7 internal, 0 external, 0 testing). +``` + +#### `-internal` + +By default, `depth` only resolves the top level of dependencies for standard library packages, however you can use the `-internal` flag to visualize all internal dependencies: + +```sh +$ depth -internal strings +strings + ├ errors + ├ io + ├ errors + └ sync + ├ internal/race + └ unsafe + ├ runtime + ├ runtime/internal/atomic + └ unsafe + ├ runtime/internal/sys + └ unsafe + ├ sync/atomic + └ unsafe + └ unsafe + ├ unicode + └ unicode/utf8 +12 dependencies (12 internal, 0 external, 0 testing). +``` + +#### `-max` + +The `-max` flag limits the dependency tree to the maximum depth provided. For example, if you supply `-max 1` on the `depth` package, your output would look like so: + +``` +$ depth -max 1 github.com/KyleBanks/depth/cmd/depth +github.com/KyleBanks/depth/cmd/depth + ├ encoding/json + ├ flag + ├ fmt + ├ io + ├ log + ├ os + ├ strings + └ github.com/KyleBanks/depth +7 dependencies (6 internal, 1 external, 0 testing). +``` + +The `-max` flag is particularly useful in conjunction with the `-internal` flag which can lead to very deep dependency trees. + +#### `-test` + +By default, `depth` ignores dependencies that are only required for testing. However, you can view test dependencies using the `-test` flag: + +```sh +$ depth -test strings +strings + ├ bytes + ├ errors + ├ fmt + ├ io + ├ io/ioutil + ├ math/rand + ├ reflect + ├ sync + ├ testing + ├ unicode + ├ unicode/utf8 + └ unsafe +13 dependencies (13 internal, 0 external, 8 testing). +``` + +#### `-explain target-package` + +The `-explain` flag instructs `depth` to print import chains in which the +`target-package` is found: + +```sh +$ depth -explain strings github.com/KyleBanks/depth/cmd/depth +github.com/KyleBanks/depth/cmd/depth -> strings +github.com/KyleBanks/depth/cmd/depth -> github.com/KyleBanks/depth -> strings +``` + +#### `-json` + +The `-json` flag instructs `depth` to output dependencies in JSON format: + +```sh +$ depth -json github.com/KyleBanks/depth/cmd/depth +{ + "name": "github.com/KyleBanks/depth/cmd/depth", + "deps": [ + { + "name": "encoding/json", + "internal": true, + "deps": null + }, + ... + { + "name": "github.com/KyleBanks/depth", + "internal": false, + "deps": [ + { + "name": "go/build", + "internal": true, + "deps": null + }, + ... + ] + } + ] +} +``` + +### Integrating With Your Project + +The `depth` package can easily be used to retrieve the dependency tree for a particular package in your own project. For example, here's how you would retrieve the dependency tree for the `strings` package: + +```go +import "github.com/KyleBanks/depth" + +var t depth.Tree +err := t.Resolve("strings") +if err != nil { + log.Fatal(err) +} + +// Output: "'strings' has 4 dependencies." +log.Printf("'%v' has %v dependencies.", t.Root.Name, len(t.Root.Deps)) +``` + +For additional customization, simply set the appropriate flags on the `Tree` before resolving: + +```go +import "github.com/KyleBanks/depth" + +t := depth.Tree { + ResolveInternal: true, + ResolveTest: true, + MaxDepth: 10, +} + + +err := t.Resolve("strings") +``` + +## Author + +`depth` was developed by [Kyle Banks](https://twitter.com/kylewbanks). + +## License + +`depth` is available under the [MIT](./LICENSE) license. diff --git a/vendor/github.com/KyleBanks/depth/depth.go b/vendor/github.com/KyleBanks/depth/depth.go new file mode 100644 index 0000000..115c28a --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/depth.go @@ -0,0 +1,129 @@ +// Package depth provides the ability to traverse and retrieve Go source code dependencies in the form of +// internal and external packages. +// +// For example, the dependencies of the stdlib `strings` package can be resolved like so: +// +// import "github.com/KyleBanks/depth" +// +// var t depth.Tree +// err := t.Resolve("strings") +// if err != nil { +// log.Fatal(err) +// } +// +// // Output: "strings has 4 dependencies." +// log.Printf("%v has %v dependencies.", t.Root.Name, len(t.Root.Deps)) +// +// For additional customization, simply set the appropriate flags on the `Tree` before resolving: +// +// import "github.com/KyleBanks/depth" +// +// t := depth.Tree { +// ResolveInternal: true, +// ResolveTest: true, +// MaxDepth: 10, +// } +// err := t.Resolve("strings") +package depth + +import ( + "errors" + "go/build" + "os" +) + +// ErrRootPkgNotResolved is returned when the root Pkg of the Tree cannot be resolved, +// typically because it does not exist. +var ErrRootPkgNotResolved = errors.New("unable to resolve root package") + +// Importer defines a type that can import a package and return its details. +type Importer interface { + Import(name, srcDir string, im build.ImportMode) (*build.Package, error) +} + +// Tree represents the top level of a Pkg and the configuration used to +// initialize and represent its contents. +type Tree struct { + Root *Pkg + + ResolveInternal bool + ResolveTest bool + MaxDepth int + + Importer Importer + + importCache map[string]struct{} +} + +// Resolve recursively finds all dependencies for the root Pkg name provided, +// and the packages it depends on. +func (t *Tree) Resolve(name string) error { + pwd, err := os.Getwd() + if err != nil { + return err + } + + t.Root = &Pkg{ + Name: name, + Tree: t, + SrcDir: pwd, + Test: false, + } + + // Reset the import cache each time to ensure a reused Tree doesn't + // reuse the same cache. + t.importCache = nil + + // Allow custom importers, but use build.Default if none is provided. + if t.Importer == nil { + t.Importer = &build.Default + } + + t.Root.Resolve(t.Importer) + if !t.Root.Resolved { + return ErrRootPkgNotResolved + } + + return nil +} + +// shouldResolveInternal determines if internal packages should be further resolved beyond the +// current parent. +// +// For example, if the parent Pkg is `github.com/foo/bar` and true is returned, all the +// internal dependencies it relies on will be resolved. If for example `strings` is one of those +// dependencies, and it is passed as the parent here, false may be returned and its internal +// dependencies will not be resolved. +func (t *Tree) shouldResolveInternal(parent *Pkg) bool { + if t.ResolveInternal { + return true + } + + return parent == t.Root +} + +// isAtMaxDepth returns true when the depth of the Pkg provided is at or beyond the maximum +// depth allowed by the tree. +// +// If the Tree has a MaxDepth of zero, true is never returned. +func (t *Tree) isAtMaxDepth(p *Pkg) bool { + if t.MaxDepth == 0 { + return false + } + + return p.depth() >= t.MaxDepth +} + +// hasSeenImport returns true if the import name provided has already been seen within the tree. +// This function only returns false for a name once. +func (t *Tree) hasSeenImport(name string) bool { + if t.importCache == nil { + t.importCache = make(map[string]struct{}) + } + + if _, ok := t.importCache[name]; ok { + return true + } + t.importCache[name] = struct{}{} + return false +} diff --git a/vendor/github.com/KyleBanks/depth/pkg.go b/vendor/github.com/KyleBanks/depth/pkg.go new file mode 100644 index 0000000..1d54d71 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/pkg.go @@ -0,0 +1,184 @@ +package depth + +import ( + "bytes" + "go/build" + "path" + "sort" + "strings" +) + +// Pkg represents a Go source package, and its dependencies. +type Pkg struct { + Name string `json:"name"` + SrcDir string `json:"-"` + + Internal bool `json:"internal"` + Resolved bool `json:"resolved"` + Test bool `json:"-"` + + Tree *Tree `json:"-"` + Parent *Pkg `json:"-"` + Deps []Pkg `json:"deps"` + + Raw *build.Package `json:"-"` +} + +// Resolve recursively finds all dependencies for the Pkg and the packages it depends on. +func (p *Pkg) Resolve(i Importer) { + // Resolved is always true, regardless of if we skip the import, + // it is only false if there is an error while importing. + p.Resolved = true + + name := p.cleanName() + if name == "" { + return + } + + // Stop resolving imports if we've reached max depth or found a duplicate. + var importMode build.ImportMode + if p.Tree.hasSeenImport(name) || p.Tree.isAtMaxDepth(p) { + importMode = build.FindOnly + } + + pkg, err := i.Import(name, p.SrcDir, importMode) + if err != nil { + // TODO: Check the error type? + p.Resolved = false + return + } + p.Raw = pkg + + // Update the name with the fully qualified import path. + p.Name = pkg.ImportPath + + // If this is an internal dependency, we may need to skip it. + if pkg.Goroot { + p.Internal = true + if !p.Tree.shouldResolveInternal(p) { + return + } + } + + //first we set the regular dependencies, then we add the test dependencies + //sharing the same set. This allows us to mark all test-only deps linearly + unique := make(map[string]struct{}) + p.setDeps(i, pkg.Imports, pkg.Dir, unique, false) + if p.Tree.ResolveTest { + p.setDeps(i, append(pkg.TestImports, pkg.XTestImports...), pkg.Dir, unique, true) + } +} + +// setDeps takes a slice of import paths and the source directory they are relative to, +// and creates the Deps of the Pkg. Each dependency is also further resolved prior to being added +// to the Pkg. +func (p *Pkg) setDeps(i Importer, imports []string, srcDir string, unique map[string]struct{}, isTest bool) { + for _, imp := range imports { + // Mostly for testing files where cyclic imports are allowed. + if imp == p.Name { + continue + } + + // Skip duplicates. + if _, ok := unique[imp]; ok { + continue + } + unique[imp] = struct{}{} + + p.addDep(i, imp, srcDir, isTest) + } + + sort.Sort(byInternalAndName(p.Deps)) +} + +// addDep creates a Pkg and it's dependencies from an imported package name. +func (p *Pkg) addDep(i Importer, name string, srcDir string, isTest bool) { + dep := Pkg{ + Name: name, + SrcDir: srcDir, + Tree: p.Tree, + Parent: p, + Test: isTest, + } + dep.Resolve(i) + + p.Deps = append(p.Deps, dep) +} + +// isParent goes recursively up the chain of Pkgs to determine if the name provided is ever a +// parent of the current Pkg. +func (p *Pkg) isParent(name string) bool { + if p.Parent == nil { + return false + } + + if p.Parent.Name == name { + return true + } + + return p.Parent.isParent(name) +} + +// depth returns the depth of the Pkg within the Tree. +func (p *Pkg) depth() int { + if p.Parent == nil { + return 0 + } + + return p.Parent.depth() + 1 +} + +// cleanName returns a cleaned version of the Pkg name used for resolving dependencies. +// +// If an empty string is returned, dependencies should not be resolved. +func (p *Pkg) cleanName() string { + name := p.Name + + // C 'package' cannot be resolved. + if name == "C" { + return "" + } + + // Internal golang_org/* packages must be prefixed with vendor/ + // + // Thanks to @davecheney for this: + // https://github.com/davecheney/graphpkg/blob/master/main.go#L46 + if strings.HasPrefix(name, "golang_org") { + name = path.Join("vendor", name) + } + + return name +} + +// String returns a string representation of the Pkg containing the Pkg name and status. +func (p *Pkg) String() string { + b := bytes.NewBufferString(p.Name) + + if !p.Resolved { + b.Write([]byte(" (unresolved)")) + } + + return b.String() +} + +// byInternalAndName ensures a slice of Pkgs are sorted such that the internal stdlib +// packages are always above external packages (ie. github.com/whatever). +type byInternalAndName []Pkg + +func (b byInternalAndName) Len() int { + return len(b) +} + +func (b byInternalAndName) Swap(i, j int) { + b[i], b[j] = b[j], b[i] +} + +func (b byInternalAndName) Less(i, j int) bool { + if b[i].Internal && !b[j].Internal { + return true + } else if !b[i].Internal && b[j].Internal { + return false + } + + return b[i].Name < b[j].Name +} diff --git a/vendor/github.com/asaskevich/govalidator/types.go b/vendor/github.com/asaskevich/govalidator/types.go index 62f8b0d..f42a346 100644 --- a/vendor/github.com/asaskevich/govalidator/types.go +++ b/vendor/github.com/asaskevich/govalidator/types.go @@ -72,13 +72,13 @@ var ParamTagMap = map[string]ParamValidator{ // ParamTagRegexMap maps param tags to their respective regexes. var ParamTagRegexMap = map[string]*regexp.Regexp{ - "range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"), - "length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"), - "runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"), - "stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"), - "in": regexp.MustCompile(`^in\((.*)\)`), - "matches": regexp.MustCompile(`^matches\((.+)\)$`), - "rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"), + "range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"), + "length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"), + "runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"), + "stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"), + "in": regexp.MustCompile(`^in\((.*)\)`), + "matches": regexp.MustCompile(`^matches\((.+)\)$`), + "rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"), "minstringlength": regexp.MustCompile("^minstringlength\\((\\d+)\\)$"), "maxstringlength": regexp.MustCompile("^maxstringlength\\((\\d+)\\)$"), } @@ -173,7 +173,7 @@ type ISO3166Entry struct { Numeric string } -// ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes" +//ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes" var ISO3166List = []ISO3166Entry{ {"Afghanistan", "Afghanistan (l')", "AF", "AFG", "004"}, {"Albania", "Albanie (l')", "AL", "ALB", "008"}, @@ -463,7 +463,7 @@ type ISO693Entry struct { English string } -// ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json +//ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json var ISO693List = []ISO693Entry{ {Alpha3bCode: "aar", Alpha2Code: "aa", English: "Afar"}, {Alpha3bCode: "abk", Alpha2Code: "ab", English: "Abkhazian"}, diff --git a/vendor/github.com/asaskevich/govalidator/validator.go b/vendor/github.com/asaskevich/govalidator/validator.go index 439a45d..14682e0 100644 --- a/vendor/github.com/asaskevich/govalidator/validator.go +++ b/vendor/github.com/asaskevich/govalidator/validator.go @@ -37,32 +37,25 @@ const RF3339WithoutZone = "2006-01-02T15:04:05" // SetFieldsRequiredByDefault causes validation to fail when struct fields // do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). // This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter): -// -// type exampleStruct struct { -// Name string `` -// Email string `valid:"email"` -// +// type exampleStruct struct { +// Name string `` +// Email string `valid:"email"` // This, however, will only fail when Email is empty or an invalid email address: -// -// type exampleStruct2 struct { -// Name string `valid:"-"` -// Email string `valid:"email"` -// +// type exampleStruct2 struct { +// Name string `valid:"-"` +// Email string `valid:"email"` // Lastly, this will only fail when Email is an invalid email address but not when it's empty: -// -// type exampleStruct2 struct { -// Name string `valid:"-"` -// Email string `valid:"email,optional"` +// type exampleStruct2 struct { +// Name string `valid:"-"` +// Email string `valid:"email,optional"` func SetFieldsRequiredByDefault(value bool) { fieldsRequiredByDefault = value } // SetNilPtrAllowedByRequired causes validation to pass for nil ptrs when a field is set to required. // The validation will still reject ptr fields in their zero value state. Example with this enabled: -// -// type exampleStruct struct { -// Name *string `valid:"required"` -// +// type exampleStruct struct { +// Name *string `valid:"required"` // With `Name` set to "", this will be considered invalid input and will cause a validation error. // With `Name` set to nil, this will be considered valid by validation. // By default this is disabled. @@ -161,8 +154,8 @@ func IsAlpha(str string) bool { return rxAlpha.MatchString(str) } -// IsUTFLetter check if the string contains only unicode letter characters. -// Similar to IsAlpha but for all languages. Empty string is valid. +//IsUTFLetter check if the string contains only unicode letter characters. +//Similar to IsAlpha but for all languages. Empty string is valid. func IsUTFLetter(str string) bool { if IsNull(str) { return true diff --git a/vendor/github.com/brianvoe/gofakeit/v6/template.go b/vendor/github.com/brianvoe/gofakeit/v6/template.go index 37ca879..329d6f7 100644 --- a/vendor/github.com/brianvoe/gofakeit/v6/template.go +++ b/vendor/github.com/brianvoe/gofakeit/v6/template.go @@ -3,12 +3,13 @@ package gofakeit import ( "bytes" "fmt" + "strconv" + "time" + "math/rand" "reflect" - "strconv" "strings" "text/template" - "time" ) // TemplateOptions defines values needed for template document generation diff --git a/vendor/github.com/davecgh/go-spew/spew/bypass.go b/vendor/github.com/davecgh/go-spew/spew/bypass.go index 70ddeaa..7929947 100644 --- a/vendor/github.com/davecgh/go-spew/spew/bypass.go +++ b/vendor/github.com/davecgh/go-spew/spew/bypass.go @@ -18,7 +18,6 @@ // tag is deprecated and thus should not be used. // Go versions prior to 1.4 are disabled because they use a different layout // for interfaces which make the implementation of unsafeReflectValue more complex. -//go:build !js && !appengine && !safe && !disableunsafe && go1.4 // +build !js,!appengine,!safe,!disableunsafe,go1.4 package spew diff --git a/vendor/github.com/davecgh/go-spew/spew/config.go b/vendor/github.com/davecgh/go-spew/spew/config.go index 161895f..2e3d22f 100644 --- a/vendor/github.com/davecgh/go-spew/spew/config.go +++ b/vendor/github.com/davecgh/go-spew/spew/config.go @@ -254,15 +254,15 @@ pointer addresses used to indirect to the final value. It provides the following features over the built-in printing facilities provided by the fmt package: - - Pointers are dereferenced and followed - - Circular data structures are detected and handled properly - - Custom Stringer/error interfaces are optionally invoked, including - on unexported types - - Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - - Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output + * Pointers are dereferenced and followed + * Circular data structures are detected and handled properly + * Custom Stringer/error interfaces are optionally invoked, including + on unexported types + * Custom types which only implement the Stringer/error interfaces via + a pointer receiver are optionally invoked when passing non-pointer + variables + * Byte arrays and slices are dumped like the hexdump -C command which + includes offsets, byte values in hex, and ASCII output The configuration options are controlled by modifying the public members of c. See ConfigState for options documentation. @@ -295,12 +295,12 @@ func (c *ConfigState) convertArgs(args []interface{}) (formatters []interface{}) // NewDefaultConfig returns a ConfigState with the following default settings. // -// Indent: " " -// MaxDepth: 0 -// DisableMethods: false -// DisablePointerMethods: false -// ContinueOnMethod: false -// SortKeys: false +// Indent: " " +// MaxDepth: 0 +// DisableMethods: false +// DisablePointerMethods: false +// ContinueOnMethod: false +// SortKeys: false func NewDefaultConfig() *ConfigState { return &ConfigState{Indent: " "} } diff --git a/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/davecgh/go-spew/spew/dump.go index 8323041..f78d89f 100644 --- a/vendor/github.com/davecgh/go-spew/spew/dump.go +++ b/vendor/github.com/davecgh/go-spew/spew/dump.go @@ -488,15 +488,15 @@ pointer addresses used to indirect to the final value. It provides the following features over the built-in printing facilities provided by the fmt package: - - Pointers are dereferenced and followed - - Circular data structures are detected and handled properly - - Custom Stringer/error interfaces are optionally invoked, including - on unexported types - - Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - - Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output + * Pointers are dereferenced and followed + * Circular data structures are detected and handled properly + * Custom Stringer/error interfaces are optionally invoked, including + on unexported types + * Custom types which only implement the Stringer/error interfaces via + a pointer receiver are optionally invoked when passing non-pointer + variables + * Byte arrays and slices are dumped like the hexdump -C command which + includes offsets, byte values in hex, and ASCII output The configuration options are controlled by an exported package global, spew.Config. See ConfigState for options documentation. diff --git a/vendor/github.com/fatih/structs/field.go b/vendor/github.com/fatih/structs/field.go index 23270e0..e697832 100644 --- a/vendor/github.com/fatih/structs/field.go +++ b/vendor/github.com/fatih/structs/field.go @@ -95,8 +95,8 @@ func (f *Field) Zero() error { // of a nested struct . A struct tag with the content of "-" ignores the // checking of that particular field. Example: // -// // Field is ignored by this package. -// Field *http.Request `structs:"-"` +// // Field is ignored by this package. +// Field *http.Request `structs:"-"` // // It panics if field is not exported or if field's kind is not struct func (f *Field) Fields() []*Field { diff --git a/vendor/github.com/fatih/structs/structs.go b/vendor/github.com/fatih/structs/structs.go index 7970aa0..3a87706 100644 --- a/vendor/github.com/fatih/structs/structs.go +++ b/vendor/github.com/fatih/structs/structs.go @@ -3,6 +3,7 @@ package structs import ( "fmt" + "reflect" ) @@ -37,43 +38,43 @@ func New(s interface{}) *Struct { // can be changed in the struct field's tag value. The "structs" key in the // struct's field tag value is the key name. Example: // -// // Field appears in map as key "myName". -// Name string `structs:"myName"` +// // Field appears in map as key "myName". +// Name string `structs:"myName"` // // A tag value with the content of "-" ignores that particular field. Example: // -// // Field is ignored by this package. -// Field bool `structs:"-"` +// // Field is ignored by this package. +// Field bool `structs:"-"` // // A tag value with the content of "string" uses the stringer to get the value. Example: // -// // The value will be output of Animal's String() func. -// // Map will panic if Animal does not implement String(). -// Field *Animal `structs:"field,string"` +// // The value will be output of Animal's String() func. +// // Map will panic if Animal does not implement String(). +// Field *Animal `structs:"field,string"` // // A tag value with the option of "flatten" used in a struct field is to flatten its fields // in the output map. Example: // -// // The FieldStruct's fields will be flattened into the output map. -// FieldStruct time.Time `structs:",flatten"` +// // The FieldStruct's fields will be flattened into the output map. +// FieldStruct time.Time `structs:",flatten"` // // A tag value with the option of "omitnested" stops iterating further if the type // is a struct. Example: // -// // Field is not processed further by this package. -// Field time.Time `structs:"myName,omitnested"` -// Field *http.Request `structs:",omitnested"` +// // Field is not processed further by this package. +// Field time.Time `structs:"myName,omitnested"` +// Field *http.Request `structs:",omitnested"` // // A tag value with the option of "omitempty" ignores that particular field if // the field value is empty. Example: // -// // Field appears in map as key "myName", but the field is -// // skipped if empty. -// Field string `structs:"myName,omitempty"` +// // Field appears in map as key "myName", but the field is +// // skipped if empty. +// Field string `structs:"myName,omitempty"` // -// // Field appears in map as key "Field" (the default), but -// // the field is skipped if empty. -// Field string `structs:",omitempty"` +// // Field appears in map as key "Field" (the default), but +// // the field is skipped if empty. +// Field string `structs:",omitempty"` // // Note that only exported fields of a struct can be accessed, non exported // fields will be neglected. @@ -152,21 +153,21 @@ func (s *Struct) FillMap(out map[string]interface{}) { // struct tag with the content of "-" ignores the that particular field. // Example: // -// // Field is ignored by this package. -// Field int `structs:"-"` +// // Field is ignored by this package. +// Field int `structs:"-"` // // A value with the option of "omitnested" stops iterating further if the type // is a struct. Example: // -// // Fields is not processed further by this package. -// Field time.Time `structs:",omitnested"` -// Field *http.Request `structs:",omitnested"` +// // Fields is not processed further by this package. +// Field time.Time `structs:",omitnested"` +// Field *http.Request `structs:",omitnested"` // // A tag value with the option of "omitempty" ignores that particular field and // is not added to the values if the field value is empty. Example: // -// // Field is skipped if empty -// Field string `structs:",omitempty"` +// // Field is skipped if empty +// Field string `structs:",omitempty"` // // Note that only exported fields of a struct can be accessed, non exported // fields will be neglected. @@ -214,8 +215,8 @@ func (s *Struct) Values() []interface{} { // Fields returns a slice of Fields. A struct tag with the content of "-" // ignores the checking of that particular field. Example: // -// // Field is ignored by this package. -// Field bool `structs:"-"` +// // Field is ignored by this package. +// Field bool `structs:"-"` // // It panics if s's kind is not struct. func (s *Struct) Fields() []*Field { @@ -225,8 +226,8 @@ func (s *Struct) Fields() []*Field { // Names returns a slice of field names. A struct tag with the content of "-" // ignores the checking of that particular field. Example: // -// // Field is ignored by this package. -// Field bool `structs:"-"` +// // Field is ignored by this package. +// Field bool `structs:"-"` // // It panics if s's kind is not struct. func (s *Struct) Names() []string { @@ -302,15 +303,15 @@ func (s *Struct) FieldOk(name string) (*Field, bool) { // initialized) A struct tag with the content of "-" ignores the checking of // that particular field. Example: // -// // Field is ignored by this package. -// Field bool `structs:"-"` +// // Field is ignored by this package. +// Field bool `structs:"-"` // // A value with the option of "omitnested" stops iterating further if the type // is a struct. Example: // -// // Field is not processed further by this package. -// Field time.Time `structs:"myName,omitnested"` -// Field *http.Request `structs:",omitnested"` +// // Field is not processed further by this package. +// Field time.Time `structs:"myName,omitnested"` +// Field *http.Request `structs:",omitnested"` // // Note that only exported fields of a struct can be accessed, non exported // fields will be neglected. It panics if s's kind is not struct. @@ -349,15 +350,15 @@ func (s *Struct) IsZero() bool { // A struct tag with the content of "-" ignores the checking of that particular // field. Example: // -// // Field is ignored by this package. -// Field bool `structs:"-"` +// // Field is ignored by this package. +// Field bool `structs:"-"` // // A value with the option of "omitnested" stops iterating further if the type // is a struct. Example: // -// // Field is not processed further by this package. -// Field time.Time `structs:"myName,omitnested"` -// Field *http.Request `structs:",omitnested"` +// // Field is not processed further by this package. +// Field time.Time `structs:"myName,omitnested"` +// Field *http.Request `structs:",omitnested"` // // Note that only exported fields of a struct can be accessed, non exported // fields will be neglected. It panics if s's kind is not struct. diff --git a/vendor/github.com/ghodss/yaml/.gitignore b/vendor/github.com/ghodss/yaml/.gitignore new file mode 100644 index 0000000..e256a31 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/.gitignore @@ -0,0 +1,20 @@ +# OSX leaves these everywhere on SMB shares +._* + +# Eclipse files +.classpath +.project +.settings/** + +# Emacs save files +*~ + +# Vim-related files +[._]*.s[a-w][a-z] +[._]s[a-w][a-z] +*.un~ +Session.vim +.netrwhist + +# Go test binaries +*.test diff --git a/vendor/github.com/ghodss/yaml/.travis.yml b/vendor/github.com/ghodss/yaml/.travis.yml new file mode 100644 index 0000000..0e9d6ed --- /dev/null +++ b/vendor/github.com/ghodss/yaml/.travis.yml @@ -0,0 +1,7 @@ +language: go +go: + - 1.3 + - 1.4 +script: + - go test + - go build diff --git a/vendor/github.com/ghodss/yaml/LICENSE b/vendor/github.com/ghodss/yaml/LICENSE new file mode 100644 index 0000000..7805d36 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/LICENSE @@ -0,0 +1,50 @@ +The MIT License (MIT) + +Copyright (c) 2014 Sam Ghods + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/ghodss/yaml/README.md b/vendor/github.com/ghodss/yaml/README.md new file mode 100644 index 0000000..0200f75 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/README.md @@ -0,0 +1,121 @@ +# YAML marshaling and unmarshaling support for Go + +[![Build Status](https://travis-ci.org/ghodss/yaml.svg)](https://travis-ci.org/ghodss/yaml) + +## Introduction + +A wrapper around [go-yaml](https://github.com/go-yaml/yaml) designed to enable a better way of handling YAML when marshaling to and from structs. + +In short, this library first converts YAML to JSON using go-yaml and then uses `json.Marshal` and `json.Unmarshal` to convert to or from the struct. This means that it effectively reuses the JSON struct tags as well as the custom JSON methods `MarshalJSON` and `UnmarshalJSON` unlike go-yaml. For a detailed overview of the rationale behind this method, [see this blog post](http://ghodss.com/2014/the-right-way-to-handle-yaml-in-golang/). + +## Compatibility + +This package uses [go-yaml](https://github.com/go-yaml/yaml) and therefore supports [everything go-yaml supports](https://github.com/go-yaml/yaml#compatibility). + +## Caveats + +**Caveat #1:** When using `yaml.Marshal` and `yaml.Unmarshal`, binary data should NOT be preceded with the `!!binary` YAML tag. If you do, go-yaml will convert the binary data from base64 to native binary data, which is not compatible with JSON. You can still use binary in your YAML files though - just store them without the `!!binary` tag and decode the base64 in your code (e.g. in the custom JSON methods `MarshalJSON` and `UnmarshalJSON`). This also has the benefit that your YAML and your JSON binary data will be decoded exactly the same way. As an example: + +``` +BAD: + exampleKey: !!binary gIGC + +GOOD: + exampleKey: gIGC +... and decode the base64 data in your code. +``` + +**Caveat #2:** When using `YAMLToJSON` directly, maps with keys that are maps will result in an error since this is not supported by JSON. This error will occur in `Unmarshal` as well since you can't unmarshal map keys anyways since struct fields can't be keys. + +## Installation and usage + +To install, run: + +``` +$ go get github.com/ghodss/yaml +``` + +And import using: + +``` +import "github.com/ghodss/yaml" +``` + +Usage is very similar to the JSON library: + +```go +package main + +import ( + "fmt" + + "github.com/ghodss/yaml" +) + +type Person struct { + Name string `json:"name"` // Affects YAML field names too. + Age int `json:"age"` +} + +func main() { + // Marshal a Person struct to YAML. + p := Person{"John", 30} + y, err := yaml.Marshal(p) + if err != nil { + fmt.Printf("err: %v\n", err) + return + } + fmt.Println(string(y)) + /* Output: + age: 30 + name: John + */ + + // Unmarshal the YAML back into a Person struct. + var p2 Person + err = yaml.Unmarshal(y, &p2) + if err != nil { + fmt.Printf("err: %v\n", err) + return + } + fmt.Println(p2) + /* Output: + {John 30} + */ +} +``` + +`yaml.YAMLToJSON` and `yaml.JSONToYAML` methods are also available: + +```go +package main + +import ( + "fmt" + + "github.com/ghodss/yaml" +) + +func main() { + j := []byte(`{"name": "John", "age": 30}`) + y, err := yaml.JSONToYAML(j) + if err != nil { + fmt.Printf("err: %v\n", err) + return + } + fmt.Println(string(y)) + /* Output: + name: John + age: 30 + */ + j2, err := yaml.YAMLToJSON(y) + if err != nil { + fmt.Printf("err: %v\n", err) + return + } + fmt.Println(string(j2)) + /* Output: + {"age":30,"name":"John"} + */ +} +``` diff --git a/vendor/github.com/ghodss/yaml/fields.go b/vendor/github.com/ghodss/yaml/fields.go new file mode 100644 index 0000000..5860074 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/fields.go @@ -0,0 +1,501 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +package yaml + +import ( + "bytes" + "encoding" + "encoding/json" + "reflect" + "sort" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +// indirect walks down v allocating pointers as needed, +// until it gets to a non-pointer. +// if it encounters an Unmarshaler, indirect stops and returns that. +// if decodingNull is true, indirect stops at the last pointer so it can be set to nil. +func indirect(v reflect.Value, decodingNull bool) (json.Unmarshaler, encoding.TextUnmarshaler, reflect.Value) { + // If v is a named type and is addressable, + // start with its address, so that if the type has pointer methods, + // we find them. + if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() { + v = v.Addr() + } + for { + // Load value from interface, but only if the result will be + // usefully addressable. + if v.Kind() == reflect.Interface && !v.IsNil() { + e := v.Elem() + if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) { + v = e + continue + } + } + + if v.Kind() != reflect.Ptr { + break + } + + if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() { + break + } + if v.IsNil() { + if v.CanSet() { + v.Set(reflect.New(v.Type().Elem())) + } else { + v = reflect.New(v.Type().Elem()) + } + } + if v.Type().NumMethod() > 0 { + if u, ok := v.Interface().(json.Unmarshaler); ok { + return u, nil, reflect.Value{} + } + if u, ok := v.Interface().(encoding.TextUnmarshaler); ok { + return nil, u, reflect.Value{} + } + } + v = v.Elem() + } + return nil, nil, v +} + +// A field represents a single field found in a struct. +type field struct { + name string + nameBytes []byte // []byte(name) + equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent + + tag bool + index []int + typ reflect.Type + omitEmpty bool + quoted bool +} + +func fillField(f field) field { + f.nameBytes = []byte(f.name) + f.equalFold = foldFunc(f.nameBytes) + return f +} + +// byName sorts field by name, breaking ties with depth, +// then breaking ties with "name came from json tag", then +// breaking ties with index sequence. +type byName []field + +func (x byName) Len() int { return len(x) } + +func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byName) Less(i, j int) bool { + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that JSON should recognize for the given type. +// The algorithm is breadth-first search over the set of structs to include - the top struct +// and then any reachable anonymous structs. +func typeFields(t reflect.Type) []field { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + count := map[reflect.Type]int{} + nextCount := map[reflect.Type]int{} + + // Types already visited at an earlier level. + visited := map[reflect.Type]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[reflect.Type]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.PkgPath != "" { // unexported + continue + } + tag := sf.Tag.Get("json") + if tag == "-" { + continue + } + name, opts := parseTag(tag) + if !isValidTag(name) { + name = "" + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.Kind() == reflect.Ptr { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { + tagged := name != "" + if name == "" { + name = sf.Name + } + fields = append(fields, fillField(field{ + name: name, + tag: tagged, + index: index, + typ: ft, + omitEmpty: opts.Contains("omitempty"), + quoted: opts.Contains("string"), + })) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + next = append(next, fillField(field{name: ft.Name(), index: index, typ: ft})) + } + } + } + } + + sort.Sort(byName(fields)) + + // Delete all fields that are hidden by the Go rules for embedded fields, + // except that fields with JSON tags are promoted. + + // The fields are sorted in primary order of name, secondary order + // of field index length. Loop over names; for each name, delete + // hidden fields by choosing the one dominant field that survives. + out := fields[:0] + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + out = append(out, fi) + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if ok { + out = append(out, dominant) + } + } + + fields = out + sort.Sort(byIndex(fields)) + + return fields +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's embedding rules, modified by the presence of +// JSON tags. If there are multiple top-level fields, the boolean +// will be false: This condition is an error in Go and we skip all +// the fields. +func dominantField(fields []field) (field, bool) { + // The fields are sorted in increasing index-length order. The winner + // must therefore be one with the shortest index length. Drop all + // longer entries, which is easy: just truncate the slice. + length := len(fields[0].index) + tagged := -1 // Index of first tagged field. + for i, f := range fields { + if len(f.index) > length { + fields = fields[:i] + break + } + if f.tag { + if tagged >= 0 { + // Multiple tagged fields at the same level: conflict. + // Return no field. + return field{}, false + } + tagged = i + } + } + if tagged >= 0 { + return fields[tagged], true + } + // All remaining fields have the same length. If there's more than one, + // we have a conflict (two fields named "X" at the same level) and we + // return no field. + if len(fields) > 1 { + return field{}, false + } + return fields[0], true +} + +var fieldCache struct { + sync.RWMutex + m map[reflect.Type][]field +} + +// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. +func cachedTypeFields(t reflect.Type) []field { + fieldCache.RLock() + f := fieldCache.m[t] + fieldCache.RUnlock() + if f != nil { + return f + } + + // Compute fields without lock. + // Might duplicate effort but won't hold other computations back. + f = typeFields(t) + if f == nil { + f = []field{} + } + + fieldCache.Lock() + if fieldCache.m == nil { + fieldCache.m = map[reflect.Type][]field{} + } + fieldCache.m[t] = f + fieldCache.Unlock() + return f +} + +func isValidTag(s string) bool { + if s == "" { + return false + } + for _, c := range s { + switch { + case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): + // Backslash and quote chars are reserved, but + // otherwise any punctuation chars are allowed + // in a tag name. + default: + if !unicode.IsLetter(c) && !unicode.IsDigit(c) { + return false + } + } + } + return true +} + +const ( + caseMask = ^byte(0x20) // Mask to ignore case in ASCII. + kelvin = '\u212a' + smallLongEss = '\u017f' +) + +// foldFunc returns one of four different case folding equivalence +// functions, from most general (and slow) to fastest: +// +// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8 +// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S') +// 3) asciiEqualFold, no special, but includes non-letters (including _) +// 4) simpleLetterEqualFold, no specials, no non-letters. +// +// The letters S and K are special because they map to 3 runes, not just 2: +// * S maps to s and to U+017F 'ſ' Latin small letter long s +// * k maps to K and to U+212A 'K' Kelvin sign +// See http://play.golang.org/p/tTxjOc0OGo +// +// The returned function is specialized for matching against s and +// should only be given s. It's not curried for performance reasons. +func foldFunc(s []byte) func(s, t []byte) bool { + nonLetter := false + special := false // special letter + for _, b := range s { + if b >= utf8.RuneSelf { + return bytes.EqualFold + } + upper := b & caseMask + if upper < 'A' || upper > 'Z' { + nonLetter = true + } else if upper == 'K' || upper == 'S' { + // See above for why these letters are special. + special = true + } + } + if special { + return equalFoldRight + } + if nonLetter { + return asciiEqualFold + } + return simpleLetterEqualFold +} + +// equalFoldRight is a specialization of bytes.EqualFold when s is +// known to be all ASCII (including punctuation), but contains an 's', +// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t. +// See comments on foldFunc. +func equalFoldRight(s, t []byte) bool { + for _, sb := range s { + if len(t) == 0 { + return false + } + tb := t[0] + if tb < utf8.RuneSelf { + if sb != tb { + sbUpper := sb & caseMask + if 'A' <= sbUpper && sbUpper <= 'Z' { + if sbUpper != tb&caseMask { + return false + } + } else { + return false + } + } + t = t[1:] + continue + } + // sb is ASCII and t is not. t must be either kelvin + // sign or long s; sb must be s, S, k, or K. + tr, size := utf8.DecodeRune(t) + switch sb { + case 's', 'S': + if tr != smallLongEss { + return false + } + case 'k', 'K': + if tr != kelvin { + return false + } + default: + return false + } + t = t[size:] + + } + if len(t) > 0 { + return false + } + return true +} + +// asciiEqualFold is a specialization of bytes.EqualFold for use when +// s is all ASCII (but may contain non-letters) and contains no +// special-folding letters. +// See comments on foldFunc. +func asciiEqualFold(s, t []byte) bool { + if len(s) != len(t) { + return false + } + for i, sb := range s { + tb := t[i] + if sb == tb { + continue + } + if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') { + if sb&caseMask != tb&caseMask { + return false + } + } else { + return false + } + } + return true +} + +// simpleLetterEqualFold is a specialization of bytes.EqualFold for +// use when s is all ASCII letters (no underscores, etc) and also +// doesn't contain 'k', 'K', 's', or 'S'. +// See comments on foldFunc. +func simpleLetterEqualFold(s, t []byte) bool { + if len(s) != len(t) { + return false + } + for i, b := range s { + if b&caseMask != t[i]&caseMask { + return false + } + } + return true +} + +// tagOptions is the string following a comma in a struct field's "json" +// tag, or the empty string. It does not include the leading comma. +type tagOptions string + +// parseTag splits a struct field's json tag into its name and +// comma-separated options. +func parseTag(tag string) (string, tagOptions) { + if idx := strings.Index(tag, ","); idx != -1 { + return tag[:idx], tagOptions(tag[idx+1:]) + } + return tag, tagOptions("") +} + +// Contains reports whether a comma-separated list of options +// contains a particular substr flag. substr must be surrounded by a +// string boundary or commas. +func (o tagOptions) Contains(optionName string) bool { + if len(o) == 0 { + return false + } + s := string(o) + for s != "" { + var next string + i := strings.Index(s, ",") + if i >= 0 { + s, next = s[:i], s[i+1:] + } + if s == optionName { + return true + } + s = next + } + return false +} diff --git a/vendor/github.com/ghodss/yaml/yaml.go b/vendor/github.com/ghodss/yaml/yaml.go new file mode 100644 index 0000000..4fb4054 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/yaml.go @@ -0,0 +1,277 @@ +package yaml + +import ( + "bytes" + "encoding/json" + "fmt" + "reflect" + "strconv" + + "gopkg.in/yaml.v2" +) + +// Marshals the object into JSON then converts JSON to YAML and returns the +// YAML. +func Marshal(o interface{}) ([]byte, error) { + j, err := json.Marshal(o) + if err != nil { + return nil, fmt.Errorf("error marshaling into JSON: %v", err) + } + + y, err := JSONToYAML(j) + if err != nil { + return nil, fmt.Errorf("error converting JSON to YAML: %v", err) + } + + return y, nil +} + +// Converts YAML to JSON then uses JSON to unmarshal into an object. +func Unmarshal(y []byte, o interface{}) error { + vo := reflect.ValueOf(o) + j, err := yamlToJSON(y, &vo) + if err != nil { + return fmt.Errorf("error converting YAML to JSON: %v", err) + } + + err = json.Unmarshal(j, o) + if err != nil { + return fmt.Errorf("error unmarshaling JSON: %v", err) + } + + return nil +} + +// Convert JSON to YAML. +func JSONToYAML(j []byte) ([]byte, error) { + // Convert the JSON to an object. + var jsonObj interface{} + // We are using yaml.Unmarshal here (instead of json.Unmarshal) because the + // Go JSON library doesn't try to pick the right number type (int, float, + // etc.) when unmarshalling to interface{}, it just picks float64 + // universally. go-yaml does go through the effort of picking the right + // number type, so we can preserve number type throughout this process. + err := yaml.Unmarshal(j, &jsonObj) + if err != nil { + return nil, err + } + + // Marshal this object into YAML. + return yaml.Marshal(jsonObj) +} + +// Convert YAML to JSON. Since JSON is a subset of YAML, passing JSON through +// this method should be a no-op. +// +// Things YAML can do that are not supported by JSON: +// * In YAML you can have binary and null keys in your maps. These are invalid +// in JSON. (int and float keys are converted to strings.) +// * Binary data in YAML with the !!binary tag is not supported. If you want to +// use binary data with this library, encode the data as base64 as usual but do +// not use the !!binary tag in your YAML. This will ensure the original base64 +// encoded data makes it all the way through to the JSON. +func YAMLToJSON(y []byte) ([]byte, error) { + return yamlToJSON(y, nil) +} + +func yamlToJSON(y []byte, jsonTarget *reflect.Value) ([]byte, error) { + // Convert the YAML to an object. + var yamlObj interface{} + err := yaml.Unmarshal(y, &yamlObj) + if err != nil { + return nil, err + } + + // YAML objects are not completely compatible with JSON objects (e.g. you + // can have non-string keys in YAML). So, convert the YAML-compatible object + // to a JSON-compatible object, failing with an error if irrecoverable + // incompatibilties happen along the way. + jsonObj, err := convertToJSONableObject(yamlObj, jsonTarget) + if err != nil { + return nil, err + } + + // Convert this object to JSON and return the data. + return json.Marshal(jsonObj) +} + +func convertToJSONableObject(yamlObj interface{}, jsonTarget *reflect.Value) (interface{}, error) { + var err error + + // Resolve jsonTarget to a concrete value (i.e. not a pointer or an + // interface). We pass decodingNull as false because we're not actually + // decoding into the value, we're just checking if the ultimate target is a + // string. + if jsonTarget != nil { + ju, tu, pv := indirect(*jsonTarget, false) + // We have a JSON or Text Umarshaler at this level, so we can't be trying + // to decode into a string. + if ju != nil || tu != nil { + jsonTarget = nil + } else { + jsonTarget = &pv + } + } + + // If yamlObj is a number or a boolean, check if jsonTarget is a string - + // if so, coerce. Else return normal. + // If yamlObj is a map or array, find the field that each key is + // unmarshaling to, and when you recurse pass the reflect.Value for that + // field back into this function. + switch typedYAMLObj := yamlObj.(type) { + case map[interface{}]interface{}: + // JSON does not support arbitrary keys in a map, so we must convert + // these keys to strings. + // + // From my reading of go-yaml v2 (specifically the resolve function), + // keys can only have the types string, int, int64, float64, binary + // (unsupported), or null (unsupported). + strMap := make(map[string]interface{}) + for k, v := range typedYAMLObj { + // Resolve the key to a string first. + var keyString string + switch typedKey := k.(type) { + case string: + keyString = typedKey + case int: + keyString = strconv.Itoa(typedKey) + case int64: + // go-yaml will only return an int64 as a key if the system + // architecture is 32-bit and the key's value is between 32-bit + // and 64-bit. Otherwise the key type will simply be int. + keyString = strconv.FormatInt(typedKey, 10) + case float64: + // Stolen from go-yaml to use the same conversion to string as + // the go-yaml library uses to convert float to string when + // Marshaling. + s := strconv.FormatFloat(typedKey, 'g', -1, 32) + switch s { + case "+Inf": + s = ".inf" + case "-Inf": + s = "-.inf" + case "NaN": + s = ".nan" + } + keyString = s + case bool: + if typedKey { + keyString = "true" + } else { + keyString = "false" + } + default: + return nil, fmt.Errorf("Unsupported map key of type: %s, key: %+#v, value: %+#v", + reflect.TypeOf(k), k, v) + } + + // jsonTarget should be a struct or a map. If it's a struct, find + // the field it's going to map to and pass its reflect.Value. If + // it's a map, find the element type of the map and pass the + // reflect.Value created from that type. If it's neither, just pass + // nil - JSON conversion will error for us if it's a real issue. + if jsonTarget != nil { + t := *jsonTarget + if t.Kind() == reflect.Struct { + keyBytes := []byte(keyString) + // Find the field that the JSON library would use. + var f *field + fields := cachedTypeFields(t.Type()) + for i := range fields { + ff := &fields[i] + if bytes.Equal(ff.nameBytes, keyBytes) { + f = ff + break + } + // Do case-insensitive comparison. + if f == nil && ff.equalFold(ff.nameBytes, keyBytes) { + f = ff + } + } + if f != nil { + // Find the reflect.Value of the most preferential + // struct field. + jtf := t.Field(f.index[0]) + strMap[keyString], err = convertToJSONableObject(v, &jtf) + if err != nil { + return nil, err + } + continue + } + } else if t.Kind() == reflect.Map { + // Create a zero value of the map's element type to use as + // the JSON target. + jtv := reflect.Zero(t.Type().Elem()) + strMap[keyString], err = convertToJSONableObject(v, &jtv) + if err != nil { + return nil, err + } + continue + } + } + strMap[keyString], err = convertToJSONableObject(v, nil) + if err != nil { + return nil, err + } + } + return strMap, nil + case []interface{}: + // We need to recurse into arrays in case there are any + // map[interface{}]interface{}'s inside and to convert any + // numbers to strings. + + // If jsonTarget is a slice (which it really should be), find the + // thing it's going to map to. If it's not a slice, just pass nil + // - JSON conversion will error for us if it's a real issue. + var jsonSliceElemValue *reflect.Value + if jsonTarget != nil { + t := *jsonTarget + if t.Kind() == reflect.Slice { + // By default slices point to nil, but we need a reflect.Value + // pointing to a value of the slice type, so we create one here. + ev := reflect.Indirect(reflect.New(t.Type().Elem())) + jsonSliceElemValue = &ev + } + } + + // Make and use a new array. + arr := make([]interface{}, len(typedYAMLObj)) + for i, v := range typedYAMLObj { + arr[i], err = convertToJSONableObject(v, jsonSliceElemValue) + if err != nil { + return nil, err + } + } + return arr, nil + default: + // If the target type is a string and the YAML type is a number, + // convert the YAML type to a string. + if jsonTarget != nil && (*jsonTarget).Kind() == reflect.String { + // Based on my reading of go-yaml, it may return int, int64, + // float64, or uint64. + var s string + switch typedVal := typedYAMLObj.(type) { + case int: + s = strconv.FormatInt(int64(typedVal), 10) + case int64: + s = strconv.FormatInt(typedVal, 10) + case float64: + s = strconv.FormatFloat(typedVal, 'g', -1, 32) + case uint64: + s = strconv.FormatUint(typedVal, 10) + case bool: + if typedVal { + s = "true" + } else { + s = "false" + } + } + if len(s) > 0 { + yamlObj = interface{}(s) + } + } + return yamlObj, nil + } + + return nil, nil +} diff --git a/vendor/github.com/go-gorp/gorp/v3/db.go b/vendor/github.com/go-gorp/gorp/v3/db.go index 249ad6f..d78062e 100644 --- a/vendor/github.com/go-gorp/gorp/v3/db.go +++ b/vendor/github.com/go-gorp/gorp/v3/db.go @@ -24,8 +24,9 @@ import ( // // Example: // -// dialect := gorp.MySQLDialect{"InnoDB", "UTF8"} -// dbmap := &gorp.DbMap{Db: db, Dialect: dialect} +// dialect := gorp.MySQLDialect{"InnoDB", "UTF8"} +// dbmap := &gorp.DbMap{Db: db, Dialect: dialect} +// type DbMap struct { ctx context.Context diff --git a/vendor/github.com/go-gorp/gorp/v3/dialect_snowflake.go b/vendor/github.com/go-gorp/gorp/v3/dialect_snowflake.go index 6335e52..2e2cb89 100644 --- a/vendor/github.com/go-gorp/gorp/v3/dialect_snowflake.go +++ b/vendor/github.com/go-gorp/gorp/v3/dialect_snowflake.go @@ -5,148 +5,148 @@ package gorp import ( - "fmt" - "reflect" - "strings" + "fmt" + "reflect" + "strings" ) type SnowflakeDialect struct { - suffix string - LowercaseFields bool + suffix string + LowercaseFields bool } func (d SnowflakeDialect) QuerySuffix() string { return ";" } func (d SnowflakeDialect) ToSqlType(val reflect.Type, maxsize int, isAutoIncr bool) string { - switch val.Kind() { - case reflect.Ptr: - return d.ToSqlType(val.Elem(), maxsize, isAutoIncr) - case reflect.Bool: - return "boolean" - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32: + switch val.Kind() { + case reflect.Ptr: + return d.ToSqlType(val.Elem(), maxsize, isAutoIncr) + case reflect.Bool: + return "boolean" + case reflect.Int, + reflect.Int8, + reflect.Int16, + reflect.Int32, + reflect.Uint, + reflect.Uint8, + reflect.Uint16, + reflect.Uint32: - if isAutoIncr { - return "serial" - } - return "integer" - case reflect.Int64, reflect.Uint64: - if isAutoIncr { - return "bigserial" - } - return "bigint" - case reflect.Float64: - return "double precision" - case reflect.Float32: - return "real" - case reflect.Slice: - if val.Elem().Kind() == reflect.Uint8 { - return "binary" - } - } + if isAutoIncr { + return "serial" + } + return "integer" + case reflect.Int64, reflect.Uint64: + if isAutoIncr { + return "bigserial" + } + return "bigint" + case reflect.Float64: + return "double precision" + case reflect.Float32: + return "real" + case reflect.Slice: + if val.Elem().Kind() == reflect.Uint8 { + return "binary" + } + } - switch val.Name() { - case "NullInt64": - return "bigint" - case "NullFloat64": - return "double precision" - case "NullBool": - return "boolean" - case "Time", "NullTime": - return "timestamp with time zone" - } + switch val.Name() { + case "NullInt64": + return "bigint" + case "NullFloat64": + return "double precision" + case "NullBool": + return "boolean" + case "Time", "NullTime": + return "timestamp with time zone" + } - if maxsize > 0 { - return fmt.Sprintf("varchar(%d)", maxsize) - } else { - return "text" - } + if maxsize > 0 { + return fmt.Sprintf("varchar(%d)", maxsize) + } else { + return "text" + } } // Returns empty string func (d SnowflakeDialect) AutoIncrStr() string { - return "" + return "" } func (d SnowflakeDialect) AutoIncrBindValue() string { - return "default" + return "default" } func (d SnowflakeDialect) AutoIncrInsertSuffix(col *ColumnMap) string { - return "" + return "" } // Returns suffix func (d SnowflakeDialect) CreateTableSuffix() string { - return d.suffix + return d.suffix } func (d SnowflakeDialect) CreateIndexSuffix() string { - return "" + return "" } func (d SnowflakeDialect) DropIndexSuffix() string { - return "" + return "" } func (d SnowflakeDialect) TruncateClause() string { - return "truncate" + return "truncate" } // Returns "$(i+1)" func (d SnowflakeDialect) BindVar(i int) string { - return "?" + return "?" } func (d SnowflakeDialect) InsertAutoIncrToTarget(exec SqlExecutor, insertSql string, target interface{}, params ...interface{}) error { - rows, err := exec.Query(insertSql, params...) - if err != nil { - return err - } - defer rows.Close() + rows, err := exec.Query(insertSql, params...) + if err != nil { + return err + } + defer rows.Close() - if !rows.Next() { - return fmt.Errorf("No serial value returned for insert: %s Encountered error: %s", insertSql, rows.Err()) - } - if err := rows.Scan(target); err != nil { - return err - } - if rows.Next() { - return fmt.Errorf("more than two serial value returned for insert: %s", insertSql) - } - return rows.Err() + if !rows.Next() { + return fmt.Errorf("No serial value returned for insert: %s Encountered error: %s", insertSql, rows.Err()) + } + if err := rows.Scan(target); err != nil { + return err + } + if rows.Next() { + return fmt.Errorf("more than two serial value returned for insert: %s", insertSql) + } + return rows.Err() } func (d SnowflakeDialect) QuoteField(f string) string { - if d.LowercaseFields { - return `"` + strings.ToLower(f) + `"` - } - return `"` + f + `"` + if d.LowercaseFields { + return `"` + strings.ToLower(f) + `"` + } + return `"` + f + `"` } func (d SnowflakeDialect) QuotedTableForQuery(schema string, table string) string { - if strings.TrimSpace(schema) == "" { - return d.QuoteField(table) - } + if strings.TrimSpace(schema) == "" { + return d.QuoteField(table) + } - return schema + "." + d.QuoteField(table) + return schema + "." + d.QuoteField(table) } func (d SnowflakeDialect) IfSchemaNotExists(command, schema string) string { - return fmt.Sprintf("%s if not exists", command) + return fmt.Sprintf("%s if not exists", command) } func (d SnowflakeDialect) IfTableExists(command, schema, table string) string { - return fmt.Sprintf("%s if exists", command) + return fmt.Sprintf("%s if exists", command) } func (d SnowflakeDialect) IfTableNotExists(command, schema, table string) string { - return fmt.Sprintf("%s if not exists", command) + return fmt.Sprintf("%s if not exists", command) } diff --git a/vendor/github.com/go-gorp/gorp/v3/select.go b/vendor/github.com/go-gorp/gorp/v3/select.go index 4b89641..2d2d596 100644 --- a/vendor/github.com/go-gorp/gorp/v3/select.go +++ b/vendor/github.com/go-gorp/gorp/v3/select.go @@ -86,9 +86,10 @@ func SelectNullStr(e SqlExecutor, query string, args ...interface{}) (sql.NullSt // SelectOne executes the given query (which should be a SELECT statement) // and binds the result to holder, which must be a pointer. // -// # If no row is found, an error (sql.ErrNoRows specifically) will be returned +// If no row is found, an error (sql.ErrNoRows specifically) will be returned // // If more than one row is found, an error will be returned. +// func SelectOne(m *DbMap, e SqlExecutor, holder interface{}, query string, args ...interface{}) error { t := reflect.TypeOf(holder) if t.Kind() == reflect.Ptr { diff --git a/vendor/github.com/go-gorp/gorp/v3/table.go b/vendor/github.com/go-gorp/gorp/v3/table.go index edab08a..5931b2d 100644 --- a/vendor/github.com/go-gorp/gorp/v3/table.go +++ b/vendor/github.com/go-gorp/gorp/v3/table.go @@ -47,6 +47,7 @@ func (t *TableMap) ResetSql() { // Automatically calls ResetSql() to ensure SQL statements are regenerated. // // Panics if isAutoIncr is true, and fieldNames length != 1 +// func (t *TableMap) SetKeys(isAutoIncr bool, fieldNames ...string) *TableMap { if isAutoIncr && len(fieldNames) != 1 { panic(fmt.Sprintf( @@ -72,6 +73,7 @@ func (t *TableMap) SetKeys(isAutoIncr bool, fieldNames ...string) *TableMap { // Automatically calls ResetSql() to ensure SQL statements are regenerated. // // Panics if fieldNames length < 2. +// func (t *TableMap) SetUniqueTogether(fieldNames ...string) *TableMap { if len(fieldNames) < 2 { panic(fmt.Sprintf( @@ -133,6 +135,7 @@ func (t *TableMap) IdxMap(field string) *IndexMap { // Function will panic if one of the given for index columns does not exists // // Automatically calls ResetSql() to ensure SQL statements are regenerated. +// func (t *TableMap) AddIndex(name string, idxtype string, columns []string) *IndexMap { // check if we have a index with this name already for _, idx := range t.indexes { diff --git a/vendor/github.com/go-openapi/jsonpointer/.editorconfig b/vendor/github.com/go-openapi/jsonpointer/.editorconfig new file mode 100644 index 0000000..3152da6 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/.editorconfig @@ -0,0 +1,26 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# Set default charset +[*.{js,py,go,scala,rb,java,html,css,less,sass,md}] +charset = utf-8 + +# Tab indentation (no size specified) +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/go-openapi/jsonpointer/.gitignore b/vendor/github.com/go-openapi/jsonpointer/.gitignore new file mode 100644 index 0000000..769c244 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/.gitignore @@ -0,0 +1 @@ +secrets.yml diff --git a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/jsonpointer/LICENSE b/vendor/github.com/go-openapi/jsonpointer/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/jsonpointer/README.md b/vendor/github.com/go-openapi/jsonpointer/README.md new file mode 100644 index 0000000..0108f1d --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/README.md @@ -0,0 +1,19 @@ +# gojsonpointer [![Build Status](https://github.com/go-openapi/jsonpointer/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonpointer/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonpointer.svg)](https://pkg.go.dev/github.com/go-openapi/jsonpointer) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonpointer)](https://goreportcard.com/report/github.com/go-openapi/jsonpointer) + +An implementation of JSON Pointer - Go language + +## Status +Completed YES + +Tested YES + +## References +http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07 + +### Note +The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented. diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go new file mode 100644 index 0000000..d970c7c --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/pointer.go @@ -0,0 +1,531 @@ +// Copyright 2013 sigu-399 ( https://github.com/sigu-399 ) +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// author sigu-399 +// author-github https://github.com/sigu-399 +// author-mail sigu.399@gmail.com +// +// repository-name jsonpointer +// repository-desc An implementation of JSON Pointer - Go language +// +// description Main and unique file. +// +// created 25-02-2013 + +package jsonpointer + +import ( + "encoding/json" + "errors" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/go-openapi/swag" +) + +const ( + emptyPointer = `` + pointerSeparator = `/` + + invalidStart = `JSON pointer must be empty or start with a "` + pointerSeparator + notFound = `Can't find the pointer in the document` +) + +var jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem() +var jsonSetableType = reflect.TypeOf(new(JSONSetable)).Elem() + +// JSONPointable is an interface for structs to implement when they need to customize the +// json pointer process +type JSONPointable interface { + JSONLookup(string) (any, error) +} + +// JSONSetable is an interface for structs to implement when they need to customize the +// json pointer process +type JSONSetable interface { + JSONSet(string, any) error +} + +// New creates a new json pointer for the given string +func New(jsonPointerString string) (Pointer, error) { + + var p Pointer + err := p.parse(jsonPointerString) + return p, err + +} + +// Pointer the json pointer reprsentation +type Pointer struct { + referenceTokens []string +} + +// "Constructor", parses the given string JSON pointer +func (p *Pointer) parse(jsonPointerString string) error { + + var err error + + if jsonPointerString != emptyPointer { + if !strings.HasPrefix(jsonPointerString, pointerSeparator) { + err = errors.New(invalidStart) + } else { + referenceTokens := strings.Split(jsonPointerString, pointerSeparator) + p.referenceTokens = append(p.referenceTokens, referenceTokens[1:]...) + } + } + + return err +} + +// Get uses the pointer to retrieve a value from a JSON document +func (p *Pointer) Get(document any) (any, reflect.Kind, error) { + return p.get(document, swag.DefaultJSONNameProvider) +} + +// Set uses the pointer to set a value from a JSON document +func (p *Pointer) Set(document any, value any) (any, error) { + return document, p.set(document, value, swag.DefaultJSONNameProvider) +} + +// GetForToken gets a value for a json pointer token 1 level deep +func GetForToken(document any, decodedToken string) (any, reflect.Kind, error) { + return getSingleImpl(document, decodedToken, swag.DefaultJSONNameProvider) +} + +// SetForToken gets a value for a json pointer token 1 level deep +func SetForToken(document any, decodedToken string, value any) (any, error) { + return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider) +} + +func isNil(input any) bool { + if input == nil { + return true + } + + kind := reflect.TypeOf(input).Kind() + switch kind { //nolint:exhaustive + case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan: + return reflect.ValueOf(input).IsNil() + default: + return false + } +} + +func getSingleImpl(node any, decodedToken string, nameProvider *swag.NameProvider) (any, reflect.Kind, error) { + rValue := reflect.Indirect(reflect.ValueOf(node)) + kind := rValue.Kind() + if isNil(node) { + return nil, kind, fmt.Errorf("nil value has not field %q", decodedToken) + } + + switch typed := node.(type) { + case JSONPointable: + r, err := typed.JSONLookup(decodedToken) + if err != nil { + return nil, kind, err + } + return r, kind, nil + case *any: // case of a pointer to interface, that is not resolved by reflect.Indirect + return getSingleImpl(*typed, decodedToken, nameProvider) + } + + switch kind { //nolint:exhaustive + case reflect.Struct: + nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) + if !ok { + return nil, kind, fmt.Errorf("object has no field %q", decodedToken) + } + fld := rValue.FieldByName(nm) + return fld.Interface(), kind, nil + + case reflect.Map: + kv := reflect.ValueOf(decodedToken) + mv := rValue.MapIndex(kv) + + if mv.IsValid() { + return mv.Interface(), kind, nil + } + return nil, kind, fmt.Errorf("object has no key %q", decodedToken) + + case reflect.Slice: + tokenIndex, err := strconv.Atoi(decodedToken) + if err != nil { + return nil, kind, err + } + sLength := rValue.Len() + if tokenIndex < 0 || tokenIndex >= sLength { + return nil, kind, fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength-1, tokenIndex) + } + + elem := rValue.Index(tokenIndex) + return elem.Interface(), kind, nil + + default: + return nil, kind, fmt.Errorf("invalid token reference %q", decodedToken) + } + +} + +func setSingleImpl(node, data any, decodedToken string, nameProvider *swag.NameProvider) error { + rValue := reflect.Indirect(reflect.ValueOf(node)) + + if ns, ok := node.(JSONSetable); ok { // pointer impl + return ns.JSONSet(decodedToken, data) + } + + if rValue.Type().Implements(jsonSetableType) { + return node.(JSONSetable).JSONSet(decodedToken, data) + } + + switch rValue.Kind() { //nolint:exhaustive + case reflect.Struct: + nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) + if !ok { + return fmt.Errorf("object has no field %q", decodedToken) + } + fld := rValue.FieldByName(nm) + if fld.IsValid() { + fld.Set(reflect.ValueOf(data)) + } + return nil + + case reflect.Map: + kv := reflect.ValueOf(decodedToken) + rValue.SetMapIndex(kv, reflect.ValueOf(data)) + return nil + + case reflect.Slice: + tokenIndex, err := strconv.Atoi(decodedToken) + if err != nil { + return err + } + sLength := rValue.Len() + if tokenIndex < 0 || tokenIndex >= sLength { + return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex) + } + + elem := rValue.Index(tokenIndex) + if !elem.CanSet() { + return fmt.Errorf("can't set slice index %s to %v", decodedToken, data) + } + elem.Set(reflect.ValueOf(data)) + return nil + + default: + return fmt.Errorf("invalid token reference %q", decodedToken) + } + +} + +func (p *Pointer) get(node any, nameProvider *swag.NameProvider) (any, reflect.Kind, error) { + + if nameProvider == nil { + nameProvider = swag.DefaultJSONNameProvider + } + + kind := reflect.Invalid + + // Full document when empty + if len(p.referenceTokens) == 0 { + return node, kind, nil + } + + for _, token := range p.referenceTokens { + + decodedToken := Unescape(token) + + r, knd, err := getSingleImpl(node, decodedToken, nameProvider) + if err != nil { + return nil, knd, err + } + node = r + } + + rValue := reflect.ValueOf(node) + kind = rValue.Kind() + + return node, kind, nil +} + +func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error { + knd := reflect.ValueOf(node).Kind() + + if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array { + return errors.New("only structs, pointers, maps and slices are supported for setting values") + } + + if nameProvider == nil { + nameProvider = swag.DefaultJSONNameProvider + } + + // Full document when empty + if len(p.referenceTokens) == 0 { + return nil + } + + lastI := len(p.referenceTokens) - 1 + for i, token := range p.referenceTokens { + isLastToken := i == lastI + decodedToken := Unescape(token) + + if isLastToken { + + return setSingleImpl(node, data, decodedToken, nameProvider) + } + + rValue := reflect.Indirect(reflect.ValueOf(node)) + kind := rValue.Kind() + + if rValue.Type().Implements(jsonPointableType) { + r, err := node.(JSONPointable).JSONLookup(decodedToken) + if err != nil { + return err + } + fld := reflect.ValueOf(r) + if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr { + node = fld.Addr().Interface() + continue + } + node = r + continue + } + + switch kind { //nolint:exhaustive + case reflect.Struct: + nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) + if !ok { + return fmt.Errorf("object has no field %q", decodedToken) + } + fld := rValue.FieldByName(nm) + if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr { + node = fld.Addr().Interface() + continue + } + node = fld.Interface() + + case reflect.Map: + kv := reflect.ValueOf(decodedToken) + mv := rValue.MapIndex(kv) + + if !mv.IsValid() { + return fmt.Errorf("object has no key %q", decodedToken) + } + if mv.CanAddr() && mv.Kind() != reflect.Interface && mv.Kind() != reflect.Map && mv.Kind() != reflect.Slice && mv.Kind() != reflect.Ptr { + node = mv.Addr().Interface() + continue + } + node = mv.Interface() + + case reflect.Slice: + tokenIndex, err := strconv.Atoi(decodedToken) + if err != nil { + return err + } + sLength := rValue.Len() + if tokenIndex < 0 || tokenIndex >= sLength { + return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex) + } + + elem := rValue.Index(tokenIndex) + if elem.CanAddr() && elem.Kind() != reflect.Interface && elem.Kind() != reflect.Map && elem.Kind() != reflect.Slice && elem.Kind() != reflect.Ptr { + node = elem.Addr().Interface() + continue + } + node = elem.Interface() + + default: + return fmt.Errorf("invalid token reference %q", decodedToken) + } + + } + + return nil +} + +// DecodedTokens returns the decoded tokens +func (p *Pointer) DecodedTokens() []string { + result := make([]string, 0, len(p.referenceTokens)) + for _, t := range p.referenceTokens { + result = append(result, Unescape(t)) + } + return result +} + +// IsEmpty returns true if this is an empty json pointer +// this indicates that it points to the root document +func (p *Pointer) IsEmpty() bool { + return len(p.referenceTokens) == 0 +} + +// Pointer to string representation function +func (p *Pointer) String() string { + + if len(p.referenceTokens) == 0 { + return emptyPointer + } + + pointerString := pointerSeparator + strings.Join(p.referenceTokens, pointerSeparator) + + return pointerString +} + +func (p *Pointer) Offset(document string) (int64, error) { + dec := json.NewDecoder(strings.NewReader(document)) + var offset int64 + for _, ttk := range p.DecodedTokens() { + tk, err := dec.Token() + if err != nil { + return 0, err + } + switch tk := tk.(type) { + case json.Delim: + switch tk { + case '{': + offset, err = offsetSingleObject(dec, ttk) + if err != nil { + return 0, err + } + case '[': + offset, err = offsetSingleArray(dec, ttk) + if err != nil { + return 0, err + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + } + return offset, nil +} + +func offsetSingleObject(dec *json.Decoder, decodedToken string) (int64, error) { + for dec.More() { + offset := dec.InputOffset() + tk, err := dec.Token() + if err != nil { + return 0, err + } + switch tk := tk.(type) { + case json.Delim: + switch tk { + case '{': + if err = drainSingle(dec); err != nil { + return 0, err + } + case '[': + if err = drainSingle(dec); err != nil { + return 0, err + } + } + case string: + if tk == decodedToken { + return offset, nil + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + } + return 0, fmt.Errorf("token reference %q not found", decodedToken) +} + +func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) { + idx, err := strconv.Atoi(decodedToken) + if err != nil { + return 0, fmt.Errorf("token reference %q is not a number: %v", decodedToken, err) + } + var i int + for i = 0; i < idx && dec.More(); i++ { + tk, err := dec.Token() + if err != nil { + return 0, err + } + + if delim, isDelim := tk.(json.Delim); isDelim { + switch delim { + case '{': + if err = drainSingle(dec); err != nil { + return 0, err + } + case '[': + if err = drainSingle(dec); err != nil { + return 0, err + } + } + } + } + + if !dec.More() { + return 0, fmt.Errorf("token reference %q not found", decodedToken) + } + return dec.InputOffset(), nil +} + +// drainSingle drains a single level of object or array. +// The decoder has to guarantee the beginning delim (i.e. '{' or '[') has been consumed. +func drainSingle(dec *json.Decoder) error { + for dec.More() { + tk, err := dec.Token() + if err != nil { + return err + } + if delim, isDelim := tk.(json.Delim); isDelim { + switch delim { + case '{': + if err = drainSingle(dec); err != nil { + return err + } + case '[': + if err = drainSingle(dec); err != nil { + return err + } + } + } + } + + // Consumes the ending delim + if _, err := dec.Token(); err != nil { + return err + } + return nil +} + +// Specific JSON pointer encoding here +// ~0 => ~ +// ~1 => / +// ... and vice versa + +const ( + encRefTok0 = `~0` + encRefTok1 = `~1` + decRefTok0 = `~` + decRefTok1 = `/` +) + +// Unescape unescapes a json pointer reference token string to the original representation +func Unescape(token string) string { + step1 := strings.ReplaceAll(token, encRefTok1, decRefTok1) + step2 := strings.ReplaceAll(step1, encRefTok0, decRefTok0) + return step2 +} + +// Escape escapes a pointer reference token string +func Escape(token string) string { + step1 := strings.ReplaceAll(token, decRefTok0, encRefTok0) + step2 := strings.ReplaceAll(step1, decRefTok1, encRefTok1) + return step2 +} diff --git a/vendor/github.com/go-openapi/jsonreference/.gitignore b/vendor/github.com/go-openapi/jsonreference/.gitignore new file mode 100644 index 0000000..769c244 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/.gitignore @@ -0,0 +1 @@ +secrets.yml diff --git a/vendor/github.com/go-openapi/jsonreference/.golangci.yml b/vendor/github.com/go-openapi/jsonreference/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/jsonreference/LICENSE b/vendor/github.com/go-openapi/jsonreference/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/jsonreference/README.md b/vendor/github.com/go-openapi/jsonreference/README.md new file mode 100644 index 0000000..c7fc204 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/README.md @@ -0,0 +1,19 @@ +# gojsonreference [![Build Status](https://github.com/go-openapi/jsonreference/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonreference/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonreference/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonreference) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonreference/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonreference.svg)](https://pkg.go.dev/github.com/go-openapi/jsonreference) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonreference)](https://goreportcard.com/report/github.com/go-openapi/jsonreference) + +An implementation of JSON Reference - Go language + +## Status +Feature complete. Stable API + +## Dependencies +* https://github.com/go-openapi/jsonpointer + +## References + +* http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07 +* http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03 diff --git a/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go new file mode 100644 index 0000000..f0610cf --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go @@ -0,0 +1,69 @@ +package internal + +import ( + "net/url" + "regexp" + "strings" +) + +const ( + defaultHTTPPort = ":80" + defaultHTTPSPort = ":443" +) + +// Regular expressions used by the normalizations +var rxPort = regexp.MustCompile(`(:\d+)/?$`) +var rxDupSlashes = regexp.MustCompile(`/{2,}`) + +// NormalizeURL will normalize the specified URL +// This was added to replace a previous call to the no longer maintained purell library: +// The call that was used looked like the following: +// +// url.Parse(purell.NormalizeURL(parsed, purell.FlagsSafe|purell.FlagRemoveDuplicateSlashes)) +// +// To explain all that was included in the call above, purell.FlagsSafe was really just the following: +// - FlagLowercaseScheme +// - FlagLowercaseHost +// - FlagRemoveDefaultPort +// - FlagRemoveDuplicateSlashes (and this was mixed in with the |) +// +// This also normalizes the URL into its urlencoded form by removing RawPath and RawFragment. +func NormalizeURL(u *url.URL) { + lowercaseScheme(u) + lowercaseHost(u) + removeDefaultPort(u) + removeDuplicateSlashes(u) + + u.RawPath = "" + u.RawFragment = "" +} + +func lowercaseScheme(u *url.URL) { + if len(u.Scheme) > 0 { + u.Scheme = strings.ToLower(u.Scheme) + } +} + +func lowercaseHost(u *url.URL) { + if len(u.Host) > 0 { + u.Host = strings.ToLower(u.Host) + } +} + +func removeDefaultPort(u *url.URL) { + if len(u.Host) > 0 { + scheme := strings.ToLower(u.Scheme) + u.Host = rxPort.ReplaceAllStringFunc(u.Host, func(val string) string { + if (scheme == "http" && val == defaultHTTPPort) || (scheme == "https" && val == defaultHTTPSPort) { + return "" + } + return val + }) + } +} + +func removeDuplicateSlashes(u *url.URL) { + if len(u.Path) > 0 { + u.Path = rxDupSlashes.ReplaceAllString(u.Path, "/") + } +} diff --git a/vendor/github.com/go-openapi/jsonreference/reference.go b/vendor/github.com/go-openapi/jsonreference/reference.go new file mode 100644 index 0000000..cfdef03 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/reference.go @@ -0,0 +1,158 @@ +// Copyright 2013 sigu-399 ( https://github.com/sigu-399 ) +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// author sigu-399 +// author-github https://github.com/sigu-399 +// author-mail sigu.399@gmail.com +// +// repository-name jsonreference +// repository-desc An implementation of JSON Reference - Go language +// +// description Main and unique file. +// +// created 26-02-2013 + +package jsonreference + +import ( + "errors" + "net/url" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/jsonreference/internal" +) + +const ( + fragmentRune = `#` +) + +// New creates a new reference for the given string +func New(jsonReferenceString string) (Ref, error) { + + var r Ref + err := r.parse(jsonReferenceString) + return r, err + +} + +// MustCreateRef parses the ref string and panics when it's invalid. +// Use the New method for a version that returns an error +func MustCreateRef(ref string) Ref { + r, err := New(ref) + if err != nil { + panic(err) + } + return r +} + +// Ref represents a json reference object +type Ref struct { + referenceURL *url.URL + referencePointer jsonpointer.Pointer + + HasFullURL bool + HasURLPathOnly bool + HasFragmentOnly bool + HasFileScheme bool + HasFullFilePath bool +} + +// GetURL gets the URL for this reference +func (r *Ref) GetURL() *url.URL { + return r.referenceURL +} + +// GetPointer gets the json pointer for this reference +func (r *Ref) GetPointer() *jsonpointer.Pointer { + return &r.referencePointer +} + +// String returns the best version of the url for this reference +func (r *Ref) String() string { + + if r.referenceURL != nil { + return r.referenceURL.String() + } + + if r.HasFragmentOnly { + return fragmentRune + r.referencePointer.String() + } + + return r.referencePointer.String() +} + +// IsRoot returns true if this reference is a root document +func (r *Ref) IsRoot() bool { + return r.referenceURL != nil && + !r.IsCanonical() && + !r.HasURLPathOnly && + r.referenceURL.Fragment == "" +} + +// IsCanonical returns true when this pointer starts with http(s):// or file:// +func (r *Ref) IsCanonical() bool { + return (r.HasFileScheme && r.HasFullFilePath) || (!r.HasFileScheme && r.HasFullURL) +} + +// "Constructor", parses the given string JSON reference +func (r *Ref) parse(jsonReferenceString string) error { + + parsed, err := url.Parse(jsonReferenceString) + if err != nil { + return err + } + + internal.NormalizeURL(parsed) + + r.referenceURL = parsed + refURL := r.referenceURL + + if refURL.Scheme != "" && refURL.Host != "" { + r.HasFullURL = true + } else { + if refURL.Path != "" { + r.HasURLPathOnly = true + } else if refURL.RawQuery == "" && refURL.Fragment != "" { + r.HasFragmentOnly = true + } + } + + r.HasFileScheme = refURL.Scheme == "file" + r.HasFullFilePath = strings.HasPrefix(refURL.Path, "/") + + // invalid json-pointer error means url has no json-pointer fragment. simply ignore error + r.referencePointer, _ = jsonpointer.New(refURL.Fragment) + + return nil +} + +// Inherits creates a new reference from a parent and a child +// If the child cannot inherit from the parent, an error is returned +func (r *Ref) Inherits(child Ref) (*Ref, error) { + childURL := child.GetURL() + parentURL := r.GetURL() + if childURL == nil { + return nil, errors.New("child url is nil") + } + if parentURL == nil { + return &child, nil + } + + ref, err := New(parentURL.ResolveReference(childURL).String()) + if err != nil { + return nil, err + } + return &ref, nil +} diff --git a/vendor/github.com/go-openapi/spec/.editorconfig b/vendor/github.com/go-openapi/spec/.editorconfig new file mode 100644 index 0000000..3152da6 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/.editorconfig @@ -0,0 +1,26 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# Set default charset +[*.{js,py,go,scala,rb,java,html,css,less,sass,md}] +charset = utf-8 + +# Tab indentation (no size specified) +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/go-openapi/spec/.gitignore b/vendor/github.com/go-openapi/spec/.gitignore new file mode 100644 index 0000000..f47cb20 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/.gitignore @@ -0,0 +1 @@ +*.out diff --git a/vendor/github.com/go-openapi/spec/.golangci.yml b/vendor/github.com/go-openapi/spec/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/spec/LICENSE b/vendor/github.com/go-openapi/spec/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/spec/README.md b/vendor/github.com/go-openapi/spec/README.md new file mode 100644 index 0000000..7fd2810 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/README.md @@ -0,0 +1,54 @@ +# OpenAPI v2 object model [![Build Status](https://github.com/go-openapi/spec/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/spec/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/spec/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/spec) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/spec/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/spec.svg)](https://pkg.go.dev/github.com/go-openapi/spec) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/spec)](https://goreportcard.com/report/github.com/go-openapi/spec) + +The object model for OpenAPI specification documents. + +### FAQ + +* What does this do? + +> 1. This package knows how to marshal and unmarshal Swagger API specifications into a golang object model +> 2. It knows how to resolve $ref and expand them to make a single root document + +* How does it play with the rest of the go-openapi packages ? + +> 1. This package is at the core of the go-openapi suite of packages and [code generator](https://github.com/go-swagger/go-swagger) +> 2. There is a [spec loading package](https://github.com/go-openapi/loads) to fetch specs as JSON or YAML from local or remote locations +> 3. There is a [spec validation package](https://github.com/go-openapi/validate) built on top of it +> 4. There is a [spec analysis package](https://github.com/go-openapi/analysis) built on top of it, to analyze, flatten, fix and merge spec documents + +* Does this library support OpenAPI 3? + +> No. +> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0). +> There is no plan to make it evolve toward supporting OpenAPI 3.x. +> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story. +> +> An early attempt to support Swagger 3 may be found at: https://github.com/go-openapi/spec3 + +* Does the unmarshaling support YAML? + +> Not directly. The exposed types know only how to unmarshal from JSON. +> +> In order to load a YAML document as a Swagger spec, you need to use the loaders provided by +> github.com/go-openapi/loads +> +> Take a look at the example there: https://pkg.go.dev/github.com/go-openapi/loads#example-Spec +> +> See also https://github.com/go-openapi/spec/issues/164 + +* How can I validate a spec? + +> Validation is provided by [the validate package](http://github.com/go-openapi/validate) + +* Why do we have an `ID` field for `Schema` which is not part of the swagger spec? + +> We found jsonschema compatibility more important: since `id` in jsonschema influences +> how `$ref` are resolved. +> This `id` does not conflict with any property named `id`. +> +> See also https://github.com/go-openapi/spec/issues/23 diff --git a/vendor/github.com/go-openapi/spec/cache.go b/vendor/github.com/go-openapi/spec/cache.go new file mode 100644 index 0000000..122993b --- /dev/null +++ b/vendor/github.com/go-openapi/spec/cache.go @@ -0,0 +1,98 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "sync" +) + +// ResolutionCache a cache for resolving urls +type ResolutionCache interface { + Get(string) (interface{}, bool) + Set(string, interface{}) +} + +type simpleCache struct { + lock sync.RWMutex + store map[string]interface{} +} + +func (s *simpleCache) ShallowClone() ResolutionCache { + store := make(map[string]interface{}, len(s.store)) + s.lock.RLock() + for k, v := range s.store { + store[k] = v + } + s.lock.RUnlock() + + return &simpleCache{ + store: store, + } +} + +// Get retrieves a cached URI +func (s *simpleCache) Get(uri string) (interface{}, bool) { + s.lock.RLock() + v, ok := s.store[uri] + + s.lock.RUnlock() + return v, ok +} + +// Set caches a URI +func (s *simpleCache) Set(uri string, data interface{}) { + s.lock.Lock() + s.store[uri] = data + s.lock.Unlock() +} + +var ( + // resCache is a package level cache for $ref resolution and expansion. + // It is initialized lazily by methods that have the need for it: no + // memory is allocated unless some expander methods are called. + // + // It is initialized with JSON schema and swagger schema, + // which do not mutate during normal operations. + // + // All subsequent utilizations of this cache are produced from a shallow + // clone of this initial version. + resCache *simpleCache + onceCache sync.Once + + _ ResolutionCache = &simpleCache{} +) + +// initResolutionCache initializes the URI resolution cache. To be wrapped in a sync.Once.Do call. +func initResolutionCache() { + resCache = defaultResolutionCache() +} + +func defaultResolutionCache() *simpleCache { + return &simpleCache{store: map[string]interface{}{ + "http://swagger.io/v2/schema.json": MustLoadSwagger20Schema(), + "http://json-schema.org/draft-04/schema": MustLoadJSONSchemaDraft04(), + }} +} + +func cacheOrDefault(cache ResolutionCache) ResolutionCache { + onceCache.Do(initResolutionCache) + + if cache != nil { + return cache + } + + // get a shallow clone of the base cache with swagger and json schema + return resCache.ShallowClone() +} diff --git a/vendor/github.com/go-openapi/spec/contact_info.go b/vendor/github.com/go-openapi/spec/contact_info.go new file mode 100644 index 0000000..2f7bb21 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/contact_info.go @@ -0,0 +1,57 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/swag" +) + +// ContactInfo contact information for the exposed API. +// +// For more information: http://goo.gl/8us55a#contactObject +type ContactInfo struct { + ContactInfoProps + VendorExtensible +} + +// ContactInfoProps hold the properties of a ContactInfo object +type ContactInfoProps struct { + Name string `json:"name,omitempty"` + URL string `json:"url,omitempty"` + Email string `json:"email,omitempty"` +} + +// UnmarshalJSON hydrates ContactInfo from json +func (c *ContactInfo) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &c.ContactInfoProps); err != nil { + return err + } + return json.Unmarshal(data, &c.VendorExtensible) +} + +// MarshalJSON produces ContactInfo as json +func (c ContactInfo) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(c.ContactInfoProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(c.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} diff --git a/vendor/github.com/go-openapi/spec/debug.go b/vendor/github.com/go-openapi/spec/debug.go new file mode 100644 index 0000000..fc889f6 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/debug.go @@ -0,0 +1,49 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "fmt" + "log" + "os" + "path" + "runtime" +) + +// Debug is true when the SWAGGER_DEBUG env var is not empty. +// +// It enables a more verbose logging of this package. +var Debug = os.Getenv("SWAGGER_DEBUG") != "" + +var ( + // specLogger is a debug logger for this package + specLogger *log.Logger +) + +func init() { + debugOptions() +} + +func debugOptions() { + specLogger = log.New(os.Stdout, "spec:", log.LstdFlags) +} + +func debugLog(msg string, args ...interface{}) { + // A private, trivial trace logger, based on go-openapi/spec/expander.go:debugLog() + if Debug { + _, file1, pos1, _ := runtime.Caller(1) + specLogger.Printf("%s:%d: %s", path.Base(file1), pos1, fmt.Sprintf(msg, args...)) + } +} diff --git a/vendor/github.com/go-openapi/spec/embed.go b/vendor/github.com/go-openapi/spec/embed.go new file mode 100644 index 0000000..1f42847 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/embed.go @@ -0,0 +1,17 @@ +package spec + +import ( + "embed" + "path" +) + +//go:embed schemas/*.json schemas/*/*.json +var assets embed.FS + +func jsonschemaDraft04JSONBytes() ([]byte, error) { + return assets.ReadFile(path.Join("schemas", "jsonschema-draft-04.json")) +} + +func v2SchemaJSONBytes() ([]byte, error) { + return assets.ReadFile(path.Join("schemas", "v2", "schema.json")) +} diff --git a/vendor/github.com/go-openapi/spec/errors.go b/vendor/github.com/go-openapi/spec/errors.go new file mode 100644 index 0000000..6992c7b --- /dev/null +++ b/vendor/github.com/go-openapi/spec/errors.go @@ -0,0 +1,19 @@ +package spec + +import "errors" + +// Error codes +var ( + // ErrUnknownTypeForReference indicates that a resolved reference was found in an unsupported container type + ErrUnknownTypeForReference = errors.New("unknown type for the resolved reference") + + // ErrResolveRefNeedsAPointer indicates that a $ref target must be a valid JSON pointer + ErrResolveRefNeedsAPointer = errors.New("resolve ref: target needs to be a pointer") + + // ErrDerefUnsupportedType indicates that a resolved reference was found in an unsupported container type. + // At the moment, $ref are supported only inside: schemas, parameters, responses, path items + ErrDerefUnsupportedType = errors.New("deref: unsupported type") + + // ErrExpandUnsupportedType indicates that $ref expansion is attempted on some invalid type + ErrExpandUnsupportedType = errors.New("expand: unsupported type. Input should be of type *Parameter or *Response") +) diff --git a/vendor/github.com/go-openapi/spec/expander.go b/vendor/github.com/go-openapi/spec/expander.go new file mode 100644 index 0000000..b81a569 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/expander.go @@ -0,0 +1,607 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" +) + +// ExpandOptions provides options for the spec expander. +// +// RelativeBase is the path to the root document. This can be a remote URL or a path to a local file. +// +// If left empty, the root document is assumed to be located in the current working directory: +// all relative $ref's will be resolved from there. +// +// PathLoader injects a document loading method. By default, this resolves to the function provided by the SpecLoader package variable. +type ExpandOptions struct { + RelativeBase string // the path to the root document to expand. This is a file, not a directory + SkipSchemas bool // do not expand schemas, just paths, parameters and responses + ContinueOnError bool // continue expanding even after and error is found + PathLoader func(string) (json.RawMessage, error) `json:"-"` // the document loading method that takes a path as input and yields a json document + AbsoluteCircularRef bool // circular $ref remaining after expansion remain absolute URLs +} + +func optionsOrDefault(opts *ExpandOptions) *ExpandOptions { + if opts != nil { + clone := *opts // shallow clone to avoid internal changes to be propagated to the caller + if clone.RelativeBase != "" { + clone.RelativeBase = normalizeBase(clone.RelativeBase) + } + // if the relative base is empty, let the schema loader choose a pseudo root document + return &clone + } + return &ExpandOptions{} +} + +// ExpandSpec expands the references in a swagger spec +func ExpandSpec(spec *Swagger, options *ExpandOptions) error { + options = optionsOrDefault(options) + resolver := defaultSchemaLoader(spec, options, nil, nil) + + specBasePath := options.RelativeBase + + if !options.SkipSchemas { + for key, definition := range spec.Definitions { + parentRefs := make([]string, 0, 10) + parentRefs = append(parentRefs, "#/definitions/"+key) + + def, err := expandSchema(definition, parentRefs, resolver, specBasePath) + if resolver.shouldStopOnError(err) { + return err + } + if def != nil { + spec.Definitions[key] = *def + } + } + } + + for key := range spec.Parameters { + parameter := spec.Parameters[key] + if err := expandParameterOrResponse(¶meter, resolver, specBasePath); resolver.shouldStopOnError(err) { + return err + } + spec.Parameters[key] = parameter + } + + for key := range spec.Responses { + response := spec.Responses[key] + if err := expandParameterOrResponse(&response, resolver, specBasePath); resolver.shouldStopOnError(err) { + return err + } + spec.Responses[key] = response + } + + if spec.Paths != nil { + for key := range spec.Paths.Paths { + pth := spec.Paths.Paths[key] + if err := expandPathItem(&pth, resolver, specBasePath); resolver.shouldStopOnError(err) { + return err + } + spec.Paths.Paths[key] = pth + } + } + + return nil +} + +const rootBase = ".root" + +// baseForRoot loads in the cache the root document and produces a fake ".root" base path entry +// for further $ref resolution +func baseForRoot(root interface{}, cache ResolutionCache) string { + // cache the root document to resolve $ref's + normalizedBase := normalizeBase(rootBase) + + if root == nil { + // ensure that we never leave a nil root: always cache the root base pseudo-document + cachedRoot, found := cache.Get(normalizedBase) + if found && cachedRoot != nil { + // the cache is already preloaded with a root + return normalizedBase + } + + root = map[string]interface{}{} + } + + cache.Set(normalizedBase, root) + + return normalizedBase +} + +// ExpandSchema expands the refs in the schema object with reference to the root object. +// +// go-openapi/validate uses this function. +// +// Notice that it is impossible to reference a json schema in a different document other than root +// (use ExpandSchemaWithBasePath to resolve external references). +// +// Setting the cache is optional and this parameter may safely be left to nil. +func ExpandSchema(schema *Schema, root interface{}, cache ResolutionCache) error { + cache = cacheOrDefault(cache) + if root == nil { + root = schema + } + + opts := &ExpandOptions{ + // when a root is specified, cache the root as an in-memory document for $ref retrieval + RelativeBase: baseForRoot(root, cache), + SkipSchemas: false, + ContinueOnError: false, + } + + return ExpandSchemaWithBasePath(schema, cache, opts) +} + +// ExpandSchemaWithBasePath expands the refs in the schema object, base path configured through expand options. +// +// Setting the cache is optional and this parameter may safely be left to nil. +func ExpandSchemaWithBasePath(schema *Schema, cache ResolutionCache, opts *ExpandOptions) error { + if schema == nil { + return nil + } + + cache = cacheOrDefault(cache) + + opts = optionsOrDefault(opts) + + resolver := defaultSchemaLoader(nil, opts, cache, nil) + + parentRefs := make([]string, 0, 10) + s, err := expandSchema(*schema, parentRefs, resolver, opts.RelativeBase) + if err != nil { + return err + } + if s != nil { + // guard for when continuing on error + *schema = *s + } + + return nil +} + +func expandItems(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) { + if target.Items == nil { + return &target, nil + } + + // array + if target.Items.Schema != nil { + t, err := expandSchema(*target.Items.Schema, parentRefs, resolver, basePath) + if err != nil { + return nil, err + } + *target.Items.Schema = *t + } + + // tuple + for i := range target.Items.Schemas { + t, err := expandSchema(target.Items.Schemas[i], parentRefs, resolver, basePath) + if err != nil { + return nil, err + } + target.Items.Schemas[i] = *t + } + + return &target, nil +} + +func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) { + if target.Ref.String() == "" && target.Ref.IsRoot() { + newRef := normalizeRef(&target.Ref, basePath) + target.Ref = *newRef + return &target, nil + } + + // change the base path of resolution when an ID is encountered + // otherwise the basePath should inherit the parent's + if target.ID != "" { + basePath, _ = resolver.setSchemaID(target, target.ID, basePath) + } + + if target.Ref.String() != "" { + if !resolver.options.SkipSchemas { + return expandSchemaRef(target, parentRefs, resolver, basePath) + } + + // when "expand" with SkipSchema, we just rebase the existing $ref without replacing + // the full schema. + rebasedRef, err := NewRef(normalizeURI(target.Ref.String(), basePath)) + if err != nil { + return nil, err + } + target.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID) + + return &target, nil + } + + for k := range target.Definitions { + tt, err := expandSchema(target.Definitions[k], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if tt != nil { + target.Definitions[k] = *tt + } + } + + t, err := expandItems(target, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target = *t + } + + for i := range target.AllOf { + t, err := expandSchema(target.AllOf[i], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.AllOf[i] = *t + } + } + + for i := range target.AnyOf { + t, err := expandSchema(target.AnyOf[i], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.AnyOf[i] = *t + } + } + + for i := range target.OneOf { + t, err := expandSchema(target.OneOf[i], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.OneOf[i] = *t + } + } + + if target.Not != nil { + t, err := expandSchema(*target.Not, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + *target.Not = *t + } + } + + for k := range target.Properties { + t, err := expandSchema(target.Properties[k], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.Properties[k] = *t + } + } + + if target.AdditionalProperties != nil && target.AdditionalProperties.Schema != nil { + t, err := expandSchema(*target.AdditionalProperties.Schema, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + *target.AdditionalProperties.Schema = *t + } + } + + for k := range target.PatternProperties { + t, err := expandSchema(target.PatternProperties[k], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.PatternProperties[k] = *t + } + } + + for k := range target.Dependencies { + if target.Dependencies[k].Schema != nil { + t, err := expandSchema(*target.Dependencies[k].Schema, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + *target.Dependencies[k].Schema = *t + } + } + } + + if target.AdditionalItems != nil && target.AdditionalItems.Schema != nil { + t, err := expandSchema(*target.AdditionalItems.Schema, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + *target.AdditionalItems.Schema = *t + } + } + return &target, nil +} + +func expandSchemaRef(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) { + // if a Ref is found, all sibling fields are skipped + // Ref also changes the resolution scope of children expandSchema + + // here the resolution scope is changed because a $ref was encountered + normalizedRef := normalizeRef(&target.Ref, basePath) + normalizedBasePath := normalizedRef.RemoteURI() + + if resolver.isCircular(normalizedRef, basePath, parentRefs...) { + // this means there is a cycle in the recursion tree: return the Ref + // - circular refs cannot be expanded. We leave them as ref. + // - denormalization means that a new local file ref is set relative to the original basePath + debugLog("short circuit circular ref: basePath: %s, normalizedPath: %s, normalized ref: %s", + basePath, normalizedBasePath, normalizedRef.String()) + if !resolver.options.AbsoluteCircularRef { + target.Ref = denormalizeRef(normalizedRef, resolver.context.basePath, resolver.context.rootID) + } else { + target.Ref = *normalizedRef + } + return &target, nil + } + + var t *Schema + err := resolver.Resolve(&target.Ref, &t, basePath) + if resolver.shouldStopOnError(err) { + return nil, err + } + + if t == nil { + // guard for when continuing on error + return &target, nil + } + + parentRefs = append(parentRefs, normalizedRef.String()) + transitiveResolver := resolver.transitiveResolver(basePath, target.Ref) + + basePath = resolver.updateBasePath(transitiveResolver, normalizedBasePath) + + return expandSchema(*t, parentRefs, transitiveResolver, basePath) +} + +func expandPathItem(pathItem *PathItem, resolver *schemaLoader, basePath string) error { + if pathItem == nil { + return nil + } + + parentRefs := make([]string, 0, 10) + if err := resolver.deref(pathItem, parentRefs, basePath); resolver.shouldStopOnError(err) { + return err + } + + if pathItem.Ref.String() != "" { + transitiveResolver := resolver.transitiveResolver(basePath, pathItem.Ref) + basePath = transitiveResolver.updateBasePath(resolver, basePath) + resolver = transitiveResolver + } + + pathItem.Ref = Ref{} + for i := range pathItem.Parameters { + if err := expandParameterOrResponse(&(pathItem.Parameters[i]), resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + } + + ops := []*Operation{ + pathItem.Get, + pathItem.Head, + pathItem.Options, + pathItem.Put, + pathItem.Post, + pathItem.Patch, + pathItem.Delete, + } + for _, op := range ops { + if err := expandOperation(op, resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + } + + return nil +} + +func expandOperation(op *Operation, resolver *schemaLoader, basePath string) error { + if op == nil { + return nil + } + + for i := range op.Parameters { + param := op.Parameters[i] + if err := expandParameterOrResponse(¶m, resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + op.Parameters[i] = param + } + + if op.Responses == nil { + return nil + } + + responses := op.Responses + if err := expandParameterOrResponse(responses.Default, resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + + for code := range responses.StatusCodeResponses { + response := responses.StatusCodeResponses[code] + if err := expandParameterOrResponse(&response, resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + responses.StatusCodeResponses[code] = response + } + + return nil +} + +// ExpandResponseWithRoot expands a response based on a root document, not a fetchable document +// +// Notice that it is impossible to reference a json schema in a different document other than root +// (use ExpandResponse to resolve external references). +// +// Setting the cache is optional and this parameter may safely be left to nil. +func ExpandResponseWithRoot(response *Response, root interface{}, cache ResolutionCache) error { + cache = cacheOrDefault(cache) + opts := &ExpandOptions{ + RelativeBase: baseForRoot(root, cache), + } + resolver := defaultSchemaLoader(root, opts, cache, nil) + + return expandParameterOrResponse(response, resolver, opts.RelativeBase) +} + +// ExpandResponse expands a response based on a basepath +// +// All refs inside response will be resolved relative to basePath +func ExpandResponse(response *Response, basePath string) error { + opts := optionsOrDefault(&ExpandOptions{ + RelativeBase: basePath, + }) + resolver := defaultSchemaLoader(nil, opts, nil, nil) + + return expandParameterOrResponse(response, resolver, opts.RelativeBase) +} + +// ExpandParameterWithRoot expands a parameter based on a root document, not a fetchable document. +// +// Notice that it is impossible to reference a json schema in a different document other than root +// (use ExpandParameter to resolve external references). +func ExpandParameterWithRoot(parameter *Parameter, root interface{}, cache ResolutionCache) error { + cache = cacheOrDefault(cache) + + opts := &ExpandOptions{ + RelativeBase: baseForRoot(root, cache), + } + resolver := defaultSchemaLoader(root, opts, cache, nil) + + return expandParameterOrResponse(parameter, resolver, opts.RelativeBase) +} + +// ExpandParameter expands a parameter based on a basepath. +// This is the exported version of expandParameter +// all refs inside parameter will be resolved relative to basePath +func ExpandParameter(parameter *Parameter, basePath string) error { + opts := optionsOrDefault(&ExpandOptions{ + RelativeBase: basePath, + }) + resolver := defaultSchemaLoader(nil, opts, nil, nil) + + return expandParameterOrResponse(parameter, resolver, opts.RelativeBase) +} + +func getRefAndSchema(input interface{}) (*Ref, *Schema, error) { + var ( + ref *Ref + sch *Schema + ) + + switch refable := input.(type) { + case *Parameter: + if refable == nil { + return nil, nil, nil + } + ref = &refable.Ref + sch = refable.Schema + case *Response: + if refable == nil { + return nil, nil, nil + } + ref = &refable.Ref + sch = refable.Schema + default: + return nil, nil, fmt.Errorf("unsupported type: %T: %w", input, ErrExpandUnsupportedType) + } + + return ref, sch, nil +} + +func expandParameterOrResponse(input interface{}, resolver *schemaLoader, basePath string) error { + ref, sch, err := getRefAndSchema(input) + if err != nil { + return err + } + + if ref == nil && sch == nil { // nothing to do + return nil + } + + parentRefs := make([]string, 0, 10) + if ref != nil { + // dereference this $ref + if err = resolver.deref(input, parentRefs, basePath); resolver.shouldStopOnError(err) { + return err + } + + ref, sch, _ = getRefAndSchema(input) + } + + if ref.String() != "" { + transitiveResolver := resolver.transitiveResolver(basePath, *ref) + basePath = resolver.updateBasePath(transitiveResolver, basePath) + resolver = transitiveResolver + } + + if sch == nil { + // nothing to be expanded + if ref != nil { + *ref = Ref{} + } + + return nil + } + + if sch.Ref.String() != "" { + rebasedRef, ern := NewRef(normalizeURI(sch.Ref.String(), basePath)) + if ern != nil { + return ern + } + + if resolver.isCircular(&rebasedRef, basePath, parentRefs...) { + // this is a circular $ref: stop expansion + if !resolver.options.AbsoluteCircularRef { + sch.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID) + } else { + sch.Ref = rebasedRef + } + } + } + + // $ref expansion or rebasing is performed by expandSchema below + if ref != nil { + *ref = Ref{} + } + + // expand schema + // yes, we do it even if options.SkipSchema is true: we have to go down that rabbit hole and rebase nested $ref) + s, err := expandSchema(*sch, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return err + } + + if s != nil { // guard for when continuing on error + *sch = *s + } + + return nil +} diff --git a/vendor/github.com/go-openapi/spec/external_docs.go b/vendor/github.com/go-openapi/spec/external_docs.go new file mode 100644 index 0000000..88add91 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/external_docs.go @@ -0,0 +1,24 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +// ExternalDocumentation allows referencing an external resource for +// extended documentation. +// +// For more information: http://goo.gl/8us55a#externalDocumentationObject +type ExternalDocumentation struct { + Description string `json:"description,omitempty"` + URL string `json:"url,omitempty"` +} diff --git a/vendor/github.com/go-openapi/spec/header.go b/vendor/github.com/go-openapi/spec/header.go new file mode 100644 index 0000000..9dfd17b --- /dev/null +++ b/vendor/github.com/go-openapi/spec/header.go @@ -0,0 +1,203 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +const ( + jsonArray = "array" +) + +// HeaderProps describes a response header +type HeaderProps struct { + Description string `json:"description,omitempty"` +} + +// Header describes a header for a response of the API +// +// For more information: http://goo.gl/8us55a#headerObject +type Header struct { + CommonValidations + SimpleSchema + VendorExtensible + HeaderProps +} + +// ResponseHeader creates a new header instance for use in a response +func ResponseHeader() *Header { + return new(Header) +} + +// WithDescription sets the description on this response, allows for chaining +func (h *Header) WithDescription(description string) *Header { + h.Description = description + return h +} + +// Typed a fluent builder method for the type of parameter +func (h *Header) Typed(tpe, format string) *Header { + h.Type = tpe + h.Format = format + return h +} + +// CollectionOf a fluent builder method for an array item +func (h *Header) CollectionOf(items *Items, format string) *Header { + h.Type = jsonArray + h.Items = items + h.CollectionFormat = format + return h +} + +// WithDefault sets the default value on this item +func (h *Header) WithDefault(defaultValue interface{}) *Header { + h.Default = defaultValue + return h +} + +// WithMaxLength sets a max length value +func (h *Header) WithMaxLength(max int64) *Header { + h.MaxLength = &max + return h +} + +// WithMinLength sets a min length value +func (h *Header) WithMinLength(min int64) *Header { + h.MinLength = &min + return h +} + +// WithPattern sets a pattern value +func (h *Header) WithPattern(pattern string) *Header { + h.Pattern = pattern + return h +} + +// WithMultipleOf sets a multiple of value +func (h *Header) WithMultipleOf(number float64) *Header { + h.MultipleOf = &number + return h +} + +// WithMaximum sets a maximum number value +func (h *Header) WithMaximum(max float64, exclusive bool) *Header { + h.Maximum = &max + h.ExclusiveMaximum = exclusive + return h +} + +// WithMinimum sets a minimum number value +func (h *Header) WithMinimum(min float64, exclusive bool) *Header { + h.Minimum = &min + h.ExclusiveMinimum = exclusive + return h +} + +// WithEnum sets a the enum values (replace) +func (h *Header) WithEnum(values ...interface{}) *Header { + h.Enum = append([]interface{}{}, values...) + return h +} + +// WithMaxItems sets the max items +func (h *Header) WithMaxItems(size int64) *Header { + h.MaxItems = &size + return h +} + +// WithMinItems sets the min items +func (h *Header) WithMinItems(size int64) *Header { + h.MinItems = &size + return h +} + +// UniqueValues dictates that this array can only have unique items +func (h *Header) UniqueValues() *Header { + h.UniqueItems = true + return h +} + +// AllowDuplicates this array can have duplicates +func (h *Header) AllowDuplicates() *Header { + h.UniqueItems = false + return h +} + +// WithValidations is a fluent method to set header validations +func (h *Header) WithValidations(val CommonValidations) *Header { + h.SetValidations(SchemaValidations{CommonValidations: val}) + return h +} + +// MarshalJSON marshal this to JSON +func (h Header) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(h.CommonValidations) + if err != nil { + return nil, err + } + b2, err := json.Marshal(h.SimpleSchema) + if err != nil { + return nil, err + } + b3, err := json.Marshal(h.HeaderProps) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2, b3), nil +} + +// UnmarshalJSON unmarshals this header from JSON +func (h *Header) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &h.CommonValidations); err != nil { + return err + } + if err := json.Unmarshal(data, &h.SimpleSchema); err != nil { + return err + } + if err := json.Unmarshal(data, &h.VendorExtensible); err != nil { + return err + } + return json.Unmarshal(data, &h.HeaderProps) +} + +// JSONLookup look up a value by the json property name +func (h Header) JSONLookup(token string) (interface{}, error) { + if ex, ok := h.Extensions[token]; ok { + return &ex, nil + } + + r, _, err := jsonpointer.GetForToken(h.CommonValidations, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(h.SimpleSchema, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(h.HeaderProps, token) + return r, err +} diff --git a/vendor/github.com/go-openapi/spec/info.go b/vendor/github.com/go-openapi/spec/info.go new file mode 100644 index 0000000..582f0fd --- /dev/null +++ b/vendor/github.com/go-openapi/spec/info.go @@ -0,0 +1,184 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "strconv" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// Extensions vendor specific extensions +type Extensions map[string]interface{} + +// Add adds a value to these extensions +func (e Extensions) Add(key string, value interface{}) { + realKey := strings.ToLower(key) + e[realKey] = value +} + +// GetString gets a string value from the extensions +func (e Extensions) GetString(key string) (string, bool) { + if v, ok := e[strings.ToLower(key)]; ok { + str, ok := v.(string) + return str, ok + } + return "", false +} + +// GetInt gets a int value from the extensions +func (e Extensions) GetInt(key string) (int, bool) { + realKey := strings.ToLower(key) + + if v, ok := e.GetString(realKey); ok { + if r, err := strconv.Atoi(v); err == nil { + return r, true + } + } + + if v, ok := e[realKey]; ok { + if r, rOk := v.(float64); rOk { + return int(r), true + } + } + return -1, false +} + +// GetBool gets a string value from the extensions +func (e Extensions) GetBool(key string) (bool, bool) { + if v, ok := e[strings.ToLower(key)]; ok { + str, ok := v.(bool) + return str, ok + } + return false, false +} + +// GetStringSlice gets a string value from the extensions +func (e Extensions) GetStringSlice(key string) ([]string, bool) { + if v, ok := e[strings.ToLower(key)]; ok { + arr, isSlice := v.([]interface{}) + if !isSlice { + return nil, false + } + var strs []string + for _, iface := range arr { + str, isString := iface.(string) + if !isString { + return nil, false + } + strs = append(strs, str) + } + return strs, ok + } + return nil, false +} + +// VendorExtensible composition block. +type VendorExtensible struct { + Extensions Extensions +} + +// AddExtension adds an extension to this extensible object +func (v *VendorExtensible) AddExtension(key string, value interface{}) { + if value == nil { + return + } + if v.Extensions == nil { + v.Extensions = make(map[string]interface{}) + } + v.Extensions.Add(key, value) +} + +// MarshalJSON marshals the extensions to json +func (v VendorExtensible) MarshalJSON() ([]byte, error) { + toser := make(map[string]interface{}) + for k, v := range v.Extensions { + lk := strings.ToLower(k) + if strings.HasPrefix(lk, "x-") { + toser[k] = v + } + } + return json.Marshal(toser) +} + +// UnmarshalJSON for this extensible object +func (v *VendorExtensible) UnmarshalJSON(data []byte) error { + var d map[string]interface{} + if err := json.Unmarshal(data, &d); err != nil { + return err + } + for k, vv := range d { + lk := strings.ToLower(k) + if strings.HasPrefix(lk, "x-") { + if v.Extensions == nil { + v.Extensions = map[string]interface{}{} + } + v.Extensions[k] = vv + } + } + return nil +} + +// InfoProps the properties for an info definition +type InfoProps struct { + Description string `json:"description,omitempty"` + Title string `json:"title,omitempty"` + TermsOfService string `json:"termsOfService,omitempty"` + Contact *ContactInfo `json:"contact,omitempty"` + License *License `json:"license,omitempty"` + Version string `json:"version,omitempty"` +} + +// Info object provides metadata about the API. +// The metadata can be used by the clients if needed, and can be presented in the Swagger-UI for convenience. +// +// For more information: http://goo.gl/8us55a#infoObject +type Info struct { + VendorExtensible + InfoProps +} + +// JSONLookup look up a value by the json property name +func (i Info) JSONLookup(token string) (interface{}, error) { + if ex, ok := i.Extensions[token]; ok { + return &ex, nil + } + r, _, err := jsonpointer.GetForToken(i.InfoProps, token) + return r, err +} + +// MarshalJSON marshal this to JSON +func (i Info) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(i.InfoProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(i.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} + +// UnmarshalJSON marshal this from JSON +func (i *Info) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &i.InfoProps); err != nil { + return err + } + return json.Unmarshal(data, &i.VendorExtensible) +} diff --git a/vendor/github.com/go-openapi/spec/items.go b/vendor/github.com/go-openapi/spec/items.go new file mode 100644 index 0000000..e2afb21 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/items.go @@ -0,0 +1,234 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +const ( + jsonRef = "$ref" +) + +// SimpleSchema describe swagger simple schemas for parameters and headers +type SimpleSchema struct { + Type string `json:"type,omitempty"` + Nullable bool `json:"nullable,omitempty"` + Format string `json:"format,omitempty"` + Items *Items `json:"items,omitempty"` + CollectionFormat string `json:"collectionFormat,omitempty"` + Default interface{} `json:"default,omitempty"` + Example interface{} `json:"example,omitempty"` +} + +// TypeName return the type (or format) of a simple schema +func (s *SimpleSchema) TypeName() string { + if s.Format != "" { + return s.Format + } + return s.Type +} + +// ItemsTypeName yields the type of items in a simple schema array +func (s *SimpleSchema) ItemsTypeName() string { + if s.Items == nil { + return "" + } + return s.Items.TypeName() +} + +// Items a limited subset of JSON-Schema's items object. +// It is used by parameter definitions that are not located in "body". +// +// For more information: http://goo.gl/8us55a#items-object +type Items struct { + Refable + CommonValidations + SimpleSchema + VendorExtensible +} + +// NewItems creates a new instance of items +func NewItems() *Items { + return &Items{} +} + +// Typed a fluent builder method for the type of item +func (i *Items) Typed(tpe, format string) *Items { + i.Type = tpe + i.Format = format + return i +} + +// AsNullable flags this schema as nullable. +func (i *Items) AsNullable() *Items { + i.Nullable = true + return i +} + +// CollectionOf a fluent builder method for an array item +func (i *Items) CollectionOf(items *Items, format string) *Items { + i.Type = jsonArray + i.Items = items + i.CollectionFormat = format + return i +} + +// WithDefault sets the default value on this item +func (i *Items) WithDefault(defaultValue interface{}) *Items { + i.Default = defaultValue + return i +} + +// WithMaxLength sets a max length value +func (i *Items) WithMaxLength(max int64) *Items { + i.MaxLength = &max + return i +} + +// WithMinLength sets a min length value +func (i *Items) WithMinLength(min int64) *Items { + i.MinLength = &min + return i +} + +// WithPattern sets a pattern value +func (i *Items) WithPattern(pattern string) *Items { + i.Pattern = pattern + return i +} + +// WithMultipleOf sets a multiple of value +func (i *Items) WithMultipleOf(number float64) *Items { + i.MultipleOf = &number + return i +} + +// WithMaximum sets a maximum number value +func (i *Items) WithMaximum(max float64, exclusive bool) *Items { + i.Maximum = &max + i.ExclusiveMaximum = exclusive + return i +} + +// WithMinimum sets a minimum number value +func (i *Items) WithMinimum(min float64, exclusive bool) *Items { + i.Minimum = &min + i.ExclusiveMinimum = exclusive + return i +} + +// WithEnum sets a the enum values (replace) +func (i *Items) WithEnum(values ...interface{}) *Items { + i.Enum = append([]interface{}{}, values...) + return i +} + +// WithMaxItems sets the max items +func (i *Items) WithMaxItems(size int64) *Items { + i.MaxItems = &size + return i +} + +// WithMinItems sets the min items +func (i *Items) WithMinItems(size int64) *Items { + i.MinItems = &size + return i +} + +// UniqueValues dictates that this array can only have unique items +func (i *Items) UniqueValues() *Items { + i.UniqueItems = true + return i +} + +// AllowDuplicates this array can have duplicates +func (i *Items) AllowDuplicates() *Items { + i.UniqueItems = false + return i +} + +// WithValidations is a fluent method to set Items validations +func (i *Items) WithValidations(val CommonValidations) *Items { + i.SetValidations(SchemaValidations{CommonValidations: val}) + return i +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (i *Items) UnmarshalJSON(data []byte) error { + var validations CommonValidations + if err := json.Unmarshal(data, &validations); err != nil { + return err + } + var ref Refable + if err := json.Unmarshal(data, &ref); err != nil { + return err + } + var simpleSchema SimpleSchema + if err := json.Unmarshal(data, &simpleSchema); err != nil { + return err + } + var vendorExtensible VendorExtensible + if err := json.Unmarshal(data, &vendorExtensible); err != nil { + return err + } + i.Refable = ref + i.CommonValidations = validations + i.SimpleSchema = simpleSchema + i.VendorExtensible = vendorExtensible + return nil +} + +// MarshalJSON converts this items object to JSON +func (i Items) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(i.CommonValidations) + if err != nil { + return nil, err + } + b2, err := json.Marshal(i.SimpleSchema) + if err != nil { + return nil, err + } + b3, err := json.Marshal(i.Refable) + if err != nil { + return nil, err + } + b4, err := json.Marshal(i.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b4, b3, b1, b2), nil +} + +// JSONLookup look up a value by the json property name +func (i Items) JSONLookup(token string) (interface{}, error) { + if token == jsonRef { + return &i.Ref, nil + } + + r, _, err := jsonpointer.GetForToken(i.CommonValidations, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(i.SimpleSchema, token) + return r, err +} diff --git a/vendor/github.com/go-openapi/spec/license.go b/vendor/github.com/go-openapi/spec/license.go new file mode 100644 index 0000000..b42f803 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/license.go @@ -0,0 +1,56 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/swag" +) + +// License information for the exposed API. +// +// For more information: http://goo.gl/8us55a#licenseObject +type License struct { + LicenseProps + VendorExtensible +} + +// LicenseProps holds the properties of a License object +type LicenseProps struct { + Name string `json:"name,omitempty"` + URL string `json:"url,omitempty"` +} + +// UnmarshalJSON hydrates License from json +func (l *License) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &l.LicenseProps); err != nil { + return err + } + return json.Unmarshal(data, &l.VendorExtensible) +} + +// MarshalJSON produces License as json +func (l License) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(l.LicenseProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(l.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} diff --git a/vendor/github.com/go-openapi/spec/normalizer.go b/vendor/github.com/go-openapi/spec/normalizer.go new file mode 100644 index 0000000..e8b6009 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/normalizer.go @@ -0,0 +1,202 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "net/url" + "path" + "strings" +) + +const fileScheme = "file" + +// normalizeURI ensures that all $ref paths used internally by the expander are canonicalized. +// +// NOTE(windows): there is a tolerance over the strict URI format on windows. +// +// The normalizer accepts relative file URLs like 'Path\File.JSON' as well as absolute file URLs like +// 'C:\Path\file.Yaml'. +// +// Both are canonicalized with a "file://" scheme, slashes and a lower-cased path: +// 'file:///c:/path/file.yaml' +// +// URLs can be specified with a file scheme, like in 'file:///folder/file.json' or +// 'file:///c:\folder\File.json'. +// +// URLs like file://C:\folder are considered invalid (i.e. there is no host 'c:\folder') and a "repair" +// is attempted. +// +// The base path argument is assumed to be canonicalized (e.g. using normalizeBase()). +func normalizeURI(refPath, base string) string { + refURL, err := parseURL(refPath) + if err != nil { + specLogger.Printf("warning: invalid URI in $ref %q: %v", refPath, err) + refURL, refPath = repairURI(refPath) + } + + fixWindowsURI(refURL, refPath) // noop on non-windows OS + + refURL.Path = path.Clean(refURL.Path) + if refURL.Path == "." { + refURL.Path = "" + } + + r := MustCreateRef(refURL.String()) + if r.IsCanonical() { + return refURL.String() + } + + baseURL, _ := parseURL(base) + if path.IsAbs(refURL.Path) { + baseURL.Path = refURL.Path + } else if refURL.Path != "" { + baseURL.Path = path.Join(path.Dir(baseURL.Path), refURL.Path) + } + // copying fragment from ref to base + baseURL.Fragment = refURL.Fragment + + return baseURL.String() +} + +// denormalizeRef returns the simplest notation for a normalized $ref, given the path of the original root document. +// +// When calling this, we assume that: +// * $ref is a canonical URI +// * originalRelativeBase is a canonical URI +// +// denormalizeRef is currently used when we rewrite a $ref after a circular $ref has been detected. +// In this case, expansion stops and normally renders the internal canonical $ref. +// +// This internal $ref is eventually rebased to the original RelativeBase used for the expansion. +// +// There is a special case for schemas that are anchored with an "id": +// in that case, the rebasing is performed // against the id only if this is an anchor for the initial root document. +// All other intermediate "id"'s found along the way are ignored for the purpose of rebasing. +func denormalizeRef(ref *Ref, originalRelativeBase, id string) Ref { + debugLog("denormalizeRef called:\n$ref: %q\noriginal: %s\nroot ID:%s", ref.String(), originalRelativeBase, id) + + if ref.String() == "" || ref.IsRoot() || ref.HasFragmentOnly { + // short circuit: $ref to current doc + return *ref + } + + if id != "" { + idBaseURL, err := parseURL(id) + if err == nil { // if the schema id is not usable as a URI, ignore it + if ref, ok := rebase(ref, idBaseURL, true); ok { // rebase, but keep references to root unchaged (do not want $ref: "") + // $ref relative to the ID of the schema in the root document + return ref + } + } + } + + originalRelativeBaseURL, _ := parseURL(originalRelativeBase) + + r, _ := rebase(ref, originalRelativeBaseURL, false) + + return r +} + +func rebase(ref *Ref, v *url.URL, notEqual bool) (Ref, bool) { + var newBase url.URL + + u := ref.GetURL() + + if u.Scheme != v.Scheme || u.Host != v.Host { + return *ref, false + } + + docPath := v.Path + v.Path = path.Dir(v.Path) + + if v.Path == "." { + v.Path = "" + } else if !strings.HasSuffix(v.Path, "/") { + v.Path += "/" + } + + newBase.Fragment = u.Fragment + + if strings.HasPrefix(u.Path, docPath) { + newBase.Path = strings.TrimPrefix(u.Path, docPath) + } else { + newBase.Path = strings.TrimPrefix(u.Path, v.Path) + } + + if notEqual && newBase.Path == "" && newBase.Fragment == "" { + // do not want rebasing to end up in an empty $ref + return *ref, false + } + + if path.IsAbs(newBase.Path) { + // whenever we end up with an absolute path, specify the scheme and host + newBase.Scheme = v.Scheme + newBase.Host = v.Host + } + + return MustCreateRef(newBase.String()), true +} + +// normalizeRef canonicalize a Ref, using a canonical relativeBase as its absolute anchor +func normalizeRef(ref *Ref, relativeBase string) *Ref { + r := MustCreateRef(normalizeURI(ref.String(), relativeBase)) + return &r +} + +// normalizeBase performs a normalization of the input base path. +// +// This always yields a canonical URI (absolute), usable for the document cache. +// +// It ensures that all further internal work on basePath may safely assume +// a non-empty, cross-platform, canonical URI (i.e. absolute). +// +// This normalization tolerates windows paths (e.g. C:\x\y\File.dat) and transform this +// in a file:// URL with lower cased drive letter and path. +// +// See also: https://en.wikipedia.org/wiki/File_URI_scheme +func normalizeBase(in string) string { + u, err := parseURL(in) + if err != nil { + specLogger.Printf("warning: invalid URI in RelativeBase %q: %v", in, err) + u, in = repairURI(in) + } + + u.Fragment = "" // any fragment in the base is irrelevant + + fixWindowsURI(u, in) // noop on non-windows OS + + u.Path = path.Clean(u.Path) + if u.Path == "." { // empty after Clean() + u.Path = "" + } + + if u.Scheme != "" { + if path.IsAbs(u.Path) || u.Scheme != fileScheme { + // this is absolute or explicitly not a local file: we're good + return u.String() + } + } + + // no scheme or file scheme with relative path: assume file and make it absolute + // enforce scheme file://... with absolute path. + // + // If the input path is relative, we anchor the path to the current working directory. + // NOTE: we may end up with a host component. Leave it unchanged: e.g. file://host/folder/file.json + + u.Scheme = fileScheme + u.Path = absPath(u.Path) // platform-dependent + u.RawQuery = "" // any query component is irrelevant for a base + return u.String() +} diff --git a/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go new file mode 100644 index 0000000..f19f1a8 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go @@ -0,0 +1,44 @@ +//go:build !windows +// +build !windows + +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "net/url" + "path/filepath" +) + +// absPath makes a file path absolute and compatible with a URI path component. +// +// The parameter must be a path, not an URI. +func absPath(in string) string { + anchored, err := filepath.Abs(in) + if err != nil { + specLogger.Printf("warning: could not resolve current working directory: %v", err) + return in + } + return anchored +} + +func repairURI(in string) (*url.URL, string) { + u, _ := parseURL("") + debugLog("repaired URI: original: %q, repaired: %q", in, "") + return u, "" +} + +func fixWindowsURI(_ *url.URL, _ string) { +} diff --git a/vendor/github.com/go-openapi/spec/normalizer_windows.go b/vendor/github.com/go-openapi/spec/normalizer_windows.go new file mode 100644 index 0000000..a66c532 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/normalizer_windows.go @@ -0,0 +1,154 @@ +// -build windows + +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "net/url" + "os" + "path" + "path/filepath" + "strings" +) + +// absPath makes a file path absolute and compatible with a URI path component +// +// The parameter must be a path, not an URI. +func absPath(in string) string { + // NOTE(windows): filepath.Abs exhibits a special behavior on windows for empty paths. + // See https://github.com/golang/go/issues/24441 + if in == "" { + in = "." + } + + anchored, err := filepath.Abs(in) + if err != nil { + specLogger.Printf("warning: could not resolve current working directory: %v", err) + return in + } + + pth := strings.ReplaceAll(strings.ToLower(anchored), `\`, `/`) + if !strings.HasPrefix(pth, "/") { + pth = "/" + pth + } + + return path.Clean(pth) +} + +// repairURI tolerates invalid file URIs with common typos +// such as 'file://E:\folder\file', that break the regular URL parser. +// +// Adopting the same defaults as for unixes (e.g. return an empty path) would +// result into a counter-intuitive result for that case (e.g. E:\folder\file is +// eventually resolved as the current directory). The repair will detect the missing "/". +// +// Note that this only works for the file scheme. +func repairURI(in string) (*url.URL, string) { + const prefix = fileScheme + "://" + if !strings.HasPrefix(in, prefix) { + // giving up: resolve to empty path + u, _ := parseURL("") + + return u, "" + } + + // attempt the repair, stripping the scheme should be sufficient + u, _ := parseURL(strings.TrimPrefix(in, prefix)) + debugLog("repaired URI: original: %q, repaired: %q", in, u.String()) + + return u, u.String() +} + +// fixWindowsURI tolerates an absolute file path on windows such as C:\Base\File.yaml or \\host\share\Base\File.yaml +// and makes it a canonical URI: file:///c:/base/file.yaml +// +// Catch 22 notes for Windows: +// +// * There may be a drive letter on windows (it is lower-cased) +// * There may be a share UNC, e.g. \\server\folder\data.xml +// * Paths are case insensitive +// * Paths may already contain slashes +// * Paths must be slashed +// +// NOTE: there is no escaping. "/" may be valid separators just like "\". +// We don't use ToSlash() (which escapes everything) because windows now also +// tolerates the use of "/". Hence, both C:\File.yaml and C:/File.yaml will work. +func fixWindowsURI(u *url.URL, in string) { + drive := filepath.VolumeName(in) + + if len(drive) > 0 { + if len(u.Scheme) == 1 && strings.EqualFold(u.Scheme, drive[:1]) { // a path with a drive letter + u.Scheme = fileScheme + u.Host = "" + u.Path = strings.Join([]string{drive, u.Opaque, u.Path}, `/`) // reconstruct the full path component (no fragment, no query) + } else if u.Host == "" && strings.HasPrefix(u.Path, drive) { // a path with a \\host volume + // NOTE: the special host@port syntax for UNC is not supported (yet) + u.Scheme = fileScheme + + // this is a modified version of filepath.Dir() to apply on the VolumeName itself + i := len(drive) - 1 + for i >= 0 && !os.IsPathSeparator(drive[i]) { + i-- + } + host := drive[:i] // \\host\share => host + + u.Path = strings.TrimPrefix(u.Path, host) + u.Host = strings.TrimPrefix(host, `\\`) + } + + u.Opaque = "" + u.Path = strings.ReplaceAll(strings.ToLower(u.Path), `\`, `/`) + + // ensure we form an absolute path + if !strings.HasPrefix(u.Path, "/") { + u.Path = "/" + u.Path + } + + u.Path = path.Clean(u.Path) + + return + } + + if u.Scheme == fileScheme { + // Handle dodgy cases for file://{...} URIs on windows. + // A canonical URI should always be followed by an absolute path. + // + // Examples: + // * file:///folder/file => valid, unchanged + // * file:///c:\folder\file => slashed + // * file:///./folder/file => valid, cleaned to remove the dot + // * file:///.\folder\file => remapped to cwd + // * file:///. => dodgy, remapped to / (consistent with the behavior on unix) + // * file:///.. => dodgy, remapped to / (consistent with the behavior on unix) + if (!path.IsAbs(u.Path) && !filepath.IsAbs(u.Path)) || (strings.HasPrefix(u.Path, `/.`) && strings.Contains(u.Path, `\`)) { + // ensure we form an absolute path + u.Path, _ = filepath.Abs(strings.TrimLeft(u.Path, `/`)) + if !strings.HasPrefix(u.Path, "/") { + u.Path = "/" + u.Path + } + } + u.Path = strings.ToLower(u.Path) + } + + // NOTE: lower case normalization does not propagate to inner resources, + // generated when rebasing: when joining a relative URI with a file to an absolute base, + // only the base is currently lower-cased. + // + // For now, we assume this is good enough for most use cases + // and try not to generate too many differences + // between the output produced on different platforms. + u.Path = path.Clean(strings.ReplaceAll(u.Path, `\`, `/`)) +} diff --git a/vendor/github.com/go-openapi/spec/operation.go b/vendor/github.com/go-openapi/spec/operation.go new file mode 100644 index 0000000..a69cca8 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/operation.go @@ -0,0 +1,400 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "bytes" + "encoding/gob" + "encoding/json" + "sort" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +func init() { + gob.Register(map[string]interface{}{}) + gob.Register([]interface{}{}) +} + +// OperationProps describes an operation +// +// NOTES: +// - schemes, when present must be from [http, https, ws, wss]: see validate +// - Security is handled as a special case: see MarshalJSON function +type OperationProps struct { + Description string `json:"description,omitempty"` + Consumes []string `json:"consumes,omitempty"` + Produces []string `json:"produces,omitempty"` + Schemes []string `json:"schemes,omitempty"` + Tags []string `json:"tags,omitempty"` + Summary string `json:"summary,omitempty"` + ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` + ID string `json:"operationId,omitempty"` + Deprecated bool `json:"deprecated,omitempty"` + Security []map[string][]string `json:"security,omitempty"` + Parameters []Parameter `json:"parameters,omitempty"` + Responses *Responses `json:"responses,omitempty"` +} + +// MarshalJSON takes care of serializing operation properties to JSON +// +// We use a custom marhaller here to handle a special cases related to +// the Security field. We need to preserve zero length slice +// while omitting the field when the value is nil/unset. +func (op OperationProps) MarshalJSON() ([]byte, error) { + type Alias OperationProps + if op.Security == nil { + return json.Marshal(&struct { + Security []map[string][]string `json:"security,omitempty"` + *Alias + }{ + Security: op.Security, + Alias: (*Alias)(&op), + }) + } + return json.Marshal(&struct { + Security []map[string][]string `json:"security"` + *Alias + }{ + Security: op.Security, + Alias: (*Alias)(&op), + }) +} + +// Operation describes a single API operation on a path. +// +// For more information: http://goo.gl/8us55a#operationObject +type Operation struct { + VendorExtensible + OperationProps +} + +// SuccessResponse gets a success response model +func (o *Operation) SuccessResponse() (*Response, int, bool) { + if o.Responses == nil { + return nil, 0, false + } + + responseCodes := make([]int, 0, len(o.Responses.StatusCodeResponses)) + for k := range o.Responses.StatusCodeResponses { + if k >= 200 && k < 300 { + responseCodes = append(responseCodes, k) + } + } + if len(responseCodes) > 0 { + sort.Ints(responseCodes) + v := o.Responses.StatusCodeResponses[responseCodes[0]] + return &v, responseCodes[0], true + } + + return o.Responses.Default, 0, false +} + +// JSONLookup look up a value by the json property name +func (o Operation) JSONLookup(token string) (interface{}, error) { + if ex, ok := o.Extensions[token]; ok { + return &ex, nil + } + r, _, err := jsonpointer.GetForToken(o.OperationProps, token) + return r, err +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (o *Operation) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &o.OperationProps); err != nil { + return err + } + return json.Unmarshal(data, &o.VendorExtensible) +} + +// MarshalJSON converts this items object to JSON +func (o Operation) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(o.OperationProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(o.VendorExtensible) + if err != nil { + return nil, err + } + concated := swag.ConcatJSON(b1, b2) + return concated, nil +} + +// NewOperation creates a new operation instance. +// It expects an ID as parameter but not passing an ID is also valid. +func NewOperation(id string) *Operation { + op := new(Operation) + op.ID = id + return op +} + +// WithID sets the ID property on this operation, allows for chaining. +func (o *Operation) WithID(id string) *Operation { + o.ID = id + return o +} + +// WithDescription sets the description on this operation, allows for chaining +func (o *Operation) WithDescription(description string) *Operation { + o.Description = description + return o +} + +// WithSummary sets the summary on this operation, allows for chaining +func (o *Operation) WithSummary(summary string) *Operation { + o.Summary = summary + return o +} + +// WithExternalDocs sets/removes the external docs for/from this operation. +// When you pass empty strings as params the external documents will be removed. +// When you pass non-empty string as one value then those values will be used on the external docs object. +// So when you pass a non-empty description, you should also pass the url and vice versa. +func (o *Operation) WithExternalDocs(description, url string) *Operation { + if description == "" && url == "" { + o.ExternalDocs = nil + return o + } + + if o.ExternalDocs == nil { + o.ExternalDocs = &ExternalDocumentation{} + } + o.ExternalDocs.Description = description + o.ExternalDocs.URL = url + return o +} + +// Deprecate marks the operation as deprecated +func (o *Operation) Deprecate() *Operation { + o.Deprecated = true + return o +} + +// Undeprecate marks the operation as not deprected +func (o *Operation) Undeprecate() *Operation { + o.Deprecated = false + return o +} + +// WithConsumes adds media types for incoming body values +func (o *Operation) WithConsumes(mediaTypes ...string) *Operation { + o.Consumes = append(o.Consumes, mediaTypes...) + return o +} + +// WithProduces adds media types for outgoing body values +func (o *Operation) WithProduces(mediaTypes ...string) *Operation { + o.Produces = append(o.Produces, mediaTypes...) + return o +} + +// WithTags adds tags for this operation +func (o *Operation) WithTags(tags ...string) *Operation { + o.Tags = append(o.Tags, tags...) + return o +} + +// AddParam adds a parameter to this operation, when a parameter for that location +// and with that name already exists it will be replaced +func (o *Operation) AddParam(param *Parameter) *Operation { + if param == nil { + return o + } + + for i, p := range o.Parameters { + if p.Name == param.Name && p.In == param.In { + params := make([]Parameter, 0, len(o.Parameters)+1) + params = append(params, o.Parameters[:i]...) + params = append(params, *param) + params = append(params, o.Parameters[i+1:]...) + o.Parameters = params + + return o + } + } + + o.Parameters = append(o.Parameters, *param) + return o +} + +// RemoveParam removes a parameter from the operation +func (o *Operation) RemoveParam(name, in string) *Operation { + for i, p := range o.Parameters { + if p.Name == name && p.In == in { + o.Parameters = append(o.Parameters[:i], o.Parameters[i+1:]...) + return o + } + } + return o +} + +// SecuredWith adds a security scope to this operation. +func (o *Operation) SecuredWith(name string, scopes ...string) *Operation { + o.Security = append(o.Security, map[string][]string{name: scopes}) + return o +} + +// WithDefaultResponse adds a default response to the operation. +// Passing a nil value will remove the response +func (o *Operation) WithDefaultResponse(response *Response) *Operation { + return o.RespondsWith(0, response) +} + +// RespondsWith adds a status code response to the operation. +// When the code is 0 the value of the response will be used as default response value. +// When the value of the response is nil it will be removed from the operation +func (o *Operation) RespondsWith(code int, response *Response) *Operation { + if o.Responses == nil { + o.Responses = new(Responses) + } + if code == 0 { + o.Responses.Default = response + return o + } + if response == nil { + delete(o.Responses.StatusCodeResponses, code) + return o + } + if o.Responses.StatusCodeResponses == nil { + o.Responses.StatusCodeResponses = make(map[int]Response) + } + o.Responses.StatusCodeResponses[code] = *response + return o +} + +type opsAlias OperationProps + +type gobAlias struct { + Security []map[string]struct { + List []string + Pad bool + } + Alias *opsAlias + SecurityIsEmpty bool +} + +// GobEncode provides a safe gob encoder for Operation, including empty security requirements +func (o Operation) GobEncode() ([]byte, error) { + raw := struct { + Ext VendorExtensible + Props OperationProps + }{ + Ext: o.VendorExtensible, + Props: o.OperationProps, + } + var b bytes.Buffer + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Operation, including empty security requirements +func (o *Operation) GobDecode(b []byte) error { + var raw struct { + Ext VendorExtensible + Props OperationProps + } + + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + o.VendorExtensible = raw.Ext + o.OperationProps = raw.Props + return nil +} + +// GobEncode provides a safe gob encoder for Operation, including empty security requirements +func (op OperationProps) GobEncode() ([]byte, error) { + raw := gobAlias{ + Alias: (*opsAlias)(&op), + } + + var b bytes.Buffer + if op.Security == nil { + // nil security requirement + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + if len(op.Security) == 0 { + // empty, but non-nil security requirement + raw.SecurityIsEmpty = true + raw.Alias.Security = nil + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + raw.Security = make([]map[string]struct { + List []string + Pad bool + }, 0, len(op.Security)) + for _, req := range op.Security { + v := make(map[string]struct { + List []string + Pad bool + }, len(req)) + for k, val := range req { + v[k] = struct { + List []string + Pad bool + }{ + List: val, + } + } + raw.Security = append(raw.Security, v) + } + + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Operation, including empty security requirements +func (op *OperationProps) GobDecode(b []byte) error { + var raw gobAlias + + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + if raw.Alias == nil { + return nil + } + + switch { + case raw.SecurityIsEmpty: + // empty, but non-nil security requirement + raw.Alias.Security = []map[string][]string{} + case len(raw.Alias.Security) == 0: + // nil security requirement + raw.Alias.Security = nil + default: + raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security)) + for _, req := range raw.Security { + v := make(map[string][]string, len(req)) + for k, val := range req { + v[k] = make([]string, 0, len(val.List)) + v[k] = append(v[k], val.List...) + } + raw.Alias.Security = append(raw.Alias.Security, v) + } + } + + *op = *(*OperationProps)(raw.Alias) + return nil +} diff --git a/vendor/github.com/go-openapi/spec/parameter.go b/vendor/github.com/go-openapi/spec/parameter.go new file mode 100644 index 0000000..bd4f1cd --- /dev/null +++ b/vendor/github.com/go-openapi/spec/parameter.go @@ -0,0 +1,326 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// QueryParam creates a query parameter +func QueryParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "query"}} +} + +// HeaderParam creates a header parameter, this is always required by default +func HeaderParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "header", Required: true}} +} + +// PathParam creates a path parameter, this is always required +func PathParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "path", Required: true}} +} + +// BodyParam creates a body parameter +func BodyParam(name string, schema *Schema) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "body", Schema: schema}} +} + +// FormDataParam creates a body parameter +func FormDataParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "formData"}} +} + +// FileParam creates a body parameter +func FileParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "formData"}, + SimpleSchema: SimpleSchema{Type: "file"}} +} + +// SimpleArrayParam creates a param for a simple array (string, int, date etc) +func SimpleArrayParam(name, tpe, fmt string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name}, + SimpleSchema: SimpleSchema{Type: jsonArray, CollectionFormat: "csv", + Items: &Items{SimpleSchema: SimpleSchema{Type: tpe, Format: fmt}}}} +} + +// ParamRef creates a parameter that's a json reference +func ParamRef(uri string) *Parameter { + p := new(Parameter) + p.Ref = MustCreateRef(uri) + return p +} + +// ParamProps describes the specific attributes of an operation parameter +// +// NOTE: +// - Schema is defined when "in" == "body": see validate +// - AllowEmptyValue is allowed where "in" == "query" || "formData" +type ParamProps struct { + Description string `json:"description,omitempty"` + Name string `json:"name,omitempty"` + In string `json:"in,omitempty"` + Required bool `json:"required,omitempty"` + Schema *Schema `json:"schema,omitempty"` + AllowEmptyValue bool `json:"allowEmptyValue,omitempty"` +} + +// Parameter a unique parameter is defined by a combination of a [name](#parameterName) and [location](#parameterIn). +// +// There are five possible parameter types. +// - Path - Used together with [Path Templating](#pathTemplating), where the parameter value is actually part +// of the operation's URL. This does not include the host or base path of the API. For example, in `/items/{itemId}`, +// the path parameter is `itemId`. +// - Query - Parameters that are appended to the URL. For example, in `/items?id=###`, the query parameter is `id`. +// - Header - Custom headers that are expected as part of the request. +// - Body - The payload that's appended to the HTTP request. Since there can only be one payload, there can only be +// _one_ body parameter. The name of the body parameter has no effect on the parameter itself and is used for +// documentation purposes only. Since Form parameters are also in the payload, body and form parameters cannot exist +// together for the same operation. +// - Form - Used to describe the payload of an HTTP request when either `application/x-www-form-urlencoded` or +// `multipart/form-data` are used as the content type of the request (in Swagger's definition, +// the [`consumes`](#operationConsumes) property of an operation). This is the only parameter type that can be used +// to send files, thus supporting the `file` type. Since form parameters are sent in the payload, they cannot be +// declared together with a body parameter for the same operation. Form parameters have a different format based on +// the content-type used (for further details, consult http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4). +// - `application/x-www-form-urlencoded` - Similar to the format of Query parameters but as a payload. +// For example, `foo=1&bar=swagger` - both `foo` and `bar` are form parameters. This is normally used for simple +// parameters that are being transferred. +// - `multipart/form-data` - each parameter takes a section in the payload with an internal header. +// For example, for the header `Content-Disposition: form-data; name="submit-name"` the name of the parameter is +// `submit-name`. This type of form parameters is more commonly used for file transfers. +// +// For more information: http://goo.gl/8us55a#parameterObject +type Parameter struct { + Refable + CommonValidations + SimpleSchema + VendorExtensible + ParamProps +} + +// JSONLookup look up a value by the json property name +func (p Parameter) JSONLookup(token string) (interface{}, error) { + if ex, ok := p.Extensions[token]; ok { + return &ex, nil + } + if token == jsonRef { + return &p.Ref, nil + } + + r, _, err := jsonpointer.GetForToken(p.CommonValidations, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(p.SimpleSchema, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(p.ParamProps, token) + return r, err +} + +// WithDescription a fluent builder method for the description of the parameter +func (p *Parameter) WithDescription(description string) *Parameter { + p.Description = description + return p +} + +// Named a fluent builder method to override the name of the parameter +func (p *Parameter) Named(name string) *Parameter { + p.Name = name + return p +} + +// WithLocation a fluent builder method to override the location of the parameter +func (p *Parameter) WithLocation(in string) *Parameter { + p.In = in + return p +} + +// Typed a fluent builder method for the type of the parameter value +func (p *Parameter) Typed(tpe, format string) *Parameter { + p.Type = tpe + p.Format = format + return p +} + +// CollectionOf a fluent builder method for an array parameter +func (p *Parameter) CollectionOf(items *Items, format string) *Parameter { + p.Type = jsonArray + p.Items = items + p.CollectionFormat = format + return p +} + +// WithDefault sets the default value on this parameter +func (p *Parameter) WithDefault(defaultValue interface{}) *Parameter { + p.AsOptional() // with default implies optional + p.Default = defaultValue + return p +} + +// AllowsEmptyValues flags this parameter as being ok with empty values +func (p *Parameter) AllowsEmptyValues() *Parameter { + p.AllowEmptyValue = true + return p +} + +// NoEmptyValues flags this parameter as not liking empty values +func (p *Parameter) NoEmptyValues() *Parameter { + p.AllowEmptyValue = false + return p +} + +// AsOptional flags this parameter as optional +func (p *Parameter) AsOptional() *Parameter { + p.Required = false + return p +} + +// AsRequired flags this parameter as required +func (p *Parameter) AsRequired() *Parameter { + if p.Default != nil { // with a default required makes no sense + return p + } + p.Required = true + return p +} + +// WithMaxLength sets a max length value +func (p *Parameter) WithMaxLength(max int64) *Parameter { + p.MaxLength = &max + return p +} + +// WithMinLength sets a min length value +func (p *Parameter) WithMinLength(min int64) *Parameter { + p.MinLength = &min + return p +} + +// WithPattern sets a pattern value +func (p *Parameter) WithPattern(pattern string) *Parameter { + p.Pattern = pattern + return p +} + +// WithMultipleOf sets a multiple of value +func (p *Parameter) WithMultipleOf(number float64) *Parameter { + p.MultipleOf = &number + return p +} + +// WithMaximum sets a maximum number value +func (p *Parameter) WithMaximum(max float64, exclusive bool) *Parameter { + p.Maximum = &max + p.ExclusiveMaximum = exclusive + return p +} + +// WithMinimum sets a minimum number value +func (p *Parameter) WithMinimum(min float64, exclusive bool) *Parameter { + p.Minimum = &min + p.ExclusiveMinimum = exclusive + return p +} + +// WithEnum sets a the enum values (replace) +func (p *Parameter) WithEnum(values ...interface{}) *Parameter { + p.Enum = append([]interface{}{}, values...) + return p +} + +// WithMaxItems sets the max items +func (p *Parameter) WithMaxItems(size int64) *Parameter { + p.MaxItems = &size + return p +} + +// WithMinItems sets the min items +func (p *Parameter) WithMinItems(size int64) *Parameter { + p.MinItems = &size + return p +} + +// UniqueValues dictates that this array can only have unique items +func (p *Parameter) UniqueValues() *Parameter { + p.UniqueItems = true + return p +} + +// AllowDuplicates this array can have duplicates +func (p *Parameter) AllowDuplicates() *Parameter { + p.UniqueItems = false + return p +} + +// WithValidations is a fluent method to set parameter validations +func (p *Parameter) WithValidations(val CommonValidations) *Parameter { + p.SetValidations(SchemaValidations{CommonValidations: val}) + return p +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (p *Parameter) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &p.CommonValidations); err != nil { + return err + } + if err := json.Unmarshal(data, &p.Refable); err != nil { + return err + } + if err := json.Unmarshal(data, &p.SimpleSchema); err != nil { + return err + } + if err := json.Unmarshal(data, &p.VendorExtensible); err != nil { + return err + } + return json.Unmarshal(data, &p.ParamProps) +} + +// MarshalJSON converts this items object to JSON +func (p Parameter) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(p.CommonValidations) + if err != nil { + return nil, err + } + b2, err := json.Marshal(p.SimpleSchema) + if err != nil { + return nil, err + } + b3, err := json.Marshal(p.Refable) + if err != nil { + return nil, err + } + b4, err := json.Marshal(p.VendorExtensible) + if err != nil { + return nil, err + } + b5, err := json.Marshal(p.ParamProps) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b3, b1, b2, b4, b5), nil +} diff --git a/vendor/github.com/go-openapi/spec/path_item.go b/vendor/github.com/go-openapi/spec/path_item.go new file mode 100644 index 0000000..68fc8e9 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/path_item.go @@ -0,0 +1,87 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// PathItemProps the path item specific properties +type PathItemProps struct { + Get *Operation `json:"get,omitempty"` + Put *Operation `json:"put,omitempty"` + Post *Operation `json:"post,omitempty"` + Delete *Operation `json:"delete,omitempty"` + Options *Operation `json:"options,omitempty"` + Head *Operation `json:"head,omitempty"` + Patch *Operation `json:"patch,omitempty"` + Parameters []Parameter `json:"parameters,omitempty"` +} + +// PathItem describes the operations available on a single path. +// A Path Item may be empty, due to [ACL constraints](http://goo.gl/8us55a#securityFiltering). +// The path itself is still exposed to the documentation viewer but they will +// not know which operations and parameters are available. +// +// For more information: http://goo.gl/8us55a#pathItemObject +type PathItem struct { + Refable + VendorExtensible + PathItemProps +} + +// JSONLookup look up a value by the json property name +func (p PathItem) JSONLookup(token string) (interface{}, error) { + if ex, ok := p.Extensions[token]; ok { + return &ex, nil + } + if token == jsonRef { + return &p.Ref, nil + } + r, _, err := jsonpointer.GetForToken(p.PathItemProps, token) + return r, err +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (p *PathItem) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &p.Refable); err != nil { + return err + } + if err := json.Unmarshal(data, &p.VendorExtensible); err != nil { + return err + } + return json.Unmarshal(data, &p.PathItemProps) +} + +// MarshalJSON converts this items object to JSON +func (p PathItem) MarshalJSON() ([]byte, error) { + b3, err := json.Marshal(p.Refable) + if err != nil { + return nil, err + } + b4, err := json.Marshal(p.VendorExtensible) + if err != nil { + return nil, err + } + b5, err := json.Marshal(p.PathItemProps) + if err != nil { + return nil, err + } + concated := swag.ConcatJSON(b3, b4, b5) + return concated, nil +} diff --git a/vendor/github.com/go-openapi/spec/paths.go b/vendor/github.com/go-openapi/spec/paths.go new file mode 100644 index 0000000..9dc82a2 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/paths.go @@ -0,0 +1,97 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/go-openapi/swag" +) + +// Paths holds the relative paths to the individual endpoints. +// The path is appended to the [`basePath`](http://goo.gl/8us55a#swaggerBasePath) in order +// to construct the full URL. +// The Paths may be empty, due to [ACL constraints](http://goo.gl/8us55a#securityFiltering). +// +// For more information: http://goo.gl/8us55a#pathsObject +type Paths struct { + VendorExtensible + Paths map[string]PathItem `json:"-"` // custom serializer to flatten this, each entry must start with "/" +} + +// JSONLookup look up a value by the json property name +func (p Paths) JSONLookup(token string) (interface{}, error) { + if pi, ok := p.Paths[token]; ok { + return &pi, nil + } + if ex, ok := p.Extensions[token]; ok { + return &ex, nil + } + return nil, fmt.Errorf("object has no field %q", token) +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (p *Paths) UnmarshalJSON(data []byte) error { + var res map[string]json.RawMessage + if err := json.Unmarshal(data, &res); err != nil { + return err + } + for k, v := range res { + if strings.HasPrefix(strings.ToLower(k), "x-") { + if p.Extensions == nil { + p.Extensions = make(map[string]interface{}) + } + var d interface{} + if err := json.Unmarshal(v, &d); err != nil { + return err + } + p.Extensions[k] = d + } + if strings.HasPrefix(k, "/") { + if p.Paths == nil { + p.Paths = make(map[string]PathItem) + } + var pi PathItem + if err := json.Unmarshal(v, &pi); err != nil { + return err + } + p.Paths[k] = pi + } + } + return nil +} + +// MarshalJSON converts this items object to JSON +func (p Paths) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(p.VendorExtensible) + if err != nil { + return nil, err + } + + pths := make(map[string]PathItem) + for k, v := range p.Paths { + if strings.HasPrefix(k, "/") { + pths[k] = v + } + } + b2, err := json.Marshal(pths) + if err != nil { + return nil, err + } + concated := swag.ConcatJSON(b1, b2) + return concated, nil +} diff --git a/vendor/github.com/go-openapi/spec/properties.go b/vendor/github.com/go-openapi/spec/properties.go new file mode 100644 index 0000000..91d2435 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/properties.go @@ -0,0 +1,91 @@ +package spec + +import ( + "bytes" + "encoding/json" + "reflect" + "sort" +) + +// OrderSchemaItem holds a named schema (e.g. from a property of an object) +type OrderSchemaItem struct { + Name string + Schema +} + +// OrderSchemaItems is a sortable slice of named schemas. +// The ordering is defined by the x-order schema extension. +type OrderSchemaItems []OrderSchemaItem + +// MarshalJSON produces a json object with keys defined by the name schemas +// of the OrderSchemaItems slice, keeping the original order of the slice. +func (items OrderSchemaItems) MarshalJSON() ([]byte, error) { + buf := bytes.NewBuffer(nil) + buf.WriteString("{") + for i := range items { + if i > 0 { + buf.WriteString(",") + } + buf.WriteString("\"") + buf.WriteString(items[i].Name) + buf.WriteString("\":") + bs, err := json.Marshal(&items[i].Schema) + if err != nil { + return nil, err + } + buf.Write(bs) + } + buf.WriteString("}") + return buf.Bytes(), nil +} + +func (items OrderSchemaItems) Len() int { return len(items) } +func (items OrderSchemaItems) Swap(i, j int) { items[i], items[j] = items[j], items[i] } +func (items OrderSchemaItems) Less(i, j int) (ret bool) { + ii, oki := items[i].Extensions.GetInt("x-order") + ij, okj := items[j].Extensions.GetInt("x-order") + if oki { + if okj { + defer func() { + if err := recover(); err != nil { + defer func() { + if err = recover(); err != nil { + ret = items[i].Name < items[j].Name + } + }() + ret = reflect.ValueOf(ii).String() < reflect.ValueOf(ij).String() + } + }() + return ii < ij + } + return true + } else if okj { + return false + } + return items[i].Name < items[j].Name +} + +// SchemaProperties is a map representing the properties of a Schema object. +// It knows how to transform its keys into an ordered slice. +type SchemaProperties map[string]Schema + +// ToOrderedSchemaItems transforms the map of properties into a sortable slice +func (properties SchemaProperties) ToOrderedSchemaItems() OrderSchemaItems { + items := make(OrderSchemaItems, 0, len(properties)) + for k, v := range properties { + items = append(items, OrderSchemaItem{ + Name: k, + Schema: v, + }) + } + sort.Sort(items) + return items +} + +// MarshalJSON produces properties as json, keeping their order. +func (properties SchemaProperties) MarshalJSON() ([]byte, error) { + if properties == nil { + return []byte("null"), nil + } + return json.Marshal(properties.ToOrderedSchemaItems()) +} diff --git a/vendor/github.com/go-openapi/spec/ref.go b/vendor/github.com/go-openapi/spec/ref.go new file mode 100644 index 0000000..b0ef9bd --- /dev/null +++ b/vendor/github.com/go-openapi/spec/ref.go @@ -0,0 +1,193 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "bytes" + "encoding/gob" + "encoding/json" + "net/http" + "os" + "path/filepath" + + "github.com/go-openapi/jsonreference" +) + +// Refable is a struct for things that accept a $ref property +type Refable struct { + Ref Ref +} + +// MarshalJSON marshals the ref to json +func (r Refable) MarshalJSON() ([]byte, error) { + return r.Ref.MarshalJSON() +} + +// UnmarshalJSON unmarshalss the ref from json +func (r *Refable) UnmarshalJSON(d []byte) error { + return json.Unmarshal(d, &r.Ref) +} + +// Ref represents a json reference that is potentially resolved +type Ref struct { + jsonreference.Ref +} + +// RemoteURI gets the remote uri part of the ref +func (r *Ref) RemoteURI() string { + if r.String() == "" { + return "" + } + + u := *r.GetURL() + u.Fragment = "" + return u.String() +} + +// IsValidURI returns true when the url the ref points to can be found +func (r *Ref) IsValidURI(basepaths ...string) bool { + if r.String() == "" { + return true + } + + v := r.RemoteURI() + if v == "" { + return true + } + + if r.HasFullURL { + //nolint:noctx,gosec + rr, err := http.Get(v) + if err != nil { + return false + } + defer rr.Body.Close() + + return rr.StatusCode/100 == 2 + } + + if !(r.HasFileScheme || r.HasFullFilePath || r.HasURLPathOnly) { + return false + } + + // check for local file + pth := v + if r.HasURLPathOnly { + base := "." + if len(basepaths) > 0 { + base = filepath.Dir(filepath.Join(basepaths...)) + } + p, e := filepath.Abs(filepath.ToSlash(filepath.Join(base, pth))) + if e != nil { + return false + } + pth = p + } + + fi, err := os.Stat(filepath.ToSlash(pth)) + if err != nil { + return false + } + + return !fi.IsDir() +} + +// Inherits creates a new reference from a parent and a child +// If the child cannot inherit from the parent, an error is returned +func (r *Ref) Inherits(child Ref) (*Ref, error) { + ref, err := r.Ref.Inherits(child.Ref) + if err != nil { + return nil, err + } + return &Ref{Ref: *ref}, nil +} + +// NewRef creates a new instance of a ref object +// returns an error when the reference uri is an invalid uri +func NewRef(refURI string) (Ref, error) { + ref, err := jsonreference.New(refURI) + if err != nil { + return Ref{}, err + } + return Ref{Ref: ref}, nil +} + +// MustCreateRef creates a ref object but panics when refURI is invalid. +// Use the NewRef method for a version that returns an error. +func MustCreateRef(refURI string) Ref { + return Ref{Ref: jsonreference.MustCreateRef(refURI)} +} + +// MarshalJSON marshals this ref into a JSON object +func (r Ref) MarshalJSON() ([]byte, error) { + str := r.String() + if str == "" { + if r.IsRoot() { + return []byte(`{"$ref":""}`), nil + } + return []byte("{}"), nil + } + v := map[string]interface{}{"$ref": str} + return json.Marshal(v) +} + +// UnmarshalJSON unmarshals this ref from a JSON object +func (r *Ref) UnmarshalJSON(d []byte) error { + var v map[string]interface{} + if err := json.Unmarshal(d, &v); err != nil { + return err + } + return r.fromMap(v) +} + +// GobEncode provides a safe gob encoder for Ref +func (r Ref) GobEncode() ([]byte, error) { + var b bytes.Buffer + raw, err := r.MarshalJSON() + if err != nil { + return nil, err + } + err = gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Ref +func (r *Ref) GobDecode(b []byte) error { + var raw []byte + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + return json.Unmarshal(raw, r) +} + +func (r *Ref) fromMap(v map[string]interface{}) error { + if v == nil { + return nil + } + + if vv, ok := v["$ref"]; ok { + if str, ok := vv.(string); ok { + ref, err := jsonreference.New(str) + if err != nil { + return err + } + *r = Ref{Ref: ref} + } + } + + return nil +} diff --git a/vendor/github.com/go-openapi/spec/resolver.go b/vendor/github.com/go-openapi/spec/resolver.go new file mode 100644 index 0000000..47d1ee1 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/resolver.go @@ -0,0 +1,127 @@ +package spec + +import ( + "fmt" + + "github.com/go-openapi/swag" +) + +func resolveAnyWithBase(root interface{}, ref *Ref, result interface{}, options *ExpandOptions) error { + options = optionsOrDefault(options) + resolver := defaultSchemaLoader(root, options, nil, nil) + + if err := resolver.Resolve(ref, result, options.RelativeBase); err != nil { + return err + } + + return nil +} + +// ResolveRefWithBase resolves a reference against a context root with preservation of base path +func ResolveRefWithBase(root interface{}, ref *Ref, options *ExpandOptions) (*Schema, error) { + result := new(Schema) + + if err := resolveAnyWithBase(root, ref, result, options); err != nil { + return nil, err + } + + return result, nil +} + +// ResolveRef resolves a reference for a schema against a context root +// ref is guaranteed to be in root (no need to go to external files) +// +// ResolveRef is ONLY called from the code generation module +func ResolveRef(root interface{}, ref *Ref) (*Schema, error) { + res, _, err := ref.GetPointer().Get(root) + if err != nil { + return nil, err + } + + switch sch := res.(type) { + case Schema: + return &sch, nil + case *Schema: + return sch, nil + case map[string]interface{}: + newSch := new(Schema) + if err = swag.DynamicJSONToStruct(sch, newSch); err != nil { + return nil, err + } + return newSch, nil + default: + return nil, fmt.Errorf("type: %T: %w", sch, ErrUnknownTypeForReference) + } +} + +// ResolveParameterWithBase resolves a parameter reference against a context root and base path +func ResolveParameterWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Parameter, error) { + result := new(Parameter) + + if err := resolveAnyWithBase(root, &ref, result, options); err != nil { + return nil, err + } + + return result, nil +} + +// ResolveParameter resolves a parameter reference against a context root +func ResolveParameter(root interface{}, ref Ref) (*Parameter, error) { + return ResolveParameterWithBase(root, ref, nil) +} + +// ResolveResponseWithBase resolves response a reference against a context root and base path +func ResolveResponseWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Response, error) { + result := new(Response) + + err := resolveAnyWithBase(root, &ref, result, options) + if err != nil { + return nil, err + } + + return result, nil +} + +// ResolveResponse resolves response a reference against a context root +func ResolveResponse(root interface{}, ref Ref) (*Response, error) { + return ResolveResponseWithBase(root, ref, nil) +} + +// ResolvePathItemWithBase resolves response a path item against a context root and base path +func ResolvePathItemWithBase(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) { + result := new(PathItem) + + if err := resolveAnyWithBase(root, &ref, result, options); err != nil { + return nil, err + } + + return result, nil +} + +// ResolvePathItem resolves response a path item against a context root and base path +// +// Deprecated: use ResolvePathItemWithBase instead +func ResolvePathItem(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) { + return ResolvePathItemWithBase(root, ref, options) +} + +// ResolveItemsWithBase resolves parameter items reference against a context root and base path. +// +// NOTE: stricly speaking, this construct is not supported by Swagger 2.0. +// Similarly, $ref are forbidden in response headers. +func ResolveItemsWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) { + result := new(Items) + + if err := resolveAnyWithBase(root, &ref, result, options); err != nil { + return nil, err + } + + return result, nil +} + +// ResolveItems resolves parameter items reference against a context root and base path. +// +// Deprecated: use ResolveItemsWithBase instead +func ResolveItems(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) { + return ResolveItemsWithBase(root, ref, options) +} diff --git a/vendor/github.com/go-openapi/spec/response.go b/vendor/github.com/go-openapi/spec/response.go new file mode 100644 index 0000000..0340b60 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/response.go @@ -0,0 +1,152 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// ResponseProps properties specific to a response +type ResponseProps struct { + Description string `json:"description"` + Schema *Schema `json:"schema,omitempty"` + Headers map[string]Header `json:"headers,omitempty"` + Examples map[string]interface{} `json:"examples,omitempty"` +} + +// Response describes a single response from an API Operation. +// +// For more information: http://goo.gl/8us55a#responseObject +type Response struct { + Refable + ResponseProps + VendorExtensible +} + +// JSONLookup look up a value by the json property name +func (r Response) JSONLookup(token string) (interface{}, error) { + if ex, ok := r.Extensions[token]; ok { + return &ex, nil + } + if token == "$ref" { + return &r.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(r.ResponseProps, token) + return ptr, err +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (r *Response) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &r.ResponseProps); err != nil { + return err + } + if err := json.Unmarshal(data, &r.Refable); err != nil { + return err + } + return json.Unmarshal(data, &r.VendorExtensible) +} + +// MarshalJSON converts this items object to JSON +func (r Response) MarshalJSON() ([]byte, error) { + var ( + b1 []byte + err error + ) + + if r.Ref.String() == "" { + // when there is no $ref, empty description is rendered as an empty string + b1, err = json.Marshal(r.ResponseProps) + } else { + // when there is $ref inside the schema, description should be omitempty-ied + b1, err = json.Marshal(struct { + Description string `json:"description,omitempty"` + Schema *Schema `json:"schema,omitempty"` + Headers map[string]Header `json:"headers,omitempty"` + Examples map[string]interface{} `json:"examples,omitempty"` + }{ + Description: r.ResponseProps.Description, + Schema: r.ResponseProps.Schema, + Examples: r.ResponseProps.Examples, + }) + } + if err != nil { + return nil, err + } + + b2, err := json.Marshal(r.Refable) + if err != nil { + return nil, err + } + b3, err := json.Marshal(r.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2, b3), nil +} + +// NewResponse creates a new response instance +func NewResponse() *Response { + return new(Response) +} + +// ResponseRef creates a response as a json reference +func ResponseRef(url string) *Response { + resp := NewResponse() + resp.Ref = MustCreateRef(url) + return resp +} + +// WithDescription sets the description on this response, allows for chaining +func (r *Response) WithDescription(description string) *Response { + r.Description = description + return r +} + +// WithSchema sets the schema on this response, allows for chaining. +// Passing a nil argument removes the schema from this response +func (r *Response) WithSchema(schema *Schema) *Response { + r.Schema = schema + return r +} + +// AddHeader adds a header to this response +func (r *Response) AddHeader(name string, header *Header) *Response { + if header == nil { + return r.RemoveHeader(name) + } + if r.Headers == nil { + r.Headers = make(map[string]Header) + } + r.Headers[name] = *header + return r +} + +// RemoveHeader removes a header from this response +func (r *Response) RemoveHeader(name string) *Response { + delete(r.Headers, name) + return r +} + +// AddExample adds an example to this response +func (r *Response) AddExample(mediaType string, example interface{}) *Response { + if r.Examples == nil { + r.Examples = make(map[string]interface{}) + } + r.Examples[mediaType] = example + return r +} diff --git a/vendor/github.com/go-openapi/spec/responses.go b/vendor/github.com/go-openapi/spec/responses.go new file mode 100644 index 0000000..16c3076 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/responses.go @@ -0,0 +1,140 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/go-openapi/swag" +) + +// Responses is a container for the expected responses of an operation. +// The container maps a HTTP response code to the expected response. +// It is not expected from the documentation to necessarily cover all possible HTTP response codes, +// since they may not be known in advance. However, it is expected from the documentation to cover +// a successful operation response and any known errors. +// +// The `default` can be used a default response object for all HTTP codes that are not covered +// individually by the specification. +// +// The `Responses Object` MUST contain at least one response code, and it SHOULD be the response +// for a successful operation call. +// +// For more information: http://goo.gl/8us55a#responsesObject +type Responses struct { + VendorExtensible + ResponsesProps +} + +// JSONLookup implements an interface to customize json pointer lookup +func (r Responses) JSONLookup(token string) (interface{}, error) { + if token == "default" { + return r.Default, nil + } + if ex, ok := r.Extensions[token]; ok { + return &ex, nil + } + if i, err := strconv.Atoi(token); err == nil { + if scr, ok := r.StatusCodeResponses[i]; ok { + return scr, nil + } + } + return nil, fmt.Errorf("object has no field %q", token) +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (r *Responses) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &r.ResponsesProps); err != nil { + return err + } + + if err := json.Unmarshal(data, &r.VendorExtensible); err != nil { + return err + } + if reflect.DeepEqual(ResponsesProps{}, r.ResponsesProps) { + r.ResponsesProps = ResponsesProps{} + } + return nil +} + +// MarshalJSON converts this items object to JSON +func (r Responses) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(r.ResponsesProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(r.VendorExtensible) + if err != nil { + return nil, err + } + concated := swag.ConcatJSON(b1, b2) + return concated, nil +} + +// ResponsesProps describes all responses for an operation. +// It tells what is the default response and maps all responses with a +// HTTP status code. +type ResponsesProps struct { + Default *Response + StatusCodeResponses map[int]Response +} + +// MarshalJSON marshals responses as JSON +func (r ResponsesProps) MarshalJSON() ([]byte, error) { + toser := map[string]Response{} + if r.Default != nil { + toser["default"] = *r.Default + } + for k, v := range r.StatusCodeResponses { + toser[strconv.Itoa(k)] = v + } + return json.Marshal(toser) +} + +// UnmarshalJSON unmarshals responses from JSON +func (r *ResponsesProps) UnmarshalJSON(data []byte) error { + var res map[string]json.RawMessage + if err := json.Unmarshal(data, &res); err != nil { + return err + } + + if v, ok := res["default"]; ok { + var defaultRes Response + if err := json.Unmarshal(v, &defaultRes); err != nil { + return err + } + r.Default = &defaultRes + delete(res, "default") + } + for k, v := range res { + if !strings.HasPrefix(k, "x-") { + var statusCodeResp Response + if err := json.Unmarshal(v, &statusCodeResp); err != nil { + return err + } + if nk, err := strconv.Atoi(k); err == nil { + if r.StatusCodeResponses == nil { + r.StatusCodeResponses = map[int]Response{} + } + r.StatusCodeResponses[nk] = statusCodeResp + } + } + } + return nil +} diff --git a/vendor/github.com/go-openapi/spec/schema.go b/vendor/github.com/go-openapi/spec/schema.go new file mode 100644 index 0000000..4e9be85 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schema.go @@ -0,0 +1,645 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// BooleanProperty creates a boolean property +func BooleanProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"boolean"}}} +} + +// BoolProperty creates a boolean property +func BoolProperty() *Schema { return BooleanProperty() } + +// StringProperty creates a string property +func StringProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}}} +} + +// CharProperty creates a string property +func CharProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}}} +} + +// Float64Property creates a float64/double property +func Float64Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"number"}, Format: "double"}} +} + +// Float32Property creates a float32/float property +func Float32Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"number"}, Format: "float"}} +} + +// Int8Property creates an int8 property +func Int8Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int8"}} +} + +// Int16Property creates an int16 property +func Int16Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int16"}} +} + +// Int32Property creates an int32 property +func Int32Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int32"}} +} + +// Int64Property creates an int64 property +func Int64Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int64"}} +} + +// StrFmtProperty creates a property for the named string format +func StrFmtProperty(format string) *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: format}} +} + +// DateProperty creates a date property +func DateProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: "date"}} +} + +// DateTimeProperty creates a date time property +func DateTimeProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: "date-time"}} +} + +// MapProperty creates a map property +func MapProperty(property *Schema) *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"object"}, + AdditionalProperties: &SchemaOrBool{Allows: true, Schema: property}}} +} + +// RefProperty creates a ref property +func RefProperty(name string) *Schema { + return &Schema{SchemaProps: SchemaProps{Ref: MustCreateRef(name)}} +} + +// RefSchema creates a ref property +func RefSchema(name string) *Schema { + return &Schema{SchemaProps: SchemaProps{Ref: MustCreateRef(name)}} +} + +// ArrayProperty creates an array property +func ArrayProperty(items *Schema) *Schema { + if items == nil { + return &Schema{SchemaProps: SchemaProps{Type: []string{"array"}}} + } + return &Schema{SchemaProps: SchemaProps{Items: &SchemaOrArray{Schema: items}, Type: []string{"array"}}} +} + +// ComposedSchema creates a schema with allOf +func ComposedSchema(schemas ...Schema) *Schema { + s := new(Schema) + s.AllOf = schemas + return s +} + +// SchemaURL represents a schema url +type SchemaURL string + +// MarshalJSON marshal this to JSON +func (r SchemaURL) MarshalJSON() ([]byte, error) { + if r == "" { + return []byte("{}"), nil + } + v := map[string]interface{}{"$schema": string(r)} + return json.Marshal(v) +} + +// UnmarshalJSON unmarshal this from JSON +func (r *SchemaURL) UnmarshalJSON(data []byte) error { + var v map[string]interface{} + if err := json.Unmarshal(data, &v); err != nil { + return err + } + return r.fromMap(v) +} + +func (r *SchemaURL) fromMap(v map[string]interface{}) error { + if v == nil { + return nil + } + if vv, ok := v["$schema"]; ok { + if str, ok := vv.(string); ok { + u, err := parseURL(str) + if err != nil { + return err + } + + *r = SchemaURL(u.String()) + } + } + return nil +} + +// SchemaProps describes a JSON schema (draft 4) +type SchemaProps struct { + ID string `json:"id,omitempty"` + Ref Ref `json:"-"` + Schema SchemaURL `json:"-"` + Description string `json:"description,omitempty"` + Type StringOrArray `json:"type,omitempty"` + Nullable bool `json:"nullable,omitempty"` + Format string `json:"format,omitempty"` + Title string `json:"title,omitempty"` + Default interface{} `json:"default,omitempty"` + Maximum *float64 `json:"maximum,omitempty"` + ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"` + MaxLength *int64 `json:"maxLength,omitempty"` + MinLength *int64 `json:"minLength,omitempty"` + Pattern string `json:"pattern,omitempty"` + MaxItems *int64 `json:"maxItems,omitempty"` + MinItems *int64 `json:"minItems,omitempty"` + UniqueItems bool `json:"uniqueItems,omitempty"` + MultipleOf *float64 `json:"multipleOf,omitempty"` + Enum []interface{} `json:"enum,omitempty"` + MaxProperties *int64 `json:"maxProperties,omitempty"` + MinProperties *int64 `json:"minProperties,omitempty"` + Required []string `json:"required,omitempty"` + Items *SchemaOrArray `json:"items,omitempty"` + AllOf []Schema `json:"allOf,omitempty"` + OneOf []Schema `json:"oneOf,omitempty"` + AnyOf []Schema `json:"anyOf,omitempty"` + Not *Schema `json:"not,omitempty"` + Properties SchemaProperties `json:"properties,omitempty"` + AdditionalProperties *SchemaOrBool `json:"additionalProperties,omitempty"` + PatternProperties SchemaProperties `json:"patternProperties,omitempty"` + Dependencies Dependencies `json:"dependencies,omitempty"` + AdditionalItems *SchemaOrBool `json:"additionalItems,omitempty"` + Definitions Definitions `json:"definitions,omitempty"` +} + +// SwaggerSchemaProps are additional properties supported by swagger schemas, but not JSON-schema (draft 4) +type SwaggerSchemaProps struct { + Discriminator string `json:"discriminator,omitempty"` + ReadOnly bool `json:"readOnly,omitempty"` + XML *XMLObject `json:"xml,omitempty"` + ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` + Example interface{} `json:"example,omitempty"` +} + +// Schema the schema object allows the definition of input and output data types. +// These types can be objects, but also primitives and arrays. +// This object is based on the [JSON Schema Specification Draft 4](http://json-schema.org/) +// and uses a predefined subset of it. +// On top of this subset, there are extensions provided by this specification to allow for more complete documentation. +// +// For more information: http://goo.gl/8us55a#schemaObject +type Schema struct { + VendorExtensible + SchemaProps + SwaggerSchemaProps + ExtraProps map[string]interface{} `json:"-"` +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s Schema) JSONLookup(token string) (interface{}, error) { + if ex, ok := s.Extensions[token]; ok { + return &ex, nil + } + + if ex, ok := s.ExtraProps[token]; ok { + return &ex, nil + } + + r, _, err := jsonpointer.GetForToken(s.SchemaProps, token) + if r != nil || (err != nil && !strings.HasPrefix(err.Error(), "object has no field")) { + return r, err + } + r, _, err = jsonpointer.GetForToken(s.SwaggerSchemaProps, token) + return r, err +} + +// WithID sets the id for this schema, allows for chaining +func (s *Schema) WithID(id string) *Schema { + s.ID = id + return s +} + +// WithTitle sets the title for this schema, allows for chaining +func (s *Schema) WithTitle(title string) *Schema { + s.Title = title + return s +} + +// WithDescription sets the description for this schema, allows for chaining +func (s *Schema) WithDescription(description string) *Schema { + s.Description = description + return s +} + +// WithProperties sets the properties for this schema +func (s *Schema) WithProperties(schemas map[string]Schema) *Schema { + s.Properties = schemas + return s +} + +// SetProperty sets a property on this schema +func (s *Schema) SetProperty(name string, schema Schema) *Schema { + if s.Properties == nil { + s.Properties = make(map[string]Schema) + } + s.Properties[name] = schema + return s +} + +// WithAllOf sets the all of property +func (s *Schema) WithAllOf(schemas ...Schema) *Schema { + s.AllOf = schemas + return s +} + +// WithMaxProperties sets the max number of properties an object can have +func (s *Schema) WithMaxProperties(max int64) *Schema { + s.MaxProperties = &max + return s +} + +// WithMinProperties sets the min number of properties an object must have +func (s *Schema) WithMinProperties(min int64) *Schema { + s.MinProperties = &min + return s +} + +// Typed sets the type of this schema for a single value item +func (s *Schema) Typed(tpe, format string) *Schema { + s.Type = []string{tpe} + s.Format = format + return s +} + +// AddType adds a type with potential format to the types for this schema +func (s *Schema) AddType(tpe, format string) *Schema { + s.Type = append(s.Type, tpe) + if format != "" { + s.Format = format + } + return s +} + +// AsNullable flags this schema as nullable. +func (s *Schema) AsNullable() *Schema { + s.Nullable = true + return s +} + +// CollectionOf a fluent builder method for an array parameter +func (s *Schema) CollectionOf(items Schema) *Schema { + s.Type = []string{jsonArray} + s.Items = &SchemaOrArray{Schema: &items} + return s +} + +// WithDefault sets the default value on this parameter +func (s *Schema) WithDefault(defaultValue interface{}) *Schema { + s.Default = defaultValue + return s +} + +// WithRequired flags this parameter as required +func (s *Schema) WithRequired(items ...string) *Schema { + s.Required = items + return s +} + +// AddRequired adds field names to the required properties array +func (s *Schema) AddRequired(items ...string) *Schema { + s.Required = append(s.Required, items...) + return s +} + +// WithMaxLength sets a max length value +func (s *Schema) WithMaxLength(max int64) *Schema { + s.MaxLength = &max + return s +} + +// WithMinLength sets a min length value +func (s *Schema) WithMinLength(min int64) *Schema { + s.MinLength = &min + return s +} + +// WithPattern sets a pattern value +func (s *Schema) WithPattern(pattern string) *Schema { + s.Pattern = pattern + return s +} + +// WithMultipleOf sets a multiple of value +func (s *Schema) WithMultipleOf(number float64) *Schema { + s.MultipleOf = &number + return s +} + +// WithMaximum sets a maximum number value +func (s *Schema) WithMaximum(max float64, exclusive bool) *Schema { + s.Maximum = &max + s.ExclusiveMaximum = exclusive + return s +} + +// WithMinimum sets a minimum number value +func (s *Schema) WithMinimum(min float64, exclusive bool) *Schema { + s.Minimum = &min + s.ExclusiveMinimum = exclusive + return s +} + +// WithEnum sets a the enum values (replace) +func (s *Schema) WithEnum(values ...interface{}) *Schema { + s.Enum = append([]interface{}{}, values...) + return s +} + +// WithMaxItems sets the max items +func (s *Schema) WithMaxItems(size int64) *Schema { + s.MaxItems = &size + return s +} + +// WithMinItems sets the min items +func (s *Schema) WithMinItems(size int64) *Schema { + s.MinItems = &size + return s +} + +// UniqueValues dictates that this array can only have unique items +func (s *Schema) UniqueValues() *Schema { + s.UniqueItems = true + return s +} + +// AllowDuplicates this array can have duplicates +func (s *Schema) AllowDuplicates() *Schema { + s.UniqueItems = false + return s +} + +// AddToAllOf adds a schema to the allOf property +func (s *Schema) AddToAllOf(schemas ...Schema) *Schema { + s.AllOf = append(s.AllOf, schemas...) + return s +} + +// WithDiscriminator sets the name of the discriminator field +func (s *Schema) WithDiscriminator(discriminator string) *Schema { + s.Discriminator = discriminator + return s +} + +// AsReadOnly flags this schema as readonly +func (s *Schema) AsReadOnly() *Schema { + s.ReadOnly = true + return s +} + +// AsWritable flags this schema as writeable (not read-only) +func (s *Schema) AsWritable() *Schema { + s.ReadOnly = false + return s +} + +// WithExample sets the example for this schema +func (s *Schema) WithExample(example interface{}) *Schema { + s.Example = example + return s +} + +// WithExternalDocs sets/removes the external docs for/from this schema. +// When you pass empty strings as params the external documents will be removed. +// When you pass non-empty string as one value then those values will be used on the external docs object. +// So when you pass a non-empty description, you should also pass the url and vice versa. +func (s *Schema) WithExternalDocs(description, url string) *Schema { + if description == "" && url == "" { + s.ExternalDocs = nil + return s + } + + if s.ExternalDocs == nil { + s.ExternalDocs = &ExternalDocumentation{} + } + s.ExternalDocs.Description = description + s.ExternalDocs.URL = url + return s +} + +// WithXMLName sets the xml name for the object +func (s *Schema) WithXMLName(name string) *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Name = name + return s +} + +// WithXMLNamespace sets the xml namespace for the object +func (s *Schema) WithXMLNamespace(namespace string) *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Namespace = namespace + return s +} + +// WithXMLPrefix sets the xml prefix for the object +func (s *Schema) WithXMLPrefix(prefix string) *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Prefix = prefix + return s +} + +// AsXMLAttribute flags this object as xml attribute +func (s *Schema) AsXMLAttribute() *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Attribute = true + return s +} + +// AsXMLElement flags this object as an xml node +func (s *Schema) AsXMLElement() *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Attribute = false + return s +} + +// AsWrappedXML flags this object as wrapped, this is mostly useful for array types +func (s *Schema) AsWrappedXML() *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Wrapped = true + return s +} + +// AsUnwrappedXML flags this object as an xml node +func (s *Schema) AsUnwrappedXML() *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Wrapped = false + return s +} + +// SetValidations defines all schema validations. +// +// NOTE: Required, ReadOnly, AllOf, AnyOf, OneOf and Not are not considered. +func (s *Schema) SetValidations(val SchemaValidations) { + s.Maximum = val.Maximum + s.ExclusiveMaximum = val.ExclusiveMaximum + s.Minimum = val.Minimum + s.ExclusiveMinimum = val.ExclusiveMinimum + s.MaxLength = val.MaxLength + s.MinLength = val.MinLength + s.Pattern = val.Pattern + s.MaxItems = val.MaxItems + s.MinItems = val.MinItems + s.UniqueItems = val.UniqueItems + s.MultipleOf = val.MultipleOf + s.Enum = val.Enum + s.MinProperties = val.MinProperties + s.MaxProperties = val.MaxProperties + s.PatternProperties = val.PatternProperties +} + +// WithValidations is a fluent method to set schema validations +func (s *Schema) WithValidations(val SchemaValidations) *Schema { + s.SetValidations(val) + return s +} + +// Validations returns a clone of the validations for this schema +func (s Schema) Validations() SchemaValidations { + return SchemaValidations{ + CommonValidations: CommonValidations{ + Maximum: s.Maximum, + ExclusiveMaximum: s.ExclusiveMaximum, + Minimum: s.Minimum, + ExclusiveMinimum: s.ExclusiveMinimum, + MaxLength: s.MaxLength, + MinLength: s.MinLength, + Pattern: s.Pattern, + MaxItems: s.MaxItems, + MinItems: s.MinItems, + UniqueItems: s.UniqueItems, + MultipleOf: s.MultipleOf, + Enum: s.Enum, + }, + MinProperties: s.MinProperties, + MaxProperties: s.MaxProperties, + PatternProperties: s.PatternProperties, + } +} + +// MarshalJSON marshal this to JSON +func (s Schema) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(s.SchemaProps) + if err != nil { + return nil, fmt.Errorf("schema props %v", err) + } + b2, err := json.Marshal(s.VendorExtensible) + if err != nil { + return nil, fmt.Errorf("vendor props %v", err) + } + b3, err := s.Ref.MarshalJSON() + if err != nil { + return nil, fmt.Errorf("ref prop %v", err) + } + b4, err := s.Schema.MarshalJSON() + if err != nil { + return nil, fmt.Errorf("schema prop %v", err) + } + b5, err := json.Marshal(s.SwaggerSchemaProps) + if err != nil { + return nil, fmt.Errorf("common validations %v", err) + } + var b6 []byte + if s.ExtraProps != nil { + jj, err := json.Marshal(s.ExtraProps) + if err != nil { + return nil, fmt.Errorf("extra props %v", err) + } + b6 = jj + } + return swag.ConcatJSON(b1, b2, b3, b4, b5, b6), nil +} + +// UnmarshalJSON marshal this from JSON +func (s *Schema) UnmarshalJSON(data []byte) error { + props := struct { + SchemaProps + SwaggerSchemaProps + }{} + if err := json.Unmarshal(data, &props); err != nil { + return err + } + + sch := Schema{ + SchemaProps: props.SchemaProps, + SwaggerSchemaProps: props.SwaggerSchemaProps, + } + + var d map[string]interface{} + if err := json.Unmarshal(data, &d); err != nil { + return err + } + + _ = sch.Ref.fromMap(d) + _ = sch.Schema.fromMap(d) + + delete(d, "$ref") + delete(d, "$schema") + for _, pn := range swag.DefaultJSONNameProvider.GetJSONNames(s) { + delete(d, pn) + } + + for k, vv := range d { + lk := strings.ToLower(k) + if strings.HasPrefix(lk, "x-") { + if sch.Extensions == nil { + sch.Extensions = map[string]interface{}{} + } + sch.Extensions[k] = vv + continue + } + if sch.ExtraProps == nil { + sch.ExtraProps = map[string]interface{}{} + } + sch.ExtraProps[k] = vv + } + + *s = sch + + return nil +} diff --git a/vendor/github.com/go-openapi/spec/schema_loader.go b/vendor/github.com/go-openapi/spec/schema_loader.go new file mode 100644 index 0000000..0059b99 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schema_loader.go @@ -0,0 +1,331 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" + "log" + "net/url" + "reflect" + "strings" + + "github.com/go-openapi/swag" +) + +// PathLoader is a function to use when loading remote refs. +// +// This is a package level default. It may be overridden or bypassed by +// specifying the loader in ExpandOptions. +// +// NOTE: if you are using the go-openapi/loads package, it will override +// this value with its own default (a loader to retrieve YAML documents as +// well as JSON ones). +var PathLoader = func(pth string) (json.RawMessage, error) { + data, err := swag.LoadFromFileOrHTTP(pth) + if err != nil { + return nil, err + } + return json.RawMessage(data), nil +} + +// resolverContext allows to share a context during spec processing. +// At the moment, it just holds the index of circular references found. +type resolverContext struct { + // circulars holds all visited circular references, to shortcircuit $ref resolution. + // + // This structure is privately instantiated and needs not be locked against + // concurrent access, unless we chose to implement a parallel spec walking. + circulars map[string]bool + basePath string + loadDoc func(string) (json.RawMessage, error) + rootID string +} + +func newResolverContext(options *ExpandOptions) *resolverContext { + expandOptions := optionsOrDefault(options) + + // path loader may be overridden by options + var loader func(string) (json.RawMessage, error) + if expandOptions.PathLoader == nil { + loader = PathLoader + } else { + loader = expandOptions.PathLoader + } + + return &resolverContext{ + circulars: make(map[string]bool), + basePath: expandOptions.RelativeBase, // keep the root base path in context + loadDoc: loader, + } +} + +type schemaLoader struct { + root interface{} + options *ExpandOptions + cache ResolutionCache + context *resolverContext +} + +func (r *schemaLoader) transitiveResolver(basePath string, ref Ref) *schemaLoader { + if ref.IsRoot() || ref.HasFragmentOnly { + return r + } + + baseRef := MustCreateRef(basePath) + currentRef := normalizeRef(&ref, basePath) + if strings.HasPrefix(currentRef.String(), baseRef.String()) { + return r + } + + // set a new root against which to resolve + rootURL := currentRef.GetURL() + rootURL.Fragment = "" + root, _ := r.cache.Get(rootURL.String()) + + // shallow copy of resolver options to set a new RelativeBase when + // traversing multiple documents + newOptions := r.options + newOptions.RelativeBase = rootURL.String() + + return defaultSchemaLoader(root, newOptions, r.cache, r.context) +} + +func (r *schemaLoader) updateBasePath(transitive *schemaLoader, basePath string) string { + if transitive != r { + if transitive.options != nil && transitive.options.RelativeBase != "" { + return normalizeBase(transitive.options.RelativeBase) + } + } + + return basePath +} + +func (r *schemaLoader) resolveRef(ref *Ref, target interface{}, basePath string) error { + tgt := reflect.ValueOf(target) + if tgt.Kind() != reflect.Ptr { + return ErrResolveRefNeedsAPointer + } + + if ref.GetURL() == nil { + return nil + } + + var ( + res interface{} + data interface{} + err error + ) + + // Resolve against the root if it isn't nil, and if ref is pointing at the root, or has a fragment only which means + // it is pointing somewhere in the root. + root := r.root + if (ref.IsRoot() || ref.HasFragmentOnly) && root == nil && basePath != "" { + if baseRef, erb := NewRef(basePath); erb == nil { + root, _, _, _ = r.load(baseRef.GetURL()) + } + } + + if (ref.IsRoot() || ref.HasFragmentOnly) && root != nil { + data = root + } else { + baseRef := normalizeRef(ref, basePath) + data, _, _, err = r.load(baseRef.GetURL()) + if err != nil { + return err + } + } + + res = data + if ref.String() != "" { + res, _, err = ref.GetPointer().Get(data) + if err != nil { + return err + } + } + return swag.DynamicJSONToStruct(res, target) +} + +func (r *schemaLoader) load(refURL *url.URL) (interface{}, url.URL, bool, error) { + debugLog("loading schema from url: %s", refURL) + toFetch := *refURL + toFetch.Fragment = "" + + var err error + pth := toFetch.String() + normalized := normalizeBase(pth) + debugLog("loading doc from: %s", normalized) + + data, fromCache := r.cache.Get(normalized) + if fromCache { + return data, toFetch, fromCache, nil + } + + b, err := r.context.loadDoc(normalized) + if err != nil { + return nil, url.URL{}, false, err + } + + var doc interface{} + if err := json.Unmarshal(b, &doc); err != nil { + return nil, url.URL{}, false, err + } + r.cache.Set(normalized, doc) + + return doc, toFetch, fromCache, nil +} + +// isCircular detects cycles in sequences of $ref. +// +// It relies on a private context (which needs not be locked). +func (r *schemaLoader) isCircular(ref *Ref, basePath string, parentRefs ...string) (foundCycle bool) { + normalizedRef := normalizeURI(ref.String(), basePath) + if _, ok := r.context.circulars[normalizedRef]; ok { + // circular $ref has been already detected in another explored cycle + foundCycle = true + return + } + foundCycle = swag.ContainsStrings(parentRefs, normalizedRef) // normalized windows url's are lower cased + if foundCycle { + r.context.circulars[normalizedRef] = true + } + return +} + +// Resolve resolves a reference against basePath and stores the result in target. +// +// Resolve is not in charge of following references: it only resolves ref by following its URL. +// +// If the schema the ref is referring to holds nested refs, Resolve doesn't resolve them. +// +// If basePath is an empty string, ref is resolved against the root schema stored in the schemaLoader struct +func (r *schemaLoader) Resolve(ref *Ref, target interface{}, basePath string) error { + return r.resolveRef(ref, target, basePath) +} + +func (r *schemaLoader) deref(input interface{}, parentRefs []string, basePath string) error { + var ref *Ref + switch refable := input.(type) { + case *Schema: + ref = &refable.Ref + case *Parameter: + ref = &refable.Ref + case *Response: + ref = &refable.Ref + case *PathItem: + ref = &refable.Ref + default: + return fmt.Errorf("unsupported type: %T: %w", input, ErrDerefUnsupportedType) + } + + curRef := ref.String() + if curRef == "" { + return nil + } + + normalizedRef := normalizeRef(ref, basePath) + normalizedBasePath := normalizedRef.RemoteURI() + + if r.isCircular(normalizedRef, basePath, parentRefs...) { + return nil + } + + if err := r.resolveRef(ref, input, basePath); r.shouldStopOnError(err) { + return err + } + + if ref.String() == "" || ref.String() == curRef { + // done with rereferencing + return nil + } + + parentRefs = append(parentRefs, normalizedRef.String()) + return r.deref(input, parentRefs, normalizedBasePath) +} + +func (r *schemaLoader) shouldStopOnError(err error) bool { + if err != nil && !r.options.ContinueOnError { + return true + } + + if err != nil { + log.Println(err) + } + + return false +} + +func (r *schemaLoader) setSchemaID(target interface{}, id, basePath string) (string, string) { + debugLog("schema has ID: %s", id) + + // handling the case when id is a folder + // remember that basePath has to point to a file + var refPath string + if strings.HasSuffix(id, "/") { + // ensure this is detected as a file, not a folder + refPath = fmt.Sprintf("%s%s", id, "placeholder.json") + } else { + refPath = id + } + + // updates the current base path + // * important: ID can be a relative path + // * registers target to be fetchable from the new base proposed by this id + newBasePath := normalizeURI(refPath, basePath) + + // store found IDs for possible future reuse in $ref + r.cache.Set(newBasePath, target) + + // the root document has an ID: all $ref relative to that ID may + // be rebased relative to the root document + if basePath == r.context.basePath { + debugLog("root document is a schema with ID: %s (normalized as:%s)", id, newBasePath) + r.context.rootID = newBasePath + } + + return newBasePath, refPath +} + +func defaultSchemaLoader( + root interface{}, + expandOptions *ExpandOptions, + cache ResolutionCache, + context *resolverContext) *schemaLoader { + + if expandOptions == nil { + expandOptions = &ExpandOptions{} + } + + cache = cacheOrDefault(cache) + + if expandOptions.RelativeBase == "" { + // if no relative base is provided, assume the root document + // contains all $ref, or at least, that the relative documents + // may be resolved from the current working directory. + expandOptions.RelativeBase = baseForRoot(root, cache) + } + debugLog("effective expander options: %#v", expandOptions) + + if context == nil { + context = newResolverContext(expandOptions) + } + + return &schemaLoader{ + root: root, + options: expandOptions, + cache: cache, + context: context, + } +} diff --git a/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json b/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json new file mode 100644 index 0000000..bcbb847 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json @@ -0,0 +1,149 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "$schema": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { "type": "string" }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {} +} diff --git a/vendor/github.com/go-openapi/spec/schemas/v2/schema.json b/vendor/github.com/go-openapi/spec/schemas/v2/schema.json new file mode 100644 index 0000000..ebe10ed --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schemas/v2/schema.json @@ -0,0 +1,1607 @@ +{ + "title": "A JSON Schema for Swagger 2.0 API.", + "id": "http://swagger.io/v2/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "required": [ + "swagger", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "swagger": { + "type": "string", + "enum": [ + "2.0" + ], + "description": "The Swagger version of this document." + }, + "info": { + "$ref": "#/definitions/info" + }, + "host": { + "type": "string", + "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", + "description": "The host (name or ip) of the API. Example: 'swagger.io'" + }, + "basePath": { + "type": "string", + "pattern": "^/", + "description": "The base path to the API. Example: '/api'." + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "consumes": { + "description": "A list of MIME types accepted by the API.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "definitions": { + "$ref": "#/definitions/definitions" + }, + "parameters": { + "$ref": "#/definitions/parameterDefinitions" + }, + "responses": { + "$ref": "#/definitions/responseDefinitions" + }, + "security": { + "$ref": "#/definitions/security" + }, + "securityDefinitions": { + "$ref": "#/definitions/securityDefinitions" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "General information about the API.", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." + }, + "termsOfService": { + "type": "string", + "description": "The terms of service for the API." + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the owners of the API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "license": { + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "paths": { + "type": "object", + "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + }, + "^/": { + "$ref": "#/definitions/pathItem" + } + }, + "additionalProperties": false + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "description": "One or more JSON objects describing the schemas being consumed and produced by the API." + }, + "parameterDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + }, + "description": "One or more JSON representations for parameters" + }, + "responseDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/response" + }, + "description": "One or more JSON representations for responses" + }, + "externalDocs": { + "type": "object", + "additionalProperties": false, + "description": "information about external documentation", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "examples": { + "type": "object", + "additionalProperties": true + }, + "mimeType": { + "type": "string", + "description": "The MIME type of the HTTP message." + }, + "operation": { + "type": "object", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string", + "description": "A unique identifier of the operation." + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "consumes": { + "description": "A list of MIME types the API can consume.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "parameters": { + "$ref": "#/definitions/parametersList" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "security": { + "$ref": "#/definitions/security" + } + } + }, + "pathItem": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "parameters": { + "$ref": "#/definitions/parametersList" + } + } + }, + "responses": { + "type": "object", + "description": "Response objects names can either be any valid HTTP status code or 'default'.", + "minProperties": 1, + "additionalProperties": false, + "patternProperties": { + "^([0-9]{3})$|^(default)$": { + "$ref": "#/definitions/responseValue" + }, + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "not": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + } + }, + "responseValue": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "response": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + }, + "schema": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/fileSchema" + } + ] + }, + "headers": { + "$ref": "#/definitions/headers" + }, + "examples": { + "$ref": "#/definitions/examples" + } + }, + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "headers": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/header" + } + }, + "header": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "vendorExtension": { + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "bodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "schema" + ], + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "body" + ] + }, + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "schema": { + "$ref": "#/definitions/schema" + } + }, + "additionalProperties": false + }, + "headerParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "header" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "queryParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "query" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "formDataParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "formData" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array", + "file" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "pathParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "required" + ], + "properties": { + "required": { + "type": "boolean", + "enum": [ + true + ], + "description": "Determines whether or not this parameter is required or optional." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "path" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "nonBodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "type" + ], + "oneOf": [ + { + "$ref": "#/definitions/headerParameterSubSchema" + }, + { + "$ref": "#/definitions/formDataParameterSubSchema" + }, + { + "$ref": "#/definitions/queryParameterSubSchema" + }, + { + "$ref": "#/definitions/pathParameterSubSchema" + } + ] + }, + "parameter": { + "oneOf": [ + { + "$ref": "#/definitions/bodyParameter" + }, + { + "$ref": "#/definitions/nonBodyParameter" + } + ] + }, + "schema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "additionalProperties": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "type": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/type" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "default": {} + }, + "discriminator": { + "type": "string" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "fileSchema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "type" + ], + "properties": { + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "type": { + "type": "string", + "enum": [ + "file" + ] + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "primitivesItems": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "securityRequirement": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "xml": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "tag": { + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "securityDefinitions": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/basicAuthenticationSecurity" + }, + { + "$ref": "#/definitions/apiKeySecurity" + }, + { + "$ref": "#/definitions/oauth2ImplicitSecurity" + }, + { + "$ref": "#/definitions/oauth2PasswordSecurity" + }, + { + "$ref": "#/definitions/oauth2ApplicationSecurity" + }, + { + "$ref": "#/definitions/oauth2AccessCodeSecurity" + } + ] + } + }, + "basicAuthenticationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "basic" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "apiKeySecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "apiKey" + ] + }, + "name": { + "type": "string" + }, + "in": { + "type": "string", + "enum": [ + "header", + "query" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ImplicitSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "implicit" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2PasswordSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "password" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ApplicationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "application" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2AccessCodeSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "accessCode" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2Scopes": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "mediaTypeList": { + "type": "array", + "items": { + "$ref": "#/definitions/mimeType" + }, + "uniqueItems": true + }, + "parametersList": { + "type": "array", + "description": "The parameters needed to send a valid API call.", + "additionalItems": false, + "items": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "uniqueItems": true + }, + "schemesList": { + "type": "array", + "description": "The transfer protocol of the API.", + "items": { + "type": "string", + "enum": [ + "http", + "https", + "ws", + "wss" + ] + }, + "uniqueItems": true + }, + "collectionFormat": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes" + ], + "default": "csv" + }, + "collectionFormatWithMulti": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes", + "multi" + ], + "default": "csv" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "jsonReference": { + "type": "object", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + } + } + } + } +} diff --git a/vendor/github.com/go-openapi/spec/security_scheme.go b/vendor/github.com/go-openapi/spec/security_scheme.go new file mode 100644 index 0000000..9d0bdae --- /dev/null +++ b/vendor/github.com/go-openapi/spec/security_scheme.go @@ -0,0 +1,170 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +const ( + basic = "basic" + apiKey = "apiKey" + oauth2 = "oauth2" + implicit = "implicit" + password = "password" + application = "application" + accessCode = "accessCode" +) + +// BasicAuth creates a basic auth security scheme +func BasicAuth() *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{Type: basic}} +} + +// APIKeyAuth creates an api key auth security scheme +func APIKeyAuth(fieldName, valueSource string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{Type: apiKey, Name: fieldName, In: valueSource}} +} + +// OAuth2Implicit creates an implicit flow oauth2 security scheme +func OAuth2Implicit(authorizationURL string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{ + Type: oauth2, + Flow: implicit, + AuthorizationURL: authorizationURL, + }} +} + +// OAuth2Password creates a password flow oauth2 security scheme +func OAuth2Password(tokenURL string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{ + Type: oauth2, + Flow: password, + TokenURL: tokenURL, + }} +} + +// OAuth2Application creates an application flow oauth2 security scheme +func OAuth2Application(tokenURL string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{ + Type: oauth2, + Flow: application, + TokenURL: tokenURL, + }} +} + +// OAuth2AccessToken creates an access token flow oauth2 security scheme +func OAuth2AccessToken(authorizationURL, tokenURL string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{ + Type: oauth2, + Flow: accessCode, + AuthorizationURL: authorizationURL, + TokenURL: tokenURL, + }} +} + +// SecuritySchemeProps describes a swagger security scheme in the securityDefinitions section +type SecuritySchemeProps struct { + Description string `json:"description,omitempty"` + Type string `json:"type"` + Name string `json:"name,omitempty"` // api key + In string `json:"in,omitempty"` // api key + Flow string `json:"flow,omitempty"` // oauth2 + AuthorizationURL string `json:"authorizationUrl"` // oauth2 + TokenURL string `json:"tokenUrl,omitempty"` // oauth2 + Scopes map[string]string `json:"scopes,omitempty"` // oauth2 +} + +// AddScope adds a scope to this security scheme +func (s *SecuritySchemeProps) AddScope(scope, description string) { + if s.Scopes == nil { + s.Scopes = make(map[string]string) + } + s.Scopes[scope] = description +} + +// SecurityScheme allows the definition of a security scheme that can be used by the operations. +// Supported schemes are basic authentication, an API key (either as a header or as a query parameter) +// and OAuth2's common flows (implicit, password, application and access code). +// +// For more information: http://goo.gl/8us55a#securitySchemeObject +type SecurityScheme struct { + VendorExtensible + SecuritySchemeProps +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s SecurityScheme) JSONLookup(token string) (interface{}, error) { + if ex, ok := s.Extensions[token]; ok { + return &ex, nil + } + + r, _, err := jsonpointer.GetForToken(s.SecuritySchemeProps, token) + return r, err +} + +// MarshalJSON marshal this to JSON +func (s SecurityScheme) MarshalJSON() ([]byte, error) { + var ( + b1 []byte + err error + ) + + if s.Type == oauth2 && (s.Flow == "implicit" || s.Flow == "accessCode") { + // when oauth2 for implicit or accessCode flows, empty AuthorizationURL is added as empty string + b1, err = json.Marshal(s.SecuritySchemeProps) + } else { + // when not oauth2, empty AuthorizationURL should be omitted + b1, err = json.Marshal(struct { + Description string `json:"description,omitempty"` + Type string `json:"type"` + Name string `json:"name,omitempty"` // api key + In string `json:"in,omitempty"` // api key + Flow string `json:"flow,omitempty"` // oauth2 + AuthorizationURL string `json:"authorizationUrl,omitempty"` // oauth2 + TokenURL string `json:"tokenUrl,omitempty"` // oauth2 + Scopes map[string]string `json:"scopes,omitempty"` // oauth2 + }{ + Description: s.Description, + Type: s.Type, + Name: s.Name, + In: s.In, + Flow: s.Flow, + AuthorizationURL: s.AuthorizationURL, + TokenURL: s.TokenURL, + Scopes: s.Scopes, + }) + } + if err != nil { + return nil, err + } + + b2, err := json.Marshal(s.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} + +// UnmarshalJSON marshal this from JSON +func (s *SecurityScheme) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &s.SecuritySchemeProps); err != nil { + return err + } + return json.Unmarshal(data, &s.VendorExtensible) +} diff --git a/vendor/github.com/go-openapi/spec/spec.go b/vendor/github.com/go-openapi/spec/spec.go new file mode 100644 index 0000000..876aa12 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/spec.go @@ -0,0 +1,78 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" +) + +//go:generate curl -L --progress -o ./schemas/v2/schema.json http://swagger.io/v2/schema.json +//go:generate curl -L --progress -o ./schemas/jsonschema-draft-04.json http://json-schema.org/draft-04/schema +//go:generate go-bindata -pkg=spec -prefix=./schemas -ignore=.*\.md ./schemas/... +//go:generate perl -pi -e s,Json,JSON,g bindata.go + +const ( + // SwaggerSchemaURL the url for the swagger 2.0 schema to validate specs + SwaggerSchemaURL = "http://swagger.io/v2/schema.json#" + // JSONSchemaURL the url for the json schema + JSONSchemaURL = "http://json-schema.org/draft-04/schema#" +) + +// MustLoadJSONSchemaDraft04 panics when Swagger20Schema returns an error +func MustLoadJSONSchemaDraft04() *Schema { + d, e := JSONSchemaDraft04() + if e != nil { + panic(e) + } + return d +} + +// JSONSchemaDraft04 loads the json schema document for json shema draft04 +func JSONSchemaDraft04() (*Schema, error) { + b, err := jsonschemaDraft04JSONBytes() + if err != nil { + return nil, err + } + + schema := new(Schema) + if err := json.Unmarshal(b, schema); err != nil { + return nil, err + } + return schema, nil +} + +// MustLoadSwagger20Schema panics when Swagger20Schema returns an error +func MustLoadSwagger20Schema() *Schema { + d, e := Swagger20Schema() + if e != nil { + panic(e) + } + return d +} + +// Swagger20Schema loads the swagger 2.0 schema from the embedded assets +func Swagger20Schema() (*Schema, error) { + + b, err := v2SchemaJSONBytes() + if err != nil { + return nil, err + } + + schema := new(Schema) + if err := json.Unmarshal(b, schema); err != nil { + return nil, err + } + return schema, nil +} diff --git a/vendor/github.com/go-openapi/spec/swagger.go b/vendor/github.com/go-openapi/spec/swagger.go new file mode 100644 index 0000000..1590fd1 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/swagger.go @@ -0,0 +1,448 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "bytes" + "encoding/gob" + "encoding/json" + "fmt" + "strconv" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// Swagger this is the root document object for the API specification. +// It combines what previously was the Resource Listing and API Declaration (version 1.2 and earlier) +// together into one document. +// +// For more information: http://goo.gl/8us55a#swagger-object- +type Swagger struct { + VendorExtensible + SwaggerProps +} + +// JSONLookup look up a value by the json property name +func (s Swagger) JSONLookup(token string) (interface{}, error) { + if ex, ok := s.Extensions[token]; ok { + return &ex, nil + } + r, _, err := jsonpointer.GetForToken(s.SwaggerProps, token) + return r, err +} + +// MarshalJSON marshals this swagger structure to json +func (s Swagger) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(s.SwaggerProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(s.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} + +// UnmarshalJSON unmarshals a swagger spec from json +func (s *Swagger) UnmarshalJSON(data []byte) error { + var sw Swagger + if err := json.Unmarshal(data, &sw.SwaggerProps); err != nil { + return err + } + if err := json.Unmarshal(data, &sw.VendorExtensible); err != nil { + return err + } + *s = sw + return nil +} + +// GobEncode provides a safe gob encoder for Swagger, including extensions +func (s Swagger) GobEncode() ([]byte, error) { + var b bytes.Buffer + raw := struct { + Props SwaggerProps + Ext VendorExtensible + }{ + Props: s.SwaggerProps, + Ext: s.VendorExtensible, + } + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Swagger, including extensions +func (s *Swagger) GobDecode(b []byte) error { + var raw struct { + Props SwaggerProps + Ext VendorExtensible + } + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + s.SwaggerProps = raw.Props + s.VendorExtensible = raw.Ext + return nil +} + +// SwaggerProps captures the top-level properties of an Api specification +// +// NOTE: validation rules +// - the scheme, when present must be from [http, https, ws, wss] +// - BasePath must start with a leading "/" +// - Paths is required +type SwaggerProps struct { + ID string `json:"id,omitempty"` + Consumes []string `json:"consumes,omitempty"` + Produces []string `json:"produces,omitempty"` + Schemes []string `json:"schemes,omitempty"` + Swagger string `json:"swagger,omitempty"` + Info *Info `json:"info,omitempty"` + Host string `json:"host,omitempty"` + BasePath string `json:"basePath,omitempty"` + Paths *Paths `json:"paths"` + Definitions Definitions `json:"definitions,omitempty"` + Parameters map[string]Parameter `json:"parameters,omitempty"` + Responses map[string]Response `json:"responses,omitempty"` + SecurityDefinitions SecurityDefinitions `json:"securityDefinitions,omitempty"` + Security []map[string][]string `json:"security,omitempty"` + Tags []Tag `json:"tags,omitempty"` + ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` +} + +type swaggerPropsAlias SwaggerProps + +type gobSwaggerPropsAlias struct { + Security []map[string]struct { + List []string + Pad bool + } + Alias *swaggerPropsAlias + SecurityIsEmpty bool +} + +// GobEncode provides a safe gob encoder for SwaggerProps, including empty security requirements +func (o SwaggerProps) GobEncode() ([]byte, error) { + raw := gobSwaggerPropsAlias{ + Alias: (*swaggerPropsAlias)(&o), + } + + var b bytes.Buffer + if o.Security == nil { + // nil security requirement + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + if len(o.Security) == 0 { + // empty, but non-nil security requirement + raw.SecurityIsEmpty = true + raw.Alias.Security = nil + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + raw.Security = make([]map[string]struct { + List []string + Pad bool + }, 0, len(o.Security)) + for _, req := range o.Security { + v := make(map[string]struct { + List []string + Pad bool + }, len(req)) + for k, val := range req { + v[k] = struct { + List []string + Pad bool + }{ + List: val, + } + } + raw.Security = append(raw.Security, v) + } + + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for SwaggerProps, including empty security requirements +func (o *SwaggerProps) GobDecode(b []byte) error { + var raw gobSwaggerPropsAlias + + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + if raw.Alias == nil { + return nil + } + + switch { + case raw.SecurityIsEmpty: + // empty, but non-nil security requirement + raw.Alias.Security = []map[string][]string{} + case len(raw.Alias.Security) == 0: + // nil security requirement + raw.Alias.Security = nil + default: + raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security)) + for _, req := range raw.Security { + v := make(map[string][]string, len(req)) + for k, val := range req { + v[k] = make([]string, 0, len(val.List)) + v[k] = append(v[k], val.List...) + } + raw.Alias.Security = append(raw.Alias.Security, v) + } + } + + *o = *(*SwaggerProps)(raw.Alias) + return nil +} + +// Dependencies represent a dependencies property +type Dependencies map[string]SchemaOrStringArray + +// SchemaOrBool represents a schema or boolean value, is biased towards true for the boolean property +type SchemaOrBool struct { + Allows bool + Schema *Schema +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s SchemaOrBool) JSONLookup(token string) (interface{}, error) { + if token == "allows" { + return s.Allows, nil + } + r, _, err := jsonpointer.GetForToken(s.Schema, token) + return r, err +} + +var jsTrue = []byte("true") +var jsFalse = []byte("false") + +// MarshalJSON convert this object to JSON +func (s SchemaOrBool) MarshalJSON() ([]byte, error) { + if s.Schema != nil { + return json.Marshal(s.Schema) + } + + if s.Schema == nil && !s.Allows { + return jsFalse, nil + } + return jsTrue, nil +} + +// UnmarshalJSON converts this bool or schema object from a JSON structure +func (s *SchemaOrBool) UnmarshalJSON(data []byte) error { + var nw SchemaOrBool + if len(data) > 0 { + if data[0] == '{' { + var sch Schema + if err := json.Unmarshal(data, &sch); err != nil { + return err + } + nw.Schema = &sch + } + nw.Allows = !bytes.Equal(data, []byte("false")) + } + *s = nw + return nil +} + +// SchemaOrStringArray represents a schema or a string array +type SchemaOrStringArray struct { + Schema *Schema + Property []string +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s SchemaOrStringArray) JSONLookup(token string) (interface{}, error) { + r, _, err := jsonpointer.GetForToken(s.Schema, token) + return r, err +} + +// MarshalJSON converts this schema object or array into JSON structure +func (s SchemaOrStringArray) MarshalJSON() ([]byte, error) { + if len(s.Property) > 0 { + return json.Marshal(s.Property) + } + if s.Schema != nil { + return json.Marshal(s.Schema) + } + return []byte("null"), nil +} + +// UnmarshalJSON converts this schema object or array from a JSON structure +func (s *SchemaOrStringArray) UnmarshalJSON(data []byte) error { + var first byte + if len(data) > 1 { + first = data[0] + } + var nw SchemaOrStringArray + if first == '{' { + var sch Schema + if err := json.Unmarshal(data, &sch); err != nil { + return err + } + nw.Schema = &sch + } + if first == '[' { + if err := json.Unmarshal(data, &nw.Property); err != nil { + return err + } + } + *s = nw + return nil +} + +// Definitions contains the models explicitly defined in this spec +// An object to hold data types that can be consumed and produced by operations. +// These data types can be primitives, arrays or models. +// +// For more information: http://goo.gl/8us55a#definitionsObject +type Definitions map[string]Schema + +// SecurityDefinitions a declaration of the security schemes available to be used in the specification. +// This does not enforce the security schemes on the operations and only serves to provide +// the relevant details for each scheme. +// +// For more information: http://goo.gl/8us55a#securityDefinitionsObject +type SecurityDefinitions map[string]*SecurityScheme + +// StringOrArray represents a value that can either be a string +// or an array of strings. Mainly here for serialization purposes +type StringOrArray []string + +// Contains returns true when the value is contained in the slice +func (s StringOrArray) Contains(value string) bool { + for _, str := range s { + if str == value { + return true + } + } + return false +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s SchemaOrArray) JSONLookup(token string) (interface{}, error) { + if _, err := strconv.Atoi(token); err == nil { + r, _, err := jsonpointer.GetForToken(s.Schemas, token) + return r, err + } + r, _, err := jsonpointer.GetForToken(s.Schema, token) + return r, err +} + +// UnmarshalJSON unmarshals this string or array object from a JSON array or JSON string +func (s *StringOrArray) UnmarshalJSON(data []byte) error { + var first byte + if len(data) > 1 { + first = data[0] + } + + if first == '[' { + var parsed []string + if err := json.Unmarshal(data, &parsed); err != nil { + return err + } + *s = StringOrArray(parsed) + return nil + } + + var single interface{} + if err := json.Unmarshal(data, &single); err != nil { + return err + } + if single == nil { + return nil + } + switch v := single.(type) { + case string: + *s = StringOrArray([]string{v}) + return nil + default: + return fmt.Errorf("only string or array is allowed, not %T", single) + } +} + +// MarshalJSON converts this string or array to a JSON array or JSON string +func (s StringOrArray) MarshalJSON() ([]byte, error) { + if len(s) == 1 { + return json.Marshal([]string(s)[0]) + } + return json.Marshal([]string(s)) +} + +// SchemaOrArray represents a value that can either be a Schema +// or an array of Schema. Mainly here for serialization purposes +type SchemaOrArray struct { + Schema *Schema + Schemas []Schema +} + +// Len returns the number of schemas in this property +func (s SchemaOrArray) Len() int { + if s.Schema != nil { + return 1 + } + return len(s.Schemas) +} + +// ContainsType returns true when one of the schemas is of the specified type +func (s *SchemaOrArray) ContainsType(name string) bool { + if s.Schema != nil { + return s.Schema.Type != nil && s.Schema.Type.Contains(name) + } + return false +} + +// MarshalJSON converts this schema object or array into JSON structure +func (s SchemaOrArray) MarshalJSON() ([]byte, error) { + if len(s.Schemas) > 0 { + return json.Marshal(s.Schemas) + } + return json.Marshal(s.Schema) +} + +// UnmarshalJSON converts this schema object or array from a JSON structure +func (s *SchemaOrArray) UnmarshalJSON(data []byte) error { + var nw SchemaOrArray + var first byte + if len(data) > 1 { + first = data[0] + } + if first == '{' { + var sch Schema + if err := json.Unmarshal(data, &sch); err != nil { + return err + } + nw.Schema = &sch + } + if first == '[' { + if err := json.Unmarshal(data, &nw.Schemas); err != nil { + return err + } + } + *s = nw + return nil +} + +// vim:set ft=go noet sts=2 sw=2 ts=2: diff --git a/vendor/github.com/go-openapi/spec/tag.go b/vendor/github.com/go-openapi/spec/tag.go new file mode 100644 index 0000000..faa3d3d --- /dev/null +++ b/vendor/github.com/go-openapi/spec/tag.go @@ -0,0 +1,75 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// TagProps describe a tag entry in the top level tags section of a swagger spec +type TagProps struct { + Description string `json:"description,omitempty"` + Name string `json:"name,omitempty"` + ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` +} + +// NewTag creates a new tag +func NewTag(name, description string, externalDocs *ExternalDocumentation) Tag { + return Tag{TagProps: TagProps{Description: description, Name: name, ExternalDocs: externalDocs}} +} + +// Tag allows adding meta data to a single tag that is used by the +// [Operation Object](http://goo.gl/8us55a#operationObject). +// It is not mandatory to have a Tag Object per tag used there. +// +// For more information: http://goo.gl/8us55a#tagObject +type Tag struct { + VendorExtensible + TagProps +} + +// JSONLookup implements an interface to customize json pointer lookup +func (t Tag) JSONLookup(token string) (interface{}, error) { + if ex, ok := t.Extensions[token]; ok { + return &ex, nil + } + + r, _, err := jsonpointer.GetForToken(t.TagProps, token) + return r, err +} + +// MarshalJSON marshal this to JSON +func (t Tag) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(t.TagProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(t.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} + +// UnmarshalJSON marshal this from JSON +func (t *Tag) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &t.TagProps); err != nil { + return err + } + return json.Unmarshal(data, &t.VendorExtensible) +} diff --git a/vendor/github.com/go-openapi/spec/url_go19.go b/vendor/github.com/go-openapi/spec/url_go19.go new file mode 100644 index 0000000..5bdfe40 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/url_go19.go @@ -0,0 +1,11 @@ +package spec + +import "net/url" + +func parseURL(s string) (*url.URL, error) { + u, err := url.Parse(s) + if err == nil { + u.OmitHost = false + } + return u, err +} diff --git a/vendor/github.com/go-openapi/spec/validations.go b/vendor/github.com/go-openapi/spec/validations.go new file mode 100644 index 0000000..6360a8e --- /dev/null +++ b/vendor/github.com/go-openapi/spec/validations.go @@ -0,0 +1,215 @@ +package spec + +// CommonValidations describe common JSON-schema validations +type CommonValidations struct { + Maximum *float64 `json:"maximum,omitempty"` + ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"` + MaxLength *int64 `json:"maxLength,omitempty"` + MinLength *int64 `json:"minLength,omitempty"` + Pattern string `json:"pattern,omitempty"` + MaxItems *int64 `json:"maxItems,omitempty"` + MinItems *int64 `json:"minItems,omitempty"` + UniqueItems bool `json:"uniqueItems,omitempty"` + MultipleOf *float64 `json:"multipleOf,omitempty"` + Enum []interface{} `json:"enum,omitempty"` +} + +// SetValidations defines all validations for a simple schema. +// +// NOTE: the input is the larger set of validations available for schemas. +// For simple schemas, MinProperties and MaxProperties are ignored. +func (v *CommonValidations) SetValidations(val SchemaValidations) { + v.Maximum = val.Maximum + v.ExclusiveMaximum = val.ExclusiveMaximum + v.Minimum = val.Minimum + v.ExclusiveMinimum = val.ExclusiveMinimum + v.MaxLength = val.MaxLength + v.MinLength = val.MinLength + v.Pattern = val.Pattern + v.MaxItems = val.MaxItems + v.MinItems = val.MinItems + v.UniqueItems = val.UniqueItems + v.MultipleOf = val.MultipleOf + v.Enum = val.Enum +} + +type clearedValidation struct { + Validation string + Value interface{} +} + +type clearedValidations []clearedValidation + +func (c clearedValidations) apply(cbs []func(string, interface{})) { + for _, cb := range cbs { + for _, cleared := range c { + cb(cleared.Validation, cleared.Value) + } + } +} + +// ClearNumberValidations clears all number validations. +// +// Some callbacks may be set by the caller to capture changed values. +func (v *CommonValidations) ClearNumberValidations(cbs ...func(string, interface{})) { + done := make(clearedValidations, 0, 5) + defer func() { + done.apply(cbs) + }() + + if v.Minimum != nil { + done = append(done, clearedValidation{Validation: "minimum", Value: v.Minimum}) + v.Minimum = nil + } + if v.Maximum != nil { + done = append(done, clearedValidation{Validation: "maximum", Value: v.Maximum}) + v.Maximum = nil + } + if v.ExclusiveMaximum { + done = append(done, clearedValidation{Validation: "exclusiveMaximum", Value: v.ExclusiveMaximum}) + v.ExclusiveMaximum = false + } + if v.ExclusiveMinimum { + done = append(done, clearedValidation{Validation: "exclusiveMinimum", Value: v.ExclusiveMinimum}) + v.ExclusiveMinimum = false + } + if v.MultipleOf != nil { + done = append(done, clearedValidation{Validation: "multipleOf", Value: v.MultipleOf}) + v.MultipleOf = nil + } +} + +// ClearStringValidations clears all string validations. +// +// Some callbacks may be set by the caller to capture changed values. +func (v *CommonValidations) ClearStringValidations(cbs ...func(string, interface{})) { + done := make(clearedValidations, 0, 3) + defer func() { + done.apply(cbs) + }() + + if v.Pattern != "" { + done = append(done, clearedValidation{Validation: "pattern", Value: v.Pattern}) + v.Pattern = "" + } + if v.MinLength != nil { + done = append(done, clearedValidation{Validation: "minLength", Value: v.MinLength}) + v.MinLength = nil + } + if v.MaxLength != nil { + done = append(done, clearedValidation{Validation: "maxLength", Value: v.MaxLength}) + v.MaxLength = nil + } +} + +// ClearArrayValidations clears all array validations. +// +// Some callbacks may be set by the caller to capture changed values. +func (v *CommonValidations) ClearArrayValidations(cbs ...func(string, interface{})) { + done := make(clearedValidations, 0, 3) + defer func() { + done.apply(cbs) + }() + + if v.MaxItems != nil { + done = append(done, clearedValidation{Validation: "maxItems", Value: v.MaxItems}) + v.MaxItems = nil + } + if v.MinItems != nil { + done = append(done, clearedValidation{Validation: "minItems", Value: v.MinItems}) + v.MinItems = nil + } + if v.UniqueItems { + done = append(done, clearedValidation{Validation: "uniqueItems", Value: v.UniqueItems}) + v.UniqueItems = false + } +} + +// Validations returns a clone of the validations for a simple schema. +// +// NOTE: in the context of simple schema objects, MinProperties, MaxProperties +// and PatternProperties remain unset. +func (v CommonValidations) Validations() SchemaValidations { + return SchemaValidations{ + CommonValidations: v, + } +} + +// HasNumberValidations indicates if the validations are for numbers or integers +func (v CommonValidations) HasNumberValidations() bool { + return v.Maximum != nil || v.Minimum != nil || v.MultipleOf != nil +} + +// HasStringValidations indicates if the validations are for strings +func (v CommonValidations) HasStringValidations() bool { + return v.MaxLength != nil || v.MinLength != nil || v.Pattern != "" +} + +// HasArrayValidations indicates if the validations are for arrays +func (v CommonValidations) HasArrayValidations() bool { + return v.MaxItems != nil || v.MinItems != nil || v.UniqueItems +} + +// HasEnum indicates if the validation includes some enum constraint +func (v CommonValidations) HasEnum() bool { + return len(v.Enum) > 0 +} + +// SchemaValidations describes the validation properties of a schema +// +// NOTE: at this moment, this is not embedded in SchemaProps because this would induce a breaking change +// in the exported members: all initializers using litterals would fail. +type SchemaValidations struct { + CommonValidations + + PatternProperties SchemaProperties `json:"patternProperties,omitempty"` + MaxProperties *int64 `json:"maxProperties,omitempty"` + MinProperties *int64 `json:"minProperties,omitempty"` +} + +// HasObjectValidations indicates if the validations are for objects +func (v SchemaValidations) HasObjectValidations() bool { + return v.MaxProperties != nil || v.MinProperties != nil || v.PatternProperties != nil +} + +// SetValidations for schema validations +func (v *SchemaValidations) SetValidations(val SchemaValidations) { + v.CommonValidations.SetValidations(val) + v.PatternProperties = val.PatternProperties + v.MaxProperties = val.MaxProperties + v.MinProperties = val.MinProperties +} + +// Validations for a schema +func (v SchemaValidations) Validations() SchemaValidations { + val := v.CommonValidations.Validations() + val.PatternProperties = v.PatternProperties + val.MinProperties = v.MinProperties + val.MaxProperties = v.MaxProperties + return val +} + +// ClearObjectValidations returns a clone of the validations with all object validations cleared. +// +// Some callbacks may be set by the caller to capture changed values. +func (v *SchemaValidations) ClearObjectValidations(cbs ...func(string, interface{})) { + done := make(clearedValidations, 0, 3) + defer func() { + done.apply(cbs) + }() + + if v.MaxProperties != nil { + done = append(done, clearedValidation{Validation: "maxProperties", Value: v.MaxProperties}) + v.MaxProperties = nil + } + if v.MinProperties != nil { + done = append(done, clearedValidation{Validation: "minProperties", Value: v.MinProperties}) + v.MinProperties = nil + } + if v.PatternProperties != nil { + done = append(done, clearedValidation{Validation: "patternProperties", Value: v.PatternProperties}) + v.PatternProperties = nil + } +} diff --git a/vendor/github.com/go-openapi/spec/xml_object.go b/vendor/github.com/go-openapi/spec/xml_object.go new file mode 100644 index 0000000..945a467 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/xml_object.go @@ -0,0 +1,68 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +// XMLObject a metadata object that allows for more fine-tuned XML model definitions. +// +// For more information: http://goo.gl/8us55a#xmlObject +type XMLObject struct { + Name string `json:"name,omitempty"` + Namespace string `json:"namespace,omitempty"` + Prefix string `json:"prefix,omitempty"` + Attribute bool `json:"attribute,omitempty"` + Wrapped bool `json:"wrapped,omitempty"` +} + +// WithName sets the xml name for the object +func (x *XMLObject) WithName(name string) *XMLObject { + x.Name = name + return x +} + +// WithNamespace sets the xml namespace for the object +func (x *XMLObject) WithNamespace(namespace string) *XMLObject { + x.Namespace = namespace + return x +} + +// WithPrefix sets the xml prefix for the object +func (x *XMLObject) WithPrefix(prefix string) *XMLObject { + x.Prefix = prefix + return x +} + +// AsAttribute flags this object as xml attribute +func (x *XMLObject) AsAttribute() *XMLObject { + x.Attribute = true + return x +} + +// AsElement flags this object as an xml node +func (x *XMLObject) AsElement() *XMLObject { + x.Attribute = false + return x +} + +// AsWrapped flags this object as wrapped, this is mostly useful for array types +func (x *XMLObject) AsWrapped() *XMLObject { + x.Wrapped = true + return x +} + +// AsUnwrapped flags this object as an xml node +func (x *XMLObject) AsUnwrapped() *XMLObject { + x.Wrapped = false + return x +} diff --git a/vendor/github.com/go-openapi/swag/.editorconfig b/vendor/github.com/go-openapi/swag/.editorconfig new file mode 100644 index 0000000..3152da6 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/.editorconfig @@ -0,0 +1,26 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# Set default charset +[*.{js,py,go,scala,rb,java,html,css,less,sass,md}] +charset = utf-8 + +# Tab indentation (no size specified) +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/go-openapi/swag/.gitattributes b/vendor/github.com/go-openapi/swag/.gitattributes new file mode 100644 index 0000000..49ad527 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/.gitattributes @@ -0,0 +1,2 @@ +# gofmt always uses LF, whereas Git uses CRLF on Windows. +*.go text eol=lf diff --git a/vendor/github.com/go-openapi/swag/.gitignore b/vendor/github.com/go-openapi/swag/.gitignore new file mode 100644 index 0000000..c4b1b64 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/.gitignore @@ -0,0 +1,5 @@ +secrets.yml +vendor +Godeps +.idea +*.out diff --git a/vendor/github.com/go-openapi/swag/.golangci.yml b/vendor/github.com/go-openapi/swag/.golangci.yml new file mode 100644 index 0000000..80e2be0 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/.golangci.yml @@ -0,0 +1,60 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 3 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/swag/BENCHMARK.md b/vendor/github.com/go-openapi/swag/BENCHMARK.md new file mode 100644 index 0000000..e7f28ed --- /dev/null +++ b/vendor/github.com/go-openapi/swag/BENCHMARK.md @@ -0,0 +1,52 @@ +# Benchmarks + +## Name mangling utilities + +```bash +go test -bench XXX -run XXX -benchtime 30s +``` + +### Benchmarks at b3e7a5386f996177e4808f11acb2aa93a0f660df + +``` +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/swag +cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz +BenchmarkToXXXName/ToGoName-4 862623 44101 ns/op 10450 B/op 732 allocs/op +BenchmarkToXXXName/ToVarName-4 853656 40728 ns/op 10468 B/op 734 allocs/op +BenchmarkToXXXName/ToFileName-4 1268312 27813 ns/op 9785 B/op 617 allocs/op +BenchmarkToXXXName/ToCommandName-4 1276322 27903 ns/op 9785 B/op 617 allocs/op +BenchmarkToXXXName/ToHumanNameLower-4 895334 40354 ns/op 10472 B/op 731 allocs/op +BenchmarkToXXXName/ToHumanNameTitle-4 882441 40678 ns/op 10566 B/op 749 allocs/op +``` + +### Benchmarks after PR #79 + +~ x10 performance improvement and ~ /100 memory allocations. + +``` +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/swag +cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz +BenchmarkToXXXName/ToGoName-4 9595830 3991 ns/op 42 B/op 5 allocs/op +BenchmarkToXXXName/ToVarName-4 9194276 3984 ns/op 62 B/op 7 allocs/op +BenchmarkToXXXName/ToFileName-4 17002711 2123 ns/op 147 B/op 7 allocs/op +BenchmarkToXXXName/ToCommandName-4 16772926 2111 ns/op 147 B/op 7 allocs/op +BenchmarkToXXXName/ToHumanNameLower-4 9788331 3749 ns/op 92 B/op 6 allocs/op +BenchmarkToXXXName/ToHumanNameTitle-4 9188260 3941 ns/op 104 B/op 6 allocs/op +``` + +``` +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/swag +cpu: AMD Ryzen 7 5800X 8-Core Processor +BenchmarkToXXXName/ToGoName-16 18527378 1972 ns/op 42 B/op 5 allocs/op +BenchmarkToXXXName/ToVarName-16 15552692 2093 ns/op 62 B/op 7 allocs/op +BenchmarkToXXXName/ToFileName-16 32161176 1117 ns/op 147 B/op 7 allocs/op +BenchmarkToXXXName/ToCommandName-16 32256634 1137 ns/op 147 B/op 7 allocs/op +BenchmarkToXXXName/ToHumanNameLower-16 18599661 1946 ns/op 92 B/op 6 allocs/op +BenchmarkToXXXName/ToHumanNameTitle-16 17581353 2054 ns/op 105 B/op 6 allocs/op +``` diff --git a/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/swag/LICENSE b/vendor/github.com/go-openapi/swag/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/swag/README.md b/vendor/github.com/go-openapi/swag/README.md new file mode 100644 index 0000000..a729222 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/README.md @@ -0,0 +1,23 @@ +# Swag [![Build Status](https://github.com/go-openapi/swag/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/swag/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/swag/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/swag) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/swag/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/swag.svg)](https://pkg.go.dev/github.com/go-openapi/swag) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/swag)](https://goreportcard.com/report/github.com/go-openapi/swag) + +Contains a bunch of helper functions for go-openapi and go-swagger projects. + +You may also use it standalone for your projects. + +* convert between value and pointers for builtin types +* convert from string to builtin types (wraps strconv) +* fast json concatenation +* search in path +* load from file or http +* name mangling + + +This repo has only few dependencies outside of the standard library: + +* YAML utilities depend on `gopkg.in/yaml.v3` +* `github.com/mailru/easyjson v0.7.7` diff --git a/vendor/github.com/go-openapi/swag/convert.go b/vendor/github.com/go-openapi/swag/convert.go new file mode 100644 index 0000000..fc085ae --- /dev/null +++ b/vendor/github.com/go-openapi/swag/convert.go @@ -0,0 +1,208 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "math" + "strconv" + "strings" +) + +// same as ECMA Number.MAX_SAFE_INTEGER and Number.MIN_SAFE_INTEGER +const ( + maxJSONFloat = float64(1<<53 - 1) // 9007199254740991.0 2^53 - 1 + minJSONFloat = -float64(1<<53 - 1) //-9007199254740991.0 -2^53 - 1 + epsilon float64 = 1e-9 +) + +// IsFloat64AJSONInteger allow for integers [-2^53, 2^53-1] inclusive +func IsFloat64AJSONInteger(f float64) bool { + if math.IsNaN(f) || math.IsInf(f, 0) || f < minJSONFloat || f > maxJSONFloat { + return false + } + fa := math.Abs(f) + g := float64(uint64(f)) + ga := math.Abs(g) + + diff := math.Abs(f - g) + + // more info: https://floating-point-gui.de/errors/comparison/#look-out-for-edge-cases + switch { + case f == g: // best case + return true + case f == float64(int64(f)) || f == float64(uint64(f)): // optimistic case + return true + case f == 0 || g == 0 || diff < math.SmallestNonzeroFloat64: // very close to 0 values + return diff < (epsilon * math.SmallestNonzeroFloat64) + } + // check the relative error + return diff/math.Min(fa+ga, math.MaxFloat64) < epsilon +} + +var evaluatesAsTrue map[string]struct{} + +func init() { + evaluatesAsTrue = map[string]struct{}{ + "true": {}, + "1": {}, + "yes": {}, + "ok": {}, + "y": {}, + "on": {}, + "selected": {}, + "checked": {}, + "t": {}, + "enabled": {}, + } +} + +// ConvertBool turn a string into a boolean +func ConvertBool(str string) (bool, error) { + _, ok := evaluatesAsTrue[strings.ToLower(str)] + return ok, nil +} + +// ConvertFloat32 turn a string into a float32 +func ConvertFloat32(str string) (float32, error) { + f, err := strconv.ParseFloat(str, 32) + if err != nil { + return 0, err + } + return float32(f), nil +} + +// ConvertFloat64 turn a string into a float64 +func ConvertFloat64(str string) (float64, error) { + return strconv.ParseFloat(str, 64) +} + +// ConvertInt8 turn a string into an int8 +func ConvertInt8(str string) (int8, error) { + i, err := strconv.ParseInt(str, 10, 8) + if err != nil { + return 0, err + } + return int8(i), nil +} + +// ConvertInt16 turn a string into an int16 +func ConvertInt16(str string) (int16, error) { + i, err := strconv.ParseInt(str, 10, 16) + if err != nil { + return 0, err + } + return int16(i), nil +} + +// ConvertInt32 turn a string into an int32 +func ConvertInt32(str string) (int32, error) { + i, err := strconv.ParseInt(str, 10, 32) + if err != nil { + return 0, err + } + return int32(i), nil +} + +// ConvertInt64 turn a string into an int64 +func ConvertInt64(str string) (int64, error) { + return strconv.ParseInt(str, 10, 64) +} + +// ConvertUint8 turn a string into an uint8 +func ConvertUint8(str string) (uint8, error) { + i, err := strconv.ParseUint(str, 10, 8) + if err != nil { + return 0, err + } + return uint8(i), nil +} + +// ConvertUint16 turn a string into an uint16 +func ConvertUint16(str string) (uint16, error) { + i, err := strconv.ParseUint(str, 10, 16) + if err != nil { + return 0, err + } + return uint16(i), nil +} + +// ConvertUint32 turn a string into an uint32 +func ConvertUint32(str string) (uint32, error) { + i, err := strconv.ParseUint(str, 10, 32) + if err != nil { + return 0, err + } + return uint32(i), nil +} + +// ConvertUint64 turn a string into an uint64 +func ConvertUint64(str string) (uint64, error) { + return strconv.ParseUint(str, 10, 64) +} + +// FormatBool turns a boolean into a string +func FormatBool(value bool) string { + return strconv.FormatBool(value) +} + +// FormatFloat32 turns a float32 into a string +func FormatFloat32(value float32) string { + return strconv.FormatFloat(float64(value), 'f', -1, 32) +} + +// FormatFloat64 turns a float64 into a string +func FormatFloat64(value float64) string { + return strconv.FormatFloat(value, 'f', -1, 64) +} + +// FormatInt8 turns an int8 into a string +func FormatInt8(value int8) string { + return strconv.FormatInt(int64(value), 10) +} + +// FormatInt16 turns an int16 into a string +func FormatInt16(value int16) string { + return strconv.FormatInt(int64(value), 10) +} + +// FormatInt32 turns an int32 into a string +func FormatInt32(value int32) string { + return strconv.Itoa(int(value)) +} + +// FormatInt64 turns an int64 into a string +func FormatInt64(value int64) string { + return strconv.FormatInt(value, 10) +} + +// FormatUint8 turns an uint8 into a string +func FormatUint8(value uint8) string { + return strconv.FormatUint(uint64(value), 10) +} + +// FormatUint16 turns an uint16 into a string +func FormatUint16(value uint16) string { + return strconv.FormatUint(uint64(value), 10) +} + +// FormatUint32 turns an uint32 into a string +func FormatUint32(value uint32) string { + return strconv.FormatUint(uint64(value), 10) +} + +// FormatUint64 turns an uint64 into a string +func FormatUint64(value uint64) string { + return strconv.FormatUint(value, 10) +} diff --git a/vendor/github.com/go-openapi/swag/convert_types.go b/vendor/github.com/go-openapi/swag/convert_types.go new file mode 100644 index 0000000..c49cc47 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/convert_types.go @@ -0,0 +1,730 @@ +package swag + +import "time" + +// This file was taken from the aws go sdk + +// String returns a pointer to of the string value passed in. +func String(v string) *string { + return &v +} + +// StringValue returns the value of the string pointer passed in or +// "" if the pointer is nil. +func StringValue(v *string) string { + if v != nil { + return *v + } + return "" +} + +// StringSlice converts a slice of string values into a slice of +// string pointers +func StringSlice(src []string) []*string { + dst := make([]*string, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// StringValueSlice converts a slice of string pointers into a slice of +// string values +func StringValueSlice(src []*string) []string { + dst := make([]string, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// StringMap converts a string map of string values into a string +// map of string pointers +func StringMap(src map[string]string) map[string]*string { + dst := make(map[string]*string) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// StringValueMap converts a string map of string pointers into a string +// map of string values +func StringValueMap(src map[string]*string) map[string]string { + dst := make(map[string]string) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Bool returns a pointer to of the bool value passed in. +func Bool(v bool) *bool { + return &v +} + +// BoolValue returns the value of the bool pointer passed in or +// false if the pointer is nil. +func BoolValue(v *bool) bool { + if v != nil { + return *v + } + return false +} + +// BoolSlice converts a slice of bool values into a slice of +// bool pointers +func BoolSlice(src []bool) []*bool { + dst := make([]*bool, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// BoolValueSlice converts a slice of bool pointers into a slice of +// bool values +func BoolValueSlice(src []*bool) []bool { + dst := make([]bool, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// BoolMap converts a string map of bool values into a string +// map of bool pointers +func BoolMap(src map[string]bool) map[string]*bool { + dst := make(map[string]*bool) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// BoolValueMap converts a string map of bool pointers into a string +// map of bool values +func BoolValueMap(src map[string]*bool) map[string]bool { + dst := make(map[string]bool) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Int returns a pointer to of the int value passed in. +func Int(v int) *int { + return &v +} + +// IntValue returns the value of the int pointer passed in or +// 0 if the pointer is nil. +func IntValue(v *int) int { + if v != nil { + return *v + } + return 0 +} + +// IntSlice converts a slice of int values into a slice of +// int pointers +func IntSlice(src []int) []*int { + dst := make([]*int, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// IntValueSlice converts a slice of int pointers into a slice of +// int values +func IntValueSlice(src []*int) []int { + dst := make([]int, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// IntMap converts a string map of int values into a string +// map of int pointers +func IntMap(src map[string]int) map[string]*int { + dst := make(map[string]*int) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// IntValueMap converts a string map of int pointers into a string +// map of int values +func IntValueMap(src map[string]*int) map[string]int { + dst := make(map[string]int) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Int32 returns a pointer to of the int32 value passed in. +func Int32(v int32) *int32 { + return &v +} + +// Int32Value returns the value of the int32 pointer passed in or +// 0 if the pointer is nil. +func Int32Value(v *int32) int32 { + if v != nil { + return *v + } + return 0 +} + +// Int32Slice converts a slice of int32 values into a slice of +// int32 pointers +func Int32Slice(src []int32) []*int32 { + dst := make([]*int32, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// Int32ValueSlice converts a slice of int32 pointers into a slice of +// int32 values +func Int32ValueSlice(src []*int32) []int32 { + dst := make([]int32, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// Int32Map converts a string map of int32 values into a string +// map of int32 pointers +func Int32Map(src map[string]int32) map[string]*int32 { + dst := make(map[string]*int32) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// Int32ValueMap converts a string map of int32 pointers into a string +// map of int32 values +func Int32ValueMap(src map[string]*int32) map[string]int32 { + dst := make(map[string]int32) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Int64 returns a pointer to of the int64 value passed in. +func Int64(v int64) *int64 { + return &v +} + +// Int64Value returns the value of the int64 pointer passed in or +// 0 if the pointer is nil. +func Int64Value(v *int64) int64 { + if v != nil { + return *v + } + return 0 +} + +// Int64Slice converts a slice of int64 values into a slice of +// int64 pointers +func Int64Slice(src []int64) []*int64 { + dst := make([]*int64, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// Int64ValueSlice converts a slice of int64 pointers into a slice of +// int64 values +func Int64ValueSlice(src []*int64) []int64 { + dst := make([]int64, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// Int64Map converts a string map of int64 values into a string +// map of int64 pointers +func Int64Map(src map[string]int64) map[string]*int64 { + dst := make(map[string]*int64) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// Int64ValueMap converts a string map of int64 pointers into a string +// map of int64 values +func Int64ValueMap(src map[string]*int64) map[string]int64 { + dst := make(map[string]int64) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Uint16 returns a pointer to of the uint16 value passed in. +func Uint16(v uint16) *uint16 { + return &v +} + +// Uint16Value returns the value of the uint16 pointer passed in or +// 0 if the pointer is nil. +func Uint16Value(v *uint16) uint16 { + if v != nil { + return *v + } + + return 0 +} + +// Uint16Slice converts a slice of uint16 values into a slice of +// uint16 pointers +func Uint16Slice(src []uint16) []*uint16 { + dst := make([]*uint16, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + + return dst +} + +// Uint16ValueSlice converts a slice of uint16 pointers into a slice of +// uint16 values +func Uint16ValueSlice(src []*uint16) []uint16 { + dst := make([]uint16, len(src)) + + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + + return dst +} + +// Uint16Map converts a string map of uint16 values into a string +// map of uint16 pointers +func Uint16Map(src map[string]uint16) map[string]*uint16 { + dst := make(map[string]*uint16) + + for k, val := range src { + v := val + dst[k] = &v + } + + return dst +} + +// Uint16ValueMap converts a string map of uint16 pointers into a string +// map of uint16 values +func Uint16ValueMap(src map[string]*uint16) map[string]uint16 { + dst := make(map[string]uint16) + + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + + return dst +} + +// Uint returns a pointer to of the uint value passed in. +func Uint(v uint) *uint { + return &v +} + +// UintValue returns the value of the uint pointer passed in or +// 0 if the pointer is nil. +func UintValue(v *uint) uint { + if v != nil { + return *v + } + return 0 +} + +// UintSlice converts a slice of uint values into a slice of +// uint pointers +func UintSlice(src []uint) []*uint { + dst := make([]*uint, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// UintValueSlice converts a slice of uint pointers into a slice of +// uint values +func UintValueSlice(src []*uint) []uint { + dst := make([]uint, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// UintMap converts a string map of uint values into a string +// map of uint pointers +func UintMap(src map[string]uint) map[string]*uint { + dst := make(map[string]*uint) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// UintValueMap converts a string map of uint pointers into a string +// map of uint values +func UintValueMap(src map[string]*uint) map[string]uint { + dst := make(map[string]uint) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Uint32 returns a pointer to of the uint32 value passed in. +func Uint32(v uint32) *uint32 { + return &v +} + +// Uint32Value returns the value of the uint32 pointer passed in or +// 0 if the pointer is nil. +func Uint32Value(v *uint32) uint32 { + if v != nil { + return *v + } + return 0 +} + +// Uint32Slice converts a slice of uint32 values into a slice of +// uint32 pointers +func Uint32Slice(src []uint32) []*uint32 { + dst := make([]*uint32, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// Uint32ValueSlice converts a slice of uint32 pointers into a slice of +// uint32 values +func Uint32ValueSlice(src []*uint32) []uint32 { + dst := make([]uint32, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// Uint32Map converts a string map of uint32 values into a string +// map of uint32 pointers +func Uint32Map(src map[string]uint32) map[string]*uint32 { + dst := make(map[string]*uint32) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// Uint32ValueMap converts a string map of uint32 pointers into a string +// map of uint32 values +func Uint32ValueMap(src map[string]*uint32) map[string]uint32 { + dst := make(map[string]uint32) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Uint64 returns a pointer to of the uint64 value passed in. +func Uint64(v uint64) *uint64 { + return &v +} + +// Uint64Value returns the value of the uint64 pointer passed in or +// 0 if the pointer is nil. +func Uint64Value(v *uint64) uint64 { + if v != nil { + return *v + } + return 0 +} + +// Uint64Slice converts a slice of uint64 values into a slice of +// uint64 pointers +func Uint64Slice(src []uint64) []*uint64 { + dst := make([]*uint64, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// Uint64ValueSlice converts a slice of uint64 pointers into a slice of +// uint64 values +func Uint64ValueSlice(src []*uint64) []uint64 { + dst := make([]uint64, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// Uint64Map converts a string map of uint64 values into a string +// map of uint64 pointers +func Uint64Map(src map[string]uint64) map[string]*uint64 { + dst := make(map[string]*uint64) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// Uint64ValueMap converts a string map of uint64 pointers into a string +// map of uint64 values +func Uint64ValueMap(src map[string]*uint64) map[string]uint64 { + dst := make(map[string]uint64) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Float32 returns a pointer to of the float32 value passed in. +func Float32(v float32) *float32 { + return &v +} + +// Float32Value returns the value of the float32 pointer passed in or +// 0 if the pointer is nil. +func Float32Value(v *float32) float32 { + if v != nil { + return *v + } + + return 0 +} + +// Float32Slice converts a slice of float32 values into a slice of +// float32 pointers +func Float32Slice(src []float32) []*float32 { + dst := make([]*float32, len(src)) + + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + + return dst +} + +// Float32ValueSlice converts a slice of float32 pointers into a slice of +// float32 values +func Float32ValueSlice(src []*float32) []float32 { + dst := make([]float32, len(src)) + + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + + return dst +} + +// Float32Map converts a string map of float32 values into a string +// map of float32 pointers +func Float32Map(src map[string]float32) map[string]*float32 { + dst := make(map[string]*float32) + + for k, val := range src { + v := val + dst[k] = &v + } + + return dst +} + +// Float32ValueMap converts a string map of float32 pointers into a string +// map of float32 values +func Float32ValueMap(src map[string]*float32) map[string]float32 { + dst := make(map[string]float32) + + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + + return dst +} + +// Float64 returns a pointer to of the float64 value passed in. +func Float64(v float64) *float64 { + return &v +} + +// Float64Value returns the value of the float64 pointer passed in or +// 0 if the pointer is nil. +func Float64Value(v *float64) float64 { + if v != nil { + return *v + } + return 0 +} + +// Float64Slice converts a slice of float64 values into a slice of +// float64 pointers +func Float64Slice(src []float64) []*float64 { + dst := make([]*float64, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// Float64ValueSlice converts a slice of float64 pointers into a slice of +// float64 values +func Float64ValueSlice(src []*float64) []float64 { + dst := make([]float64, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// Float64Map converts a string map of float64 values into a string +// map of float64 pointers +func Float64Map(src map[string]float64) map[string]*float64 { + dst := make(map[string]*float64) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// Float64ValueMap converts a string map of float64 pointers into a string +// map of float64 values +func Float64ValueMap(src map[string]*float64) map[string]float64 { + dst := make(map[string]float64) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} + +// Time returns a pointer to of the time.Time value passed in. +func Time(v time.Time) *time.Time { + return &v +} + +// TimeValue returns the value of the time.Time pointer passed in or +// time.Time{} if the pointer is nil. +func TimeValue(v *time.Time) time.Time { + if v != nil { + return *v + } + return time.Time{} +} + +// TimeSlice converts a slice of time.Time values into a slice of +// time.Time pointers +func TimeSlice(src []time.Time) []*time.Time { + dst := make([]*time.Time, len(src)) + for i := 0; i < len(src); i++ { + dst[i] = &(src[i]) + } + return dst +} + +// TimeValueSlice converts a slice of time.Time pointers into a slice of +// time.Time values +func TimeValueSlice(src []*time.Time) []time.Time { + dst := make([]time.Time, len(src)) + for i := 0; i < len(src); i++ { + if src[i] != nil { + dst[i] = *(src[i]) + } + } + return dst +} + +// TimeMap converts a string map of time.Time values into a string +// map of time.Time pointers +func TimeMap(src map[string]time.Time) map[string]*time.Time { + dst := make(map[string]*time.Time) + for k, val := range src { + v := val + dst[k] = &v + } + return dst +} + +// TimeValueMap converts a string map of time.Time pointers into a string +// map of time.Time values +func TimeValueMap(src map[string]*time.Time) map[string]time.Time { + dst := make(map[string]time.Time) + for k, val := range src { + if val != nil { + dst[k] = *val + } + } + return dst +} diff --git a/vendor/github.com/go-openapi/swag/doc.go b/vendor/github.com/go-openapi/swag/doc.go new file mode 100644 index 0000000..55094cb --- /dev/null +++ b/vendor/github.com/go-openapi/swag/doc.go @@ -0,0 +1,31 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* +Package swag contains a bunch of helper functions for go-openapi and go-swagger projects. + +You may also use it standalone for your projects. + + - convert between value and pointers for builtin types + - convert from string to builtin types (wraps strconv) + - fast json concatenation + - search in path + - load from file or http + - name mangling + +This repo has only few dependencies outside of the standard library: + + - YAML utilities depend on gopkg.in/yaml.v2 +*/ +package swag diff --git a/vendor/github.com/go-openapi/swag/file.go b/vendor/github.com/go-openapi/swag/file.go new file mode 100644 index 0000000..16accc5 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/file.go @@ -0,0 +1,33 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import "mime/multipart" + +// File represents an uploaded file. +type File struct { + Data multipart.File + Header *multipart.FileHeader +} + +// Read bytes from the file +func (f *File) Read(p []byte) (n int, err error) { + return f.Data.Read(p) +} + +// Close the file +func (f *File) Close() error { + return f.Data.Close() +} diff --git a/vendor/github.com/go-openapi/swag/initialism_index.go b/vendor/github.com/go-openapi/swag/initialism_index.go new file mode 100644 index 0000000..20a359b --- /dev/null +++ b/vendor/github.com/go-openapi/swag/initialism_index.go @@ -0,0 +1,202 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "sort" + "strings" + "sync" +) + +var ( + // commonInitialisms are common acronyms that are kept as whole uppercased words. + commonInitialisms *indexOfInitialisms + + // initialisms is a slice of sorted initialisms + initialisms []string + + // a copy of initialisms pre-baked as []rune + initialismsRunes [][]rune + initialismsUpperCased [][]rune + + isInitialism func(string) bool + + maxAllocMatches int +) + +func init() { + // Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769 + configuredInitialisms := map[string]bool{ + "ACL": true, + "API": true, + "ASCII": true, + "CPU": true, + "CSS": true, + "DNS": true, + "EOF": true, + "GUID": true, + "HTML": true, + "HTTPS": true, + "HTTP": true, + "ID": true, + "IP": true, + "IPv4": true, + "IPv6": true, + "JSON": true, + "LHS": true, + "OAI": true, + "QPS": true, + "RAM": true, + "RHS": true, + "RPC": true, + "SLA": true, + "SMTP": true, + "SQL": true, + "SSH": true, + "TCP": true, + "TLS": true, + "TTL": true, + "UDP": true, + "UI": true, + "UID": true, + "UUID": true, + "URI": true, + "URL": true, + "UTF8": true, + "VM": true, + "XML": true, + "XMPP": true, + "XSRF": true, + "XSS": true, + } + + // a thread-safe index of initialisms + commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms) + initialisms = commonInitialisms.sorted() + initialismsRunes = asRunes(initialisms) + initialismsUpperCased = asUpperCased(initialisms) + maxAllocMatches = maxAllocHeuristic(initialismsRunes) + + // a test function + isInitialism = commonInitialisms.isInitialism +} + +func asRunes(in []string) [][]rune { + out := make([][]rune, len(in)) + for i, initialism := range in { + out[i] = []rune(initialism) + } + + return out +} + +func asUpperCased(in []string) [][]rune { + out := make([][]rune, len(in)) + + for i, initialism := range in { + out[i] = []rune(upper(trim(initialism))) + } + + return out +} + +func maxAllocHeuristic(in [][]rune) int { + heuristic := make(map[rune]int) + for _, initialism := range in { + heuristic[initialism[0]]++ + } + + var maxAlloc int + for _, val := range heuristic { + if val > maxAlloc { + maxAlloc = val + } + } + + return maxAlloc +} + +// AddInitialisms add additional initialisms +func AddInitialisms(words ...string) { + for _, word := range words { + // commonInitialisms[upper(word)] = true + commonInitialisms.add(upper(word)) + } + // sort again + initialisms = commonInitialisms.sorted() + initialismsRunes = asRunes(initialisms) + initialismsUpperCased = asUpperCased(initialisms) +} + +// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms. +// Since go1.9, this may be implemented with sync.Map. +type indexOfInitialisms struct { + sortMutex *sync.Mutex + index *sync.Map +} + +func newIndexOfInitialisms() *indexOfInitialisms { + return &indexOfInitialisms{ + sortMutex: new(sync.Mutex), + index: new(sync.Map), + } +} + +func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms { + m.sortMutex.Lock() + defer m.sortMutex.Unlock() + for k, v := range initial { + m.index.Store(k, v) + } + return m +} + +func (m *indexOfInitialisms) isInitialism(key string) bool { + _, ok := m.index.Load(key) + return ok +} + +func (m *indexOfInitialisms) add(key string) *indexOfInitialisms { + m.index.Store(key, true) + return m +} + +func (m *indexOfInitialisms) sorted() (result []string) { + m.sortMutex.Lock() + defer m.sortMutex.Unlock() + m.index.Range(func(key, _ interface{}) bool { + k := key.(string) + result = append(result, k) + return true + }) + sort.Sort(sort.Reverse(byInitialism(result))) + return +} + +type byInitialism []string + +func (s byInitialism) Len() int { + return len(s) +} +func (s byInitialism) Swap(i, j int) { + s[i], s[j] = s[j], s[i] +} +func (s byInitialism) Less(i, j int) bool { + if len(s[i]) != len(s[j]) { + return len(s[i]) < len(s[j]) + } + + return strings.Compare(s[i], s[j]) > 0 +} diff --git a/vendor/github.com/go-openapi/swag/json.go b/vendor/github.com/go-openapi/swag/json.go new file mode 100644 index 0000000..7e9902c --- /dev/null +++ b/vendor/github.com/go-openapi/swag/json.go @@ -0,0 +1,312 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "bytes" + "encoding/json" + "log" + "reflect" + "strings" + "sync" + + "github.com/mailru/easyjson/jlexer" + "github.com/mailru/easyjson/jwriter" +) + +// nullJSON represents a JSON object with null type +var nullJSON = []byte("null") + +// DefaultJSONNameProvider the default cache for types +var DefaultJSONNameProvider = NewNameProvider() + +const comma = byte(',') + +var closers map[byte]byte + +func init() { + closers = map[byte]byte{ + '{': '}', + '[': ']', + } +} + +type ejMarshaler interface { + MarshalEasyJSON(w *jwriter.Writer) +} + +type ejUnmarshaler interface { + UnmarshalEasyJSON(w *jlexer.Lexer) +} + +// WriteJSON writes json data, prefers finding an appropriate interface to short-circuit the marshaler +// so it takes the fastest option available. +func WriteJSON(data interface{}) ([]byte, error) { + if d, ok := data.(ejMarshaler); ok { + jw := new(jwriter.Writer) + d.MarshalEasyJSON(jw) + return jw.BuildBytes() + } + if d, ok := data.(json.Marshaler); ok { + return d.MarshalJSON() + } + return json.Marshal(data) +} + +// ReadJSON reads json data, prefers finding an appropriate interface to short-circuit the unmarshaler +// so it takes the fastest option available +func ReadJSON(data []byte, value interface{}) error { + trimmedData := bytes.Trim(data, "\x00") + if d, ok := value.(ejUnmarshaler); ok { + jl := &jlexer.Lexer{Data: trimmedData} + d.UnmarshalEasyJSON(jl) + return jl.Error() + } + if d, ok := value.(json.Unmarshaler); ok { + return d.UnmarshalJSON(trimmedData) + } + return json.Unmarshal(trimmedData, value) +} + +// DynamicJSONToStruct converts an untyped json structure into a struct +func DynamicJSONToStruct(data interface{}, target interface{}) error { + // TODO: convert straight to a json typed map (mergo + iterate?) + b, err := WriteJSON(data) + if err != nil { + return err + } + return ReadJSON(b, target) +} + +// ConcatJSON concatenates multiple json objects efficiently +func ConcatJSON(blobs ...[]byte) []byte { + if len(blobs) == 0 { + return nil + } + + last := len(blobs) - 1 + for blobs[last] == nil || bytes.Equal(blobs[last], nullJSON) { + // strips trailing null objects + last-- + if last < 0 { + // there was nothing but "null"s or nil... + return nil + } + } + if last == 0 { + return blobs[0] + } + + var opening, closing byte + var idx, a int + buf := bytes.NewBuffer(nil) + + for i, b := range blobs[:last+1] { + if b == nil || bytes.Equal(b, nullJSON) { + // a null object is in the list: skip it + continue + } + if len(b) > 0 && opening == 0 { // is this an array or an object? + opening, closing = b[0], closers[b[0]] + } + + if opening != '{' && opening != '[' { + continue // don't know how to concatenate non container objects + } + + if len(b) < 3 { // yep empty but also the last one, so closing this thing + if i == last && a > 0 { + if err := buf.WriteByte(closing); err != nil { + log.Println(err) + } + } + continue + } + + idx = 0 + if a > 0 { // we need to join with a comma for everything beyond the first non-empty item + if err := buf.WriteByte(comma); err != nil { + log.Println(err) + } + idx = 1 // this is not the first or the last so we want to drop the leading bracket + } + + if i != last { // not the last one, strip brackets + if _, err := buf.Write(b[idx : len(b)-1]); err != nil { + log.Println(err) + } + } else { // last one, strip only the leading bracket + if _, err := buf.Write(b[idx:]); err != nil { + log.Println(err) + } + } + a++ + } + // somehow it ended up being empty, so provide a default value + if buf.Len() == 0 { + if err := buf.WriteByte(opening); err != nil { + log.Println(err) + } + if err := buf.WriteByte(closing); err != nil { + log.Println(err) + } + } + return buf.Bytes() +} + +// ToDynamicJSON turns an object into a properly JSON typed structure +func ToDynamicJSON(data interface{}) interface{} { + // TODO: convert straight to a json typed map (mergo + iterate?) + b, err := json.Marshal(data) + if err != nil { + log.Println(err) + } + var res interface{} + if err := json.Unmarshal(b, &res); err != nil { + log.Println(err) + } + return res +} + +// FromDynamicJSON turns an object into a properly JSON typed structure +func FromDynamicJSON(data, target interface{}) error { + b, err := json.Marshal(data) + if err != nil { + log.Println(err) + } + return json.Unmarshal(b, target) +} + +// NameProvider represents an object capable of translating from go property names +// to json property names +// This type is thread-safe. +type NameProvider struct { + lock *sync.Mutex + index map[reflect.Type]nameIndex +} + +type nameIndex struct { + jsonNames map[string]string + goNames map[string]string +} + +// NewNameProvider creates a new name provider +func NewNameProvider() *NameProvider { + return &NameProvider{ + lock: &sync.Mutex{}, + index: make(map[reflect.Type]nameIndex), + } +} + +func buildnameIndex(tpe reflect.Type, idx, reverseIdx map[string]string) { + for i := 0; i < tpe.NumField(); i++ { + targetDes := tpe.Field(i) + + if targetDes.PkgPath != "" { // unexported + continue + } + + if targetDes.Anonymous { // walk embedded structures tree down first + buildnameIndex(targetDes.Type, idx, reverseIdx) + continue + } + + if tag := targetDes.Tag.Get("json"); tag != "" { + + parts := strings.Split(tag, ",") + if len(parts) == 0 { + continue + } + + nm := parts[0] + if nm == "-" { + continue + } + if nm == "" { // empty string means we want to use the Go name + nm = targetDes.Name + } + + idx[nm] = targetDes.Name + reverseIdx[targetDes.Name] = nm + } + } +} + +func newNameIndex(tpe reflect.Type) nameIndex { + var idx = make(map[string]string, tpe.NumField()) + var reverseIdx = make(map[string]string, tpe.NumField()) + + buildnameIndex(tpe, idx, reverseIdx) + return nameIndex{jsonNames: idx, goNames: reverseIdx} +} + +// GetJSONNames gets all the json property names for a type +func (n *NameProvider) GetJSONNames(subject interface{}) []string { + n.lock.Lock() + defer n.lock.Unlock() + tpe := reflect.Indirect(reflect.ValueOf(subject)).Type() + names, ok := n.index[tpe] + if !ok { + names = n.makeNameIndex(tpe) + } + + res := make([]string, 0, len(names.jsonNames)) + for k := range names.jsonNames { + res = append(res, k) + } + return res +} + +// GetJSONName gets the json name for a go property name +func (n *NameProvider) GetJSONName(subject interface{}, name string) (string, bool) { + tpe := reflect.Indirect(reflect.ValueOf(subject)).Type() + return n.GetJSONNameForType(tpe, name) +} + +// GetJSONNameForType gets the json name for a go property name on a given type +func (n *NameProvider) GetJSONNameForType(tpe reflect.Type, name string) (string, bool) { + n.lock.Lock() + defer n.lock.Unlock() + names, ok := n.index[tpe] + if !ok { + names = n.makeNameIndex(tpe) + } + nme, ok := names.goNames[name] + return nme, ok +} + +func (n *NameProvider) makeNameIndex(tpe reflect.Type) nameIndex { + names := newNameIndex(tpe) + n.index[tpe] = names + return names +} + +// GetGoName gets the go name for a json property name +func (n *NameProvider) GetGoName(subject interface{}, name string) (string, bool) { + tpe := reflect.Indirect(reflect.ValueOf(subject)).Type() + return n.GetGoNameForType(tpe, name) +} + +// GetGoNameForType gets the go name for a given type for a json property name +func (n *NameProvider) GetGoNameForType(tpe reflect.Type, name string) (string, bool) { + n.lock.Lock() + defer n.lock.Unlock() + names, ok := n.index[tpe] + if !ok { + names = n.makeNameIndex(tpe) + } + nme, ok := names.jsonNames[name] + return nme, ok +} diff --git a/vendor/github.com/go-openapi/swag/loading.go b/vendor/github.com/go-openapi/swag/loading.go new file mode 100644 index 0000000..783442f --- /dev/null +++ b/vendor/github.com/go-openapi/swag/loading.go @@ -0,0 +1,176 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "fmt" + "io" + "log" + "net/http" + "net/url" + "os" + "path" + "path/filepath" + "runtime" + "strings" + "time" +) + +// LoadHTTPTimeout the default timeout for load requests +var LoadHTTPTimeout = 30 * time.Second + +// LoadHTTPBasicAuthUsername the username to use when load requests require basic auth +var LoadHTTPBasicAuthUsername = "" + +// LoadHTTPBasicAuthPassword the password to use when load requests require basic auth +var LoadHTTPBasicAuthPassword = "" + +// LoadHTTPCustomHeaders an optional collection of custom HTTP headers for load requests +var LoadHTTPCustomHeaders = map[string]string{} + +// LoadFromFileOrHTTP loads the bytes from a file or a remote http server based on the path passed in +func LoadFromFileOrHTTP(pth string) ([]byte, error) { + return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(LoadHTTPTimeout))(pth) +} + +// LoadFromFileOrHTTPWithTimeout loads the bytes from a file or a remote http server based on the path passed in +// timeout arg allows for per request overriding of the request timeout +func LoadFromFileOrHTTPWithTimeout(pth string, timeout time.Duration) ([]byte, error) { + return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(timeout))(pth) +} + +// LoadStrategy returns a loader function for a given path or URI. +// +// The load strategy returns the remote load for any path starting with `http`. +// So this works for any URI with a scheme `http` or `https`. +// +// The fallback strategy is to call the local loader. +// +// The local loader takes a local file system path (absolute or relative) as argument, +// or alternatively a `file://...` URI, **without host** (see also below for windows). +// +// There are a few liberalities, initially intended to be tolerant regarding the URI syntax, +// especially on windows. +// +// Before the local loader is called, the given path is transformed: +// - percent-encoded characters are unescaped +// - simple paths (e.g. `./folder/file`) are passed as-is +// - on windows, occurrences of `/` are replaced by `\`, so providing a relative path such a `folder/file` works too. +// +// For paths provided as URIs with the "file" scheme, please note that: +// - `file://` is simply stripped. +// This means that the host part of the URI is not parsed at all. +// For example, `file:///folder/file" becomes "/folder/file`, +// but `file://localhost/folder/file` becomes `localhost/folder/file` on unix systems. +// Similarly, `file://./folder/file` yields `./folder/file`. +// - on windows, `file://...` can take a host so as to specify an UNC share location. +// +// Reminder about windows-specifics: +// - `file://host/folder/file` becomes an UNC path like `\\host\folder\file` (no port specification is supported) +// - `file:///c:/folder/file` becomes `C:\folder\file` +// - `file://c:/folder/file` is tolerated (without leading `/`) and becomes `c:\folder\file` +func LoadStrategy(pth string, local, remote func(string) ([]byte, error)) func(string) ([]byte, error) { + if strings.HasPrefix(pth, "http") { + return remote + } + + return func(p string) ([]byte, error) { + upth, err := url.PathUnescape(p) + if err != nil { + return nil, err + } + + if !strings.HasPrefix(p, `file://`) { + // regular file path provided: just normalize slashes + return local(filepath.FromSlash(upth)) + } + + if runtime.GOOS != "windows" { + // crude processing: this leaves full URIs with a host with a (mostly) unexpected result + upth = strings.TrimPrefix(upth, `file://`) + + return local(filepath.FromSlash(upth)) + } + + // windows-only pre-processing of file://... URIs + + // support for canonical file URIs on windows. + u, err := url.Parse(filepath.ToSlash(upth)) + if err != nil { + return nil, err + } + + if u.Host != "" { + // assume UNC name (volume share) + // NOTE: UNC port not yet supported + + // when the "host" segment is a drive letter: + // file://C:/folder/... => C:\folder + upth = path.Clean(strings.Join([]string{u.Host, u.Path}, `/`)) + if !strings.HasSuffix(u.Host, ":") && u.Host[0] != '.' { + // tolerance: if we have a leading dot, this can't be a host + // file://host/share/folder\... ==> \\host\share\path\folder + upth = "//" + upth + } + } else { + // no host, let's figure out if this is a drive letter + upth = strings.TrimPrefix(upth, `file://`) + first, _, _ := strings.Cut(strings.TrimPrefix(u.Path, "/"), "/") + if strings.HasSuffix(first, ":") { + // drive letter in the first segment: + // file:///c:/folder/... ==> strip the leading slash + upth = strings.TrimPrefix(upth, `/`) + } + } + + return local(filepath.FromSlash(upth)) + } +} + +func loadHTTPBytes(timeout time.Duration) func(path string) ([]byte, error) { + return func(path string) ([]byte, error) { + client := &http.Client{Timeout: timeout} + req, err := http.NewRequest(http.MethodGet, path, nil) //nolint:noctx + if err != nil { + return nil, err + } + + if LoadHTTPBasicAuthUsername != "" && LoadHTTPBasicAuthPassword != "" { + req.SetBasicAuth(LoadHTTPBasicAuthUsername, LoadHTTPBasicAuthPassword) + } + + for key, val := range LoadHTTPCustomHeaders { + req.Header.Set(key, val) + } + + resp, err := client.Do(req) + defer func() { + if resp != nil { + if e := resp.Body.Close(); e != nil { + log.Println(e) + } + } + }() + if err != nil { + return nil, err + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("could not access document at %q [%s] ", path, resp.Status) + } + + return io.ReadAll(resp.Body) + } +} diff --git a/vendor/github.com/go-openapi/swag/name_lexem.go b/vendor/github.com/go-openapi/swag/name_lexem.go new file mode 100644 index 0000000..8bb64ac --- /dev/null +++ b/vendor/github.com/go-openapi/swag/name_lexem.go @@ -0,0 +1,93 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "unicode" + "unicode/utf8" +) + +type ( + lexemKind uint8 + + nameLexem struct { + original string + matchedInitialism string + kind lexemKind + } +) + +const ( + lexemKindCasualName lexemKind = iota + lexemKindInitialismName +) + +func newInitialismNameLexem(original, matchedInitialism string) nameLexem { + return nameLexem{ + kind: lexemKindInitialismName, + original: original, + matchedInitialism: matchedInitialism, + } +} + +func newCasualNameLexem(original string) nameLexem { + return nameLexem{ + kind: lexemKindCasualName, + original: original, + } +} + +func (l nameLexem) GetUnsafeGoName() string { + if l.kind == lexemKindInitialismName { + return l.matchedInitialism + } + + var ( + first rune + rest string + ) + + for i, orig := range l.original { + if i == 0 { + first = orig + continue + } + + if i > 0 { + rest = l.original[i:] + break + } + } + + if len(l.original) > 1 { + b := poolOfBuffers.BorrowBuffer(utf8.UTFMax + len(rest)) + defer func() { + poolOfBuffers.RedeemBuffer(b) + }() + b.WriteRune(unicode.ToUpper(first)) + b.WriteString(lower(rest)) + return b.String() + } + + return l.original +} + +func (l nameLexem) GetOriginal() string { + return l.original +} + +func (l nameLexem) IsInitialism() bool { + return l.kind == lexemKindInitialismName +} diff --git a/vendor/github.com/go-openapi/swag/net.go b/vendor/github.com/go-openapi/swag/net.go new file mode 100644 index 0000000..821235f --- /dev/null +++ b/vendor/github.com/go-openapi/swag/net.go @@ -0,0 +1,38 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "net" + "strconv" +) + +// SplitHostPort splits a network address into a host and a port. +// The port is -1 when there is no port to be found +func SplitHostPort(addr string) (host string, port int, err error) { + h, p, err := net.SplitHostPort(addr) + if err != nil { + return "", -1, err + } + if p == "" { + return "", -1, &net.AddrError{Err: "missing port in address", Addr: addr} + } + + pi, err := strconv.Atoi(p) + if err != nil { + return "", -1, err + } + return h, pi, nil +} diff --git a/vendor/github.com/go-openapi/swag/path.go b/vendor/github.com/go-openapi/swag/path.go new file mode 100644 index 0000000..941bd01 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/path.go @@ -0,0 +1,59 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "os" + "path/filepath" + "runtime" + "strings" +) + +const ( + // GOPATHKey represents the env key for gopath + GOPATHKey = "GOPATH" +) + +// FindInSearchPath finds a package in a provided lists of paths +func FindInSearchPath(searchPath, pkg string) string { + pathsList := filepath.SplitList(searchPath) + for _, path := range pathsList { + if evaluatedPath, err := filepath.EvalSymlinks(filepath.Join(path, "src", pkg)); err == nil { + if _, err := os.Stat(evaluatedPath); err == nil { + return evaluatedPath + } + } + } + return "" +} + +// FindInGoSearchPath finds a package in the $GOPATH:$GOROOT +func FindInGoSearchPath(pkg string) string { + return FindInSearchPath(FullGoSearchPath(), pkg) +} + +// FullGoSearchPath gets the search paths for finding packages +func FullGoSearchPath() string { + allPaths := os.Getenv(GOPATHKey) + if allPaths == "" { + allPaths = filepath.Join(os.Getenv("HOME"), "go") + } + if allPaths != "" { + allPaths = strings.Join([]string{allPaths, runtime.GOROOT()}, ":") + } else { + allPaths = runtime.GOROOT() + } + return allPaths +} diff --git a/vendor/github.com/go-openapi/swag/split.go b/vendor/github.com/go-openapi/swag/split.go new file mode 100644 index 0000000..274727a --- /dev/null +++ b/vendor/github.com/go-openapi/swag/split.go @@ -0,0 +1,508 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "bytes" + "sync" + "unicode" + "unicode/utf8" +) + +type ( + splitter struct { + initialisms []string + initialismsRunes [][]rune + initialismsUpperCased [][]rune // initialisms cached in their trimmed, upper-cased version + postSplitInitialismCheck bool + } + + splitterOption func(*splitter) + + initialismMatch struct { + body []rune + start, end int + complete bool + } + initialismMatches []initialismMatch +) + +type ( + // memory pools of temporary objects. + // + // These are used to recycle temporarily allocated objects + // and relieve the GC from undue pressure. + + matchesPool struct { + *sync.Pool + } + + buffersPool struct { + *sync.Pool + } + + lexemsPool struct { + *sync.Pool + } + + splittersPool struct { + *sync.Pool + } +) + +var ( + // poolOfMatches holds temporary slices for recycling during the initialism match process + poolOfMatches = matchesPool{ + Pool: &sync.Pool{ + New: func() any { + s := make(initialismMatches, 0, maxAllocMatches) + + return &s + }, + }, + } + + poolOfBuffers = buffersPool{ + Pool: &sync.Pool{ + New: func() any { + return new(bytes.Buffer) + }, + }, + } + + poolOfLexems = lexemsPool{ + Pool: &sync.Pool{ + New: func() any { + s := make([]nameLexem, 0, maxAllocMatches) + + return &s + }, + }, + } + + poolOfSplitters = splittersPool{ + Pool: &sync.Pool{ + New: func() any { + s := newSplitter() + + return &s + }, + }, + } +) + +// nameReplaceTable finds a word representation for special characters. +func nameReplaceTable(r rune) (string, bool) { + switch r { + case '@': + return "At ", true + case '&': + return "And ", true + case '|': + return "Pipe ", true + case '$': + return "Dollar ", true + case '!': + return "Bang ", true + case '-': + return "", true + case '_': + return "", true + default: + return "", false + } +} + +// split calls the splitter. +// +// Use newSplitter for more control and options +func split(str string) []string { + s := poolOfSplitters.BorrowSplitter() + lexems := s.split(str) + result := make([]string, 0, len(*lexems)) + + for _, lexem := range *lexems { + result = append(result, lexem.GetOriginal()) + } + poolOfLexems.RedeemLexems(lexems) + poolOfSplitters.RedeemSplitter(s) + + return result + +} + +func newSplitter(options ...splitterOption) splitter { + s := splitter{ + postSplitInitialismCheck: false, + initialisms: initialisms, + initialismsRunes: initialismsRunes, + initialismsUpperCased: initialismsUpperCased, + } + + for _, option := range options { + option(&s) + } + + return s +} + +// withPostSplitInitialismCheck allows to catch initialisms after main split process +func withPostSplitInitialismCheck(s *splitter) { + s.postSplitInitialismCheck = true +} + +func (p matchesPool) BorrowMatches() *initialismMatches { + s := p.Get().(*initialismMatches) + *s = (*s)[:0] // reset slice, keep allocated capacity + + return s +} + +func (p buffersPool) BorrowBuffer(size int) *bytes.Buffer { + s := p.Get().(*bytes.Buffer) + s.Reset() + + if s.Cap() < size { + s.Grow(size) + } + + return s +} + +func (p lexemsPool) BorrowLexems() *[]nameLexem { + s := p.Get().(*[]nameLexem) + *s = (*s)[:0] // reset slice, keep allocated capacity + + return s +} + +func (p splittersPool) BorrowSplitter(options ...splitterOption) *splitter { + s := p.Get().(*splitter) + s.postSplitInitialismCheck = false // reset options + for _, apply := range options { + apply(s) + } + + return s +} + +func (p matchesPool) RedeemMatches(s *initialismMatches) { + p.Put(s) +} + +func (p buffersPool) RedeemBuffer(s *bytes.Buffer) { + p.Put(s) +} + +func (p lexemsPool) RedeemLexems(s *[]nameLexem) { + p.Put(s) +} + +func (p splittersPool) RedeemSplitter(s *splitter) { + p.Put(s) +} + +func (m initialismMatch) isZero() bool { + return m.start == 0 && m.end == 0 +} + +func (s splitter) split(name string) *[]nameLexem { + nameRunes := []rune(name) + matches := s.gatherInitialismMatches(nameRunes) + if matches == nil { + return poolOfLexems.BorrowLexems() + } + + return s.mapMatchesToNameLexems(nameRunes, matches) +} + +func (s splitter) gatherInitialismMatches(nameRunes []rune) *initialismMatches { + var matches *initialismMatches + + for currentRunePosition, currentRune := range nameRunes { + // recycle these allocations as we loop over runes + // with such recycling, only 2 slices should be allocated per call + // instead of o(n). + newMatches := poolOfMatches.BorrowMatches() + + // check current initialism matches + if matches != nil { // skip first iteration + for _, match := range *matches { + if keepCompleteMatch := match.complete; keepCompleteMatch { + *newMatches = append(*newMatches, match) + continue + } + + // drop failed match + currentMatchRune := match.body[currentRunePosition-match.start] + if currentMatchRune != currentRune { + continue + } + + // try to complete ongoing match + if currentRunePosition-match.start == len(match.body)-1 { + // we are close; the next step is to check the symbol ahead + // if it is a small letter, then it is not the end of match + // but beginning of the next word + + if currentRunePosition < len(nameRunes)-1 { + nextRune := nameRunes[currentRunePosition+1] + if newWord := unicode.IsLower(nextRune); newWord { + // oh ok, it was the start of a new word + continue + } + } + + match.complete = true + match.end = currentRunePosition + } + + *newMatches = append(*newMatches, match) + } + } + + // check for new initialism matches + for i := range s.initialisms { + initialismRunes := s.initialismsRunes[i] + if initialismRunes[0] == currentRune { + *newMatches = append(*newMatches, initialismMatch{ + start: currentRunePosition, + body: initialismRunes, + complete: false, + }) + } + } + + if matches != nil { + poolOfMatches.RedeemMatches(matches) + } + matches = newMatches + } + + // up to the caller to redeem this last slice + return matches +} + +func (s splitter) mapMatchesToNameLexems(nameRunes []rune, matches *initialismMatches) *[]nameLexem { + nameLexems := poolOfLexems.BorrowLexems() + + var lastAcceptedMatch initialismMatch + for _, match := range *matches { + if !match.complete { + continue + } + + if firstMatch := lastAcceptedMatch.isZero(); firstMatch { + s.appendBrokenDownCasualString(nameLexems, nameRunes[:match.start]) + *nameLexems = append(*nameLexems, s.breakInitialism(string(match.body))) + + lastAcceptedMatch = match + + continue + } + + if overlappedMatch := match.start <= lastAcceptedMatch.end; overlappedMatch { + continue + } + + middle := nameRunes[lastAcceptedMatch.end+1 : match.start] + s.appendBrokenDownCasualString(nameLexems, middle) + *nameLexems = append(*nameLexems, s.breakInitialism(string(match.body))) + + lastAcceptedMatch = match + } + + // we have not found any accepted matches + if lastAcceptedMatch.isZero() { + *nameLexems = (*nameLexems)[:0] + s.appendBrokenDownCasualString(nameLexems, nameRunes) + } else if lastAcceptedMatch.end+1 != len(nameRunes) { + rest := nameRunes[lastAcceptedMatch.end+1:] + s.appendBrokenDownCasualString(nameLexems, rest) + } + + poolOfMatches.RedeemMatches(matches) + + return nameLexems +} + +func (s splitter) breakInitialism(original string) nameLexem { + return newInitialismNameLexem(original, original) +} + +func (s splitter) appendBrokenDownCasualString(segments *[]nameLexem, str []rune) { + currentSegment := poolOfBuffers.BorrowBuffer(len(str)) // unlike strings.Builder, bytes.Buffer initial storage can reused + defer func() { + poolOfBuffers.RedeemBuffer(currentSegment) + }() + + addCasualNameLexem := func(original string) { + *segments = append(*segments, newCasualNameLexem(original)) + } + + addInitialismNameLexem := func(original, match string) { + *segments = append(*segments, newInitialismNameLexem(original, match)) + } + + var addNameLexem func(string) + if s.postSplitInitialismCheck { + addNameLexem = func(original string) { + for i := range s.initialisms { + if isEqualFoldIgnoreSpace(s.initialismsUpperCased[i], original) { + addInitialismNameLexem(original, s.initialisms[i]) + + return + } + } + + addCasualNameLexem(original) + } + } else { + addNameLexem = addCasualNameLexem + } + + for _, rn := range str { + if replace, found := nameReplaceTable(rn); found { + if currentSegment.Len() > 0 { + addNameLexem(currentSegment.String()) + currentSegment.Reset() + } + + if replace != "" { + addNameLexem(replace) + } + + continue + } + + if !unicode.In(rn, unicode.L, unicode.M, unicode.N, unicode.Pc) { + if currentSegment.Len() > 0 { + addNameLexem(currentSegment.String()) + currentSegment.Reset() + } + + continue + } + + if unicode.IsUpper(rn) { + if currentSegment.Len() > 0 { + addNameLexem(currentSegment.String()) + } + currentSegment.Reset() + } + + currentSegment.WriteRune(rn) + } + + if currentSegment.Len() > 0 { + addNameLexem(currentSegment.String()) + } +} + +// isEqualFoldIgnoreSpace is the same as strings.EqualFold, but +// it ignores leading and trailing blank spaces in the compared +// string. +// +// base is assumed to be composed of upper-cased runes, and be already +// trimmed. +// +// This code is heavily inspired from strings.EqualFold. +func isEqualFoldIgnoreSpace(base []rune, str string) bool { + var i, baseIndex int + // equivalent to b := []byte(str), but without data copy + b := hackStringBytes(str) + + for i < len(b) { + if c := b[i]; c < utf8.RuneSelf { + // fast path for ASCII + if c != ' ' && c != '\t' { + break + } + i++ + + continue + } + + // unicode case + r, size := utf8.DecodeRune(b[i:]) + if !unicode.IsSpace(r) { + break + } + i += size + } + + if i >= len(b) { + return len(base) == 0 + } + + for _, baseRune := range base { + if i >= len(b) { + break + } + + if c := b[i]; c < utf8.RuneSelf { + // single byte rune case (ASCII) + if baseRune >= utf8.RuneSelf { + return false + } + + baseChar := byte(baseRune) + if c != baseChar && + !('a' <= c && c <= 'z' && c-'a'+'A' == baseChar) { + return false + } + + baseIndex++ + i++ + + continue + } + + // unicode case + r, size := utf8.DecodeRune(b[i:]) + if unicode.ToUpper(r) != baseRune { + return false + } + baseIndex++ + i += size + } + + if baseIndex != len(base) { + return false + } + + // all passed: now we should only have blanks + for i < len(b) { + if c := b[i]; c < utf8.RuneSelf { + // fast path for ASCII + if c != ' ' && c != '\t' { + return false + } + i++ + + continue + } + + // unicode case + r, size := utf8.DecodeRune(b[i:]) + if !unicode.IsSpace(r) { + return false + } + + i += size + } + + return true +} diff --git a/vendor/github.com/go-openapi/swag/string_bytes.go b/vendor/github.com/go-openapi/swag/string_bytes.go new file mode 100644 index 0000000..90745d5 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/string_bytes.go @@ -0,0 +1,8 @@ +package swag + +import "unsafe" + +// hackStringBytes returns the (unsafe) underlying bytes slice of a string. +func hackStringBytes(str string) []byte { + return unsafe.Slice(unsafe.StringData(str), len(str)) +} diff --git a/vendor/github.com/go-openapi/swag/util.go b/vendor/github.com/go-openapi/swag/util.go new file mode 100644 index 0000000..5051401 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/util.go @@ -0,0 +1,364 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "reflect" + "strings" + "unicode" + "unicode/utf8" +) + +// GoNamePrefixFunc sets an optional rule to prefix go names +// which do not start with a letter. +// +// The prefix function is assumed to return a string that starts with an upper case letter. +// +// e.g. to help convert "123" into "{prefix}123" +// +// The default is to prefix with "X" +var GoNamePrefixFunc func(string) string + +func prefixFunc(name, in string) string { + if GoNamePrefixFunc == nil { + return "X" + in + } + + return GoNamePrefixFunc(name) + in +} + +const ( + // collectionFormatComma = "csv" + collectionFormatSpace = "ssv" + collectionFormatTab = "tsv" + collectionFormatPipe = "pipes" + collectionFormatMulti = "multi" +) + +// JoinByFormat joins a string array by a known format (e.g. swagger's collectionFormat attribute): +// +// ssv: space separated value +// tsv: tab separated value +// pipes: pipe (|) separated value +// csv: comma separated value (default) +func JoinByFormat(data []string, format string) []string { + if len(data) == 0 { + return data + } + var sep string + switch format { + case collectionFormatSpace: + sep = " " + case collectionFormatTab: + sep = "\t" + case collectionFormatPipe: + sep = "|" + case collectionFormatMulti: + return data + default: + sep = "," + } + return []string{strings.Join(data, sep)} +} + +// SplitByFormat splits a string by a known format: +// +// ssv: space separated value +// tsv: tab separated value +// pipes: pipe (|) separated value +// csv: comma separated value (default) +func SplitByFormat(data, format string) []string { + if data == "" { + return nil + } + var sep string + switch format { + case collectionFormatSpace: + sep = " " + case collectionFormatTab: + sep = "\t" + case collectionFormatPipe: + sep = "|" + case collectionFormatMulti: + return nil + default: + sep = "," + } + var result []string + for _, s := range strings.Split(data, sep) { + if ts := strings.TrimSpace(s); ts != "" { + result = append(result, ts) + } + } + return result +} + +// Removes leading whitespaces +func trim(str string) string { + return strings.TrimSpace(str) +} + +// Shortcut to strings.ToUpper() +func upper(str string) string { + return strings.ToUpper(trim(str)) +} + +// Shortcut to strings.ToLower() +func lower(str string) string { + return strings.ToLower(trim(str)) +} + +// Camelize an uppercased word +func Camelize(word string) string { + camelized := poolOfBuffers.BorrowBuffer(len(word)) + defer func() { + poolOfBuffers.RedeemBuffer(camelized) + }() + + for pos, ru := range []rune(word) { + if pos > 0 { + camelized.WriteRune(unicode.ToLower(ru)) + } else { + camelized.WriteRune(unicode.ToUpper(ru)) + } + } + return camelized.String() +} + +// ToFileName lowercases and underscores a go type name +func ToFileName(name string) string { + in := split(name) + out := make([]string, 0, len(in)) + + for _, w := range in { + out = append(out, lower(w)) + } + + return strings.Join(out, "_") +} + +// ToCommandName lowercases and underscores a go type name +func ToCommandName(name string) string { + in := split(name) + out := make([]string, 0, len(in)) + + for _, w := range in { + out = append(out, lower(w)) + } + return strings.Join(out, "-") +} + +// ToHumanNameLower represents a code name as a human series of words +func ToHumanNameLower(name string) string { + s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck) + in := s.split(name) + poolOfSplitters.RedeemSplitter(s) + out := make([]string, 0, len(*in)) + + for _, w := range *in { + if !w.IsInitialism() { + out = append(out, lower(w.GetOriginal())) + } else { + out = append(out, trim(w.GetOriginal())) + } + } + poolOfLexems.RedeemLexems(in) + + return strings.Join(out, " ") +} + +// ToHumanNameTitle represents a code name as a human series of words with the first letters titleized +func ToHumanNameTitle(name string) string { + s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck) + in := s.split(name) + poolOfSplitters.RedeemSplitter(s) + + out := make([]string, 0, len(*in)) + for _, w := range *in { + original := trim(w.GetOriginal()) + if !w.IsInitialism() { + out = append(out, Camelize(original)) + } else { + out = append(out, original) + } + } + poolOfLexems.RedeemLexems(in) + + return strings.Join(out, " ") +} + +// ToJSONName camelcases a name which can be underscored or pascal cased +func ToJSONName(name string) string { + in := split(name) + out := make([]string, 0, len(in)) + + for i, w := range in { + if i == 0 { + out = append(out, lower(w)) + continue + } + out = append(out, Camelize(trim(w))) + } + return strings.Join(out, "") +} + +// ToVarName camelcases a name which can be underscored or pascal cased +func ToVarName(name string) string { + res := ToGoName(name) + if isInitialism(res) { + return lower(res) + } + if len(res) <= 1 { + return lower(res) + } + return lower(res[:1]) + res[1:] +} + +// ToGoName translates a swagger name which can be underscored or camel cased to a name that golint likes +func ToGoName(name string) string { + s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck) + lexems := s.split(name) + poolOfSplitters.RedeemSplitter(s) + defer func() { + poolOfLexems.RedeemLexems(lexems) + }() + lexemes := *lexems + + if len(lexemes) == 0 { + return "" + } + + result := poolOfBuffers.BorrowBuffer(len(name)) + defer func() { + poolOfBuffers.RedeemBuffer(result) + }() + + // check if not starting with a letter, upper case + firstPart := lexemes[0].GetUnsafeGoName() + if lexemes[0].IsInitialism() { + firstPart = upper(firstPart) + } + + if c := firstPart[0]; c < utf8.RuneSelf { + // ASCII + switch { + case 'A' <= c && c <= 'Z': + result.WriteString(firstPart) + case 'a' <= c && c <= 'z': + result.WriteByte(c - 'a' + 'A') + result.WriteString(firstPart[1:]) + default: + result.WriteString(prefixFunc(name, firstPart)) + // NOTE: no longer check if prefixFunc returns a string that starts with uppercase: + // assume this is always the case + } + } else { + // unicode + firstRune, _ := utf8.DecodeRuneInString(firstPart) + switch { + case !unicode.IsLetter(firstRune): + result.WriteString(prefixFunc(name, firstPart)) + case !unicode.IsUpper(firstRune): + result.WriteString(prefixFunc(name, firstPart)) + /* + result.WriteRune(unicode.ToUpper(firstRune)) + result.WriteString(firstPart[offset:]) + */ + default: + result.WriteString(firstPart) + } + } + + for _, lexem := range lexemes[1:] { + goName := lexem.GetUnsafeGoName() + + // to support old behavior + if lexem.IsInitialism() { + goName = upper(goName) + } + result.WriteString(goName) + } + + return result.String() +} + +// ContainsStrings searches a slice of strings for a case-sensitive match +func ContainsStrings(coll []string, item string) bool { + for _, a := range coll { + if a == item { + return true + } + } + return false +} + +// ContainsStringsCI searches a slice of strings for a case-insensitive match +func ContainsStringsCI(coll []string, item string) bool { + for _, a := range coll { + if strings.EqualFold(a, item) { + return true + } + } + return false +} + +type zeroable interface { + IsZero() bool +} + +// IsZero returns true when the value passed into the function is a zero value. +// This allows for safer checking of interface values. +func IsZero(data interface{}) bool { + v := reflect.ValueOf(data) + // check for nil data + switch v.Kind() { //nolint:exhaustive + case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + if v.IsNil() { + return true + } + } + + // check for things that have an IsZero method instead + if vv, ok := data.(zeroable); ok { + return vv.IsZero() + } + + // continue with slightly more complex reflection + switch v.Kind() { //nolint:exhaustive + case reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Struct, reflect.Array: + return reflect.DeepEqual(data, reflect.Zero(v.Type()).Interface()) + case reflect.Invalid: + return true + default: + return false + } +} + +// CommandLineOptionsGroup represents a group of user-defined command line options +type CommandLineOptionsGroup struct { + ShortDescription string + LongDescription string + Options interface{} +} diff --git a/vendor/github.com/go-openapi/swag/yaml.go b/vendor/github.com/go-openapi/swag/yaml.go new file mode 100644 index 0000000..f59e025 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/yaml.go @@ -0,0 +1,481 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "encoding/json" + "errors" + "fmt" + "path/filepath" + "reflect" + "sort" + "strconv" + + "github.com/mailru/easyjson/jlexer" + "github.com/mailru/easyjson/jwriter" + yaml "gopkg.in/yaml.v3" +) + +// YAMLMatcher matches yaml +func YAMLMatcher(path string) bool { + ext := filepath.Ext(path) + return ext == ".yaml" || ext == ".yml" +} + +// YAMLToJSON converts YAML unmarshaled data into json compatible data +func YAMLToJSON(data interface{}) (json.RawMessage, error) { + jm, err := transformData(data) + if err != nil { + return nil, err + } + b, err := WriteJSON(jm) + return json.RawMessage(b), err +} + +// BytesToYAMLDoc converts a byte slice into a YAML document +func BytesToYAMLDoc(data []byte) (interface{}, error) { + var document yaml.Node // preserve order that is present in the document + if err := yaml.Unmarshal(data, &document); err != nil { + return nil, err + } + if document.Kind != yaml.DocumentNode || len(document.Content) != 1 || document.Content[0].Kind != yaml.MappingNode { + return nil, errors.New("only YAML documents that are objects are supported") + } + return &document, nil +} + +func yamlNode(root *yaml.Node) (interface{}, error) { + switch root.Kind { + case yaml.DocumentNode: + return yamlDocument(root) + case yaml.SequenceNode: + return yamlSequence(root) + case yaml.MappingNode: + return yamlMapping(root) + case yaml.ScalarNode: + return yamlScalar(root) + case yaml.AliasNode: + return yamlNode(root.Alias) + default: + return nil, fmt.Errorf("unsupported YAML node type: %v", root.Kind) + } +} + +func yamlDocument(node *yaml.Node) (interface{}, error) { + if len(node.Content) != 1 { + return nil, fmt.Errorf("unexpected YAML Document node content length: %d", len(node.Content)) + } + return yamlNode(node.Content[0]) +} + +func yamlMapping(node *yaml.Node) (interface{}, error) { + m := make(JSONMapSlice, len(node.Content)/2) + + var j int + for i := 0; i < len(node.Content); i += 2 { + var nmi JSONMapItem + k, err := yamlStringScalarC(node.Content[i]) + if err != nil { + return nil, fmt.Errorf("unable to decode YAML map key: %w", err) + } + nmi.Key = k + v, err := yamlNode(node.Content[i+1]) + if err != nil { + return nil, fmt.Errorf("unable to process YAML map value for key %q: %w", k, err) + } + nmi.Value = v + m[j] = nmi + j++ + } + return m, nil +} + +func yamlSequence(node *yaml.Node) (interface{}, error) { + s := make([]interface{}, 0) + + for i := 0; i < len(node.Content); i++ { + + v, err := yamlNode(node.Content[i]) + if err != nil { + return nil, fmt.Errorf("unable to decode YAML sequence value: %w", err) + } + s = append(s, v) + } + return s, nil +} + +const ( // See https://yaml.org/type/ + yamlStringScalar = "tag:yaml.org,2002:str" + yamlIntScalar = "tag:yaml.org,2002:int" + yamlBoolScalar = "tag:yaml.org,2002:bool" + yamlFloatScalar = "tag:yaml.org,2002:float" + yamlTimestamp = "tag:yaml.org,2002:timestamp" + yamlNull = "tag:yaml.org,2002:null" +) + +func yamlScalar(node *yaml.Node) (interface{}, error) { + switch node.LongTag() { + case yamlStringScalar: + return node.Value, nil + case yamlBoolScalar: + b, err := strconv.ParseBool(node.Value) + if err != nil { + return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting bool content: %w", node.Value, err) + } + return b, nil + case yamlIntScalar: + i, err := strconv.ParseInt(node.Value, 10, 64) + if err != nil { + return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting integer content: %w", node.Value, err) + } + return i, nil + case yamlFloatScalar: + f, err := strconv.ParseFloat(node.Value, 64) + if err != nil { + return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting float content: %w", node.Value, err) + } + return f, nil + case yamlTimestamp: + return node.Value, nil + case yamlNull: + return nil, nil //nolint:nilnil + default: + return nil, fmt.Errorf("YAML tag %q is not supported", node.LongTag()) + } +} + +func yamlStringScalarC(node *yaml.Node) (string, error) { + if node.Kind != yaml.ScalarNode { + return "", fmt.Errorf("expecting a string scalar but got %q", node.Kind) + } + switch node.LongTag() { + case yamlStringScalar, yamlIntScalar, yamlFloatScalar: + return node.Value, nil + default: + return "", fmt.Errorf("YAML tag %q is not supported as map key", node.LongTag()) + } +} + +// JSONMapSlice represent a JSON object, with the order of keys maintained +type JSONMapSlice []JSONMapItem + +// MarshalJSON renders a JSONMapSlice as JSON +func (s JSONMapSlice) MarshalJSON() ([]byte, error) { + w := &jwriter.Writer{Flags: jwriter.NilMapAsEmpty | jwriter.NilSliceAsEmpty} + s.MarshalEasyJSON(w) + return w.BuildBytes() +} + +// MarshalEasyJSON renders a JSONMapSlice as JSON, using easyJSON +func (s JSONMapSlice) MarshalEasyJSON(w *jwriter.Writer) { + w.RawByte('{') + + ln := len(s) + last := ln - 1 + for i := 0; i < ln; i++ { + s[i].MarshalEasyJSON(w) + if i != last { // last item + w.RawByte(',') + } + } + + w.RawByte('}') +} + +// UnmarshalJSON makes a JSONMapSlice from JSON +func (s *JSONMapSlice) UnmarshalJSON(data []byte) error { + l := jlexer.Lexer{Data: data} + s.UnmarshalEasyJSON(&l) + return l.Error() +} + +// UnmarshalEasyJSON makes a JSONMapSlice from JSON, using easyJSON +func (s *JSONMapSlice) UnmarshalEasyJSON(in *jlexer.Lexer) { + if in.IsNull() { + in.Skip() + return + } + + var result JSONMapSlice + in.Delim('{') + for !in.IsDelim('}') { + var mi JSONMapItem + mi.UnmarshalEasyJSON(in) + result = append(result, mi) + } + *s = result +} + +func (s JSONMapSlice) MarshalYAML() (interface{}, error) { + var n yaml.Node + n.Kind = yaml.DocumentNode + var nodes []*yaml.Node + for _, item := range s { + nn, err := json2yaml(item.Value) + if err != nil { + return nil, err + } + ns := []*yaml.Node{ + { + Kind: yaml.ScalarNode, + Tag: yamlStringScalar, + Value: item.Key, + }, + nn, + } + nodes = append(nodes, ns...) + } + + n.Content = []*yaml.Node{ + { + Kind: yaml.MappingNode, + Content: nodes, + }, + } + + return yaml.Marshal(&n) +} + +func isNil(input interface{}) bool { + if input == nil { + return true + } + kind := reflect.TypeOf(input).Kind() + switch kind { //nolint:exhaustive + case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan: + return reflect.ValueOf(input).IsNil() + default: + return false + } +} + +func json2yaml(item interface{}) (*yaml.Node, error) { + if isNil(item) { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Value: "null", + }, nil + } + + switch val := item.(type) { + case JSONMapSlice: + var n yaml.Node + n.Kind = yaml.MappingNode + for i := range val { + childNode, err := json2yaml(&val[i].Value) + if err != nil { + return nil, err + } + n.Content = append(n.Content, &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: yamlStringScalar, + Value: val[i].Key, + }, childNode) + } + return &n, nil + case map[string]interface{}: + var n yaml.Node + n.Kind = yaml.MappingNode + keys := make([]string, 0, len(val)) + for k := range val { + keys = append(keys, k) + } + sort.Strings(keys) + + for _, k := range keys { + v := val[k] + childNode, err := json2yaml(v) + if err != nil { + return nil, err + } + n.Content = append(n.Content, &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: yamlStringScalar, + Value: k, + }, childNode) + } + return &n, nil + case []interface{}: + var n yaml.Node + n.Kind = yaml.SequenceNode + for i := range val { + childNode, err := json2yaml(val[i]) + if err != nil { + return nil, err + } + n.Content = append(n.Content, childNode) + } + return &n, nil + case string: + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: yamlStringScalar, + Value: val, + }, nil + case float64: + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: yamlFloatScalar, + Value: strconv.FormatFloat(val, 'f', -1, 64), + }, nil + case int64: + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: yamlIntScalar, + Value: strconv.FormatInt(val, 10), + }, nil + case uint64: + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: yamlIntScalar, + Value: strconv.FormatUint(val, 10), + }, nil + case bool: + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: yamlBoolScalar, + Value: strconv.FormatBool(val), + }, nil + default: + return nil, fmt.Errorf("unhandled type: %T", val) + } +} + +// JSONMapItem represents the value of a key in a JSON object held by JSONMapSlice +type JSONMapItem struct { + Key string + Value interface{} +} + +// MarshalJSON renders a JSONMapItem as JSON +func (s JSONMapItem) MarshalJSON() ([]byte, error) { + w := &jwriter.Writer{Flags: jwriter.NilMapAsEmpty | jwriter.NilSliceAsEmpty} + s.MarshalEasyJSON(w) + return w.BuildBytes() +} + +// MarshalEasyJSON renders a JSONMapItem as JSON, using easyJSON +func (s JSONMapItem) MarshalEasyJSON(w *jwriter.Writer) { + w.String(s.Key) + w.RawByte(':') + w.Raw(WriteJSON(s.Value)) +} + +// UnmarshalJSON makes a JSONMapItem from JSON +func (s *JSONMapItem) UnmarshalJSON(data []byte) error { + l := jlexer.Lexer{Data: data} + s.UnmarshalEasyJSON(&l) + return l.Error() +} + +// UnmarshalEasyJSON makes a JSONMapItem from JSON, using easyJSON +func (s *JSONMapItem) UnmarshalEasyJSON(in *jlexer.Lexer) { + key := in.UnsafeString() + in.WantColon() + value := in.Interface() + in.WantComma() + s.Key = key + s.Value = value +} + +func transformData(input interface{}) (out interface{}, err error) { + format := func(t interface{}) (string, error) { + switch k := t.(type) { + case string: + return k, nil + case uint: + return strconv.FormatUint(uint64(k), 10), nil + case uint8: + return strconv.FormatUint(uint64(k), 10), nil + case uint16: + return strconv.FormatUint(uint64(k), 10), nil + case uint32: + return strconv.FormatUint(uint64(k), 10), nil + case uint64: + return strconv.FormatUint(k, 10), nil + case int: + return strconv.Itoa(k), nil + case int8: + return strconv.FormatInt(int64(k), 10), nil + case int16: + return strconv.FormatInt(int64(k), 10), nil + case int32: + return strconv.FormatInt(int64(k), 10), nil + case int64: + return strconv.FormatInt(k, 10), nil + default: + return "", fmt.Errorf("unexpected map key type, got: %T", k) + } + } + + switch in := input.(type) { + case yaml.Node: + return yamlNode(&in) + case *yaml.Node: + return yamlNode(in) + case map[interface{}]interface{}: + o := make(JSONMapSlice, 0, len(in)) + for ke, va := range in { + var nmi JSONMapItem + if nmi.Key, err = format(ke); err != nil { + return nil, err + } + + v, ert := transformData(va) + if ert != nil { + return nil, ert + } + nmi.Value = v + o = append(o, nmi) + } + return o, nil + case []interface{}: + len1 := len(in) + o := make([]interface{}, len1) + for i := 0; i < len1; i++ { + o[i], err = transformData(in[i]) + if err != nil { + return nil, err + } + } + return o, nil + } + return input, nil +} + +// YAMLDoc loads a yaml document from either http or a file and converts it to json +func YAMLDoc(path string) (json.RawMessage, error) { + yamlDoc, err := YAMLData(path) + if err != nil { + return nil, err + } + + data, err := YAMLToJSON(yamlDoc) + if err != nil { + return nil, err + } + + return data, nil +} + +// YAMLData loads a yaml document from either http or a file +func YAMLData(path string) (interface{}, error) { + data, err := LoadFromFileOrHTTP(path) + if err != nil { + return nil, err + } + + return BytesToYAMLDoc(data) +} diff --git a/vendor/github.com/go-ozzo/ozzo-validation/date.go b/vendor/github.com/go-ozzo/ozzo-validation/date.go index 9ba71fb..432e035 100644 --- a/vendor/github.com/go-ozzo/ozzo-validation/date.go +++ b/vendor/github.com/go-ozzo/ozzo-validation/date.go @@ -19,10 +19,9 @@ type DateRule struct { // Date returns a validation rule that checks if a string value is in a format that can be parsed into a date. // The format of the date should be specified as the layout parameter which accepts the same value as that for time.Parse. // For example, -// -// validation.Date(time.ANSIC) -// validation.Date("02 Jan 06 15:04 MST") -// validation.Date("2006-01-02") +// validation.Date(time.ANSIC) +// validation.Date("02 Jan 06 15:04 MST") +// validation.Date("2006-01-02") // // By calling Min() and/or Max(), you can let the Date rule to check if a parsed date value is within // the specified date range. diff --git a/vendor/github.com/go-ozzo/ozzo-validation/multipleof.go b/vendor/github.com/go-ozzo/ozzo-validation/multipleof.go index da3ed94..c40fcfa 100644 --- a/vendor/github.com/go-ozzo/ozzo-validation/multipleof.go +++ b/vendor/github.com/go-ozzo/ozzo-validation/multipleof.go @@ -24,6 +24,7 @@ func (r *multipleOfRule) Error(message string) *multipleOfRule { return r } + func (r *multipleOfRule) Validate(value interface{}) error { rv := reflect.ValueOf(r.threshold) diff --git a/vendor/github.com/go-ozzo/ozzo-validation/struct.go b/vendor/github.com/go-ozzo/ozzo-validation/struct.go index d380029..2ff852a 100644 --- a/vendor/github.com/go-ozzo/ozzo-validation/struct.go +++ b/vendor/github.com/go-ozzo/ozzo-validation/struct.go @@ -46,16 +46,16 @@ func (e ErrFieldNotFound) Error() string { // should be specified as a pointer to the field. A field can be associated with multiple rules. // For example, // -// value := struct { -// Name string -// Value string -// }{"name", "demo"} -// err := validation.ValidateStruct(&value, -// validation.Field(&a.Name, validation.Required), -// validation.Field(&a.Value, validation.Required, validation.Length(5, 10)), -// ) -// fmt.Println(err) -// // Value: the length must be between 5 and 10. +// value := struct { +// Name string +// Value string +// }{"name", "demo"} +// err := validation.ValidateStruct(&value, +// validation.Field(&a.Name, validation.Required), +// validation.Field(&a.Value, validation.Required, validation.Length(5, 10)), +// ) +// fmt.Println(err) +// // Value: the length must be between 5 and 10. // // An error will be returned if validation fails. func ValidateStruct(structPtr interface{}, fields ...*FieldRules) error { diff --git a/vendor/github.com/go-ozzo/ozzo-validation/v4/map.go b/vendor/github.com/go-ozzo/ozzo-validation/v4/map.go index b3a87bd..106b6d8 100644 --- a/vendor/github.com/go-ozzo/ozzo-validation/v4/map.go +++ b/vendor/github.com/go-ozzo/ozzo-validation/v4/map.go @@ -41,11 +41,10 @@ type ( // Use Key() to specify map keys that need to be validated. Each Key() call specifies a single key which can // be associated with multiple rules. // For example, -// -// validation.Map( -// validation.Key("Name", validation.Required), -// validation.Key("Value", validation.Required, validation.Length(5, 10)), -// ) +// validation.Map( +// validation.Key("Name", validation.Required), +// validation.Key("Value", validation.Required, validation.Length(5, 10)), +// ) // // A nil value is considered valid. Use the Required rule to make sure a map value is present. func Map(keys ...*KeyRules) MapRule { diff --git a/vendor/github.com/go-ozzo/ozzo-validation/v4/struct.go b/vendor/github.com/go-ozzo/ozzo-validation/v4/struct.go index 08b63ba..d63619d 100644 --- a/vendor/github.com/go-ozzo/ozzo-validation/v4/struct.go +++ b/vendor/github.com/go-ozzo/ozzo-validation/v4/struct.go @@ -47,16 +47,16 @@ func (e ErrFieldNotFound) Error() string { // should be specified as a pointer to the field. A field can be associated with multiple rules. // For example, // -// value := struct { -// Name string -// Value string -// }{"name", "demo"} -// err := validation.ValidateStruct(&value, -// validation.Field(&a.Name, validation.Required), -// validation.Field(&a.Value, validation.Required, validation.Length(5, 10)), -// ) -// fmt.Println(err) -// // Value: the length must be between 5 and 10. +// value := struct { +// Name string +// Value string +// }{"name", "demo"} +// err := validation.ValidateStruct(&value, +// validation.Field(&a.Name, validation.Required), +// validation.Field(&a.Value, validation.Required, validation.Length(5, 10)), +// ) +// fmt.Println(err) +// // Value: the length must be between 5 and 10. // // An error will be returned if validation fails. func ValidateStruct(structPtr interface{}, fields ...*FieldRules) error { diff --git a/vendor/github.com/go-ozzo/ozzo-validation/v4/validation.go b/vendor/github.com/go-ozzo/ozzo-validation/v4/validation.go index 0495f32..ec7a161 100644 --- a/vendor/github.com/go-ozzo/ozzo-validation/v4/validation.go +++ b/vendor/github.com/go-ozzo/ozzo-validation/v4/validation.go @@ -60,11 +60,11 @@ var ( // Validate validates the given value and returns the validation error, if any. // // Validate performs validation using the following steps: -// 1. For each rule, call its `Validate()` to validate the value. Return if any error is found. -// 2. If the value being validated implements `Validatable`, call the value's `Validate()`. -// Return with the validation result. -// 3. If the value being validated is a map/slice/array, and the element type implements `Validatable`, -// for each element call the element value's `Validate()`. Return with the validation result. +// 1. For each rule, call its `Validate()` to validate the value. Return if any error is found. +// 2. If the value being validated implements `Validatable`, call the value's `Validate()`. +// Return with the validation result. +// 3. If the value being validated is a map/slice/array, and the element type implements `Validatable`, +// for each element call the element value's `Validate()`. Return with the validation result. func Validate(value interface{}, rules ...Rule) error { for _, rule := range rules { if s, ok := rule.(skipRule); ok && s.skip { @@ -103,16 +103,16 @@ func Validate(value interface{}, rules ...Rule) error { // ValidateWithContext validates the given value with the given context and returns the validation error, if any. // // ValidateWithContext performs validation using the following steps: -// 1. For each rule, call its `ValidateWithContext()` to validate the value if the rule implements `RuleWithContext`. -// Otherwise call `Validate()` of the rule. Return if any error is found. -// 2. If the value being validated implements `ValidatableWithContext`, call the value's `ValidateWithContext()` -// and return with the validation result. -// 3. If the value being validated implements `Validatable`, call the value's `Validate()` -// and return with the validation result. -// 4. If the value being validated is a map/slice/array, and the element type implements `ValidatableWithContext`, -// for each element call the element value's `ValidateWithContext()`. Return with the validation result. -// 5. If the value being validated is a map/slice/array, and the element type implements `Validatable`, -// for each element call the element value's `Validate()`. Return with the validation result. +// 1. For each rule, call its `ValidateWithContext()` to validate the value if the rule implements `RuleWithContext`. +// Otherwise call `Validate()` of the rule. Return if any error is found. +// 2. If the value being validated implements `ValidatableWithContext`, call the value's `ValidateWithContext()` +// and return with the validation result. +// 3. If the value being validated implements `Validatable`, call the value's `Validate()` +// and return with the validation result. +// 4. If the value being validated is a map/slice/array, and the element type implements `ValidatableWithContext`, +// for each element call the element value's `ValidateWithContext()`. Return with the validation result. +// 5. If the value being validated is a map/slice/array, and the element type implements `Validatable`, +// for each element call the element value's `Validate()`. Return with the validation result. func ValidateWithContext(ctx context.Context, value interface{}, rules ...Rule) error { for _, rule := range rules { if s, ok := rule.(skipRule); ok && s.skip { diff --git a/vendor/github.com/go-sql-driver/mysql/auth.go b/vendor/github.com/go-sql-driver/mysql/auth.go index d1371a4..b2f19e8 100644 --- a/vendor/github.com/go-sql-driver/mysql/auth.go +++ b/vendor/github.com/go-sql-driver/mysql/auth.go @@ -33,26 +33,27 @@ var ( // Note: The provided rsa.PublicKey instance is exclusively owned by the driver // after registering it and may not be modified. // -// data, err := ioutil.ReadFile("mykey.pem") -// if err != nil { -// log.Fatal(err) -// } +// data, err := ioutil.ReadFile("mykey.pem") +// if err != nil { +// log.Fatal(err) +// } // -// block, _ := pem.Decode(data) -// if block == nil || block.Type != "PUBLIC KEY" { -// log.Fatal("failed to decode PEM block containing public key") -// } +// block, _ := pem.Decode(data) +// if block == nil || block.Type != "PUBLIC KEY" { +// log.Fatal("failed to decode PEM block containing public key") +// } // -// pub, err := x509.ParsePKIXPublicKey(block.Bytes) -// if err != nil { -// log.Fatal(err) -// } +// pub, err := x509.ParsePKIXPublicKey(block.Bytes) +// if err != nil { +// log.Fatal(err) +// } +// +// if rsaPubKey, ok := pub.(*rsa.PublicKey); ok { +// mysql.RegisterServerPubKey("mykey", rsaPubKey) +// } else { +// log.Fatal("not a RSA public key") +// } // -// if rsaPubKey, ok := pub.(*rsa.PublicKey); ok { -// mysql.RegisterServerPubKey("mykey", rsaPubKey) -// } else { -// log.Fatal("not a RSA public key") -// } func RegisterServerPubKey(name string, pubKey *rsa.PublicKey) { serverPubKeyLock.Lock() if serverPubKeyRegistry == nil { diff --git a/vendor/github.com/go-sql-driver/mysql/conncheck.go b/vendor/github.com/go-sql-driver/mysql/conncheck.go index 0ea7217..024eb28 100644 --- a/vendor/github.com/go-sql-driver/mysql/conncheck.go +++ b/vendor/github.com/go-sql-driver/mysql/conncheck.go @@ -6,7 +6,6 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this file, // You can obtain one at http://mozilla.org/MPL/2.0/. -//go:build linux || darwin || dragonfly || freebsd || netbsd || openbsd || solaris || illumos // +build linux darwin dragonfly freebsd netbsd openbsd solaris illumos package mysql diff --git a/vendor/github.com/go-sql-driver/mysql/driver.go b/vendor/github.com/go-sql-driver/mysql/driver.go index ad7aec2..c1bdf11 100644 --- a/vendor/github.com/go-sql-driver/mysql/driver.go +++ b/vendor/github.com/go-sql-driver/mysql/driver.go @@ -8,10 +8,10 @@ // // The driver should be used via the database/sql package: // -// import "database/sql" -// import _ "github.com/go-sql-driver/mysql" +// import "database/sql" +// import _ "github.com/go-sql-driver/mysql" // -// db, err := sql.Open("mysql", "user:password@/dbname") +// db, err := sql.Open("mysql", "user:password@/dbname") // // See https://github.com/go-sql-driver/mysql#usage for details package mysql diff --git a/vendor/github.com/go-sql-driver/mysql/infile.go b/vendor/github.com/go-sql-driver/mysql/infile.go index 955a00a..60effdf 100644 --- a/vendor/github.com/go-sql-driver/mysql/infile.go +++ b/vendor/github.com/go-sql-driver/mysql/infile.go @@ -28,11 +28,12 @@ var ( // Alternatively you can allow the use of all local files with // the DSN parameter 'allowAllFiles=true' // -// filePath := "/home/gopher/data.csv" -// mysql.RegisterLocalFile(filePath) -// err := db.Exec("LOAD DATA LOCAL INFILE '" + filePath + "' INTO TABLE foo") -// if err != nil { -// ... +// filePath := "/home/gopher/data.csv" +// mysql.RegisterLocalFile(filePath) +// err := db.Exec("LOAD DATA LOCAL INFILE '" + filePath + "' INTO TABLE foo") +// if err != nil { +// ... +// func RegisterLocalFile(filePath string) { fileRegisterLock.Lock() // lazy map init @@ -57,14 +58,15 @@ func DeregisterLocalFile(filePath string) { // If the handler returns a io.ReadCloser Close() is called when the // request is finished. // -// mysql.RegisterReaderHandler("data", func() io.Reader { -// var csvReader io.Reader // Some Reader that returns CSV data -// ... // Open Reader here -// return csvReader -// }) -// err := db.Exec("LOAD DATA LOCAL INFILE 'Reader::data' INTO TABLE foo") -// if err != nil { -// ... +// mysql.RegisterReaderHandler("data", func() io.Reader { +// var csvReader io.Reader // Some Reader that returns CSV data +// ... // Open Reader here +// return csvReader +// }) +// err := db.Exec("LOAD DATA LOCAL INFILE 'Reader::data' INTO TABLE foo") +// if err != nil { +// ... +// func RegisterReaderHandler(name string, handler func() io.Reader) { readerRegisterLock.Lock() // lazy map init diff --git a/vendor/github.com/go-sql-driver/mysql/utils.go b/vendor/github.com/go-sql-driver/mysql/utils.go index ae7be39..d6545f5 100644 --- a/vendor/github.com/go-sql-driver/mysql/utils.go +++ b/vendor/github.com/go-sql-driver/mysql/utils.go @@ -35,25 +35,26 @@ var ( // Note: The provided tls.Config is exclusively owned by the driver after // registering it. // -// rootCertPool := x509.NewCertPool() -// pem, err := ioutil.ReadFile("/path/ca-cert.pem") -// if err != nil { -// log.Fatal(err) -// } -// if ok := rootCertPool.AppendCertsFromPEM(pem); !ok { -// log.Fatal("Failed to append PEM.") -// } -// clientCert := make([]tls.Certificate, 0, 1) -// certs, err := tls.LoadX509KeyPair("/path/client-cert.pem", "/path/client-key.pem") -// if err != nil { -// log.Fatal(err) -// } -// clientCert = append(clientCert, certs) -// mysql.RegisterTLSConfig("custom", &tls.Config{ -// RootCAs: rootCertPool, -// Certificates: clientCert, -// }) -// db, err := sql.Open("mysql", "user@tcp(localhost:3306)/test?tls=custom") +// rootCertPool := x509.NewCertPool() +// pem, err := ioutil.ReadFile("/path/ca-cert.pem") +// if err != nil { +// log.Fatal(err) +// } +// if ok := rootCertPool.AppendCertsFromPEM(pem); !ok { +// log.Fatal("Failed to append PEM.") +// } +// clientCert := make([]tls.Certificate, 0, 1) +// certs, err := tls.LoadX509KeyPair("/path/client-cert.pem", "/path/client-key.pem") +// if err != nil { +// log.Fatal(err) +// } +// clientCert = append(clientCert, certs) +// mysql.RegisterTLSConfig("custom", &tls.Config{ +// RootCAs: rootCertPool, +// Certificates: clientCert, +// }) +// db, err := sql.Open("mysql", "user@tcp(localhost:3306)/test?tls=custom") +// func RegisterTLSConfig(key string, config *tls.Config) error { if _, isBool := readBool(key); isBool || strings.ToLower(key) == "skip-verify" || strings.ToLower(key) == "preferred" { return fmt.Errorf("key '%s' is reserved", key) diff --git a/vendor/github.com/golang-jwt/jwt/ed25519.go b/vendor/github.com/golang-jwt/jwt/ed25519.go index 9c18f05..a2f8ddb 100644 --- a/vendor/github.com/golang-jwt/jwt/ed25519.go +++ b/vendor/github.com/golang-jwt/jwt/ed25519.go @@ -1,8 +1,9 @@ package jwt import ( - "crypto/ed25519" "errors" + + "crypto/ed25519" ) var ( diff --git a/vendor/github.com/golang-jwt/jwt/map_claims.go b/vendor/github.com/golang-jwt/jwt/map_claims.go index 9fa1454..72c79f9 100644 --- a/vendor/github.com/golang-jwt/jwt/map_claims.go +++ b/vendor/github.com/golang-jwt/jwt/map_claims.go @@ -3,6 +3,7 @@ package jwt import ( "encoding/json" "errors" + // "fmt" ) // Claims type that uses the map[string]interface{} for JSON decoding diff --git a/vendor/github.com/golang-jwt/jwt/rsa_pss.go b/vendor/github.com/golang-jwt/jwt/rsa_pss.go index 370c5a8..c014708 100644 --- a/vendor/github.com/golang-jwt/jwt/rsa_pss.go +++ b/vendor/github.com/golang-jwt/jwt/rsa_pss.go @@ -1,4 +1,3 @@ -//go:build go1.4 // +build go1.4 package jwt diff --git a/vendor/github.com/golang-jwt/jwt/v4/ed25519.go b/vendor/github.com/golang-jwt/jwt/v4/ed25519.go index 24bd027..07d3aac 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/ed25519.go +++ b/vendor/github.com/golang-jwt/jwt/v4/ed25519.go @@ -1,10 +1,11 @@ package jwt import ( + "errors" + "crypto" "crypto/ed25519" "crypto/rand" - "errors" ) var ( diff --git a/vendor/github.com/golang-jwt/jwt/v4/map_claims.go b/vendor/github.com/golang-jwt/jwt/v4/map_claims.go index 1d5c430..2700d64 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/map_claims.go +++ b/vendor/github.com/golang-jwt/jwt/v4/map_claims.go @@ -4,6 +4,7 @@ import ( "encoding/json" "errors" "time" + // "fmt" ) // MapClaims is a claims type that uses the map[string]interface{} for JSON decoding. diff --git a/vendor/github.com/golang-jwt/jwt/v5/ed25519.go b/vendor/github.com/golang-jwt/jwt/v5/ed25519.go index eb6bdf0..3db00e4 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ed25519.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ed25519.go @@ -1,10 +1,11 @@ package jwt import ( + "errors" + "crypto" "crypto/ed25519" "crypto/rand" - "errors" ) var ( diff --git a/vendor/github.com/josharian/intern/README.md b/vendor/github.com/josharian/intern/README.md new file mode 100644 index 0000000..ffc44b2 --- /dev/null +++ b/vendor/github.com/josharian/intern/README.md @@ -0,0 +1,5 @@ +Docs: https://godoc.org/github.com/josharian/intern + +See also [Go issue 5160](https://golang.org/issue/5160). + +License: MIT diff --git a/vendor/github.com/josharian/intern/intern.go b/vendor/github.com/josharian/intern/intern.go new file mode 100644 index 0000000..7acb1fe --- /dev/null +++ b/vendor/github.com/josharian/intern/intern.go @@ -0,0 +1,44 @@ +// Package intern interns strings. +// Interning is best effort only. +// Interned strings may be removed automatically +// at any time without notification. +// All functions may be called concurrently +// with themselves and each other. +package intern + +import "sync" + +var ( + pool sync.Pool = sync.Pool{ + New: func() interface{} { + return make(map[string]string) + }, + } +) + +// String returns s, interned. +func String(s string) string { + m := pool.Get().(map[string]string) + c, ok := m[s] + if ok { + pool.Put(m) + return c + } + m[s] = s + pool.Put(m) + return s +} + +// Bytes returns b converted to a string, interned. +func Bytes(b []byte) string { + m := pool.Get().(map[string]string) + c, ok := m[string(b)] + if ok { + pool.Put(m) + return c + } + s := string(b) + m[s] = s + pool.Put(m) + return s +} diff --git a/vendor/github.com/josharian/intern/license.md b/vendor/github.com/josharian/intern/license.md new file mode 100644 index 0000000..353d305 --- /dev/null +++ b/vendor/github.com/josharian/intern/license.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Josh Bleecher Snyder + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/kavenegar/kavenegar-go/message_countInbox.go b/vendor/github.com/kavenegar/kavenegar-go/message_countInbox.go index 54159b8..1508b58 100644 --- a/vendor/github.com/kavenegar/kavenegar-go/message_countInbox.go +++ b/vendor/github.com/kavenegar/kavenegar-go/message_countInbox.go @@ -5,20 +5,20 @@ import ( "time" ) -// MessageCountInbox ... +//MessageCountInbox ... type MessageCountInbox struct { Startdate int `json:"startdate"` Enddate int `json:"enddate"` Sumcount int `json:"sumcount"` } -// MessageCountInboxResult ... +//MessageCountInboxResult ... type MessageCountInboxResult struct { *Return `json:"return"` Entries []MessageCountInbox `json:"entries"` } -// CountInbox ... +//CountInbox ... func (message *MessageService) CountInbox(linenumber string, startdate time.Time, endate time.Time, isread bool) (MessageCountInbox, error) { v := url.Values{} @@ -33,13 +33,13 @@ func (message *MessageService) CountInbox(linenumber string, startdate time.Time return message.CreateCountInbox(v) } -// CreateCountInbox ... +//CreateCountInbox ... func (message *MessageService) CreateCountInbox(v url.Values) (MessageCountInbox, error) { u := message.client.EndPoint("sms", "countinbox") m := new(MessageCountInboxResult) err := message.client.Execute(u.String(), v, m) - if m.Entries == nil { - return MessageCountInbox{}, err + if m.Entries==nil{ + return MessageCountInbox{},err } return m.Entries[0], err -} +} \ No newline at end of file diff --git a/vendor/github.com/kavenegar/kavenegar-go/message_countPostalCode.go b/vendor/github.com/kavenegar/kavenegar-go/message_countPostalCode.go index c768fcc..86d5661 100644 --- a/vendor/github.com/kavenegar/kavenegar-go/message_countPostalCode.go +++ b/vendor/github.com/kavenegar/kavenegar-go/message_countPostalCode.go @@ -5,19 +5,19 @@ import ( "strconv" ) -// MessageCountPostalCode ... +//MessageCountPostalCode ... type MessageCountPostalCode struct { Section string `json:"section"` Value int `json:"value"` } -// MessageCountPostalCodeResult ... +//MessageCountPostalCodeResult ... type MessageCountPostalCodeResult struct { *Return `json:"return"` Entries []MessageCountPostalCode `json:"entries"` } -// CountPostalCode ... +//CountPostalCode ... func (message *MessageService) CountPostalCode(postalcode int64) ([]MessageCountPostalCode, error) { u := message.client.EndPoint("sms", "countpostalcode") m := new(MessageCountPostalCodeResult) diff --git a/vendor/github.com/kavenegar/kavenegar-go/message_countoutbox.go b/vendor/github.com/kavenegar/kavenegar-go/message_countoutbox.go index cbe3988..7ea8369 100644 --- a/vendor/github.com/kavenegar/kavenegar-go/message_countoutbox.go +++ b/vendor/github.com/kavenegar/kavenegar-go/message_countoutbox.go @@ -5,20 +5,20 @@ import ( "time" ) -// MessageCountOutbox ... +//MessageCountOutbox ... type MessageCountOutbox struct { *MessageCountInbox Sumpart int `json:"sumpart"` Cost int `json:"cost"` } -// MessageCountOutboxResult ... +//MessageCountOutboxResult ... type MessageCountOutboxResult struct { *Return `json:"return"` Entries []MessageCountOutbox `json:"entries"` } -// CountOutbox ... +//CountOutbox ... func (message *MessageService) CountOutbox(startdate time.Time, endate time.Time, status MessageStatusType) (MessageCountOutbox, error) { v := url.Values{} v.Set("startdate", ToUnix(startdate)) @@ -29,13 +29,13 @@ func (message *MessageService) CountOutbox(startdate time.Time, endate time.Time return message.CreateCountOutbox(v) } -// CreateCountOutbox ... +//CreateCountOutbox ... func (message *MessageService) CreateCountOutbox(v url.Values) (MessageCountOutbox, error) { u := message.client.EndPoint("sms", "countoutbox") m := new(MessageCountOutboxResult) err := message.client.Execute(u.String(), v, m) - if m.Entries == nil { - return MessageCountOutbox{}, err + if m.Entries==nil{ + return MessageCountOutbox{},err } return m.Entries[0], err } diff --git a/vendor/github.com/kavenegar/kavenegar-go/message_latestOutbox.go b/vendor/github.com/kavenegar/kavenegar-go/message_latestOutbox.go index 35ec68e..a09bb00 100644 --- a/vendor/github.com/kavenegar/kavenegar-go/message_latestOutbox.go +++ b/vendor/github.com/kavenegar/kavenegar-go/message_latestOutbox.go @@ -5,7 +5,7 @@ import ( "strconv" ) -// LatestOutbox ... +//LatestOutbox ... func (message *MessageService) LatestOutbox(sender string, pagesize int) ([]Message, error) { v := url.Values{} v.Set("sender", sender) @@ -13,7 +13,7 @@ func (message *MessageService) LatestOutbox(sender string, pagesize int) ([]Mess return message.CreateLatestOutbox(v) } -// CreateLatestOutbox ... +//CreateLatestOutbox ... func (message *MessageService) CreateLatestOutbox(v url.Values) ([]Message, error) { u := message.client.EndPoint("sms", "latestoutbox") vc := url.Values{} diff --git a/vendor/github.com/kavenegar/kavenegar-go/message_selectOutbox.go b/vendor/github.com/kavenegar/kavenegar-go/message_selectOutbox.go index 766765c..5de5711 100644 --- a/vendor/github.com/kavenegar/kavenegar-go/message_selectOutbox.go +++ b/vendor/github.com/kavenegar/kavenegar-go/message_selectOutbox.go @@ -5,7 +5,7 @@ import ( "time" ) -// SelectOutbox ... +//SelectOutbox ... func (message *MessageService) SelectOutbox(startdate time.Time, endate time.Time, sender string) ([]Message, error) { v := url.Values{} @@ -23,7 +23,7 @@ func (message *MessageService) SelectOutbox(startdate time.Time, endate time.Tim return message.CreateSelectOutbox(v) } -// CreateSelectOutbox ... +//CreateSelectOutbox ... func (message *MessageService) CreateSelectOutbox(v url.Values) ([]Message, error) { u := message.client.EndPoint("sms", "selectoutbox") m := new(MessageResult) diff --git a/vendor/github.com/kavenegar/kavenegar-go/message_sendByPostalCode.go b/vendor/github.com/kavenegar/kavenegar-go/message_sendByPostalCode.go index fe537b2..0817c77 100644 --- a/vendor/github.com/kavenegar/kavenegar-go/message_sendByPostalCode.go +++ b/vendor/github.com/kavenegar/kavenegar-go/message_sendByPostalCode.go @@ -6,7 +6,7 @@ import ( "time" ) -// SendPostalCode ... +//SendPostalCode ... func (m *MessageService) SendPostalCode(postalcode int64, sender string, message string, mcistartindex int, mcicount int, mtnstartindex int, mtncount int, date time.Time) ([]Message, error) { v := url.Values{} v.Set("postalcode", strconv.FormatInt(postalcode, 10)) @@ -20,9 +20,9 @@ func (m *MessageService) SendPostalCode(postalcode int64, sender string, message return m.CreateSendPostalCode(v) } -// CreateSendPostalCode ... +//CreateSendPostalCode ... func (m *MessageService) CreateSendPostalCode(v url.Values) ([]Message, error) { - u := m.client.EndPoint("sms", "sendbypostalcode") + u := m.client.EndPoint("sms", "sendbypostalcode") res := new(MessageResult) err := m.client.Execute(u.String(), v, res) return res.Entries, err diff --git a/vendor/github.com/kavenegar/kavenegar-go/message_statusLocal.go b/vendor/github.com/kavenegar/kavenegar-go/message_statusLocal.go index 2b060e9..b66d18f 100644 --- a/vendor/github.com/kavenegar/kavenegar-go/message_statusLocal.go +++ b/vendor/github.com/kavenegar/kavenegar-go/message_statusLocal.go @@ -5,26 +5,26 @@ import ( "strconv" ) -// MessageStatusLocal ... +//MessageStatusLocal ... type MessageStatusLocal struct { *MessageStatus LocalID string `json:"localid"` } -// MessageStatusLocalResult ... +//MessageStatusLocalResult ... type MessageStatusLocalResult struct { *Return `json:"return"` Entries []MessageStatusLocal `json:"entries"` } -// StatusLocal ... +//StatusLocal ... func (message *MessageService) StatusLocal(localid int64) (MessageStatusLocal, error) { u := message.client.EndPoint("sms", "statuslocalmessageid") m := new(MessageStatusLocalResult) v := url.Values{} v.Set("localid", strconv.FormatInt(localid, 10)) err := message.client.Execute(u.String(), v, m) - if err != nil { + if err!=nil{ return MessageStatusLocal{}, err } return m.Entries[0], err diff --git a/vendor/github.com/kavenegar/kavenegar-go/utils.go b/vendor/github.com/kavenegar/kavenegar-go/utils.go index 28e96f2..a7bcbde 100644 --- a/vendor/github.com/kavenegar/kavenegar-go/utils.go +++ b/vendor/github.com/kavenegar/kavenegar-go/utils.go @@ -10,23 +10,23 @@ import ( "time" ) -// ToString ... +//ToString ... func ToString(i interface{}) string { return strings.Trim(strings.Replace(fmt.Sprint(i), " ", ",", -1), "[]") } -// ToJson ... +//ToJson ... func ToJson(i interface{}) string { _json, _ := json.Marshal(i) return string(_json) } -// ToUnix ... +//ToUnix ... func ToUnix(t time.Time) string { return strconv.FormatInt(t.Unix(), 10) } -// structToUrlValues ... +//structToUrlValues ... func structToURLValues(i interface{}) url.Values { v := url.Values{} if reflect.ValueOf(i).IsNil() { diff --git a/vendor/github.com/knadh/koanf/maps/maps.go b/vendor/github.com/knadh/koanf/maps/maps.go index cad1068..ff95608 100644 --- a/vendor/github.com/knadh/koanf/maps/maps.go +++ b/vendor/github.com/knadh/koanf/maps/maps.go @@ -5,10 +5,9 @@ package maps import ( "fmt" + "github.com/mitchellh/copystructure" "reflect" "strings" - - "github.com/mitchellh/copystructure" ) // Flatten takes a map[string]interface{} and traverses it and flattens diff --git a/vendor/github.com/knadh/koanf/providers/structs/structs.go b/vendor/github.com/knadh/koanf/providers/structs/structs.go index 7508b62..0022083 100644 --- a/vendor/github.com/knadh/koanf/providers/structs/structs.go +++ b/vendor/github.com/knadh/koanf/providers/structs/structs.go @@ -6,6 +6,7 @@ import ( "errors" "github.com/fatih/structs" + "github.com/knadh/koanf/maps" ) diff --git a/vendor/github.com/labstack/echo-jwt/v4/extractors.go b/vendor/github.com/labstack/echo-jwt/v4/extractors.go index d21f53c..f72537e 100644 --- a/vendor/github.com/labstack/echo-jwt/v4/extractors.go +++ b/vendor/github.com/labstack/echo-jwt/v4/extractors.go @@ -6,11 +6,10 @@ package echojwt import ( "errors" "fmt" - "net/textproto" - "strings" - "github.com/labstack/echo/v4" "github.com/labstack/echo/v4/middleware" + "net/textproto" + "strings" ) const ( diff --git a/vendor/github.com/labstack/echo/v4/CHANGELOG.md b/vendor/github.com/labstack/echo/v4/CHANGELOG.md index cc17e28..de3857f 100644 --- a/vendor/github.com/labstack/echo/v4/CHANGELOG.md +++ b/vendor/github.com/labstack/echo/v4/CHANGELOG.md @@ -1,5 +1,36 @@ # Changelog +## v4.12.0 - 2024-04-15 + +**Security** + +* Update golang.org/x/net dep because of [GO-2024-2687](https://pkg.go.dev/vuln/GO-2024-2687) by @aldas in https://github.com/labstack/echo/pull/2625 + + +**Enhancements** + +* binder: make binding to Map work better with string destinations by @aldas in https://github.com/labstack/echo/pull/2554 +* README.md: add Encore as sponsor by @marcuskohlberg in https://github.com/labstack/echo/pull/2579 +* Reorder paragraphs in README.md by @aldas in https://github.com/labstack/echo/pull/2581 +* CI: upgrade actions/checkout to v4 by @aldas in https://github.com/labstack/echo/pull/2584 +* Remove default charset from 'application/json' Content-Type header by @doortts in https://github.com/labstack/echo/pull/2568 +* CI: Use Go 1.22 by @aldas in https://github.com/labstack/echo/pull/2588 +* binder: allow binding to a nil map by @georgmu in https://github.com/labstack/echo/pull/2574 +* Add Skipper Unit Test In BasicBasicAuthConfig and Add More Detail Explanation regarding BasicAuthValidator by @RyoKusnadi in https://github.com/labstack/echo/pull/2461 +* fix some typos by @teslaedison in https://github.com/labstack/echo/pull/2603 +* fix: some typos by @pomadev in https://github.com/labstack/echo/pull/2596 +* Allow ResponseWriters to unwrap writers when flushing/hijacking by @aldas in https://github.com/labstack/echo/pull/2595 +* Add SPDX licence comments to files. by @aldas in https://github.com/labstack/echo/pull/2604 +* Upgrade deps by @aldas in https://github.com/labstack/echo/pull/2605 +* Change type definition blocks to single declarations. This helps copy… by @aldas in https://github.com/labstack/echo/pull/2606 +* Fix Real IP logic by @cl-bvl in https://github.com/labstack/echo/pull/2550 +* Default binder can use `UnmarshalParams(params []string) error` inter… by @aldas in https://github.com/labstack/echo/pull/2607 +* Default binder can bind pointer to slice as struct field. For example `*[]string` by @aldas in https://github.com/labstack/echo/pull/2608 +* Remove maxparam dependence from Context by @aldas in https://github.com/labstack/echo/pull/2611 +* When route is registered with empty path it is normalized to `/`. by @aldas in https://github.com/labstack/echo/pull/2616 +* proxy middleware should use httputil.ReverseProxy for SSE requests by @aldas in https://github.com/labstack/echo/pull/2624 + + ## v4.11.4 - 2023-12-20 **Security** diff --git a/vendor/github.com/labstack/echo/v4/Makefile b/vendor/github.com/labstack/echo/v4/Makefile index 6aff6a8..f9e5afb 100644 --- a/vendor/github.com/labstack/echo/v4/Makefile +++ b/vendor/github.com/labstack/echo/v4/Makefile @@ -31,6 +31,6 @@ benchmark: ## Run benchmarks help: ## Display this help screen @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' -goversion ?= "1.17" -test_version: ## Run tests inside Docker with given version (defaults to 1.17 oldest supported). Example: make test_version goversion=1.17 +goversion ?= "1.19" +test_version: ## Run tests inside Docker with given version (defaults to 1.19 oldest supported). Example: make test_version goversion=1.19 @docker run --rm -it -v $(shell pwd):/project golang:$(goversion) /bin/sh -c "cd /project && make init check" diff --git a/vendor/github.com/labstack/echo/v4/README.md b/vendor/github.com/labstack/echo/v4/README.md index 18accea..351ba3c 100644 --- a/vendor/github.com/labstack/echo/v4/README.md +++ b/vendor/github.com/labstack/echo/v4/README.md @@ -9,20 +9,18 @@ [![Twitter](https://img.shields.io/badge/twitter-@labstack-55acee.svg?style=flat-square)](https://twitter.com/labstack) [![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/labstack/echo/master/LICENSE) -## Supported Go versions +## Echo -Latest version of Echo supports last four Go major [releases](https://go.dev/doc/devel/release) and might work with -older versions. +High performance, extensible, minimalist Go web framework. -As of version 4.0.0, Echo is available as a [Go module](https://github.com/golang/go/wiki/Modules). -Therefore a Go version capable of understanding /vN suffixed imports is required: +* [Official website](https://echo.labstack.com) +* [Quick start](https://echo.labstack.com/docs/quick-start) +* [Middlewares](https://echo.labstack.com/docs/category/middleware) -Any of these versions will allow you to import Echo as `github.com/labstack/echo/v4` which is the recommended -way of using Echo going forward. +Help and questions: [Github Discussions](https://github.com/labstack/echo/discussions) -For older versions, please use the latest v3 tag. -## Feature Overview +### Feature Overview - Optimized HTTP router which smartly prioritize routes - Build robust and scalable RESTful APIs @@ -38,6 +36,18 @@ For older versions, please use the latest v3 tag. - Automatic TLS via Let’s Encrypt - HTTP/2 support +## Sponsors + +
+ + encore icon + Encore – the platform for building Go-based cloud backends + +
+
+ +Click [here](https://github.com/sponsors/labstack) for more information on sponsorship. + ## Benchmarks Date: 2020/11/11
@@ -57,6 +67,7 @@ The benchmarks above were run on an Intel(R) Core(TM) i7-6820HQ CPU @ 2.70GHz // go get github.com/labstack/echo/{version} go get github.com/labstack/echo/v4 ``` +Latest version of Echo supports last four Go major [releases](https://go.dev/doc/devel/release) and might work with older versions. ### Example @@ -117,10 +128,6 @@ of middlewares in this list. Please send a PR to add your own library here. -## Help - -- [Forum](https://github.com/labstack/echo/discussions) - ## Contribute **Use issues for everything** diff --git a/vendor/github.com/labstack/echo/v4/bind.go b/vendor/github.com/labstack/echo/v4/bind.go index 374a2ae..507def3 100644 --- a/vendor/github.com/labstack/echo/v4/bind.go +++ b/vendor/github.com/labstack/echo/v4/bind.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( @@ -11,23 +14,28 @@ import ( "strings" ) -type ( - // Binder is the interface that wraps the Bind method. - Binder interface { - Bind(i interface{}, c Context) error - } +// Binder is the interface that wraps the Bind method. +type Binder interface { + Bind(i interface{}, c Context) error +} - // DefaultBinder is the default implementation of the Binder interface. - DefaultBinder struct{} +// DefaultBinder is the default implementation of the Binder interface. +type DefaultBinder struct{} - // BindUnmarshaler is the interface used to wrap the UnmarshalParam method. - // Types that don't implement this, but do implement encoding.TextUnmarshaler - // will use that interface instead. - BindUnmarshaler interface { - // UnmarshalParam decodes and assigns a value from an form or query param. - UnmarshalParam(param string) error - } -) +// BindUnmarshaler is the interface used to wrap the UnmarshalParam method. +// Types that don't implement this, but do implement encoding.TextUnmarshaler +// will use that interface instead. +type BindUnmarshaler interface { + // UnmarshalParam decodes and assigns a value from an form or query param. + UnmarshalParam(param string) error +} + +// bindMultipleUnmarshaler is used by binder to unmarshal multiple values from request at once to +// type implementing this interface. For example request could have multiple query fields `?a=1&a=2&b=test` in that case +// for `a` following slice `["1", "2"] will be passed to unmarshaller. +type bindMultipleUnmarshaler interface { + UnmarshalParams(params []string) error +} // BindPathParams binds path params to bindable object func (b *DefaultBinder) BindPathParams(c Context, i interface{}) error { @@ -131,10 +139,29 @@ func (b *DefaultBinder) bindData(destination interface{}, data map[string][]stri typ := reflect.TypeOf(destination).Elem() val := reflect.ValueOf(destination).Elem() - // Map - if typ.Kind() == reflect.Map { + // Support binding to limited Map destinations: + // - map[string][]string, + // - map[string]string <-- (binds first value from data slice) + // - map[string]interface{} + // You are better off binding to struct but there are user who want this map feature. Source of data for these cases are: + // params,query,header,form as these sources produce string values, most of the time slice of strings, actually. + if typ.Kind() == reflect.Map && typ.Key().Kind() == reflect.String { + k := typ.Elem().Kind() + isElemInterface := k == reflect.Interface + isElemString := k == reflect.String + isElemSliceOfStrings := k == reflect.Slice && typ.Elem().Elem().Kind() == reflect.String + if !(isElemSliceOfStrings || isElemString || isElemInterface) { + return nil + } + if val.IsNil() { + val.Set(reflect.MakeMap(typ)) + } for k, v := range data { - val.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(v[0])) + if isElemString { + val.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(v[0])) + } else { + val.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(v)) + } } return nil } @@ -161,14 +188,14 @@ func (b *DefaultBinder) bindData(destination interface{}, data map[string][]stri } structFieldKind := structField.Kind() inputFieldName := typeField.Tag.Get(tag) - if typeField.Anonymous && structField.Kind() == reflect.Struct && inputFieldName != "" { + if typeField.Anonymous && structFieldKind == reflect.Struct && inputFieldName != "" { // if anonymous struct with query/param/form tags, report an error return errors.New("query/param/form tags are not allowed with anonymous struct field") } if inputFieldName == "" { // If tag is nil, we inspect if the field is a not BindUnmarshaler struct and try to bind data into it (might contains fields with tags). - // structs that implement BindUnmarshaler are binded only when they have explicit tag + // structs that implement BindUnmarshaler are bound only when they have explicit tag if _, ok := structField.Addr().Interface().(BindUnmarshaler); !ok && structFieldKind == reflect.Struct { if err := b.bindData(structField.Addr().Interface(), data, tag); err != nil { return err @@ -197,27 +224,46 @@ func (b *DefaultBinder) bindData(destination interface{}, data map[string][]stri continue } - // Call this first, in case we're dealing with an alias to an array type - if ok, err := unmarshalField(typeField.Type.Kind(), inputValue[0], structField); ok { + // NOTE: algorithm here is not particularly sophisticated. It probably does not work with absurd types like `**[]*int` + // but it is smart enough to handle niche cases like `*int`,`*[]string`,`[]*int` . + + // try unmarshalling first, in case we're dealing with an alias to an array type + if ok, err := unmarshalInputsToField(typeField.Type.Kind(), inputValue, structField); ok { if err != nil { return err } continue } - numElems := len(inputValue) - if structFieldKind == reflect.Slice && numElems > 0 { + if ok, err := unmarshalInputToField(typeField.Type.Kind(), inputValue[0], structField); ok { + if err != nil { + return err + } + continue + } + + // we could be dealing with pointer to slice `*[]string` so dereference it. There are wierd OpenAPI generators + // that could create struct fields like that. + if structFieldKind == reflect.Pointer { + structFieldKind = structField.Elem().Kind() + structField = structField.Elem() + } + + if structFieldKind == reflect.Slice { sliceOf := structField.Type().Elem().Kind() + numElems := len(inputValue) slice := reflect.MakeSlice(structField.Type(), numElems, numElems) for j := 0; j < numElems; j++ { if err := setWithProperType(sliceOf, inputValue[j], slice.Index(j)); err != nil { return err } } - val.Field(i).Set(slice) - } else if err := setWithProperType(typeField.Type.Kind(), inputValue[0], structField); err != nil { - return err + structField.Set(slice) + continue + } + if err := setWithProperType(structFieldKind, inputValue[0], structField); err != nil { + return err } } return nil @@ -225,7 +271,7 @@ func (b *DefaultBinder) bindData(destination interface{}, data map[string][]stri func setWithProperType(valueKind reflect.Kind, val string, structField reflect.Value) error { // But also call it here, in case we're dealing with an array of BindUnmarshalers - if ok, err := unmarshalField(valueKind, val, structField); ok { + if ok, err := unmarshalInputToField(valueKind, val, structField); ok { return err } @@ -266,35 +312,41 @@ func setWithProperType(valueKind reflect.Kind, val string, structField reflect.V return nil } -func unmarshalField(valueKind reflect.Kind, val string, field reflect.Value) (bool, error) { - switch valueKind { - case reflect.Ptr: - return unmarshalFieldPtr(val, field) - default: - return unmarshalFieldNonPtr(val, field) +func unmarshalInputsToField(valueKind reflect.Kind, values []string, field reflect.Value) (bool, error) { + if valueKind == reflect.Ptr { + if field.IsNil() { + field.Set(reflect.New(field.Type().Elem())) + } + field = field.Elem() } + + fieldIValue := field.Addr().Interface() + unmarshaler, ok := fieldIValue.(bindMultipleUnmarshaler) + if !ok { + return false, nil + } + return true, unmarshaler.UnmarshalParams(values) } -func unmarshalFieldNonPtr(value string, field reflect.Value) (bool, error) { - fieldIValue := field.Addr().Interface() - if unmarshaler, ok := fieldIValue.(BindUnmarshaler); ok { - return true, unmarshaler.UnmarshalParam(value) +func unmarshalInputToField(valueKind reflect.Kind, val string, field reflect.Value) (bool, error) { + if valueKind == reflect.Ptr { + if field.IsNil() { + field.Set(reflect.New(field.Type().Elem())) + } + field = field.Elem() } - if unmarshaler, ok := fieldIValue.(encoding.TextUnmarshaler); ok { - return true, unmarshaler.UnmarshalText([]byte(value)) + + fieldIValue := field.Addr().Interface() + switch unmarshaler := fieldIValue.(type) { + case BindUnmarshaler: + return true, unmarshaler.UnmarshalParam(val) + case encoding.TextUnmarshaler: + return true, unmarshaler.UnmarshalText([]byte(val)) } return false, nil } -func unmarshalFieldPtr(value string, field reflect.Value) (bool, error) { - if field.IsNil() { - // Initialize the pointer to a nil value - field.Set(reflect.New(field.Type().Elem())) - } - return unmarshalFieldNonPtr(value, field.Elem()) -} - func setIntField(value string, bitSize int, field reflect.Value) error { if value == "" { value = "0" diff --git a/vendor/github.com/labstack/echo/v4/binder.go b/vendor/github.com/labstack/echo/v4/binder.go index 8e7b814..ebabeaf 100644 --- a/vendor/github.com/labstack/echo/v4/binder.go +++ b/vendor/github.com/labstack/echo/v4/binder.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( diff --git a/vendor/github.com/labstack/echo/v4/context.go b/vendor/github.com/labstack/echo/v4/context.go index 6a18116..4edaa2e 100644 --- a/vendor/github.com/labstack/echo/v4/context.go +++ b/vendor/github.com/labstack/echo/v4/context.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( @@ -13,204 +16,216 @@ import ( "sync" ) -type ( - // Context represents the context of the current HTTP request. It holds request and - // response objects, path, path parameters, data and registered handler. - Context interface { - // Request returns `*http.Request`. - Request() *http.Request +// Context represents the context of the current HTTP request. It holds request and +// response objects, path, path parameters, data and registered handler. +type Context interface { + // Request returns `*http.Request`. + Request() *http.Request - // SetRequest sets `*http.Request`. - SetRequest(r *http.Request) + // SetRequest sets `*http.Request`. + SetRequest(r *http.Request) - // SetResponse sets `*Response`. - SetResponse(r *Response) + // SetResponse sets `*Response`. + SetResponse(r *Response) - // Response returns `*Response`. - Response() *Response + // Response returns `*Response`. + Response() *Response - // IsTLS returns true if HTTP connection is TLS otherwise false. - IsTLS() bool + // IsTLS returns true if HTTP connection is TLS otherwise false. + IsTLS() bool - // IsWebSocket returns true if HTTP connection is WebSocket otherwise false. - IsWebSocket() bool + // IsWebSocket returns true if HTTP connection is WebSocket otherwise false. + IsWebSocket() bool - // Scheme returns the HTTP protocol scheme, `http` or `https`. - Scheme() string + // Scheme returns the HTTP protocol scheme, `http` or `https`. + Scheme() string - // RealIP returns the client's network address based on `X-Forwarded-For` - // or `X-Real-IP` request header. - // The behavior can be configured using `Echo#IPExtractor`. - RealIP() string + // RealIP returns the client's network address based on `X-Forwarded-For` + // or `X-Real-IP` request header. + // The behavior can be configured using `Echo#IPExtractor`. + RealIP() string - // Path returns the registered path for the handler. - Path() string + // Path returns the registered path for the handler. + Path() string - // SetPath sets the registered path for the handler. - SetPath(p string) + // SetPath sets the registered path for the handler. + SetPath(p string) - // Param returns path parameter by name. - Param(name string) string + // Param returns path parameter by name. + Param(name string) string - // ParamNames returns path parameter names. - ParamNames() []string + // ParamNames returns path parameter names. + ParamNames() []string - // SetParamNames sets path parameter names. - SetParamNames(names ...string) + // SetParamNames sets path parameter names. + SetParamNames(names ...string) - // ParamValues returns path parameter values. - ParamValues() []string + // ParamValues returns path parameter values. + ParamValues() []string - // SetParamValues sets path parameter values. - SetParamValues(values ...string) + // SetParamValues sets path parameter values. + SetParamValues(values ...string) - // QueryParam returns the query param for the provided name. - QueryParam(name string) string + // QueryParam returns the query param for the provided name. + QueryParam(name string) string - // QueryParams returns the query parameters as `url.Values`. - QueryParams() url.Values + // QueryParams returns the query parameters as `url.Values`. + QueryParams() url.Values - // QueryString returns the URL query string. - QueryString() string + // QueryString returns the URL query string. + QueryString() string - // FormValue returns the form field value for the provided name. - FormValue(name string) string + // FormValue returns the form field value for the provided name. + FormValue(name string) string - // FormParams returns the form parameters as `url.Values`. - FormParams() (url.Values, error) + // FormParams returns the form parameters as `url.Values`. + FormParams() (url.Values, error) - // FormFile returns the multipart form file for the provided name. - FormFile(name string) (*multipart.FileHeader, error) + // FormFile returns the multipart form file for the provided name. + FormFile(name string) (*multipart.FileHeader, error) - // MultipartForm returns the multipart form. - MultipartForm() (*multipart.Form, error) + // MultipartForm returns the multipart form. + MultipartForm() (*multipart.Form, error) - // Cookie returns the named cookie provided in the request. - Cookie(name string) (*http.Cookie, error) + // Cookie returns the named cookie provided in the request. + Cookie(name string) (*http.Cookie, error) - // SetCookie adds a `Set-Cookie` header in HTTP response. - SetCookie(cookie *http.Cookie) + // SetCookie adds a `Set-Cookie` header in HTTP response. + SetCookie(cookie *http.Cookie) - // Cookies returns the HTTP cookies sent with the request. - Cookies() []*http.Cookie + // Cookies returns the HTTP cookies sent with the request. + Cookies() []*http.Cookie - // Get retrieves data from the context. - Get(key string) interface{} + // Get retrieves data from the context. + Get(key string) interface{} - // Set saves data in the context. - Set(key string, val interface{}) + // Set saves data in the context. + Set(key string, val interface{}) - // Bind binds path params, query params and the request body into provided type `i`. The default binder - // binds body based on Content-Type header. - Bind(i interface{}) error + // Bind binds path params, query params and the request body into provided type `i`. The default binder + // binds body based on Content-Type header. + Bind(i interface{}) error - // Validate validates provided `i`. It is usually called after `Context#Bind()`. - // Validator must be registered using `Echo#Validator`. - Validate(i interface{}) error + // Validate validates provided `i`. It is usually called after `Context#Bind()`. + // Validator must be registered using `Echo#Validator`. + Validate(i interface{}) error - // Render renders a template with data and sends a text/html response with status - // code. Renderer must be registered using `Echo.Renderer`. - Render(code int, name string, data interface{}) error + // Render renders a template with data and sends a text/html response with status + // code. Renderer must be registered using `Echo.Renderer`. + Render(code int, name string, data interface{}) error - // HTML sends an HTTP response with status code. - HTML(code int, html string) error + // HTML sends an HTTP response with status code. + HTML(code int, html string) error - // HTMLBlob sends an HTTP blob response with status code. - HTMLBlob(code int, b []byte) error + // HTMLBlob sends an HTTP blob response with status code. + HTMLBlob(code int, b []byte) error - // String sends a string response with status code. - String(code int, s string) error + // String sends a string response with status code. + String(code int, s string) error - // JSON sends a JSON response with status code. - JSON(code int, i interface{}) error + // JSON sends a JSON response with status code. + JSON(code int, i interface{}) error - // JSONPretty sends a pretty-print JSON with status code. - JSONPretty(code int, i interface{}, indent string) error + // JSONPretty sends a pretty-print JSON with status code. + JSONPretty(code int, i interface{}, indent string) error - // JSONBlob sends a JSON blob response with status code. - JSONBlob(code int, b []byte) error + // JSONBlob sends a JSON blob response with status code. + JSONBlob(code int, b []byte) error - // JSONP sends a JSONP response with status code. It uses `callback` to construct - // the JSONP payload. - JSONP(code int, callback string, i interface{}) error + // JSONP sends a JSONP response with status code. It uses `callback` to construct + // the JSONP payload. + JSONP(code int, callback string, i interface{}) error - // JSONPBlob sends a JSONP blob response with status code. It uses `callback` - // to construct the JSONP payload. - JSONPBlob(code int, callback string, b []byte) error + // JSONPBlob sends a JSONP blob response with status code. It uses `callback` + // to construct the JSONP payload. + JSONPBlob(code int, callback string, b []byte) error - // XML sends an XML response with status code. - XML(code int, i interface{}) error + // XML sends an XML response with status code. + XML(code int, i interface{}) error - // XMLPretty sends a pretty-print XML with status code. - XMLPretty(code int, i interface{}, indent string) error + // XMLPretty sends a pretty-print XML with status code. + XMLPretty(code int, i interface{}, indent string) error - // XMLBlob sends an XML blob response with status code. - XMLBlob(code int, b []byte) error + // XMLBlob sends an XML blob response with status code. + XMLBlob(code int, b []byte) error - // Blob sends a blob response with status code and content type. - Blob(code int, contentType string, b []byte) error + // Blob sends a blob response with status code and content type. + Blob(code int, contentType string, b []byte) error - // Stream sends a streaming response with status code and content type. - Stream(code int, contentType string, r io.Reader) error + // Stream sends a streaming response with status code and content type. + Stream(code int, contentType string, r io.Reader) error - // File sends a response with the content of the file. - File(file string) error + // File sends a response with the content of the file. + File(file string) error - // Attachment sends a response as attachment, prompting client to save the - // file. - Attachment(file string, name string) error + // Attachment sends a response as attachment, prompting client to save the + // file. + Attachment(file string, name string) error - // Inline sends a response as inline, opening the file in the browser. - Inline(file string, name string) error + // Inline sends a response as inline, opening the file in the browser. + Inline(file string, name string) error - // NoContent sends a response with no body and a status code. - NoContent(code int) error + // NoContent sends a response with no body and a status code. + NoContent(code int) error - // Redirect redirects the request to a provided URL with status code. - Redirect(code int, url string) error + // Redirect redirects the request to a provided URL with status code. + Redirect(code int, url string) error - // Error invokes the registered global HTTP error handler. Generally used by middleware. - // A side-effect of calling global error handler is that now Response has been committed (sent to the client) and - // middlewares up in chain can not change Response status code or Response body anymore. - // - // Avoid using this method in handlers as no middleware will be able to effectively handle errors after that. - Error(err error) + // Error invokes the registered global HTTP error handler. Generally used by middleware. + // A side-effect of calling global error handler is that now Response has been committed (sent to the client) and + // middlewares up in chain can not change Response status code or Response body anymore. + // + // Avoid using this method in handlers as no middleware will be able to effectively handle errors after that. + Error(err error) - // Handler returns the matched handler by router. - Handler() HandlerFunc + // Handler returns the matched handler by router. + Handler() HandlerFunc - // SetHandler sets the matched handler by router. - SetHandler(h HandlerFunc) + // SetHandler sets the matched handler by router. + SetHandler(h HandlerFunc) - // Logger returns the `Logger` instance. - Logger() Logger + // Logger returns the `Logger` instance. + Logger() Logger - // SetLogger Set the logger - SetLogger(l Logger) + // SetLogger Set the logger + SetLogger(l Logger) - // Echo returns the `Echo` instance. - Echo() *Echo + // Echo returns the `Echo` instance. + Echo() *Echo - // Reset resets the context after request completes. It must be called along - // with `Echo#AcquireContext()` and `Echo#ReleaseContext()`. - // See `Echo#ServeHTTP()` - Reset(r *http.Request, w http.ResponseWriter) - } + // Reset resets the context after request completes. It must be called along + // with `Echo#AcquireContext()` and `Echo#ReleaseContext()`. + // See `Echo#ServeHTTP()` + Reset(r *http.Request, w http.ResponseWriter) +} - context struct { - request *http.Request - response *Response - path string - pnames []string - pvalues []string - query url.Values - handler HandlerFunc - store Map - echo *Echo - logger Logger - lock sync.RWMutex - } -) +type context struct { + request *http.Request + response *Response + query url.Values + echo *Echo + logger Logger + + store Map + lock sync.RWMutex + + // following fields are set by Router + + // path is route path that Router matched. It is empty string where there is no route match. + // Route registered with RouteNotFound is considered as a match and path therefore is not empty. + path string + + // pnames length is tied to param count for the matched route + pnames []string + + // Usually echo.Echo is sizing pvalues but there could be user created middlewares that decide to + // overwrite parameter by calling SetParamNames + SetParamValues. + // When echo.Echo allocated that slice it length/capacity is tied to echo.Echo.maxParam value. + // + // It is important that pvalues size is always equal or bigger to pnames length. + pvalues []string + handler HandlerFunc +} const ( // ContextKeyHeaderAllow is set by Router for getting value for `Allow` header in later stages of handler call chain. @@ -329,13 +344,9 @@ func (c *context) SetParamNames(names ...string) { c.pnames = names l := len(names) - if *c.echo.maxParam < l { - *c.echo.maxParam = l - } - if len(c.pvalues) < l { // Keeping the old pvalues just for backward compatibility, but it sounds that doesn't make sense to keep them, - // probably those values will be overriden in a Context#SetParamValues + // probably those values will be overridden in a Context#SetParamValues newPvalues := make([]string, l) copy(newPvalues, c.pvalues) c.pvalues = newPvalues @@ -347,11 +358,11 @@ func (c *context) ParamValues() []string { } func (c *context) SetParamValues(values ...string) { - // NOTE: Don't just set c.pvalues = values, because it has to have length c.echo.maxParam at all times + // NOTE: Don't just set c.pvalues = values, because it has to have length c.echo.maxParam (or bigger) at all times // It will brake the Router#Find code limit := len(values) - if limit > *c.echo.maxParam { - limit = *c.echo.maxParam + if limit > len(c.pvalues) { + c.pvalues = make([]string, limit) } for i := 0; i < limit; i++ { c.pvalues[i] = values[i] @@ -489,7 +500,7 @@ func (c *context) jsonPBlob(code int, callback string, i interface{}) (err error } func (c *context) json(code int, i interface{}, indent string) error { - c.writeContentType(MIMEApplicationJSONCharsetUTF8) + c.writeContentType(MIMEApplicationJSON) c.response.Status = code return c.echo.JSONSerializer.Serialize(c, i, indent) } @@ -507,7 +518,7 @@ func (c *context) JSONPretty(code int, i interface{}, indent string) (err error) } func (c *context) JSONBlob(code int, b []byte) (err error) { - return c.Blob(code, MIMEApplicationJSONCharsetUTF8, b) + return c.Blob(code, MIMEApplicationJSON, b) } func (c *context) JSONP(code int, callback string, i interface{}) (err error) { @@ -642,8 +653,8 @@ func (c *context) Reset(r *http.Request, w http.ResponseWriter) { c.path = "" c.pnames = nil c.logger = nil - // NOTE: Don't reset because it has to have length c.echo.maxParam at all times - for i := 0; i < *c.echo.maxParam; i++ { + // NOTE: Don't reset because it has to have length c.echo.maxParam (or bigger) at all times + for i := 0; i < len(c.pvalues); i++ { c.pvalues[i] = "" } } diff --git a/vendor/github.com/labstack/echo/v4/context_fs.go b/vendor/github.com/labstack/echo/v4/context_fs.go index 1038f89..1c25baf 100644 --- a/vendor/github.com/labstack/echo/v4/context_fs.go +++ b/vendor/github.com/labstack/echo/v4/context_fs.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( diff --git a/vendor/github.com/labstack/echo/v4/echo.go b/vendor/github.com/labstack/echo/v4/echo.go index 9924ac8..ab66b0d 100644 --- a/vendor/github.com/labstack/echo/v4/echo.go +++ b/vendor/github.com/labstack/echo/v4/echo.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + /* Package echo implements high performance, minimalist Go web framework. @@ -60,97 +63,95 @@ import ( "golang.org/x/net/http2/h2c" ) -type ( - // Echo is the top-level framework instance. - // - // Goroutine safety: Do not mutate Echo instance fields after server has started. Accessing these - // fields from handlers/middlewares and changing field values at the same time leads to data-races. - // Adding new routes after the server has been started is also not safe! - Echo struct { - filesystem - common - // startupMutex is mutex to lock Echo instance access during server configuration and startup. Useful for to get - // listener address info (on which interface/port was listener binded) without having data races. - startupMutex sync.RWMutex - colorer *color.Color +// Echo is the top-level framework instance. +// +// Goroutine safety: Do not mutate Echo instance fields after server has started. Accessing these +// fields from handlers/middlewares and changing field values at the same time leads to data-races. +// Adding new routes after the server has been started is also not safe! +type Echo struct { + filesystem + common + // startupMutex is mutex to lock Echo instance access during server configuration and startup. Useful for to get + // listener address info (on which interface/port was listener bound) without having data races. + startupMutex sync.RWMutex + colorer *color.Color - // premiddleware are middlewares that are run before routing is done. In case a pre-middleware returns - // an error the router is not executed and the request will end up in the global error handler. - premiddleware []MiddlewareFunc - middleware []MiddlewareFunc - maxParam *int - router *Router - routers map[string]*Router - pool sync.Pool + // premiddleware are middlewares that are run before routing is done. In case a pre-middleware returns + // an error the router is not executed and the request will end up in the global error handler. + premiddleware []MiddlewareFunc + middleware []MiddlewareFunc + maxParam *int + router *Router + routers map[string]*Router + pool sync.Pool - StdLogger *stdLog.Logger - Server *http.Server - TLSServer *http.Server - Listener net.Listener - TLSListener net.Listener - AutoTLSManager autocert.Manager - DisableHTTP2 bool - Debug bool - HideBanner bool - HidePort bool - HTTPErrorHandler HTTPErrorHandler - Binder Binder - JSONSerializer JSONSerializer - Validator Validator - Renderer Renderer - Logger Logger - IPExtractor IPExtractor - ListenerNetwork string + StdLogger *stdLog.Logger + Server *http.Server + TLSServer *http.Server + Listener net.Listener + TLSListener net.Listener + AutoTLSManager autocert.Manager + DisableHTTP2 bool + Debug bool + HideBanner bool + HidePort bool + HTTPErrorHandler HTTPErrorHandler + Binder Binder + JSONSerializer JSONSerializer + Validator Validator + Renderer Renderer + Logger Logger + IPExtractor IPExtractor + ListenerNetwork string - // OnAddRouteHandler is called when Echo adds new route to specific host router. - OnAddRouteHandler func(host string, route Route, handler HandlerFunc, middleware []MiddlewareFunc) - } + // OnAddRouteHandler is called when Echo adds new route to specific host router. + OnAddRouteHandler func(host string, route Route, handler HandlerFunc, middleware []MiddlewareFunc) +} - // Route contains a handler and information for matching against requests. - Route struct { - Method string `json:"method"` - Path string `json:"path"` - Name string `json:"name"` - } +// Route contains a handler and information for matching against requests. +type Route struct { + Method string `json:"method"` + Path string `json:"path"` + Name string `json:"name"` +} - // HTTPError represents an error that occurred while handling a request. - HTTPError struct { - Code int `json:"-"` - Message interface{} `json:"message"` - Internal error `json:"-"` // Stores the error returned by an external dependency - } +// HTTPError represents an error that occurred while handling a request. +type HTTPError struct { + Code int `json:"-"` + Message interface{} `json:"message"` + Internal error `json:"-"` // Stores the error returned by an external dependency +} - // MiddlewareFunc defines a function to process middleware. - MiddlewareFunc func(next HandlerFunc) HandlerFunc +// MiddlewareFunc defines a function to process middleware. +type MiddlewareFunc func(next HandlerFunc) HandlerFunc - // HandlerFunc defines a function to serve HTTP requests. - HandlerFunc func(c Context) error +// HandlerFunc defines a function to serve HTTP requests. +type HandlerFunc func(c Context) error - // HTTPErrorHandler is a centralized HTTP error handler. - HTTPErrorHandler func(err error, c Context) +// HTTPErrorHandler is a centralized HTTP error handler. +type HTTPErrorHandler func(err error, c Context) - // Validator is the interface that wraps the Validate function. - Validator interface { - Validate(i interface{}) error - } +// Validator is the interface that wraps the Validate function. +type Validator interface { + Validate(i interface{}) error +} - // JSONSerializer is the interface that encodes and decodes JSON to and from interfaces. - JSONSerializer interface { - Serialize(c Context, i interface{}, indent string) error - Deserialize(c Context, i interface{}) error - } +// JSONSerializer is the interface that encodes and decodes JSON to and from interfaces. +type JSONSerializer interface { + Serialize(c Context, i interface{}, indent string) error + Deserialize(c Context, i interface{}) error +} - // Renderer is the interface that wraps the Render function. - Renderer interface { - Render(io.Writer, string, interface{}, Context) error - } +// Renderer is the interface that wraps the Render function. +type Renderer interface { + Render(io.Writer, string, interface{}, Context) error +} - // Map defines a generic map of type `map[string]interface{}`. - Map map[string]interface{} +// Map defines a generic map of type `map[string]interface{}`. +type Map map[string]interface{} - // Common struct for Echo & Group. - common struct{} -) +// Common struct for Echo & Group. +type common struct{} // HTTP methods // NOTE: Deprecated, please use the stdlib constants directly instead. @@ -169,7 +170,12 @@ const ( // MIME types const ( - MIMEApplicationJSON = "application/json" + // MIMEApplicationJSON JavaScript Object Notation (JSON) https://www.rfc-editor.org/rfc/rfc8259 + MIMEApplicationJSON = "application/json" + // Deprecated: Please use MIMEApplicationJSON instead. JSON should be encoded using UTF-8 by default. + // No "charset" parameter is defined for this registration. + // Adding one really has no effect on compliant recipients. + // See RFC 8259, section 8.1. https://datatracker.ietf.org/doc/html/rfc8259#section-8.1 MIMEApplicationJSONCharsetUTF8 = MIMEApplicationJSON + "; " + charsetUTF8 MIMEApplicationJavaScript = "application/javascript" MIMEApplicationJavaScriptCharsetUTF8 = MIMEApplicationJavaScript + "; " + charsetUTF8 @@ -259,7 +265,7 @@ const ( const ( // Version of Echo - Version = "4.11.4" + Version = "4.12.0" website = "https://echo.labstack.com" // http://patorjk.com/software/taag/#p=display&f=Small%20Slant&t=Echo banner = ` @@ -274,21 +280,19 @@ ____________________________________O/_______ ` ) -var ( - methods = [...]string{ - http.MethodConnect, - http.MethodDelete, - http.MethodGet, - http.MethodHead, - http.MethodOptions, - http.MethodPatch, - http.MethodPost, - PROPFIND, - http.MethodPut, - http.MethodTrace, - REPORT, - } -) +var methods = [...]string{ + http.MethodConnect, + http.MethodDelete, + http.MethodGet, + http.MethodHead, + http.MethodOptions, + http.MethodPatch, + http.MethodPost, + PROPFIND, + http.MethodPut, + http.MethodTrace, + REPORT, +} // Errors var ( @@ -341,22 +345,23 @@ var ( ErrInvalidListenerNetwork = errors.New("invalid listener network") ) -// Error handlers -var ( - NotFoundHandler = func(c Context) error { - return ErrNotFound - } +// NotFoundHandler is the handler that router uses in case there was no matching route found. Returns an error that results +// HTTP 404 status code. +var NotFoundHandler = func(c Context) error { + return ErrNotFound +} - MethodNotAllowedHandler = func(c Context) error { - // See RFC 7231 section 7.4.1: An origin server MUST generate an Allow field in a 405 (Method Not Allowed) - // response and MAY do so in any other response. For disabled resources an empty Allow header may be returned - routerAllowMethods, ok := c.Get(ContextKeyHeaderAllow).(string) - if ok && routerAllowMethods != "" { - c.Response().Header().Set(HeaderAllow, routerAllowMethods) - } - return ErrMethodNotAllowed +// MethodNotAllowedHandler is the handler thar router uses in case there was no matching route found but there was +// another matching routes for that requested URL. Returns an error that results HTTP 405 Method Not Allowed status code. +var MethodNotAllowedHandler = func(c Context) error { + // See RFC 7231 section 7.4.1: An origin server MUST generate an Allow field in a 405 (Method Not Allowed) + // response and MAY do so in any other response. For disabled resources an empty Allow header may be returned + routerAllowMethods, ok := c.Get(ContextKeyHeaderAllow).(string) + if ok && routerAllowMethods != "" { + c.Response().Header().Set(HeaderAllow, routerAllowMethods) } -) + return ErrMethodNotAllowed +} // New creates an instance of Echo. func New() (e *Echo) { @@ -414,7 +419,7 @@ func (e *Echo) Routers() map[string]*Router { // // NOTE: In case errors happens in middleware call-chain that is returning from handler (which did not return an error). // When handler has already sent response (ala c.JSON()) and there is error in middleware that is returning from -// handler. Then the error that global error handler received will be ignored because we have already "commited" the +// handler. Then the error that global error handler received will be ignored because we have already "committed" the // response and status code header has been sent to the client. func (e *Echo) DefaultHTTPErrorHandler(err error, c Context) { diff --git a/vendor/github.com/labstack/echo/v4/echo_fs.go b/vendor/github.com/labstack/echo/v4/echo_fs.go index 9f83a03..a7b231f 100644 --- a/vendor/github.com/labstack/echo/v4/echo_fs.go +++ b/vendor/github.com/labstack/echo/v4/echo_fs.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( diff --git a/vendor/github.com/labstack/echo/v4/group.go b/vendor/github.com/labstack/echo/v4/group.go index 749a5ca..eca25c9 100644 --- a/vendor/github.com/labstack/echo/v4/group.go +++ b/vendor/github.com/labstack/echo/v4/group.go @@ -1,21 +1,22 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( "net/http" ) -type ( - // Group is a set of sub-routes for a specified route. It can be used for inner - // routes that share a common middleware or functionality that should be separate - // from the parent echo instance while still inheriting from it. - Group struct { - common - host string - prefix string - middleware []MiddlewareFunc - echo *Echo - } -) +// Group is a set of sub-routes for a specified route. It can be used for inner +// routes that share a common middleware or functionality that should be separate +// from the parent echo instance while still inheriting from it. +type Group struct { + common + host string + prefix string + middleware []MiddlewareFunc + echo *Echo +} // Use implements `Echo#Use()` for sub-routes within the Group. func (g *Group) Use(middleware ...MiddlewareFunc) { diff --git a/vendor/github.com/labstack/echo/v4/group_fs.go b/vendor/github.com/labstack/echo/v4/group_fs.go index aedc4c6..c1b7ec2 100644 --- a/vendor/github.com/labstack/echo/v4/group_fs.go +++ b/vendor/github.com/labstack/echo/v4/group_fs.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( diff --git a/vendor/github.com/labstack/echo/v4/ip.go b/vendor/github.com/labstack/echo/v4/ip.go index 1bcd756..6aed8d6 100644 --- a/vendor/github.com/labstack/echo/v4/ip.go +++ b/vendor/github.com/labstack/echo/v4/ip.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( @@ -64,7 +67,7 @@ XFF: "x" "x, a" "x, a, b" ``` In this case, use **first _untrustable_ IP reading from right**. Never use first one reading from left, as it is -configurable by client. Here "trustable" means "you are sure the IP address belongs to your infrastructre". +configurable by client. Here "trustable" means "you are sure the IP address belongs to your infrastructure". In above example, if `b` and `c` are trustable, the IP address of the client is `a` for both cases, never be `x`. In Echo, use `ExtractIPFromXFFHeader(...TrustOption)`. @@ -225,15 +228,21 @@ func extractIP(req *http.Request) string { func ExtractIPFromRealIPHeader(options ...TrustOption) IPExtractor { checker := newIPChecker(options) return func(req *http.Request) string { + directIP := extractIP(req) realIP := req.Header.Get(HeaderXRealIP) - if realIP != "" { + if realIP == "" { + return directIP + } + + if checker.trust(net.ParseIP(directIP)) { realIP = strings.TrimPrefix(realIP, "[") realIP = strings.TrimSuffix(realIP, "]") - if ip := net.ParseIP(realIP); ip != nil && checker.trust(ip) { + if rIP := net.ParseIP(realIP); rIP != nil { return realIP } } - return extractIP(req) + + return directIP } } diff --git a/vendor/github.com/labstack/echo/v4/json.go b/vendor/github.com/labstack/echo/v4/json.go index 16b2d05..6da0aaf 100644 --- a/vendor/github.com/labstack/echo/v4/json.go +++ b/vendor/github.com/labstack/echo/v4/json.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( diff --git a/vendor/github.com/labstack/echo/v4/log.go b/vendor/github.com/labstack/echo/v4/log.go index 3f8de59..0acd9ff 100644 --- a/vendor/github.com/labstack/echo/v4/log.go +++ b/vendor/github.com/labstack/echo/v4/log.go @@ -1,41 +1,41 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package echo import ( - "io" - "github.com/labstack/gommon/log" + "io" ) -type ( - // Logger defines the logging interface. - Logger interface { - Output() io.Writer - SetOutput(w io.Writer) - Prefix() string - SetPrefix(p string) - Level() log.Lvl - SetLevel(v log.Lvl) - SetHeader(h string) - Print(i ...interface{}) - Printf(format string, args ...interface{}) - Printj(j log.JSON) - Debug(i ...interface{}) - Debugf(format string, args ...interface{}) - Debugj(j log.JSON) - Info(i ...interface{}) - Infof(format string, args ...interface{}) - Infoj(j log.JSON) - Warn(i ...interface{}) - Warnf(format string, args ...interface{}) - Warnj(j log.JSON) - Error(i ...interface{}) - Errorf(format string, args ...interface{}) - Errorj(j log.JSON) - Fatal(i ...interface{}) - Fatalj(j log.JSON) - Fatalf(format string, args ...interface{}) - Panic(i ...interface{}) - Panicj(j log.JSON) - Panicf(format string, args ...interface{}) - } -) +// Logger defines the logging interface. +type Logger interface { + Output() io.Writer + SetOutput(w io.Writer) + Prefix() string + SetPrefix(p string) + Level() log.Lvl + SetLevel(v log.Lvl) + SetHeader(h string) + Print(i ...interface{}) + Printf(format string, args ...interface{}) + Printj(j log.JSON) + Debug(i ...interface{}) + Debugf(format string, args ...interface{}) + Debugj(j log.JSON) + Info(i ...interface{}) + Infof(format string, args ...interface{}) + Infoj(j log.JSON) + Warn(i ...interface{}) + Warnf(format string, args ...interface{}) + Warnj(j log.JSON) + Error(i ...interface{}) + Errorf(format string, args ...interface{}) + Errorj(j log.JSON) + Fatal(i ...interface{}) + Fatalj(j log.JSON) + Fatalf(format string, args ...interface{}) + Panic(i ...interface{}) + Panicj(j log.JSON) + Panicf(format string, args ...interface{}) +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/basic_auth.go b/vendor/github.com/labstack/echo/v4/middleware/basic_auth.go index f9e8caa..9285f29 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/basic_auth.go +++ b/vendor/github.com/labstack/echo/v4/middleware/basic_auth.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -9,37 +12,35 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // BasicAuthConfig defines the config for BasicAuth middleware. - BasicAuthConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// BasicAuthConfig defines the config for BasicAuth middleware. +type BasicAuthConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Validator is a function to validate BasicAuth credentials. - // Required. - Validator BasicAuthValidator + // Validator is a function to validate BasicAuth credentials. + // Required. + Validator BasicAuthValidator - // Realm is a string to define realm attribute of BasicAuth. - // Default value "Restricted". - Realm string - } + // Realm is a string to define realm attribute of BasicAuth. + // Default value "Restricted". + Realm string +} - // BasicAuthValidator defines a function to validate BasicAuth credentials. - BasicAuthValidator func(string, string, echo.Context) (bool, error) -) +// BasicAuthValidator defines a function to validate BasicAuth credentials. +// The function should return a boolean indicating whether the credentials are valid, +// and an error if any error occurs during the validation process. +type BasicAuthValidator func(string, string, echo.Context) (bool, error) const ( basic = "basic" defaultRealm = "Restricted" ) -var ( - // DefaultBasicAuthConfig is the default BasicAuth middleware config. - DefaultBasicAuthConfig = BasicAuthConfig{ - Skipper: DefaultSkipper, - Realm: defaultRealm, - } -) +// DefaultBasicAuthConfig is the default BasicAuth middleware config. +var DefaultBasicAuthConfig = BasicAuthConfig{ + Skipper: DefaultSkipper, + Realm: defaultRealm, +} // BasicAuth returns an BasicAuth middleware. // diff --git a/vendor/github.com/labstack/echo/v4/middleware/body_dump.go b/vendor/github.com/labstack/echo/v4/middleware/body_dump.go index fa7891b..b06f762 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/body_dump.go +++ b/vendor/github.com/labstack/echo/v4/middleware/body_dump.go @@ -1,8 +1,12 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( "bufio" "bytes" + "errors" "io" "net" "net/http" @@ -10,32 +14,28 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // BodyDumpConfig defines the config for BodyDump middleware. - BodyDumpConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// BodyDumpConfig defines the config for BodyDump middleware. +type BodyDumpConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Handler receives request and response payload. - // Required. - Handler BodyDumpHandler - } + // Handler receives request and response payload. + // Required. + Handler BodyDumpHandler +} - // BodyDumpHandler receives the request and response payload. - BodyDumpHandler func(echo.Context, []byte, []byte) +// BodyDumpHandler receives the request and response payload. +type BodyDumpHandler func(echo.Context, []byte, []byte) - bodyDumpResponseWriter struct { - io.Writer - http.ResponseWriter - } -) +type bodyDumpResponseWriter struct { + io.Writer + http.ResponseWriter +} -var ( - // DefaultBodyDumpConfig is the default BodyDump middleware config. - DefaultBodyDumpConfig = BodyDumpConfig{ - Skipper: DefaultSkipper, - } -) +// DefaultBodyDumpConfig is the default BodyDump middleware config. +var DefaultBodyDumpConfig = BodyDumpConfig{ + Skipper: DefaultSkipper, +} // BodyDump returns a BodyDump middleware. // @@ -98,9 +98,16 @@ func (w *bodyDumpResponseWriter) Write(b []byte) (int, error) { } func (w *bodyDumpResponseWriter) Flush() { - w.ResponseWriter.(http.Flusher).Flush() + err := responseControllerFlush(w.ResponseWriter) + if err != nil && errors.Is(err, http.ErrNotSupported) { + panic(errors.New("response writer flushing is not supported")) + } } func (w *bodyDumpResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) { - return w.ResponseWriter.(http.Hijacker).Hijack() + return responseControllerHijack(w.ResponseWriter) +} + +func (w *bodyDumpResponseWriter) Unwrap() http.ResponseWriter { + return w.ResponseWriter } diff --git a/vendor/github.com/labstack/echo/v4/middleware/body_limit.go b/vendor/github.com/labstack/echo/v4/middleware/body_limit.go index 99e3ac5..7d3c665 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/body_limit.go +++ b/vendor/github.com/labstack/echo/v4/middleware/body_limit.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -9,31 +12,27 @@ import ( "github.com/labstack/gommon/bytes" ) -type ( - // BodyLimitConfig defines the config for BodyLimit middleware. - BodyLimitConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// BodyLimitConfig defines the config for BodyLimit middleware. +type BodyLimitConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Maximum allowed size for a request body, it can be specified - // as `4x` or `4xB`, where x is one of the multiple from K, M, G, T or P. - Limit string `yaml:"limit"` - limit int64 - } + // Maximum allowed size for a request body, it can be specified + // as `4x` or `4xB`, where x is one of the multiple from K, M, G, T or P. + Limit string `yaml:"limit"` + limit int64 +} - limitedReader struct { - BodyLimitConfig - reader io.ReadCloser - read int64 - } -) +type limitedReader struct { + BodyLimitConfig + reader io.ReadCloser + read int64 +} -var ( - // DefaultBodyLimitConfig is the default BodyLimit middleware config. - DefaultBodyLimitConfig = BodyLimitConfig{ - Skipper: DefaultSkipper, - } -) +// DefaultBodyLimitConfig is the default BodyLimit middleware config. +var DefaultBodyLimitConfig = BodyLimitConfig{ + Skipper: DefaultSkipper, +} // BodyLimit returns a BodyLimit middleware. // diff --git a/vendor/github.com/labstack/echo/v4/middleware/compress.go b/vendor/github.com/labstack/echo/v4/middleware/compress.go index 3e9bd32..557bdc8 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/compress.go +++ b/vendor/github.com/labstack/echo/v4/middleware/compress.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -13,54 +16,50 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // GzipConfig defines the config for Gzip middleware. - GzipConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// GzipConfig defines the config for Gzip middleware. +type GzipConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Gzip compression level. - // Optional. Default value -1. - Level int `yaml:"level"` + // Gzip compression level. + // Optional. Default value -1. + Level int `yaml:"level"` - // Length threshold before gzip compression is applied. - // Optional. Default value 0. - // - // Most of the time you will not need to change the default. Compressing - // a short response might increase the transmitted data because of the - // gzip format overhead. Compressing the response will also consume CPU - // and time on the server and the client (for decompressing). Depending on - // your use case such a threshold might be useful. - // - // See also: - // https://webmasters.stackexchange.com/questions/31750/what-is-recommended-minimum-object-size-for-gzip-performance-benefits - MinLength int - } + // Length threshold before gzip compression is applied. + // Optional. Default value 0. + // + // Most of the time you will not need to change the default. Compressing + // a short response might increase the transmitted data because of the + // gzip format overhead. Compressing the response will also consume CPU + // and time on the server and the client (for decompressing). Depending on + // your use case such a threshold might be useful. + // + // See also: + // https://webmasters.stackexchange.com/questions/31750/what-is-recommended-minimum-object-size-for-gzip-performance-benefits + MinLength int +} - gzipResponseWriter struct { - io.Writer - http.ResponseWriter - wroteHeader bool - wroteBody bool - minLength int - minLengthExceeded bool - buffer *bytes.Buffer - code int - } -) +type gzipResponseWriter struct { + io.Writer + http.ResponseWriter + wroteHeader bool + wroteBody bool + minLength int + minLengthExceeded bool + buffer *bytes.Buffer + code int +} const ( gzipScheme = "gzip" ) -var ( - // DefaultGzipConfig is the default Gzip middleware config. - DefaultGzipConfig = GzipConfig{ - Skipper: DefaultSkipper, - Level: -1, - MinLength: 0, - } -) +// DefaultGzipConfig is the default Gzip middleware config. +var DefaultGzipConfig = GzipConfig{ + Skipper: DefaultSkipper, + Level: -1, + MinLength: 0, +} // Gzip returns a middleware which compresses HTTP response using gzip compression // scheme. @@ -191,13 +190,15 @@ func (w *gzipResponseWriter) Flush() { } w.Writer.(*gzip.Writer).Flush() - if flusher, ok := w.ResponseWriter.(http.Flusher); ok { - flusher.Flush() - } + _ = responseControllerFlush(w.ResponseWriter) +} + +func (w *gzipResponseWriter) Unwrap() http.ResponseWriter { + return w.ResponseWriter } func (w *gzipResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) { - return w.ResponseWriter.(http.Hijacker).Hijack() + return responseControllerHijack(w.ResponseWriter) } func (w *gzipResponseWriter) Push(target string, opts *http.PushOptions) error { diff --git a/vendor/github.com/labstack/echo/v4/middleware/context_timeout.go b/vendor/github.com/labstack/echo/v4/middleware/context_timeout.go index 1937693..e67173f 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/context_timeout.go +++ b/vendor/github.com/labstack/echo/v4/middleware/context_timeout.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( diff --git a/vendor/github.com/labstack/echo/v4/middleware/cors.go b/vendor/github.com/labstack/echo/v4/middleware/cors.go index 7ace2f2..7af6a76 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/cors.go +++ b/vendor/github.com/labstack/echo/v4/middleware/cors.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -9,113 +12,109 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // CORSConfig defines the config for CORS middleware. - CORSConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// CORSConfig defines the config for CORS middleware. +type CORSConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // AllowOrigins determines the value of the Access-Control-Allow-Origin - // response header. This header defines a list of origins that may access the - // resource. The wildcard characters '*' and '?' are supported and are - // converted to regex fragments '.*' and '.' accordingly. - // - // Security: use extreme caution when handling the origin, and carefully - // validate any logic. Remember that attackers may register hostile domain names. - // See https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html - // - // Optional. Default value []string{"*"}. - // - // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin - AllowOrigins []string `yaml:"allow_origins"` + // AllowOrigins determines the value of the Access-Control-Allow-Origin + // response header. This header defines a list of origins that may access the + // resource. The wildcard characters '*' and '?' are supported and are + // converted to regex fragments '.*' and '.' accordingly. + // + // Security: use extreme caution when handling the origin, and carefully + // validate any logic. Remember that attackers may register hostile domain names. + // See https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html + // + // Optional. Default value []string{"*"}. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin + AllowOrigins []string `yaml:"allow_origins"` - // AllowOriginFunc is a custom function to validate the origin. It takes the - // origin as an argument and returns true if allowed or false otherwise. If - // an error is returned, it is returned by the handler. If this option is - // set, AllowOrigins is ignored. - // - // Security: use extreme caution when handling the origin, and carefully - // validate any logic. Remember that attackers may register hostile domain names. - // See https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html - // - // Optional. - AllowOriginFunc func(origin string) (bool, error) `yaml:"-"` + // AllowOriginFunc is a custom function to validate the origin. It takes the + // origin as an argument and returns true if allowed or false otherwise. If + // an error is returned, it is returned by the handler. If this option is + // set, AllowOrigins is ignored. + // + // Security: use extreme caution when handling the origin, and carefully + // validate any logic. Remember that attackers may register hostile domain names. + // See https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html + // + // Optional. + AllowOriginFunc func(origin string) (bool, error) `yaml:"-"` - // AllowMethods determines the value of the Access-Control-Allow-Methods - // response header. This header specified the list of methods allowed when - // accessing the resource. This is used in response to a preflight request. - // - // Optional. Default value DefaultCORSConfig.AllowMethods. - // If `allowMethods` is left empty, this middleware will fill for preflight - // request `Access-Control-Allow-Methods` header value - // from `Allow` header that echo.Router set into context. - // - // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Methods - AllowMethods []string `yaml:"allow_methods"` + // AllowMethods determines the value of the Access-Control-Allow-Methods + // response header. This header specified the list of methods allowed when + // accessing the resource. This is used in response to a preflight request. + // + // Optional. Default value DefaultCORSConfig.AllowMethods. + // If `allowMethods` is left empty, this middleware will fill for preflight + // request `Access-Control-Allow-Methods` header value + // from `Allow` header that echo.Router set into context. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Methods + AllowMethods []string `yaml:"allow_methods"` - // AllowHeaders determines the value of the Access-Control-Allow-Headers - // response header. This header is used in response to a preflight request to - // indicate which HTTP headers can be used when making the actual request. - // - // Optional. Default value []string{}. - // - // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Headers - AllowHeaders []string `yaml:"allow_headers"` + // AllowHeaders determines the value of the Access-Control-Allow-Headers + // response header. This header is used in response to a preflight request to + // indicate which HTTP headers can be used when making the actual request. + // + // Optional. Default value []string{}. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Headers + AllowHeaders []string `yaml:"allow_headers"` - // AllowCredentials determines the value of the - // Access-Control-Allow-Credentials response header. This header indicates - // whether or not the response to the request can be exposed when the - // credentials mode (Request.credentials) is true. When used as part of a - // response to a preflight request, this indicates whether or not the actual - // request can be made using credentials. See also - // [MDN: Access-Control-Allow-Credentials]. - // - // Optional. Default value false, in which case the header is not set. - // - // Security: avoid using `AllowCredentials = true` with `AllowOrigins = *`. - // See "Exploiting CORS misconfigurations for Bitcoins and bounties", - // https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html - // - // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials - AllowCredentials bool `yaml:"allow_credentials"` + // AllowCredentials determines the value of the + // Access-Control-Allow-Credentials response header. This header indicates + // whether or not the response to the request can be exposed when the + // credentials mode (Request.credentials) is true. When used as part of a + // response to a preflight request, this indicates whether or not the actual + // request can be made using credentials. See also + // [MDN: Access-Control-Allow-Credentials]. + // + // Optional. Default value false, in which case the header is not set. + // + // Security: avoid using `AllowCredentials = true` with `AllowOrigins = *`. + // See "Exploiting CORS misconfigurations for Bitcoins and bounties", + // https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials + AllowCredentials bool `yaml:"allow_credentials"` - // UnsafeWildcardOriginWithAllowCredentials UNSAFE/INSECURE: allows wildcard '*' origin to be used with AllowCredentials - // flag. In that case we consider any origin allowed and send it back to the client with `Access-Control-Allow-Origin` header. - // - // This is INSECURE and potentially leads to [cross-origin](https://portswigger.net/research/exploiting-cors-misconfigurations-for-bitcoins-and-bounties) - // attacks. See: https://github.com/labstack/echo/issues/2400 for discussion on the subject. - // - // Optional. Default value is false. - UnsafeWildcardOriginWithAllowCredentials bool `yaml:"unsafe_wildcard_origin_with_allow_credentials"` + // UnsafeWildcardOriginWithAllowCredentials UNSAFE/INSECURE: allows wildcard '*' origin to be used with AllowCredentials + // flag. In that case we consider any origin allowed and send it back to the client with `Access-Control-Allow-Origin` header. + // + // This is INSECURE and potentially leads to [cross-origin](https://portswigger.net/research/exploiting-cors-misconfigurations-for-bitcoins-and-bounties) + // attacks. See: https://github.com/labstack/echo/issues/2400 for discussion on the subject. + // + // Optional. Default value is false. + UnsafeWildcardOriginWithAllowCredentials bool `yaml:"unsafe_wildcard_origin_with_allow_credentials"` - // ExposeHeaders determines the value of Access-Control-Expose-Headers, which - // defines a list of headers that clients are allowed to access. - // - // Optional. Default value []string{}, in which case the header is not set. - // - // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Header - ExposeHeaders []string `yaml:"expose_headers"` + // ExposeHeaders determines the value of Access-Control-Expose-Headers, which + // defines a list of headers that clients are allowed to access. + // + // Optional. Default value []string{}, in which case the header is not set. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Header + ExposeHeaders []string `yaml:"expose_headers"` - // MaxAge determines the value of the Access-Control-Max-Age response header. - // This header indicates how long (in seconds) the results of a preflight - // request can be cached. - // The header is set only if MaxAge != 0, negative value sends "0" which instructs browsers not to cache that response. - // - // Optional. Default value 0 - meaning header is not sent. - // - // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age - MaxAge int `yaml:"max_age"` - } -) + // MaxAge determines the value of the Access-Control-Max-Age response header. + // This header indicates how long (in seconds) the results of a preflight + // request can be cached. + // The header is set only if MaxAge != 0, negative value sends "0" which instructs browsers not to cache that response. + // + // Optional. Default value 0 - meaning header is not sent. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age + MaxAge int `yaml:"max_age"` +} -var ( - // DefaultCORSConfig is the default CORS middleware config. - DefaultCORSConfig = CORSConfig{ - Skipper: DefaultSkipper, - AllowOrigins: []string{"*"}, - AllowMethods: []string{http.MethodGet, http.MethodHead, http.MethodPut, http.MethodPatch, http.MethodPost, http.MethodDelete}, - } -) +// DefaultCORSConfig is the default CORS middleware config. +var DefaultCORSConfig = CORSConfig{ + Skipper: DefaultSkipper, + AllowOrigins: []string{"*"}, + AllowMethods: []string{http.MethodGet, http.MethodHead, http.MethodPut, http.MethodPatch, http.MethodPost, http.MethodDelete}, +} // CORS returns a Cross-Origin Resource Sharing (CORS) middleware. // See also [MDN: Cross-Origin Resource Sharing (CORS)]. diff --git a/vendor/github.com/labstack/echo/v4/middleware/csrf.go b/vendor/github.com/labstack/echo/v4/middleware/csrf.go index adf1221..92f4019 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/csrf.go +++ b/vendor/github.com/labstack/echo/v4/middleware/csrf.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -8,82 +11,78 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // CSRFConfig defines the config for CSRF middleware. - CSRFConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// CSRFConfig defines the config for CSRF middleware. +type CSRFConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // TokenLength is the length of the generated token. - TokenLength uint8 `yaml:"token_length"` - // Optional. Default value 32. + // TokenLength is the length of the generated token. + TokenLength uint8 `yaml:"token_length"` + // Optional. Default value 32. - // TokenLookup is a string in the form of ":" or ":,:" that is used - // to extract token from the request. - // Optional. Default value "header:X-CSRF-Token". - // Possible values: - // - "header:" or "header::" - // - "query:" - // - "form:" - // Multiple sources example: - // - "header:X-CSRF-Token,query:csrf" - TokenLookup string `yaml:"token_lookup"` + // TokenLookup is a string in the form of ":" or ":,:" that is used + // to extract token from the request. + // Optional. Default value "header:X-CSRF-Token". + // Possible values: + // - "header:" or "header::" + // - "query:" + // - "form:" + // Multiple sources example: + // - "header:X-CSRF-Token,query:csrf" + TokenLookup string `yaml:"token_lookup"` - // Context key to store generated CSRF token into context. - // Optional. Default value "csrf". - ContextKey string `yaml:"context_key"` + // Context key to store generated CSRF token into context. + // Optional. Default value "csrf". + ContextKey string `yaml:"context_key"` - // Name of the CSRF cookie. This cookie will store CSRF token. - // Optional. Default value "csrf". - CookieName string `yaml:"cookie_name"` + // Name of the CSRF cookie. This cookie will store CSRF token. + // Optional. Default value "csrf". + CookieName string `yaml:"cookie_name"` - // Domain of the CSRF cookie. - // Optional. Default value none. - CookieDomain string `yaml:"cookie_domain"` + // Domain of the CSRF cookie. + // Optional. Default value none. + CookieDomain string `yaml:"cookie_domain"` - // Path of the CSRF cookie. - // Optional. Default value none. - CookiePath string `yaml:"cookie_path"` + // Path of the CSRF cookie. + // Optional. Default value none. + CookiePath string `yaml:"cookie_path"` - // Max age (in seconds) of the CSRF cookie. - // Optional. Default value 86400 (24hr). - CookieMaxAge int `yaml:"cookie_max_age"` + // Max age (in seconds) of the CSRF cookie. + // Optional. Default value 86400 (24hr). + CookieMaxAge int `yaml:"cookie_max_age"` - // Indicates if CSRF cookie is secure. - // Optional. Default value false. - CookieSecure bool `yaml:"cookie_secure"` + // Indicates if CSRF cookie is secure. + // Optional. Default value false. + CookieSecure bool `yaml:"cookie_secure"` - // Indicates if CSRF cookie is HTTP only. - // Optional. Default value false. - CookieHTTPOnly bool `yaml:"cookie_http_only"` + // Indicates if CSRF cookie is HTTP only. + // Optional. Default value false. + CookieHTTPOnly bool `yaml:"cookie_http_only"` - // Indicates SameSite mode of the CSRF cookie. - // Optional. Default value SameSiteDefaultMode. - CookieSameSite http.SameSite `yaml:"cookie_same_site"` + // Indicates SameSite mode of the CSRF cookie. + // Optional. Default value SameSiteDefaultMode. + CookieSameSite http.SameSite `yaml:"cookie_same_site"` - // ErrorHandler defines a function which is executed for returning custom errors. - ErrorHandler CSRFErrorHandler - } + // ErrorHandler defines a function which is executed for returning custom errors. + ErrorHandler CSRFErrorHandler +} - // CSRFErrorHandler is a function which is executed for creating custom errors. - CSRFErrorHandler func(err error, c echo.Context) error -) +// CSRFErrorHandler is a function which is executed for creating custom errors. +type CSRFErrorHandler func(err error, c echo.Context) error // ErrCSRFInvalid is returned when CSRF check fails var ErrCSRFInvalid = echo.NewHTTPError(http.StatusForbidden, "invalid csrf token") -var ( - // DefaultCSRFConfig is the default CSRF middleware config. - DefaultCSRFConfig = CSRFConfig{ - Skipper: DefaultSkipper, - TokenLength: 32, - TokenLookup: "header:" + echo.HeaderXCSRFToken, - ContextKey: "csrf", - CookieName: "_csrf", - CookieMaxAge: 86400, - CookieSameSite: http.SameSiteDefaultMode, - } -) +// DefaultCSRFConfig is the default CSRF middleware config. +var DefaultCSRFConfig = CSRFConfig{ + Skipper: DefaultSkipper, + TokenLength: 32, + TokenLookup: "header:" + echo.HeaderXCSRFToken, + ContextKey: "csrf", + CookieName: "_csrf", + CookieMaxAge: 86400, + CookieSameSite: http.SameSiteDefaultMode, +} // CSRF returns a Cross-Site Request Forgery (CSRF) middleware. // See: https://en.wikipedia.org/wiki/Cross-site_request_forgery diff --git a/vendor/github.com/labstack/echo/v4/middleware/decompress.go b/vendor/github.com/labstack/echo/v4/middleware/decompress.go index a73c973..0c56176 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/decompress.go +++ b/vendor/github.com/labstack/echo/v4/middleware/decompress.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -9,16 +12,14 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // DecompressConfig defines the config for Decompress middleware. - DecompressConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// DecompressConfig defines the config for Decompress middleware. +type DecompressConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // GzipDecompressPool defines an interface to provide the sync.Pool used to create/store Gzip readers - GzipDecompressPool Decompressor - } -) + // GzipDecompressPool defines an interface to provide the sync.Pool used to create/store Gzip readers + GzipDecompressPool Decompressor +} // GZIPEncoding content-encoding header if set to "gzip", decompress body contents. const GZIPEncoding string = "gzip" @@ -28,13 +29,11 @@ type Decompressor interface { gzipDecompressPool() sync.Pool } -var ( - //DefaultDecompressConfig defines the config for decompress middleware - DefaultDecompressConfig = DecompressConfig{ - Skipper: DefaultSkipper, - GzipDecompressPool: &DefaultGzipDecompressPool{}, - } -) +// DefaultDecompressConfig defines the config for decompress middleware +var DefaultDecompressConfig = DecompressConfig{ + Skipper: DefaultSkipper, + GzipDecompressPool: &DefaultGzipDecompressPool{}, +} // DefaultGzipDecompressPool is the default implementation of Decompressor interface type DefaultGzipDecompressPool struct { diff --git a/vendor/github.com/labstack/echo/v4/middleware/extractor.go b/vendor/github.com/labstack/echo/v4/middleware/extractor.go index ade7a48..3f27414 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/extractor.go +++ b/vendor/github.com/labstack/echo/v4/middleware/extractor.go @@ -1,12 +1,14 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( "errors" "fmt" + "github.com/labstack/echo/v4" "net/textproto" "strings" - - "github.com/labstack/echo/v4" ) const ( diff --git a/vendor/github.com/labstack/echo/v4/middleware/jwt.go b/vendor/github.com/labstack/echo/v4/middleware/jwt.go index 8933528..a6bf16f 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/jwt.go +++ b/vendor/github.com/labstack/echo/v4/middleware/jwt.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + //go:build go1.15 // +build go1.15 @@ -6,146 +9,141 @@ package middleware import ( "errors" "fmt" - "net/http" - "reflect" - "github.com/golang-jwt/jwt" "github.com/labstack/echo/v4" + "net/http" + "reflect" ) -type ( - // JWTConfig defines the config for JWT middleware. - JWTConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// JWTConfig defines the config for JWT middleware. +type JWTConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // BeforeFunc defines a function which is executed just before the middleware. - BeforeFunc BeforeFunc + // BeforeFunc defines a function which is executed just before the middleware. + BeforeFunc BeforeFunc - // SuccessHandler defines a function which is executed for a valid token before middleware chain continues with next - // middleware or handler. - SuccessHandler JWTSuccessHandler + // SuccessHandler defines a function which is executed for a valid token before middleware chain continues with next + // middleware or handler. + SuccessHandler JWTSuccessHandler - // ErrorHandler defines a function which is executed for an invalid token. - // It may be used to define a custom JWT error. - ErrorHandler JWTErrorHandler + // ErrorHandler defines a function which is executed for an invalid token. + // It may be used to define a custom JWT error. + ErrorHandler JWTErrorHandler - // ErrorHandlerWithContext is almost identical to ErrorHandler, but it's passed the current context. - ErrorHandlerWithContext JWTErrorHandlerWithContext + // ErrorHandlerWithContext is almost identical to ErrorHandler, but it's passed the current context. + ErrorHandlerWithContext JWTErrorHandlerWithContext - // ContinueOnIgnoredError allows the next middleware/handler to be called when ErrorHandlerWithContext decides to - // ignore the error (by returning `nil`). - // This is useful when parts of your site/api allow public access and some authorized routes provide extra functionality. - // In that case you can use ErrorHandlerWithContext to set a default public JWT token value in the request context - // and continue. Some logic down the remaining execution chain needs to check that (public) token value then. - ContinueOnIgnoredError bool + // ContinueOnIgnoredError allows the next middleware/handler to be called when ErrorHandlerWithContext decides to + // ignore the error (by returning `nil`). + // This is useful when parts of your site/api allow public access and some authorized routes provide extra functionality. + // In that case you can use ErrorHandlerWithContext to set a default public JWT token value in the request context + // and continue. Some logic down the remaining execution chain needs to check that (public) token value then. + ContinueOnIgnoredError bool - // Signing key to validate token. - // This is one of the three options to provide a token validation key. - // The order of precedence is a user-defined KeyFunc, SigningKeys and SigningKey. - // Required if neither user-defined KeyFunc nor SigningKeys is provided. - SigningKey interface{} + // Signing key to validate token. + // This is one of the three options to provide a token validation key. + // The order of precedence is a user-defined KeyFunc, SigningKeys and SigningKey. + // Required if neither user-defined KeyFunc nor SigningKeys is provided. + SigningKey interface{} - // Map of signing keys to validate token with kid field usage. - // This is one of the three options to provide a token validation key. - // The order of precedence is a user-defined KeyFunc, SigningKeys and SigningKey. - // Required if neither user-defined KeyFunc nor SigningKey is provided. - SigningKeys map[string]interface{} + // Map of signing keys to validate token with kid field usage. + // This is one of the three options to provide a token validation key. + // The order of precedence is a user-defined KeyFunc, SigningKeys and SigningKey. + // Required if neither user-defined KeyFunc nor SigningKey is provided. + SigningKeys map[string]interface{} - // Signing method used to check the token's signing algorithm. - // Optional. Default value HS256. - SigningMethod string + // Signing method used to check the token's signing algorithm. + // Optional. Default value HS256. + SigningMethod string - // Context key to store user information from the token into context. - // Optional. Default value "user". - ContextKey string + // Context key to store user information from the token into context. + // Optional. Default value "user". + ContextKey string - // Claims are extendable claims data defining token content. Used by default ParseTokenFunc implementation. - // Not used if custom ParseTokenFunc is set. - // Optional. Default value jwt.MapClaims - Claims jwt.Claims + // Claims are extendable claims data defining token content. Used by default ParseTokenFunc implementation. + // Not used if custom ParseTokenFunc is set. + // Optional. Default value jwt.MapClaims + Claims jwt.Claims - // TokenLookup is a string in the form of ":" or ":,:" that is used - // to extract token from the request. - // Optional. Default value "header:Authorization". - // Possible values: - // - "header:" or "header::" - // `` is argument value to cut/trim prefix of the extracted value. This is useful if header - // value has static prefix like `Authorization: ` where part that we - // want to cut is ` ` note the space at the end. - // In case of JWT tokens `Authorization: Bearer ` prefix we cut is `Bearer `. - // If prefix is left empty the whole value is returned. - // - "query:" - // - "param:" - // - "cookie:" - // - "form:" - // Multiple sources example: - // - "header:Authorization,cookie:myowncookie" - TokenLookup string + // TokenLookup is a string in the form of ":" or ":,:" that is used + // to extract token from the request. + // Optional. Default value "header:Authorization". + // Possible values: + // - "header:" or "header::" + // `` is argument value to cut/trim prefix of the extracted value. This is useful if header + // value has static prefix like `Authorization: ` where part that we + // want to cut is ` ` note the space at the end. + // In case of JWT tokens `Authorization: Bearer ` prefix we cut is `Bearer `. + // If prefix is left empty the whole value is returned. + // - "query:" + // - "param:" + // - "cookie:" + // - "form:" + // Multiple sources example: + // - "header:Authorization,cookie:myowncookie" + TokenLookup string - // TokenLookupFuncs defines a list of user-defined functions that extract JWT token from the given context. - // This is one of the two options to provide a token extractor. - // The order of precedence is user-defined TokenLookupFuncs, and TokenLookup. - // You can also provide both if you want. - TokenLookupFuncs []ValuesExtractor + // TokenLookupFuncs defines a list of user-defined functions that extract JWT token from the given context. + // This is one of the two options to provide a token extractor. + // The order of precedence is user-defined TokenLookupFuncs, and TokenLookup. + // You can also provide both if you want. + TokenLookupFuncs []ValuesExtractor - // AuthScheme to be used in the Authorization header. - // Optional. Default value "Bearer". - AuthScheme string + // AuthScheme to be used in the Authorization header. + // Optional. Default value "Bearer". + AuthScheme string - // KeyFunc defines a user-defined function that supplies the public key for a token validation. - // The function shall take care of verifying the signing algorithm and selecting the proper key. - // A user-defined KeyFunc can be useful if tokens are issued by an external party. - // Used by default ParseTokenFunc implementation. - // - // When a user-defined KeyFunc is provided, SigningKey, SigningKeys, and SigningMethod are ignored. - // This is one of the three options to provide a token validation key. - // The order of precedence is a user-defined KeyFunc, SigningKeys and SigningKey. - // Required if neither SigningKeys nor SigningKey is provided. - // Not used if custom ParseTokenFunc is set. - // Default to an internal implementation verifying the signing algorithm and selecting the proper key. - KeyFunc jwt.Keyfunc + // KeyFunc defines a user-defined function that supplies the public key for a token validation. + // The function shall take care of verifying the signing algorithm and selecting the proper key. + // A user-defined KeyFunc can be useful if tokens are issued by an external party. + // Used by default ParseTokenFunc implementation. + // + // When a user-defined KeyFunc is provided, SigningKey, SigningKeys, and SigningMethod are ignored. + // This is one of the three options to provide a token validation key. + // The order of precedence is a user-defined KeyFunc, SigningKeys and SigningKey. + // Required if neither SigningKeys nor SigningKey is provided. + // Not used if custom ParseTokenFunc is set. + // Default to an internal implementation verifying the signing algorithm and selecting the proper key. + KeyFunc jwt.Keyfunc - // ParseTokenFunc defines a user-defined function that parses token from given auth. Returns an error when token - // parsing fails or parsed token is invalid. - // Defaults to implementation using `github.com/golang-jwt/jwt` as JWT implementation library - ParseTokenFunc func(auth string, c echo.Context) (interface{}, error) - } + // ParseTokenFunc defines a user-defined function that parses token from given auth. Returns an error when token + // parsing fails or parsed token is invalid. + // Defaults to implementation using `github.com/golang-jwt/jwt` as JWT implementation library + ParseTokenFunc func(auth string, c echo.Context) (interface{}, error) +} - // JWTSuccessHandler defines a function which is executed for a valid token. - JWTSuccessHandler func(c echo.Context) +// JWTSuccessHandler defines a function which is executed for a valid token. +type JWTSuccessHandler func(c echo.Context) - // JWTErrorHandler defines a function which is executed for an invalid token. - JWTErrorHandler func(err error) error +// JWTErrorHandler defines a function which is executed for an invalid token. +type JWTErrorHandler func(err error) error - // JWTErrorHandlerWithContext is almost identical to JWTErrorHandler, but it's passed the current context. - JWTErrorHandlerWithContext func(err error, c echo.Context) error -) +// JWTErrorHandlerWithContext is almost identical to JWTErrorHandler, but it's passed the current context. +type JWTErrorHandlerWithContext func(err error, c echo.Context) error // Algorithms const ( AlgorithmHS256 = "HS256" ) -// Errors -var ( - ErrJWTMissing = echo.NewHTTPError(http.StatusBadRequest, "missing or malformed jwt") - ErrJWTInvalid = echo.NewHTTPError(http.StatusUnauthorized, "invalid or expired jwt") -) +// ErrJWTMissing is error that is returned when no JWToken was extracted from the request. +var ErrJWTMissing = echo.NewHTTPError(http.StatusBadRequest, "missing or malformed jwt") -var ( - // DefaultJWTConfig is the default JWT auth middleware config. - DefaultJWTConfig = JWTConfig{ - Skipper: DefaultSkipper, - SigningMethod: AlgorithmHS256, - ContextKey: "user", - TokenLookup: "header:" + echo.HeaderAuthorization, - TokenLookupFuncs: nil, - AuthScheme: "Bearer", - Claims: jwt.MapClaims{}, - KeyFunc: nil, - } -) +// ErrJWTInvalid is error that is returned when middleware could not parse JWT correctly. +var ErrJWTInvalid = echo.NewHTTPError(http.StatusUnauthorized, "invalid or expired jwt") + +// DefaultJWTConfig is the default JWT auth middleware config. +var DefaultJWTConfig = JWTConfig{ + Skipper: DefaultSkipper, + SigningMethod: AlgorithmHS256, + ContextKey: "user", + TokenLookup: "header:" + echo.HeaderAuthorization, + TokenLookupFuncs: nil, + AuthScheme: "Bearer", + Claims: jwt.MapClaims{}, + KeyFunc: nil, +} // JWT returns a JSON Web Token (JWT) auth middleware. // diff --git a/vendor/github.com/labstack/echo/v4/middleware/key_auth.go b/vendor/github.com/labstack/echo/v4/middleware/key_auth.go index caaa465..79bee20 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/key_auth.go +++ b/vendor/github.com/labstack/echo/v4/middleware/key_auth.go @@ -1,75 +1,73 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( "errors" - "net/http" - "github.com/labstack/echo/v4" + "net/http" ) -type ( - // KeyAuthConfig defines the config for KeyAuth middleware. - KeyAuthConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// KeyAuthConfig defines the config for KeyAuth middleware. +type KeyAuthConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // KeyLookup is a string in the form of ":" or ":,:" that is used - // to extract key from the request. - // Optional. Default value "header:Authorization". - // Possible values: - // - "header:" or "header::" - // `` is argument value to cut/trim prefix of the extracted value. This is useful if header - // value has static prefix like `Authorization: ` where part that we - // want to cut is ` ` note the space at the end. - // In case of basic authentication `Authorization: Basic ` prefix we want to remove is `Basic `. - // - "query:" - // - "form:" - // - "cookie:" - // Multiple sources example: - // - "header:Authorization,header:X-Api-Key" - KeyLookup string + // KeyLookup is a string in the form of ":" or ":,:" that is used + // to extract key from the request. + // Optional. Default value "header:Authorization". + // Possible values: + // - "header:" or "header::" + // `` is argument value to cut/trim prefix of the extracted value. This is useful if header + // value has static prefix like `Authorization: ` where part that we + // want to cut is ` ` note the space at the end. + // In case of basic authentication `Authorization: Basic ` prefix we want to remove is `Basic `. + // - "query:" + // - "form:" + // - "cookie:" + // Multiple sources example: + // - "header:Authorization,header:X-Api-Key" + KeyLookup string - // AuthScheme to be used in the Authorization header. - // Optional. Default value "Bearer". - AuthScheme string + // AuthScheme to be used in the Authorization header. + // Optional. Default value "Bearer". + AuthScheme string - // Validator is a function to validate key. - // Required. - Validator KeyAuthValidator + // Validator is a function to validate key. + // Required. + Validator KeyAuthValidator - // ErrorHandler defines a function which is executed for an invalid key. - // It may be used to define a custom error. - ErrorHandler KeyAuthErrorHandler + // ErrorHandler defines a function which is executed for an invalid key. + // It may be used to define a custom error. + ErrorHandler KeyAuthErrorHandler - // ContinueOnIgnoredError allows the next middleware/handler to be called when ErrorHandler decides to - // ignore the error (by returning `nil`). - // This is useful when parts of your site/api allow public access and some authorized routes provide extra functionality. - // In that case you can use ErrorHandler to set a default public key auth value in the request context - // and continue. Some logic down the remaining execution chain needs to check that (public) key auth value then. - ContinueOnIgnoredError bool - } + // ContinueOnIgnoredError allows the next middleware/handler to be called when ErrorHandler decides to + // ignore the error (by returning `nil`). + // This is useful when parts of your site/api allow public access and some authorized routes provide extra functionality. + // In that case you can use ErrorHandler to set a default public key auth value in the request context + // and continue. Some logic down the remaining execution chain needs to check that (public) key auth value then. + ContinueOnIgnoredError bool +} - // KeyAuthValidator defines a function to validate KeyAuth credentials. - KeyAuthValidator func(auth string, c echo.Context) (bool, error) +// KeyAuthValidator defines a function to validate KeyAuth credentials. +type KeyAuthValidator func(auth string, c echo.Context) (bool, error) - // KeyAuthErrorHandler defines a function which is executed for an invalid key. - KeyAuthErrorHandler func(err error, c echo.Context) error -) - -var ( - // DefaultKeyAuthConfig is the default KeyAuth middleware config. - DefaultKeyAuthConfig = KeyAuthConfig{ - Skipper: DefaultSkipper, - KeyLookup: "header:" + echo.HeaderAuthorization, - AuthScheme: "Bearer", - } -) +// KeyAuthErrorHandler defines a function which is executed for an invalid key. +type KeyAuthErrorHandler func(err error, c echo.Context) error // ErrKeyAuthMissing is error type when KeyAuth middleware is unable to extract value from lookups type ErrKeyAuthMissing struct { Err error } +// DefaultKeyAuthConfig is the default KeyAuth middleware config. +var DefaultKeyAuthConfig = KeyAuthConfig{ + Skipper: DefaultSkipper, + KeyLookup: "header:" + echo.HeaderAuthorization, + AuthScheme: "Bearer", +} + // Error returns errors text func (e *ErrKeyAuthMissing) Error() string { return e.Err.Error() diff --git a/vendor/github.com/labstack/echo/v4/middleware/logger.go b/vendor/github.com/labstack/echo/v4/middleware/logger.go index 7958d87..910fce8 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/logger.go +++ b/vendor/github.com/labstack/echo/v4/middleware/logger.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -14,77 +17,73 @@ import ( "github.com/valyala/fasttemplate" ) -type ( - // LoggerConfig defines the config for Logger middleware. - LoggerConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// LoggerConfig defines the config for Logger middleware. +type LoggerConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Tags to construct the logger format. - // - // - time_unix - // - time_unix_milli - // - time_unix_micro - // - time_unix_nano - // - time_rfc3339 - // - time_rfc3339_nano - // - time_custom - // - id (Request ID) - // - remote_ip - // - uri - // - host - // - method - // - path - // - route - // - protocol - // - referer - // - user_agent - // - status - // - error - // - latency (In nanoseconds) - // - latency_human (Human readable) - // - bytes_in (Bytes received) - // - bytes_out (Bytes sent) - // - header: - // - query: - // - form: - // - custom (see CustomTagFunc field) - // - // Example "${remote_ip} ${status}" - // - // Optional. Default value DefaultLoggerConfig.Format. - Format string `yaml:"format"` + // Tags to construct the logger format. + // + // - time_unix + // - time_unix_milli + // - time_unix_micro + // - time_unix_nano + // - time_rfc3339 + // - time_rfc3339_nano + // - time_custom + // - id (Request ID) + // - remote_ip + // - uri + // - host + // - method + // - path + // - route + // - protocol + // - referer + // - user_agent + // - status + // - error + // - latency (In nanoseconds) + // - latency_human (Human readable) + // - bytes_in (Bytes received) + // - bytes_out (Bytes sent) + // - header: + // - query: + // - form: + // - custom (see CustomTagFunc field) + // + // Example "${remote_ip} ${status}" + // + // Optional. Default value DefaultLoggerConfig.Format. + Format string `yaml:"format"` - // Optional. Default value DefaultLoggerConfig.CustomTimeFormat. - CustomTimeFormat string `yaml:"custom_time_format"` + // Optional. Default value DefaultLoggerConfig.CustomTimeFormat. + CustomTimeFormat string `yaml:"custom_time_format"` - // CustomTagFunc is function called for `${custom}` tag to output user implemented text by writing it to buf. - // Make sure that outputted text creates valid JSON string with other logged tags. - // Optional. - CustomTagFunc func(c echo.Context, buf *bytes.Buffer) (int, error) + // CustomTagFunc is function called for `${custom}` tag to output user implemented text by writing it to buf. + // Make sure that outputted text creates valid JSON string with other logged tags. + // Optional. + CustomTagFunc func(c echo.Context, buf *bytes.Buffer) (int, error) - // Output is a writer where logs in JSON format are written. - // Optional. Default value os.Stdout. - Output io.Writer + // Output is a writer where logs in JSON format are written. + // Optional. Default value os.Stdout. + Output io.Writer - template *fasttemplate.Template - colorer *color.Color - pool *sync.Pool - } -) + template *fasttemplate.Template + colorer *color.Color + pool *sync.Pool +} -var ( - // DefaultLoggerConfig is the default Logger middleware config. - DefaultLoggerConfig = LoggerConfig{ - Skipper: DefaultSkipper, - Format: `{"time":"${time_rfc3339_nano}","id":"${id}","remote_ip":"${remote_ip}",` + - `"host":"${host}","method":"${method}","uri":"${uri}","user_agent":"${user_agent}",` + - `"status":${status},"error":"${error}","latency":${latency},"latency_human":"${latency_human}"` + - `,"bytes_in":${bytes_in},"bytes_out":${bytes_out}}` + "\n", - CustomTimeFormat: "2006-01-02 15:04:05.00000", - colorer: color.New(), - } -) +// DefaultLoggerConfig is the default Logger middleware config. +var DefaultLoggerConfig = LoggerConfig{ + Skipper: DefaultSkipper, + Format: `{"time":"${time_rfc3339_nano}","id":"${id}","remote_ip":"${remote_ip}",` + + `"host":"${host}","method":"${method}","uri":"${uri}","user_agent":"${user_agent}",` + + `"status":${status},"error":"${error}","latency":${latency},"latency_human":"${latency_human}"` + + `,"bytes_in":${bytes_in},"bytes_out":${bytes_out}}` + "\n", + CustomTimeFormat: "2006-01-02 15:04:05.00000", + colorer: color.New(), +} // Logger returns a middleware that logs HTTP requests. func Logger() echo.MiddlewareFunc { diff --git a/vendor/github.com/labstack/echo/v4/middleware/method_override.go b/vendor/github.com/labstack/echo/v4/middleware/method_override.go index 92b14d2..3991e10 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/method_override.go +++ b/vendor/github.com/labstack/echo/v4/middleware/method_override.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -6,28 +9,24 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // MethodOverrideConfig defines the config for MethodOverride middleware. - MethodOverrideConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// MethodOverrideConfig defines the config for MethodOverride middleware. +type MethodOverrideConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Getter is a function that gets overridden method from the request. - // Optional. Default values MethodFromHeader(echo.HeaderXHTTPMethodOverride). - Getter MethodOverrideGetter - } + // Getter is a function that gets overridden method from the request. + // Optional. Default values MethodFromHeader(echo.HeaderXHTTPMethodOverride). + Getter MethodOverrideGetter +} - // MethodOverrideGetter is a function that gets overridden method from the request - MethodOverrideGetter func(echo.Context) string -) +// MethodOverrideGetter is a function that gets overridden method from the request +type MethodOverrideGetter func(echo.Context) string -var ( - // DefaultMethodOverrideConfig is the default MethodOverride middleware config. - DefaultMethodOverrideConfig = MethodOverrideConfig{ - Skipper: DefaultSkipper, - Getter: MethodFromHeader(echo.HeaderXHTTPMethodOverride), - } -) +// DefaultMethodOverrideConfig is the default MethodOverride middleware config. +var DefaultMethodOverrideConfig = MethodOverrideConfig{ + Skipper: DefaultSkipper, + Getter: MethodFromHeader(echo.HeaderXHTTPMethodOverride), +} // MethodOverride returns a MethodOverride middleware. // MethodOverride middleware checks for the overridden method from the request and diff --git a/vendor/github.com/labstack/echo/v4/middleware/middleware.go b/vendor/github.com/labstack/echo/v4/middleware/middleware.go index 664f71f..6f33cc5 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/middleware.go +++ b/vendor/github.com/labstack/echo/v4/middleware/middleware.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -9,14 +12,12 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // Skipper defines a function to skip middleware. Returning true skips processing - // the middleware. - Skipper func(c echo.Context) bool +// Skipper defines a function to skip middleware. Returning true skips processing +// the middleware. +type Skipper func(c echo.Context) bool - // BeforeFunc defines a function which is executed just before the middleware. - BeforeFunc func(c echo.Context) -) +// BeforeFunc defines a function which is executed just before the middleware. +type BeforeFunc func(c echo.Context) func captureTokens(pattern *regexp.Regexp, input string) *strings.Replacer { groups := pattern.FindAllStringSubmatch(input, -1) @@ -53,7 +54,7 @@ func rewriteURL(rewriteRegex map[*regexp.Regexp]string, req *http.Request) error return nil } - // Depending how HTTP request is sent RequestURI could contain Scheme://Host/path or be just /path. + // Depending on how HTTP request is sent RequestURI could contain Scheme://Host/path or be just /path. // We only want to use path part for rewriting and therefore trim prefix if it exists rawURI := req.RequestURI if rawURI != "" && rawURI[0] != '/' { diff --git a/vendor/github.com/labstack/echo/v4/middleware/proxy.go b/vendor/github.com/labstack/echo/v4/middleware/proxy.go index 16b00d6..495970a 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/proxy.go +++ b/vendor/github.com/labstack/echo/v4/middleware/proxy.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -19,117 +22,113 @@ import ( // TODO: Handle TLS proxy -type ( - // ProxyConfig defines the config for Proxy middleware. - ProxyConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// ProxyConfig defines the config for Proxy middleware. +type ProxyConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Balancer defines a load balancing technique. - // Required. - Balancer ProxyBalancer + // Balancer defines a load balancing technique. + // Required. + Balancer ProxyBalancer - // RetryCount defines the number of times a failed proxied request should be retried - // using the next available ProxyTarget. Defaults to 0, meaning requests are never retried. - RetryCount int + // RetryCount defines the number of times a failed proxied request should be retried + // using the next available ProxyTarget. Defaults to 0, meaning requests are never retried. + RetryCount int - // RetryFilter defines a function used to determine if a failed request to a - // ProxyTarget should be retried. The RetryFilter will only be called when the number - // of previous retries is less than RetryCount. If the function returns true, the - // request will be retried. The provided error indicates the reason for the request - // failure. When the ProxyTarget is unavailable, the error will be an instance of - // echo.HTTPError with a Code of http.StatusBadGateway. In all other cases, the error - // will indicate an internal error in the Proxy middleware. When a RetryFilter is not - // specified, all requests that fail with http.StatusBadGateway will be retried. A custom - // RetryFilter can be provided to only retry specific requests. Note that RetryFilter is - // only called when the request to the target fails, or an internal error in the Proxy - // middleware has occurred. Successful requests that return a non-200 response code cannot - // be retried. - RetryFilter func(c echo.Context, e error) bool + // RetryFilter defines a function used to determine if a failed request to a + // ProxyTarget should be retried. The RetryFilter will only be called when the number + // of previous retries is less than RetryCount. If the function returns true, the + // request will be retried. The provided error indicates the reason for the request + // failure. When the ProxyTarget is unavailable, the error will be an instance of + // echo.HTTPError with a Code of http.StatusBadGateway. In all other cases, the error + // will indicate an internal error in the Proxy middleware. When a RetryFilter is not + // specified, all requests that fail with http.StatusBadGateway will be retried. A custom + // RetryFilter can be provided to only retry specific requests. Note that RetryFilter is + // only called when the request to the target fails, or an internal error in the Proxy + // middleware has occurred. Successful requests that return a non-200 response code cannot + // be retried. + RetryFilter func(c echo.Context, e error) bool - // ErrorHandler defines a function which can be used to return custom errors from - // the Proxy middleware. ErrorHandler is only invoked when there has been - // either an internal error in the Proxy middleware or the ProxyTarget is - // unavailable. Due to the way requests are proxied, ErrorHandler is not invoked - // when a ProxyTarget returns a non-200 response. In these cases, the response - // is already written so errors cannot be modified. ErrorHandler is only - // invoked after all retry attempts have been exhausted. - ErrorHandler func(c echo.Context, err error) error + // ErrorHandler defines a function which can be used to return custom errors from + // the Proxy middleware. ErrorHandler is only invoked when there has been + // either an internal error in the Proxy middleware or the ProxyTarget is + // unavailable. Due to the way requests are proxied, ErrorHandler is not invoked + // when a ProxyTarget returns a non-200 response. In these cases, the response + // is already written so errors cannot be modified. ErrorHandler is only + // invoked after all retry attempts have been exhausted. + ErrorHandler func(c echo.Context, err error) error - // Rewrite defines URL path rewrite rules. The values captured in asterisk can be - // retrieved by index e.g. $1, $2 and so on. - // Examples: - // "/old": "/new", - // "/api/*": "/$1", - // "/js/*": "/public/javascripts/$1", - // "/users/*/orders/*": "/user/$1/order/$2", - Rewrite map[string]string + // Rewrite defines URL path rewrite rules. The values captured in asterisk can be + // retrieved by index e.g. $1, $2 and so on. + // Examples: + // "/old": "/new", + // "/api/*": "/$1", + // "/js/*": "/public/javascripts/$1", + // "/users/*/orders/*": "/user/$1/order/$2", + Rewrite map[string]string - // RegexRewrite defines rewrite rules using regexp.Rexexp with captures - // Every capture group in the values can be retrieved by index e.g. $1, $2 and so on. - // Example: - // "^/old/[0.9]+/": "/new", - // "^/api/.+?/(.*)": "/v2/$1", - RegexRewrite map[*regexp.Regexp]string + // RegexRewrite defines rewrite rules using regexp.Rexexp with captures + // Every capture group in the values can be retrieved by index e.g. $1, $2 and so on. + // Example: + // "^/old/[0.9]+/": "/new", + // "^/api/.+?/(.*)": "/v2/$1", + RegexRewrite map[*regexp.Regexp]string - // Context key to store selected ProxyTarget into context. - // Optional. Default value "target". - ContextKey string + // Context key to store selected ProxyTarget into context. + // Optional. Default value "target". + ContextKey string - // To customize the transport to remote. - // Examples: If custom TLS certificates are required. - Transport http.RoundTripper + // To customize the transport to remote. + // Examples: If custom TLS certificates are required. + Transport http.RoundTripper - // ModifyResponse defines function to modify response from ProxyTarget. - ModifyResponse func(*http.Response) error - } + // ModifyResponse defines function to modify response from ProxyTarget. + ModifyResponse func(*http.Response) error +} - // ProxyTarget defines the upstream target. - ProxyTarget struct { - Name string - URL *url.URL - Meta echo.Map - } +// ProxyTarget defines the upstream target. +type ProxyTarget struct { + Name string + URL *url.URL + Meta echo.Map +} - // ProxyBalancer defines an interface to implement a load balancing technique. - ProxyBalancer interface { - AddTarget(*ProxyTarget) bool - RemoveTarget(string) bool - Next(echo.Context) *ProxyTarget - } +// ProxyBalancer defines an interface to implement a load balancing technique. +type ProxyBalancer interface { + AddTarget(*ProxyTarget) bool + RemoveTarget(string) bool + Next(echo.Context) *ProxyTarget +} - // TargetProvider defines an interface that gives the opportunity for balancer - // to return custom errors when selecting target. - TargetProvider interface { - NextTarget(echo.Context) (*ProxyTarget, error) - } +// TargetProvider defines an interface that gives the opportunity for balancer +// to return custom errors when selecting target. +type TargetProvider interface { + NextTarget(echo.Context) (*ProxyTarget, error) +} - commonBalancer struct { - targets []*ProxyTarget - mutex sync.Mutex - } +type commonBalancer struct { + targets []*ProxyTarget + mutex sync.Mutex +} - // RandomBalancer implements a random load balancing technique. - randomBalancer struct { - commonBalancer - random *rand.Rand - } +// RandomBalancer implements a random load balancing technique. +type randomBalancer struct { + commonBalancer + random *rand.Rand +} - // RoundRobinBalancer implements a round-robin load balancing technique. - roundRobinBalancer struct { - commonBalancer - // tracking the index on `targets` slice for the next `*ProxyTarget` to be used - i int - } -) +// RoundRobinBalancer implements a round-robin load balancing technique. +type roundRobinBalancer struct { + commonBalancer + // tracking the index on `targets` slice for the next `*ProxyTarget` to be used + i int +} -var ( - // DefaultProxyConfig is the default Proxy middleware config. - DefaultProxyConfig = ProxyConfig{ - Skipper: DefaultSkipper, - ContextKey: "target", - } -) +// DefaultProxyConfig is the default Proxy middleware config. +var DefaultProxyConfig = ProxyConfig{ + Skipper: DefaultSkipper, + ContextKey: "target", +} func proxyRaw(t *ProxyTarget, c echo.Context) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -367,8 +366,7 @@ func ProxyWithConfig(config ProxyConfig) echo.MiddlewareFunc { switch { case c.IsWebSocket(): proxyRaw(tgt, c).ServeHTTP(res, req) - case req.Header.Get(echo.HeaderAccept) == "text/event-stream": - default: + default: // even SSE requests proxyHTTP(tgt, c, config).ServeHTTP(res, req) } diff --git a/vendor/github.com/labstack/echo/v4/middleware/rate_limiter.go b/vendor/github.com/labstack/echo/v4/middleware/rate_limiter.go index 1d24df5..d4724fd 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/rate_limiter.go +++ b/vendor/github.com/labstack/echo/v4/middleware/rate_limiter.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -9,39 +12,34 @@ import ( "golang.org/x/time/rate" ) -type ( - // RateLimiterStore is the interface to be implemented by custom stores. - RateLimiterStore interface { - // Stores for the rate limiter have to implement the Allow method - Allow(identifier string) (bool, error) - } -) +// RateLimiterStore is the interface to be implemented by custom stores. +type RateLimiterStore interface { + // Stores for the rate limiter have to implement the Allow method + Allow(identifier string) (bool, error) +} -type ( - // RateLimiterConfig defines the configuration for the rate limiter - RateLimiterConfig struct { - Skipper Skipper - BeforeFunc BeforeFunc - // IdentifierExtractor uses echo.Context to extract the identifier for a visitor - IdentifierExtractor Extractor - // Store defines a store for the rate limiter - Store RateLimiterStore - // ErrorHandler provides a handler to be called when IdentifierExtractor returns an error - ErrorHandler func(context echo.Context, err error) error - // DenyHandler provides a handler to be called when RateLimiter denies access - DenyHandler func(context echo.Context, identifier string, err error) error - } - // Extractor is used to extract data from echo.Context - Extractor func(context echo.Context) (string, error) -) +// RateLimiterConfig defines the configuration for the rate limiter +type RateLimiterConfig struct { + Skipper Skipper + BeforeFunc BeforeFunc + // IdentifierExtractor uses echo.Context to extract the identifier for a visitor + IdentifierExtractor Extractor + // Store defines a store for the rate limiter + Store RateLimiterStore + // ErrorHandler provides a handler to be called when IdentifierExtractor returns an error + ErrorHandler func(context echo.Context, err error) error + // DenyHandler provides a handler to be called when RateLimiter denies access + DenyHandler func(context echo.Context, identifier string, err error) error +} -// errors -var ( - // ErrRateLimitExceeded denotes an error raised when rate limit is exceeded - ErrRateLimitExceeded = echo.NewHTTPError(http.StatusTooManyRequests, "rate limit exceeded") - // ErrExtractorError denotes an error raised when extractor function is unsuccessful - ErrExtractorError = echo.NewHTTPError(http.StatusForbidden, "error while extracting identifier") -) +// Extractor is used to extract data from echo.Context +type Extractor func(context echo.Context) (string, error) + +// ErrRateLimitExceeded denotes an error raised when rate limit is exceeded +var ErrRateLimitExceeded = echo.NewHTTPError(http.StatusTooManyRequests, "rate limit exceeded") + +// ErrExtractorError denotes an error raised when extractor function is unsuccessful +var ErrExtractorError = echo.NewHTTPError(http.StatusForbidden, "error while extracting identifier") // DefaultRateLimiterConfig defines default values for RateLimiterConfig var DefaultRateLimiterConfig = RateLimiterConfig{ @@ -150,25 +148,24 @@ func RateLimiterWithConfig(config RateLimiterConfig) echo.MiddlewareFunc { } } -type ( - // RateLimiterMemoryStore is the built-in store implementation for RateLimiter - RateLimiterMemoryStore struct { - visitors map[string]*Visitor - mutex sync.Mutex - rate rate.Limit // for more info check out Limiter docs - https://pkg.go.dev/golang.org/x/time/rate#Limit. +// RateLimiterMemoryStore is the built-in store implementation for RateLimiter +type RateLimiterMemoryStore struct { + visitors map[string]*Visitor + mutex sync.Mutex + rate rate.Limit // for more info check out Limiter docs - https://pkg.go.dev/golang.org/x/time/rate#Limit. - burst int - expiresIn time.Duration - lastCleanup time.Time + burst int + expiresIn time.Duration + lastCleanup time.Time - timeNow func() time.Time - } - // Visitor signifies a unique user's limiter details - Visitor struct { - *rate.Limiter - lastSeen time.Time - } -) + timeNow func() time.Time +} + +// Visitor signifies a unique user's limiter details +type Visitor struct { + *rate.Limiter + lastSeen time.Time +} /* NewRateLimiterMemoryStore returns an instance of RateLimiterMemoryStore with diff --git a/vendor/github.com/labstack/echo/v4/middleware/recover.go b/vendor/github.com/labstack/echo/v4/middleware/recover.go index 0466cfe..e6a5940 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/recover.go +++ b/vendor/github.com/labstack/echo/v4/middleware/recover.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -9,56 +12,52 @@ import ( "github.com/labstack/gommon/log" ) -type ( +// LogErrorFunc defines a function for custom logging in the middleware. +type LogErrorFunc func(c echo.Context, err error, stack []byte) error + +// RecoverConfig defines the config for Recover middleware. +type RecoverConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Size of the stack to be printed. + // Optional. Default value 4KB. + StackSize int `yaml:"stack_size"` + + // DisableStackAll disables formatting stack traces of all other goroutines + // into buffer after the trace for the current goroutine. + // Optional. Default value false. + DisableStackAll bool `yaml:"disable_stack_all"` + + // DisablePrintStack disables printing stack trace. + // Optional. Default value as false. + DisablePrintStack bool `yaml:"disable_print_stack"` + + // LogLevel is log level to printing stack trace. + // Optional. Default value 0 (Print). + LogLevel log.Lvl + // LogErrorFunc defines a function for custom logging in the middleware. - LogErrorFunc func(c echo.Context, err error, stack []byte) error + // If it's set you don't need to provide LogLevel for config. + // If this function returns nil, the centralized HTTPErrorHandler will not be called. + LogErrorFunc LogErrorFunc - // RecoverConfig defines the config for Recover middleware. - RecoverConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper + // DisableErrorHandler disables the call to centralized HTTPErrorHandler. + // The recovered error is then passed back to upstream middleware, instead of swallowing the error. + // Optional. Default value false. + DisableErrorHandler bool `yaml:"disable_error_handler"` +} - // Size of the stack to be printed. - // Optional. Default value 4KB. - StackSize int `yaml:"stack_size"` - - // DisableStackAll disables formatting stack traces of all other goroutines - // into buffer after the trace for the current goroutine. - // Optional. Default value false. - DisableStackAll bool `yaml:"disable_stack_all"` - - // DisablePrintStack disables printing stack trace. - // Optional. Default value as false. - DisablePrintStack bool `yaml:"disable_print_stack"` - - // LogLevel is log level to printing stack trace. - // Optional. Default value 0 (Print). - LogLevel log.Lvl - - // LogErrorFunc defines a function for custom logging in the middleware. - // If it's set you don't need to provide LogLevel for config. - // If this function returns nil, the centralized HTTPErrorHandler will not be called. - LogErrorFunc LogErrorFunc - - // DisableErrorHandler disables the call to centralized HTTPErrorHandler. - // The recovered error is then passed back to upstream middleware, instead of swallowing the error. - // Optional. Default value false. - DisableErrorHandler bool `yaml:"disable_error_handler"` - } -) - -var ( - // DefaultRecoverConfig is the default Recover middleware config. - DefaultRecoverConfig = RecoverConfig{ - Skipper: DefaultSkipper, - StackSize: 4 << 10, // 4 KB - DisableStackAll: false, - DisablePrintStack: false, - LogLevel: 0, - LogErrorFunc: nil, - DisableErrorHandler: false, - } -) +// DefaultRecoverConfig is the default Recover middleware config. +var DefaultRecoverConfig = RecoverConfig{ + Skipper: DefaultSkipper, + StackSize: 4 << 10, // 4 KB + DisableStackAll: false, + DisablePrintStack: false, + LogLevel: 0, + LogErrorFunc: nil, + DisableErrorHandler: false, +} // Recover returns a middleware which recovers from panics anywhere in the chain // and handles the control to the centralized HTTPErrorHandler. diff --git a/vendor/github.com/labstack/echo/v4/middleware/redirect.go b/vendor/github.com/labstack/echo/v4/middleware/redirect.go index 13877db..b772ac1 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/redirect.go +++ b/vendor/github.com/labstack/echo/v4/middleware/redirect.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( diff --git a/vendor/github.com/labstack/echo/v4/middleware/request_id.go b/vendor/github.com/labstack/echo/v4/middleware/request_id.go index e29c8f5..14bd4fd 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/request_id.go +++ b/vendor/github.com/labstack/echo/v4/middleware/request_id.go @@ -1,35 +1,34 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( "github.com/labstack/echo/v4" ) -type ( - // RequestIDConfig defines the config for RequestID middleware. - RequestIDConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// RequestIDConfig defines the config for RequestID middleware. +type RequestIDConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Generator defines a function to generate an ID. - // Optional. Defaults to generator for random string of length 32. - Generator func() string + // Generator defines a function to generate an ID. + // Optional. Defaults to generator for random string of length 32. + Generator func() string - // RequestIDHandler defines a function which is executed for a request id. - RequestIDHandler func(echo.Context, string) + // RequestIDHandler defines a function which is executed for a request id. + RequestIDHandler func(echo.Context, string) - // TargetHeader defines what header to look for to populate the id - TargetHeader string - } -) + // TargetHeader defines what header to look for to populate the id + TargetHeader string +} -var ( - // DefaultRequestIDConfig is the default RequestID middleware config. - DefaultRequestIDConfig = RequestIDConfig{ - Skipper: DefaultSkipper, - Generator: generator, - TargetHeader: echo.HeaderXRequestID, - } -) +// DefaultRequestIDConfig is the default RequestID middleware config. +var DefaultRequestIDConfig = RequestIDConfig{ + Skipper: DefaultSkipper, + Generator: generator, + TargetHeader: echo.HeaderXRequestID, +} // RequestID returns a X-Request-ID middleware. func RequestID() echo.MiddlewareFunc { diff --git a/vendor/github.com/labstack/echo/v4/middleware/request_logger.go b/vendor/github.com/labstack/echo/v4/middleware/request_logger.go index f82f6b6..7c18200 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/request_logger.go +++ b/vendor/github.com/labstack/echo/v4/middleware/request_logger.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( diff --git a/vendor/github.com/labstack/echo/v4/middleware/responsecontroller_1.19.go b/vendor/github.com/labstack/echo/v4/middleware/responsecontroller_1.19.go new file mode 100644 index 0000000..ddf6b64 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/responsecontroller_1.19.go @@ -0,0 +1,44 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + +//go:build !go1.20 + +package middleware + +import ( + "bufio" + "fmt" + "net" + "net/http" +) + +// TODO: remove when Go 1.23 is released and we do not support 1.19 anymore +func responseControllerFlush(rw http.ResponseWriter) error { + for { + switch t := rw.(type) { + case interface{ FlushError() error }: + return t.FlushError() + case http.Flusher: + t.Flush() + return nil + case interface{ Unwrap() http.ResponseWriter }: + rw = t.Unwrap() + default: + return fmt.Errorf("%w", http.ErrNotSupported) + } + } +} + +// TODO: remove when Go 1.23 is released and we do not support 1.19 anymore +func responseControllerHijack(rw http.ResponseWriter) (net.Conn, *bufio.ReadWriter, error) { + for { + switch t := rw.(type) { + case http.Hijacker: + return t.Hijack() + case interface{ Unwrap() http.ResponseWriter }: + rw = t.Unwrap() + default: + return nil, nil, fmt.Errorf("%w", http.ErrNotSupported) + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/responsecontroller_1.20.go b/vendor/github.com/labstack/echo/v4/middleware/responsecontroller_1.20.go new file mode 100644 index 0000000..bc03059 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/responsecontroller_1.20.go @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + +//go:build go1.20 + +package middleware + +import ( + "bufio" + "net" + "net/http" +) + +func responseControllerFlush(rw http.ResponseWriter) error { + return http.NewResponseController(rw).Flush() +} + +func responseControllerHijack(rw http.ResponseWriter) (net.Conn, *bufio.ReadWriter, error) { + return http.NewResponseController(rw).Hijack() +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/rewrite.go b/vendor/github.com/labstack/echo/v4/middleware/rewrite.go index 2090eac..4c19cc1 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/rewrite.go +++ b/vendor/github.com/labstack/echo/v4/middleware/rewrite.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -6,37 +9,33 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // RewriteConfig defines the config for Rewrite middleware. - RewriteConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// RewriteConfig defines the config for Rewrite middleware. +type RewriteConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // Rules defines the URL path rewrite rules. The values captured in asterisk can be - // retrieved by index e.g. $1, $2 and so on. - // Example: - // "/old": "/new", - // "/api/*": "/$1", - // "/js/*": "/public/javascripts/$1", - // "/users/*/orders/*": "/user/$1/order/$2", - // Required. - Rules map[string]string `yaml:"rules"` + // Rules defines the URL path rewrite rules. The values captured in asterisk can be + // retrieved by index e.g. $1, $2 and so on. + // Example: + // "/old": "/new", + // "/api/*": "/$1", + // "/js/*": "/public/javascripts/$1", + // "/users/*/orders/*": "/user/$1/order/$2", + // Required. + Rules map[string]string `yaml:"rules"` - // RegexRules defines the URL path rewrite rules using regexp.Rexexp with captures - // Every capture group in the values can be retrieved by index e.g. $1, $2 and so on. - // Example: - // "^/old/[0.9]+/": "/new", - // "^/api/.+?/(.*)": "/v2/$1", - RegexRules map[*regexp.Regexp]string `yaml:"-"` - } -) + // RegexRules defines the URL path rewrite rules using regexp.Rexexp with captures + // Every capture group in the values can be retrieved by index e.g. $1, $2 and so on. + // Example: + // "^/old/[0.9]+/": "/new", + // "^/api/.+?/(.*)": "/v2/$1", + RegexRules map[*regexp.Regexp]string `yaml:"-"` +} -var ( - // DefaultRewriteConfig is the default Rewrite middleware config. - DefaultRewriteConfig = RewriteConfig{ - Skipper: DefaultSkipper, - } -) +// DefaultRewriteConfig is the default Rewrite middleware config. +var DefaultRewriteConfig = RewriteConfig{ + Skipper: DefaultSkipper, +} // Rewrite returns a Rewrite middleware. // diff --git a/vendor/github.com/labstack/echo/v4/middleware/secure.go b/vendor/github.com/labstack/echo/v4/middleware/secure.go index 6c40517..c904abf 100644 --- a/vendor/github.com/labstack/echo/v4/middleware/secure.go +++ b/vendor/github.com/labstack/echo/v4/middleware/secure.go @@ -1,3 +1,6 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: © 2015 LabStack LLC and Echo contributors + package middleware import ( @@ -6,84 +9,80 @@ import ( "github.com/labstack/echo/v4" ) -type ( - // SecureConfig defines the config for Secure middleware. - SecureConfig struct { - // Skipper defines a function to skip middleware. - Skipper Skipper +// SecureConfig defines the config for Secure middleware. +type SecureConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper - // XSSProtection provides protection against cross-site scripting attack (XSS) - // by setting the `X-XSS-Protection` header. - // Optional. Default value "1; mode=block". - XSSProtection string `yaml:"xss_protection"` + // XSSProtection provides protection against cross-site scripting attack (XSS) + // by setting the `X-XSS-Protection` header. + // Optional. Default value "1; mode=block". + XSSProtection string `yaml:"xss_protection"` - // ContentTypeNosniff provides protection against overriding Content-Type - // header by setting the `X-Content-Type-Options` header. - // Optional. Default value "nosniff". - ContentTypeNosniff string `yaml:"content_type_nosniff"` + // ContentTypeNosniff provides protection against overriding Content-Type + // header by setting the `X-Content-Type-Options` header. + // Optional. Default value "nosniff". + ContentTypeNosniff string `yaml:"content_type_nosniff"` - // XFrameOptions can be used to indicate whether or not a browser should - // be allowed to render a page in a , \n * \n *\n * */\n\n.aspect-ratio {\n height: 0;\n position: relative;\n}\n\n.aspect-ratio--16x9 { padding-bottom: 56.25%; }\n.aspect-ratio--9x16 { padding-bottom: 177.77%; }\n\n.aspect-ratio--4x3 { padding-bottom: 75%; }\n.aspect-ratio--3x4 { padding-bottom: 133.33%; }\n\n.aspect-ratio--6x4 { padding-bottom: 66.6%; }\n.aspect-ratio--4x6 { padding-bottom: 150%; }\n\n.aspect-ratio--8x5 { padding-bottom: 62.5%; }\n.aspect-ratio--5x8 { padding-bottom: 160%; }\n\n.aspect-ratio--7x5 { padding-bottom: 71.42%; }\n.aspect-ratio--5x7 { padding-bottom: 140%; }\n\n.aspect-ratio--1x1 { padding-bottom: 100%; }\n\n.aspect-ratio--object {\n position: absolute;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n width: 100%;\n height: 100%;\n z-index: 100;\n}\n\n@media #{$breakpoint-not-small}{\n .aspect-ratio-ns {\n height: 0;\n position: relative;\n }\n .aspect-ratio--16x9-ns { padding-bottom: 56.25%; }\n .aspect-ratio--9x16-ns { padding-bottom: 177.77%; }\n .aspect-ratio--4x3-ns { padding-bottom: 75%; }\n .aspect-ratio--3x4-ns { padding-bottom: 133.33%; }\n .aspect-ratio--6x4-ns { padding-bottom: 66.6%; }\n .aspect-ratio--4x6-ns { padding-bottom: 150%; }\n .aspect-ratio--8x5-ns { padding-bottom: 62.5%; }\n .aspect-ratio--5x8-ns { padding-bottom: 160%; }\n .aspect-ratio--7x5-ns { padding-bottom: 71.42%; }\n .aspect-ratio--5x7-ns { padding-bottom: 140%; }\n .aspect-ratio--1x1-ns { padding-bottom: 100%; }\n .aspect-ratio--object-ns {\n position: absolute;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n width: 100%;\n height: 100%;\n z-index: 100;\n }\n}\n\n@media #{$breakpoint-medium}{\n .aspect-ratio-m {\n height: 0;\n position: relative;\n }\n .aspect-ratio--16x9-m { padding-bottom: 56.25%; }\n .aspect-ratio--9x16-m { padding-bottom: 177.77%; }\n .aspect-ratio--4x3-m { padding-bottom: 75%; }\n .aspect-ratio--3x4-m { padding-bottom: 133.33%; }\n .aspect-ratio--6x4-m { padding-bottom: 66.6%; }\n .aspect-ratio--4x6-m { padding-bottom: 150%; }\n .aspect-ratio--8x5-m { padding-bottom: 62.5%; }\n .aspect-ratio--5x8-m { padding-bottom: 160%; }\n .aspect-ratio--7x5-m { padding-bottom: 71.42%; }\n .aspect-ratio--5x7-m { padding-bottom: 140%; }\n .aspect-ratio--1x1-m { padding-bottom: 100%; }\n .aspect-ratio--object-m {\n position: absolute;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n width: 100%;\n height: 100%;\n z-index: 100;\n }\n}\n\n@media #{$breakpoint-large}{\n .aspect-ratio-l {\n height: 0;\n position: relative;\n }\n .aspect-ratio--16x9-l { padding-bottom: 56.25%; }\n .aspect-ratio--9x16-l { padding-bottom: 177.77%; }\n .aspect-ratio--4x3-l { padding-bottom: 75%; }\n .aspect-ratio--3x4-l { padding-bottom: 133.33%; }\n .aspect-ratio--6x4-l { padding-bottom: 66.6%; }\n .aspect-ratio--4x6-l { padding-bottom: 150%; }\n .aspect-ratio--8x5-l { padding-bottom: 62.5%; }\n .aspect-ratio--5x8-l { padding-bottom: 160%; }\n .aspect-ratio--7x5-l { padding-bottom: 71.42%; }\n .aspect-ratio--5x7-l { padding-bottom: 140%; }\n .aspect-ratio--1x1-l { padding-bottom: 100%; }\n .aspect-ratio--object-l {\n position: absolute;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n width: 100%;\n height: 100%;\n z-index: 100;\n }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n IMAGES\n Docs: http://tachyons.io/docs/elements/images/\n\n*/\n\n/* Responsive images! */\n\nimg { max-width: 100%; }\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n BACKGROUND SIZE\n Docs: http://tachyons.io/docs/themes/background-size/\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n/*\n Often used in combination with background image set as an inline style\n on an html element.\n*/\n\n .cover { background-size: cover!important; }\n .contain { background-size: contain!important; }\n\n@media #{$breakpoint-not-small} {\n .cover-ns { background-size: cover!important; }\n .contain-ns { background-size: contain!important; }\n}\n\n@media #{$breakpoint-medium} {\n .cover-m { background-size: cover!important; }\n .contain-m { background-size: contain!important; }\n}\n\n@media #{$breakpoint-large} {\n .cover-l { background-size: cover!important; }\n .contain-l { background-size: contain!important; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n BACKGROUND POSITION\n\n Base:\n bg = background\n\n Modifiers:\n -center = center center\n -top = top center\n -right = center right\n -bottom = bottom center\n -left = center left\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n */\n\n.bg-center { \n background-repeat: no-repeat;\n background-position: center center; \n}\n\n.bg-top { \n background-repeat: no-repeat; \n background-position: top center; \n}\n\n.bg-right { \n background-repeat: no-repeat; \n background-position: center right; \n}\n\n.bg-bottom { \n background-repeat: no-repeat; \n background-position: bottom center; \n}\n\n.bg-left { \n background-repeat: no-repeat; \n background-position: center left; \n}\n\n@media #{$breakpoint-not-small} {\n .bg-center-ns { \n background-repeat: no-repeat;\n background-position: center center; \n }\n\n .bg-top-ns { \n background-repeat: no-repeat; \n background-position: top center; \n }\n\n .bg-right-ns { \n background-repeat: no-repeat; \n background-position: center right; \n }\n\n .bg-bottom-ns { \n background-repeat: no-repeat; \n background-position: bottom center; \n }\n\n .bg-left-ns { \n background-repeat: no-repeat; \n background-position: center left; \n }\n}\n\n@media #{$breakpoint-medium} {\n .bg-center-m { \n background-repeat: no-repeat;\n background-position: center center; \n }\n\n .bg-top-m { \n background-repeat: no-repeat; \n background-position: top center; \n }\n\n .bg-right-m { \n background-repeat: no-repeat; \n background-position: center right; \n }\n\n .bg-bottom-m { \n background-repeat: no-repeat; \n background-position: bottom center; \n }\n\n .bg-left-m { \n background-repeat: no-repeat; \n background-position: center left; \n }\n}\n\n@media #{$breakpoint-large} {\n .bg-center-l { \n background-repeat: no-repeat;\n background-position: center center; \n }\n\n .bg-top-l { \n background-repeat: no-repeat; \n background-position: top center; \n }\n\n .bg-right-l { \n background-repeat: no-repeat; \n background-position: center right; \n }\n\n .bg-bottom-l { \n background-repeat: no-repeat; \n background-position: bottom center; \n }\n\n .bg-left-l { \n background-repeat: no-repeat; \n background-position: center left; \n }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n OUTLINES\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.outline { outline: 1px solid; }\n.outline-transparent { outline: 1px solid transparent; }\n.outline-0 { outline: 0; }\n\n@media #{$breakpoint-not-small} {\n .outline-ns { outline: 1px solid; }\n .outline-transparent-ns { outline: 1px solid transparent; }\n .outline-0-ns { outline: 0; }\n}\n\n@media #{$breakpoint-medium} {\n .outline-m { outline: 1px solid; }\n .outline-transparent-m { outline: 1px solid transparent; }\n .outline-0-m { outline: 0; }\n}\n\n@media #{$breakpoint-large} {\n .outline-l { outline: 1px solid; }\n .outline-transparent-l { outline: 1px solid transparent; }\n .outline-0-l { outline: 0; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n BORDERS\n Docs: http://tachyons.io/docs/themes/borders/\n\n Base:\n b = border\n\n Modifiers:\n a = all\n t = top\n r = right\n b = bottom\n l = left\n n = none\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n .ba { border-style: solid; border-width: 1px; }\n .bt { border-top-style: solid; border-top-width: 1px; }\n .br { border-right-style: solid; border-right-width: 1px; }\n .bb { border-bottom-style: solid; border-bottom-width: 1px; }\n .bl { border-left-style: solid; border-left-width: 1px; }\n .bn { border-style: none; border-width: 0; }\n\n\n@media #{$breakpoint-not-small} {\n .ba-ns { border-style: solid; border-width: 1px; }\n .bt-ns { border-top-style: solid; border-top-width: 1px; }\n .br-ns { border-right-style: solid; border-right-width: 1px; }\n .bb-ns { border-bottom-style: solid; border-bottom-width: 1px; }\n .bl-ns { border-left-style: solid; border-left-width: 1px; }\n .bn-ns { border-style: none; border-width: 0; }\n}\n\n@media #{$breakpoint-medium} {\n .ba-m { border-style: solid; border-width: 1px; }\n .bt-m { border-top-style: solid; border-top-width: 1px; }\n .br-m { border-right-style: solid; border-right-width: 1px; }\n .bb-m { border-bottom-style: solid; border-bottom-width: 1px; }\n .bl-m { border-left-style: solid; border-left-width: 1px; }\n .bn-m { border-style: none; border-width: 0; }\n}\n\n@media #{$breakpoint-large} {\n .ba-l { border-style: solid; border-width: 1px; }\n .bt-l { border-top-style: solid; border-top-width: 1px; }\n .br-l { border-right-style: solid; border-right-width: 1px; }\n .bb-l { border-bottom-style: solid; border-bottom-width: 1px; }\n .bl-l { border-left-style: solid; border-left-width: 1px; }\n .bn-l { border-style: none; border-width: 0; }\n}\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n BORDER COLORS\n Docs: http://tachyons.io/docs/themes/borders/\n\n Border colors can be used to extend the base\n border classes ba,bt,bb,br,bl found in the _borders.css file.\n\n The base border class by default will set the color of the border\n to that of the current text color. These classes are for the cases\n where you desire for the text and border colors to be different.\n\n Base:\n b = border\n\n Modifiers:\n --color-name = each color variable name is also a border color name\n\n*/\n\n.b--black { border-color: $black; }\n.b--near-black { border-color: $near-black; }\n.b--dark-gray { border-color: $dark-gray; }\n.b--mid-gray { border-color: $mid-gray; }\n.b--gray { border-color: $gray; }\n.b--silver { border-color: $silver; }\n.b--light-silver { border-color: $light-silver; }\n.b--moon-gray { border-color: $moon-gray; }\n.b--light-gray { border-color: $light-gray; }\n.b--near-white { border-color: $near-white; }\n.b--white { border-color: $white; }\n\n.b--white-90 { border-color: $white-90; }\n.b--white-80 { border-color: $white-80; }\n.b--white-70 { border-color: $white-70; }\n.b--white-60 { border-color: $white-60; }\n.b--white-50 { border-color: $white-50; }\n.b--white-40 { border-color: $white-40; }\n.b--white-30 { border-color: $white-30; }\n.b--white-20 { border-color: $white-20; }\n.b--white-10 { border-color: $white-10; }\n.b--white-05 { border-color: $white-05; }\n.b--white-025 { border-color: $white-025; }\n.b--white-0125 { border-color: $white-0125; }\n\n.b--black-90 { border-color: $black-90; }\n.b--black-80 { border-color: $black-80; }\n.b--black-70 { border-color: $black-70; }\n.b--black-60 { border-color: $black-60; }\n.b--black-50 { border-color: $black-50; }\n.b--black-40 { border-color: $black-40; }\n.b--black-30 { border-color: $black-30; }\n.b--black-20 { border-color: $black-20; }\n.b--black-10 { border-color: $black-10; }\n.b--black-05 { border-color: $black-05; }\n.b--black-025 { border-color: $black-025; }\n.b--black-0125 { border-color: $black-0125; }\n\n.b--dark-red { border-color: $dark-red; }\n.b--red { border-color: $red; }\n.b--light-red { border-color: $light-red; }\n.b--orange { border-color: $orange; }\n.b--gold { border-color: $gold; }\n.b--yellow { border-color: $yellow; }\n.b--light-yellow { border-color: $light-yellow; }\n.b--purple { border-color: $purple; }\n.b--light-purple { border-color: $light-purple; }\n.b--dark-pink { border-color: $dark-pink; }\n.b--hot-pink { border-color: $hot-pink; }\n.b--pink { border-color: $pink; }\n.b--light-pink { border-color: $light-pink; }\n.b--dark-green { border-color: $dark-green; }\n.b--green { border-color: $green; }\n.b--light-green { border-color: $light-green; }\n.b--navy { border-color: $navy; }\n.b--dark-blue { border-color: $dark-blue; }\n.b--blue { border-color: $blue; }\n.b--light-blue { border-color: $light-blue; }\n.b--lightest-blue { border-color: $lightest-blue; }\n.b--washed-blue { border-color: $washed-blue; }\n.b--washed-green { border-color: $washed-green; }\n.b--washed-yellow { border-color: $washed-yellow; }\n.b--washed-red { border-color: $washed-red; }\n\n.b--transparent { border-color: $transparent; }\n.b--inherit { border-color: inherit; }\n","\n// Converted Variables\n\n$sans-serif: -apple-system, BlinkMacSystemFont, 'avenir next', avenir, helvetica, 'helvetica neue', ubuntu, roboto, noto, 'segoe ui', arial, sans-serif !default;\n$serif: georgia, serif !default;\n$code: consolas, monaco, monospace !default;\n$font-size-headline: 6rem !default;\n$font-size-subheadline: 5rem !default;\n$font-size-1: 3rem !default;\n$font-size-2: 2.25rem !default;\n$font-size-3: 1.5rem !default;\n$font-size-4: 1.25rem !default;\n$font-size-5: 1rem !default;\n$font-size-6: .875rem !default;\n$font-size-7: .75rem !default;\n$letter-spacing-tight: -.05em !default;\n$letter-spacing-1: .1em !default;\n$letter-spacing-2: .25em !default;\n$line-height-solid: 1 !default;\n$line-height-title: 1.25 !default;\n$line-height-copy: 1.5 !default;\n$measure: 30em !default;\n$measure-narrow: 20em !default;\n$measure-wide: 34em !default;\n$spacing-none: 0 !default;\n$spacing-extra-small: .25rem !default;\n$spacing-small: .5rem !default;\n$spacing-medium: 1rem !default;\n$spacing-large: 2rem !default;\n$spacing-extra-large: 4rem !default;\n$spacing-extra-extra-large: 8rem !default;\n$spacing-extra-extra-extra-large: 16rem !default;\n$spacing-copy-separator: 1.5em !default;\n$height-1: 1rem !default;\n$height-2: 2rem !default;\n$height-3: 4rem !default;\n$height-4: 8rem !default;\n$height-5: 16rem !default;\n$width-1: 1rem !default;\n$width-2: 2rem !default;\n$width-3: 4rem !default;\n$width-4: 8rem !default;\n$width-5: 16rem !default;\n$max-width-1: 1rem !default;\n$max-width-2: 2rem !default;\n$max-width-3: 4rem !default;\n$max-width-4: 8rem !default;\n$max-width-5: 16rem !default;\n$max-width-6: 32rem !default;\n$max-width-7: 48rem !default;\n$max-width-8: 64rem !default;\n$max-width-9: 96rem !default;\n$border-radius-none: 0 !default;\n$border-radius-1: .125rem !default;\n$border-radius-2: .25rem !default;\n$border-radius-3: .5rem !default;\n$border-radius-4: 1rem !default;\n$border-radius-circle: 100% !default;\n$border-radius-pill: 9999px !default;\n$border-width-none: 0 !default;\n$border-width-1: .125rem !default;\n$border-width-2: .25rem !default;\n$border-width-3: .5rem !default;\n$border-width-4: 1rem !default;\n$border-width-5: 2rem !default;\n$box-shadow-1: 0px 0px 4px 2px rgba( 0, 0, 0, 0.2 ) !default;\n$box-shadow-2: 0px 0px 8px 2px rgba( 0, 0, 0, 0.2 ) !default;\n$box-shadow-3: 2px 2px 4px 2px rgba( 0, 0, 0, 0.2 ) !default;\n$box-shadow-4: 2px 2px 8px 0px rgba( 0, 0, 0, 0.2 ) !default;\n$box-shadow-5: 4px 4px 8px 0px rgba( 0, 0, 0, 0.2 ) !default;\n$black: #000 !default;\n$near-black: #111 !default;\n$dark-gray: #333 !default;\n$mid-gray: #555 !default;\n$gray: #777 !default;\n$silver: #999 !default;\n$light-silver: #aaa !default;\n$moon-gray: #ccc !default;\n$light-gray: #eee !default;\n$near-white: #f4f4f4 !default;\n$white: #fff !default;\n$transparent: transparent !default;\n$black-90: rgba(0,0,0,.9) !default;\n$black-80: rgba(0,0,0,.8) !default;\n$black-70: rgba(0,0,0,.7) !default;\n$black-60: rgba(0,0,0,.6) !default;\n$black-50: rgba(0,0,0,.5) !default;\n$black-40: rgba(0,0,0,.4) !default;\n$black-30: rgba(0,0,0,.3) !default;\n$black-20: rgba(0,0,0,.2) !default;\n$black-10: rgba(0,0,0,.1) !default;\n$black-05: rgba(0,0,0,.05) !default;\n$black-025: rgba(0,0,0,.025) !default;\n$black-0125: rgba(0,0,0,.0125) !default;\n$white-90: rgba(255,255,255,.9) !default;\n$white-80: rgba(255,255,255,.8) !default;\n$white-70: rgba(255,255,255,.7) !default;\n$white-60: rgba(255,255,255,.6) !default;\n$white-50: rgba(255,255,255,.5) !default;\n$white-40: rgba(255,255,255,.4) !default;\n$white-30: rgba(255,255,255,.3) !default;\n$white-20: rgba(255,255,255,.2) !default;\n$white-10: rgba(255,255,255,.1) !default;\n$white-05: rgba(255,255,255,.05) !default;\n$white-025: rgba(255,255,255,.025) !default;\n$white-0125: rgba(255,255,255,.0125) !default;\n$dark-red: #e7040f !default;\n$red: #ff4136 !default;\n$light-red: #ff725c !default;\n$orange: #ff6300 !default;\n$gold: #ffb700 !default;\n$yellow: #ffd700 !default;\n$light-yellow: #fbf1a9 !default;\n$purple: #5e2ca5 !default;\n$light-purple: #a463f2 !default;\n$dark-pink: #d5008f !default;\n$hot-pink: #ff41b4 !default;\n$pink: #ff80cc !default;\n$light-pink: #ffa3d7 !default;\n$dark-green: #137752 !default;\n$green: #19a974 !default;\n$light-green: #9eebcf !default;\n$navy: #001b44 !default;\n$dark-blue: #00449e !default;\n$blue: #357edd !default;\n$light-blue: #96ccff !default;\n$lightest-blue: #cdecff !default;\n$washed-blue: #f6fffe !default;\n$washed-green: #e8fdf5 !default;\n$washed-yellow: #fffceb !default;\n$washed-red: #ffdfdf !default;\n\n// Custom Media Query Variables\n\n$breakpoint-not-small: 'screen and (min-width: 30em)' !default;\n$breakpoint-medium: 'screen and (min-width: 30em) and (max-width: 60em)' !default;\n$breakpoint-large: 'screen and (min-width: 60em)' !default;\n\n/*\n\n VARIABLES\n\n*/\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n BORDER RADIUS\n Docs: http://tachyons.io/docs/themes/border-radius/\n\n Base:\n br = border-radius\n\n Modifiers:\n 0 = 0/none\n 1 = 1st step in scale\n 2 = 2nd step in scale\n 3 = 3rd step in scale\n 4 = 4th step in scale\n\n Literal values:\n -100 = 100%\n -pill = 9999px\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n .br0 { border-radius: $border-radius-none }\n .br1 { border-radius: $border-radius-1; }\n .br2 { border-radius: $border-radius-2; }\n .br3 { border-radius: $border-radius-3; }\n .br4 { border-radius: $border-radius-4; }\n .br-100 { border-radius: $border-radius-circle; }\n .br-pill { border-radius: $border-radius-pill; }\n .br--bottom {\n border-top-left-radius: 0;\n border-top-right-radius: 0;\n }\n .br--top {\n border-bottom-left-radius: 0;\n border-bottom-right-radius: 0;\n }\n .br--right {\n border-top-left-radius: 0;\n border-bottom-left-radius: 0;\n }\n .br--left {\n border-top-right-radius: 0;\n border-bottom-right-radius: 0;\n }\n\n@media #{$breakpoint-not-small} {\n .br0-ns { border-radius: $border-radius-none }\n .br1-ns { border-radius: $border-radius-1; }\n .br2-ns { border-radius: $border-radius-2; }\n .br3-ns { border-radius: $border-radius-3; }\n .br4-ns { border-radius: $border-radius-4; }\n .br-100-ns { border-radius: $border-radius-circle; }\n .br-pill-ns { border-radius: $border-radius-pill; }\n .br--bottom-ns {\n border-top-left-radius: 0;\n border-top-right-radius: 0;\n }\n .br--top-ns {\n border-bottom-left-radius: 0;\n border-bottom-right-radius: 0;\n }\n .br--right-ns {\n border-top-left-radius: 0;\n border-bottom-left-radius: 0;\n }\n .br--left-ns {\n border-top-right-radius: 0;\n border-bottom-right-radius: 0;\n }\n}\n\n@media #{$breakpoint-medium} {\n .br0-m { border-radius: $border-radius-none }\n .br1-m { border-radius: $border-radius-1; }\n .br2-m { border-radius: $border-radius-2; }\n .br3-m { border-radius: $border-radius-3; }\n .br4-m { border-radius: $border-radius-4; }\n .br-100-m { border-radius: $border-radius-circle; }\n .br-pill-m { border-radius: $border-radius-pill; }\n .br--bottom-m {\n border-top-left-radius: 0;\n border-top-right-radius: 0;\n }\n .br--top-m {\n border-bottom-left-radius: 0;\n border-bottom-right-radius: 0;\n }\n .br--right-m {\n border-top-left-radius: 0;\n border-bottom-left-radius: 0;\n }\n .br--left-m {\n border-top-right-radius: 0;\n border-bottom-right-radius: 0;\n }\n}\n\n@media #{$breakpoint-large} {\n .br0-l { border-radius: $border-radius-none }\n .br1-l { border-radius: $border-radius-1; }\n .br2-l { border-radius: $border-radius-2; }\n .br3-l { border-radius: $border-radius-3; }\n .br4-l { border-radius: $border-radius-4; }\n .br-100-l { border-radius: $border-radius-circle; }\n .br-pill-l { border-radius: $border-radius-pill; }\n .br--bottom-l {\n border-top-left-radius: 0;\n border-top-right-radius: 0;\n }\n .br--top-l {\n border-bottom-left-radius: 0;\n border-bottom-right-radius: 0;\n }\n .br--right-l {\n border-top-left-radius: 0;\n border-bottom-left-radius: 0;\n }\n .br--left-l {\n border-top-right-radius: 0;\n border-bottom-right-radius: 0;\n }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n BORDER STYLES\n Docs: http://tachyons.io/docs/themes/borders/\n\n Depends on base border module in _borders.css\n\n Base:\n b = border-style\n\n Modifiers:\n --none = none\n --dotted = dotted\n --dashed = dashed\n --solid = solid\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n */\n\n.b--dotted { border-style: dotted; }\n.b--dashed { border-style: dashed; }\n.b--solid { border-style: solid; }\n.b--none { border-style: none; }\n\n@media #{$breakpoint-not-small} {\n .b--dotted-ns { border-style: dotted; }\n .b--dashed-ns { border-style: dashed; }\n .b--solid-ns { border-style: solid; }\n .b--none-ns { border-style: none; }\n}\n\n@media #{$breakpoint-medium} {\n .b--dotted-m { border-style: dotted; }\n .b--dashed-m { border-style: dashed; }\n .b--solid-m { border-style: solid; }\n .b--none-m { border-style: none; }\n}\n\n@media #{$breakpoint-large} {\n .b--dotted-l { border-style: dotted; }\n .b--dashed-l { border-style: dashed; }\n .b--solid-l { border-style: solid; }\n .b--none-l { border-style: none; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n BORDER WIDTHS\n Docs: http://tachyons.io/docs/themes/borders/\n\n Base:\n bw = border-width\n\n Modifiers:\n 0 = 0 width border\n 1 = 1st step in border-width scale\n 2 = 2nd step in border-width scale\n 3 = 3rd step in border-width scale\n 4 = 4th step in border-width scale\n 5 = 5th step in border-width scale\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.bw0 { border-width: $border-width-none; }\n.bw1 { border-width: $border-width-1; }\n.bw2 { border-width: $border-width-2; }\n.bw3 { border-width: $border-width-3; }\n.bw4 { border-width: $border-width-4; }\n.bw5 { border-width: $border-width-5; }\n\n/* Resets */\n.bt-0 { border-top-width: $border-width-none }\n.br-0 { border-right-width: $border-width-none }\n.bb-0 { border-bottom-width: $border-width-none }\n.bl-0 { border-left-width: $border-width-none }\n\n@media #{$breakpoint-not-small} {\n .bw0-ns { border-width: $border-width-none; }\n .bw1-ns { border-width: $border-width-1; }\n .bw2-ns { border-width: $border-width-2; }\n .bw3-ns { border-width: $border-width-3; }\n .bw4-ns { border-width: $border-width-4; }\n .bw5-ns { border-width: $border-width-5; }\n .bt-0-ns { border-top-width: $border-width-none }\n .br-0-ns { border-right-width: $border-width-none }\n .bb-0-ns { border-bottom-width: $border-width-none }\n .bl-0-ns { border-left-width: $border-width-none }\n}\n\n@media #{$breakpoint-medium} {\n .bw0-m { border-width: $border-width-none; }\n .bw1-m { border-width: $border-width-1; }\n .bw2-m { border-width: $border-width-2; }\n .bw3-m { border-width: $border-width-3; }\n .bw4-m { border-width: $border-width-4; }\n .bw5-m { border-width: $border-width-5; }\n .bt-0-m { border-top-width: $border-width-none }\n .br-0-m { border-right-width: $border-width-none }\n .bb-0-m { border-bottom-width: $border-width-none }\n .bl-0-m { border-left-width: $border-width-none }\n}\n\n@media #{$breakpoint-large} {\n .bw0-l { border-width: $border-width-none; }\n .bw1-l { border-width: $border-width-1; }\n .bw2-l { border-width: $border-width-2; }\n .bw3-l { border-width: $border-width-3; }\n .bw4-l { border-width: $border-width-4; }\n .bw5-l { border-width: $border-width-5; }\n .bt-0-l { border-top-width: $border-width-none }\n .br-0-l { border-right-width: $border-width-none }\n .bb-0-l { border-bottom-width: $border-width-none }\n .bl-0-l { border-left-width: $border-width-none }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n BOX-SHADOW\n Docs: http://tachyons.io/docs/themes/box-shadow/\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n */\n\n.shadow-1 { box-shadow: $box-shadow-1; }\n.shadow-2 { box-shadow: $box-shadow-2; }\n.shadow-3 { box-shadow: $box-shadow-3; }\n.shadow-4 { box-shadow: $box-shadow-4; }\n.shadow-5 { box-shadow: $box-shadow-5; }\n\n@media #{$breakpoint-not-small} {\n .shadow-1-ns { box-shadow: $box-shadow-1; }\n .shadow-2-ns { box-shadow: $box-shadow-2; }\n .shadow-3-ns { box-shadow: $box-shadow-3; }\n .shadow-4-ns { box-shadow: $box-shadow-4; }\n .shadow-5-ns { box-shadow: $box-shadow-5; }\n}\n\n@media #{$breakpoint-medium} {\n .shadow-1-m { box-shadow: $box-shadow-1; }\n .shadow-2-m { box-shadow: $box-shadow-2; }\n .shadow-3-m { box-shadow: $box-shadow-3; }\n .shadow-4-m { box-shadow: $box-shadow-4; }\n .shadow-5-m { box-shadow: $box-shadow-5; }\n}\n\n@media #{$breakpoint-large} {\n .shadow-1-l { box-shadow: $box-shadow-1; }\n .shadow-2-l { box-shadow: $box-shadow-2; }\n .shadow-3-l { box-shadow: $box-shadow-3; }\n .shadow-4-l { box-shadow: $box-shadow-4; }\n .shadow-5-l { box-shadow: $box-shadow-5; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n CODE\n\n*/\n\n.pre {\n overflow-x: auto;\n overflow-y: hidden;\n overflow: scroll;\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n COORDINATES\n Docs: http://tachyons.io/docs/layout/position/\n\n Use in combination with the position module.\n\n Base:\n top\n bottom\n right\n left\n\n Modifiers:\n -0 = literal value 0\n -1 = literal value 1\n -2 = literal value 2\n --1 = literal value -1\n --2 = literal value -2\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.top-0 { top: 0; }\n.right-0 { right: 0; }\n.bottom-0 { bottom: 0; }\n.left-0 { left: 0; }\n\n.top-1 { top: 1rem; }\n.right-1 { right: 1rem; }\n.bottom-1 { bottom: 1rem; }\n.left-1 { left: 1rem; }\n\n.top-2 { top: 2rem; }\n.right-2 { right: 2rem; }\n.bottom-2 { bottom: 2rem; }\n.left-2 { left: 2rem; }\n\n.top--1 { top: -1rem; }\n.right--1 { right: -1rem; }\n.bottom--1 { bottom: -1rem; }\n.left--1 { left: -1rem; }\n\n.top--2 { top: -2rem; }\n.right--2 { right: -2rem; }\n.bottom--2 { bottom: -2rem; }\n.left--2 { left: -2rem; }\n\n\n.absolute--fill {\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n}\n\n@media #{$breakpoint-not-small} {\n .top-0-ns { top: 0; }\n .left-0-ns { left: 0; }\n .right-0-ns { right: 0; }\n .bottom-0-ns { bottom: 0; }\n .top-1-ns { top: 1rem; }\n .left-1-ns { left: 1rem; }\n .right-1-ns { right: 1rem; }\n .bottom-1-ns { bottom: 1rem; }\n .top-2-ns { top: 2rem; }\n .left-2-ns { left: 2rem; }\n .right-2-ns { right: 2rem; }\n .bottom-2-ns { bottom: 2rem; }\n .top--1-ns { top: -1rem; }\n .right--1-ns { right: -1rem; }\n .bottom--1-ns { bottom: -1rem; }\n .left--1-ns { left: -1rem; }\n .top--2-ns { top: -2rem; }\n .right--2-ns { right: -2rem; }\n .bottom--2-ns { bottom: -2rem; }\n .left--2-ns { left: -2rem; }\n .absolute--fill-ns {\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n }\n}\n\n@media #{$breakpoint-medium} {\n .top-0-m { top: 0; }\n .left-0-m { left: 0; }\n .right-0-m { right: 0; }\n .bottom-0-m { bottom: 0; }\n .top-1-m { top: 1rem; }\n .left-1-m { left: 1rem; }\n .right-1-m { right: 1rem; }\n .bottom-1-m { bottom: 1rem; }\n .top-2-m { top: 2rem; }\n .left-2-m { left: 2rem; }\n .right-2-m { right: 2rem; }\n .bottom-2-m { bottom: 2rem; }\n .top--1-m { top: -1rem; }\n .right--1-m { right: -1rem; }\n .bottom--1-m { bottom: -1rem; }\n .left--1-m { left: -1rem; }\n .top--2-m { top: -2rem; }\n .right--2-m { right: -2rem; }\n .bottom--2-m { bottom: -2rem; }\n .left--2-m { left: -2rem; }\n .absolute--fill-m {\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n }\n}\n\n@media #{$breakpoint-large} {\n .top-0-l { top: 0; }\n .left-0-l { left: 0; }\n .right-0-l { right: 0; }\n .bottom-0-l { bottom: 0; }\n .top-1-l { top: 1rem; }\n .left-1-l { left: 1rem; }\n .right-1-l { right: 1rem; }\n .bottom-1-l { bottom: 1rem; }\n .top-2-l { top: 2rem; }\n .left-2-l { left: 2rem; }\n .right-2-l { right: 2rem; }\n .bottom-2-l { bottom: 2rem; }\n .top--1-l { top: -1rem; }\n .right--1-l { right: -1rem; }\n .bottom--1-l { bottom: -1rem; }\n .left--1-l { left: -1rem; }\n .top--2-l { top: -2rem; }\n .right--2-l { right: -2rem; }\n .bottom--2-l { bottom: -2rem; }\n .left--2-l { left: -2rem; }\n .absolute--fill-l {\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n CLEARFIX\n http://tachyons.io/docs/layout/clearfix/\n\n*/\n\n/* Nicolas Gallaghers Clearfix solution\n Ref: http://nicolasgallagher.com/micro-clearfix-hack/ */\n\n.cf:before,\n.cf:after { content: \" \"; display: table; }\n.cf:after { clear: both; }\n.cf { zoom: 1; }\n\n.cl { clear: left; }\n.cr { clear: right; }\n.cb { clear: both; }\n.cn { clear: none; }\n\n@media #{$breakpoint-not-small} {\n .cl-ns { clear: left; }\n .cr-ns { clear: right; }\n .cb-ns { clear: both; }\n .cn-ns { clear: none; }\n}\n\n@media #{$breakpoint-medium} {\n .cl-m { clear: left; }\n .cr-m { clear: right; }\n .cb-m { clear: both; }\n .cn-m { clear: none; }\n}\n\n@media #{$breakpoint-large} {\n .cl-l { clear: left; }\n .cr-l { clear: right; }\n .cb-l { clear: both; }\n .cn-l { clear: none; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n FLEXBOX\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.flex { display: flex; }\n.inline-flex { display: inline-flex; }\n\n/* 1. Fix for Chrome 44 bug.\n * https://code.google.com/p/chromium/issues/detail?id=506893 */\n.flex-auto {\n flex: 1 1 auto;\n min-width: 0; /* 1 */\n min-height: 0; /* 1 */\n}\n\n.flex-none { flex: none; }\n\n.flex-column { flex-direction: column; }\n.flex-row { flex-direction: row; }\n.flex-wrap { flex-wrap: wrap; }\n.flex-nowrap { flex-wrap: nowrap; }\n.flex-wrap-reverse { flex-wrap: wrap-reverse; }\n.flex-column-reverse { flex-direction: column-reverse; }\n.flex-row-reverse { flex-direction: row-reverse; }\n\n.items-start { align-items: flex-start; }\n.items-end { align-items: flex-end; }\n.items-center { align-items: center; }\n.items-baseline { align-items: baseline; }\n.items-stretch { align-items: stretch; }\n\n.self-start { align-self: flex-start; }\n.self-end { align-self: flex-end; }\n.self-center { align-self: center; }\n.self-baseline { align-self: baseline; }\n.self-stretch { align-self: stretch; }\n\n.justify-start { justify-content: flex-start; }\n.justify-end { justify-content: flex-end; }\n.justify-center { justify-content: center; }\n.justify-between { justify-content: space-between; }\n.justify-around { justify-content: space-around; }\n\n.content-start { align-content: flex-start; }\n.content-end { align-content: flex-end; }\n.content-center { align-content: center; }\n.content-between { align-content: space-between; }\n.content-around { align-content: space-around; }\n.content-stretch { align-content: stretch; }\n\n.order-0 { order: 0; }\n.order-1 { order: 1; }\n.order-2 { order: 2; }\n.order-3 { order: 3; }\n.order-4 { order: 4; }\n.order-5 { order: 5; }\n.order-6 { order: 6; }\n.order-7 { order: 7; }\n.order-8 { order: 8; }\n.order-last { order: 99999; }\n\n.flex-grow-0 { flex-grow: 0; }\n.flex-grow-1 { flex-grow: 1; }\n\n.flex-shrink-0 { flex-shrink: 0; }\n.flex-shrink-1 { flex-shrink: 1; }\n\n@media #{$breakpoint-not-small} {\n .flex-ns { display: flex; }\n .inline-flex-ns { display: inline-flex; }\n .flex-auto-ns {\n flex: 1 1 auto;\n min-width: 0; /* 1 */\n min-height: 0; /* 1 */\n }\n .flex-none-ns { flex: none; }\n .flex-column-ns { flex-direction: column; }\n .flex-row-ns { flex-direction: row; }\n .flex-wrap-ns { flex-wrap: wrap; }\n .flex-nowrap-ns { flex-wrap: nowrap; }\n .flex-wrap-reverse-ns { flex-wrap: wrap-reverse; }\n .flex-column-reverse-ns { flex-direction: column-reverse; }\n .flex-row-reverse-ns { flex-direction: row-reverse; }\n .items-start-ns { align-items: flex-start; }\n .items-end-ns { align-items: flex-end; }\n .items-center-ns { align-items: center; }\n .items-baseline-ns { align-items: baseline; }\n .items-stretch-ns { align-items: stretch; }\n\n .self-start-ns { align-self: flex-start; }\n .self-end-ns { align-self: flex-end; }\n .self-center-ns { align-self: center; }\n .self-baseline-ns { align-self: baseline; }\n .self-stretch-ns { align-self: stretch; }\n\n .justify-start-ns { justify-content: flex-start; }\n .justify-end-ns { justify-content: flex-end; }\n .justify-center-ns { justify-content: center; }\n .justify-between-ns { justify-content: space-between; }\n .justify-around-ns { justify-content: space-around; }\n\n .content-start-ns { align-content: flex-start; }\n .content-end-ns { align-content: flex-end; }\n .content-center-ns { align-content: center; }\n .content-between-ns { align-content: space-between; }\n .content-around-ns { align-content: space-around; }\n .content-stretch-ns { align-content: stretch; }\n\n .order-0-ns { order: 0; }\n .order-1-ns { order: 1; }\n .order-2-ns { order: 2; }\n .order-3-ns { order: 3; }\n .order-4-ns { order: 4; }\n .order-5-ns { order: 5; }\n .order-6-ns { order: 6; }\n .order-7-ns { order: 7; }\n .order-8-ns { order: 8; }\n .order-last-ns { order: 99999; }\n\n .flex-grow-0-ns { flex-grow: 0; }\n .flex-grow-1-ns { flex-grow: 1; }\n\n .flex-shrink-0-ns { flex-shrink: 0; }\n .flex-shrink-1-ns { flex-shrink: 1; }\n}\n@media #{$breakpoint-medium} {\n .flex-m { display: flex; }\n .inline-flex-m { display: inline-flex; }\n .flex-auto-m {\n flex: 1 1 auto;\n min-width: 0; /* 1 */\n min-height: 0; /* 1 */\n }\n .flex-none-m { flex: none; }\n .flex-column-m { flex-direction: column; }\n .flex-row-m { flex-direction: row; }\n .flex-wrap-m { flex-wrap: wrap; }\n .flex-nowrap-m { flex-wrap: nowrap; }\n .flex-wrap-reverse-m { flex-wrap: wrap-reverse; }\n .flex-column-reverse-m { flex-direction: column-reverse; }\n .flex-row-reverse-m { flex-direction: row-reverse; }\n .items-start-m { align-items: flex-start; }\n .items-end-m { align-items: flex-end; }\n .items-center-m { align-items: center; }\n .items-baseline-m { align-items: baseline; }\n .items-stretch-m { align-items: stretch; }\n\n .self-start-m { align-self: flex-start; }\n .self-end-m { align-self: flex-end; }\n .self-center-m { align-self: center; }\n .self-baseline-m { align-self: baseline; }\n .self-stretch-m { align-self: stretch; }\n\n .justify-start-m { justify-content: flex-start; }\n .justify-end-m { justify-content: flex-end; }\n .justify-center-m { justify-content: center; }\n .justify-between-m { justify-content: space-between; }\n .justify-around-m { justify-content: space-around; }\n\n .content-start-m { align-content: flex-start; }\n .content-end-m { align-content: flex-end; }\n .content-center-m { align-content: center; }\n .content-between-m { align-content: space-between; }\n .content-around-m { align-content: space-around; }\n .content-stretch-m { align-content: stretch; }\n\n .order-0-m { order: 0; }\n .order-1-m { order: 1; }\n .order-2-m { order: 2; }\n .order-3-m { order: 3; }\n .order-4-m { order: 4; }\n .order-5-m { order: 5; }\n .order-6-m { order: 6; }\n .order-7-m { order: 7; }\n .order-8-m { order: 8; }\n .order-last-m { order: 99999; }\n\n .flex-grow-0-m { flex-grow: 0; }\n .flex-grow-1-m { flex-grow: 1; }\n\n .flex-shrink-0-m { flex-shrink: 0; }\n .flex-shrink-1-m { flex-shrink: 1; }\n}\n\n@media #{$breakpoint-large} {\n .flex-l { display: flex; }\n .inline-flex-l { display: inline-flex; }\n .flex-auto-l {\n flex: 1 1 auto;\n min-width: 0; /* 1 */\n min-height: 0; /* 1 */\n }\n .flex-none-l { flex: none; }\n .flex-column-l { flex-direction: column; }\n .flex-row-l { flex-direction: row; }\n .flex-wrap-l { flex-wrap: wrap; }\n .flex-nowrap-l { flex-wrap: nowrap; }\n .flex-wrap-reverse-l { flex-wrap: wrap-reverse; }\n .flex-column-reverse-l { flex-direction: column-reverse; }\n .flex-row-reverse-l { flex-direction: row-reverse; }\n\n .items-start-l { align-items: flex-start; }\n .items-end-l { align-items: flex-end; }\n .items-center-l { align-items: center; }\n .items-baseline-l { align-items: baseline; }\n .items-stretch-l { align-items: stretch; }\n\n .self-start-l { align-self: flex-start; }\n .self-end-l { align-self: flex-end; }\n .self-center-l { align-self: center; }\n .self-baseline-l { align-self: baseline; }\n .self-stretch-l { align-self: stretch; }\n\n .justify-start-l { justify-content: flex-start; }\n .justify-end-l { justify-content: flex-end; }\n .justify-center-l { justify-content: center; }\n .justify-between-l { justify-content: space-between; }\n .justify-around-l { justify-content: space-around; }\n\n .content-start-l { align-content: flex-start; }\n .content-end-l { align-content: flex-end; }\n .content-center-l { align-content: center; }\n .content-between-l { align-content: space-between; }\n .content-around-l { align-content: space-around; }\n .content-stretch-l { align-content: stretch; }\n\n .order-0-l { order: 0; }\n .order-1-l { order: 1; }\n .order-2-l { order: 2; }\n .order-3-l { order: 3; }\n .order-4-l { order: 4; }\n .order-5-l { order: 5; }\n .order-6-l { order: 6; }\n .order-7-l { order: 7; }\n .order-8-l { order: 8; }\n .order-last-l { order: 99999; }\n\n .flex-grow-0-l { flex-grow: 0; }\n .flex-grow-1-l { flex-grow: 1; }\n\n .flex-shrink-0-l { flex-shrink: 0; }\n .flex-shrink-1-l { flex-shrink: 1; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n DISPLAY\n Docs: http://tachyons.io/docs/layout/display\n\n Base:\n d = display\n\n Modifiers:\n n = none\n b = block\n ib = inline-block\n it = inline-table\n t = table\n tc = table-cell\n tr = table-row\n tcol = table-column\n tcolg = table-column-group\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.dn { display: none; }\n.di { display: inline; }\n.db { display: block; }\n.dib { display: inline-block; }\n.dit { display: inline-table; }\n.dt { display: table; }\n.dtc { display: table-cell; }\n.dt-row { display: table-row; }\n.dt-row-group { display: table-row-group; }\n.dt-column { display: table-column; }\n.dt-column-group { display: table-column-group; }\n\n/*\n This will set table to full width and then\n all cells will be equal width\n*/\n.dt--fixed {\n table-layout: fixed;\n width: 100%;\n}\n\n@media #{$breakpoint-not-small} {\n .dn-ns { display: none; }\n .di-ns { display: inline; }\n .db-ns { display: block; }\n .dib-ns { display: inline-block; }\n .dit-ns { display: inline-table; }\n .dt-ns { display: table; }\n .dtc-ns { display: table-cell; }\n .dt-row-ns { display: table-row; }\n .dt-row-group-ns { display: table-row-group; }\n .dt-column-ns { display: table-column; }\n .dt-column-group-ns { display: table-column-group; }\n\n .dt--fixed-ns {\n table-layout: fixed;\n width: 100%;\n }\n}\n\n@media #{$breakpoint-medium} {\n .dn-m { display: none; }\n .di-m { display: inline; }\n .db-m { display: block; }\n .dib-m { display: inline-block; }\n .dit-m { display: inline-table; }\n .dt-m { display: table; }\n .dtc-m { display: table-cell; }\n .dt-row-m { display: table-row; }\n .dt-row-group-m { display: table-row-group; }\n .dt-column-m { display: table-column; }\n .dt-column-group-m { display: table-column-group; }\n\n .dt--fixed-m {\n table-layout: fixed;\n width: 100%;\n }\n}\n\n@media #{$breakpoint-large} {\n .dn-l { display: none; }\n .di-l { display: inline; }\n .db-l { display: block; }\n .dib-l { display: inline-block; }\n .dit-l { display: inline-table; }\n .dt-l { display: table; }\n .dtc-l { display: table-cell; }\n .dt-row-l { display: table-row; }\n .dt-row-group-l { display: table-row-group; }\n .dt-column-l { display: table-column; }\n .dt-column-group-l { display: table-column-group; }\n\n .dt--fixed-l {\n table-layout: fixed;\n width: 100%;\n }\n}\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n FLOATS\n http://tachyons.io/docs/layout/floats/\n\n 1. Floated elements are automatically rendered as block level elements.\n Setting floats to display inline will fix the double margin bug in\n ie6. You know... just in case.\n\n 2. Don't forget to clearfix your floats with .cf\n\n Base:\n f = float\n\n Modifiers:\n l = left\n r = right\n n = none\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n\n\n.fl { float: left; _display: inline; }\n.fr { float: right; _display: inline; }\n.fn { float: none; }\n\n@media #{$breakpoint-not-small} {\n .fl-ns { float: left; _display: inline; }\n .fr-ns { float: right; _display: inline; }\n .fn-ns { float: none; }\n}\n\n@media #{$breakpoint-medium} {\n .fl-m { float: left; _display: inline; }\n .fr-m { float: right; _display: inline; }\n .fn-m { float: none; }\n}\n\n@media #{$breakpoint-large} {\n .fl-l { float: left; _display: inline; }\n .fr-l { float: right; _display: inline; }\n .fn-l { float: none; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n FONT FAMILY GROUPS\n Docs: http://tachyons.io/docs/typography/font-family/\n\n*/\n\n\n.sans-serif {\n font-family: $sans-serif;\n}\n\n.serif {\n font-family: $serif;\n}\n\n.system-sans-serif {\n font-family: sans-serif;\n}\n\n.system-serif {\n font-family: serif;\n}\n\n\n/* Monospaced Typefaces (for code) */\n\n/* From http://cssfontstack.com */\ncode, .code {\n font-family: Consolas,\n monaco,\n monospace;\n}\n\n.courier {\n font-family: 'Courier Next',\n courier,\n monospace;\n}\n\n\n/* Sans-Serif Typefaces */\n\n.helvetica {\n font-family: 'helvetica neue', helvetica,\n sans-serif;\n}\n\n.avenir {\n font-family: 'avenir next', avenir,\n sans-serif;\n}\n\n\n/* Serif Typefaces */\n\n.athelas {\n font-family: athelas,\n georgia,\n serif;\n}\n\n.georgia {\n font-family: georgia,\n serif;\n}\n\n.times {\n font-family: times,\n serif;\n}\n\n.bodoni {\n font-family: \"Bodoni MT\",\n serif;\n}\n\n.calisto {\n font-family: \"Calisto MT\",\n serif;\n}\n\n.garamond {\n font-family: garamond,\n serif;\n}\n\n.baskerville {\n font-family: baskerville,\n serif;\n}\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n FONT STYLE\n Docs: http://tachyons.io/docs/typography/font-style/\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.i { font-style: italic; }\n.fs-normal { font-style: normal; }\n\n@media #{$breakpoint-not-small} {\n .i-ns { font-style: italic; }\n .fs-normal-ns { font-style: normal; }\n}\n\n@media #{$breakpoint-medium} {\n .i-m { font-style: italic; }\n .fs-normal-m { font-style: normal; }\n}\n\n@media #{$breakpoint-large} {\n .i-l { font-style: italic; }\n .fs-normal-l { font-style: normal; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n FONT WEIGHT\n Docs: http://tachyons.io/docs/typography/font-weight/\n\n Base\n fw = font-weight\n\n Modifiers:\n 1 = literal value 100\n 2 = literal value 200\n 3 = literal value 300\n 4 = literal value 400\n 5 = literal value 500\n 6 = literal value 600\n 7 = literal value 700\n 8 = literal value 800\n 9 = literal value 900\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.normal { font-weight: normal; }\n.b { font-weight: bold; }\n.fw1 { font-weight: 100; }\n.fw2 { font-weight: 200; }\n.fw3 { font-weight: 300; }\n.fw4 { font-weight: 400; }\n.fw5 { font-weight: 500; }\n.fw6 { font-weight: 600; }\n.fw7 { font-weight: 700; }\n.fw8 { font-weight: 800; }\n.fw9 { font-weight: 900; }\n\n\n@media #{$breakpoint-not-small} {\n .normal-ns { font-weight: normal; }\n .b-ns { font-weight: bold; }\n .fw1-ns { font-weight: 100; }\n .fw2-ns { font-weight: 200; }\n .fw3-ns { font-weight: 300; }\n .fw4-ns { font-weight: 400; }\n .fw5-ns { font-weight: 500; }\n .fw6-ns { font-weight: 600; }\n .fw7-ns { font-weight: 700; }\n .fw8-ns { font-weight: 800; }\n .fw9-ns { font-weight: 900; }\n}\n\n@media #{$breakpoint-medium} {\n .normal-m { font-weight: normal; }\n .b-m { font-weight: bold; }\n .fw1-m { font-weight: 100; }\n .fw2-m { font-weight: 200; }\n .fw3-m { font-weight: 300; }\n .fw4-m { font-weight: 400; }\n .fw5-m { font-weight: 500; }\n .fw6-m { font-weight: 600; }\n .fw7-m { font-weight: 700; }\n .fw8-m { font-weight: 800; }\n .fw9-m { font-weight: 900; }\n}\n\n@media #{$breakpoint-large} {\n .normal-l { font-weight: normal; }\n .b-l { font-weight: bold; }\n .fw1-l { font-weight: 100; }\n .fw2-l { font-weight: 200; }\n .fw3-l { font-weight: 300; }\n .fw4-l { font-weight: 400; }\n .fw5-l { font-weight: 500; }\n .fw6-l { font-weight: 600; }\n .fw7-l { font-weight: 700; }\n .fw8-l { font-weight: 800; }\n .fw9-l { font-weight: 900; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n FORMS\n \n*/\n\n.input-reset {\n -webkit-appearance: none;\n -moz-appearance: none;\n}\n\n.button-reset::-moz-focus-inner,\n.input-reset::-moz-focus-inner {\n border: 0;\n padding: 0;\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n HEIGHTS\n Docs: http://tachyons.io/docs/layout/heights/\n\n Base:\n h = height\n min-h = min-height\n min-vh = min-height vertical screen height\n vh = vertical screen height\n\n Modifiers\n 1 = 1st step in height scale\n 2 = 2nd step in height scale\n 3 = 3rd step in height scale\n 4 = 4th step in height scale\n 5 = 5th step in height scale\n\n -25 = literal value 25%\n -50 = literal value 50%\n -75 = literal value 75%\n -100 = literal value 100%\n\n -auto = string value of auto\n -inherit = string value of inherit\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n/* Height Scale */\n\n.h1 { height: $height-1; }\n.h2 { height: $height-2; }\n.h3 { height: $height-3; }\n.h4 { height: $height-4; }\n.h5 { height: $height-5; }\n\n/* Height Percentages - Based off of height of parent */\n\n.h-25 { height: 25%; }\n.h-50 { height: 50%; }\n.h-75 { height: 75%; }\n.h-100 { height: 100%; }\n\n.min-h-100 { min-height: 100%; }\n\n/* Screen Height Percentage */\n\n.vh-25 { height: 25vh; }\n.vh-50 { height: 50vh; }\n.vh-75 { height: 75vh; }\n.vh-100 { height: 100vh; }\n\n.min-vh-100 { min-height: 100vh; }\n\n\n/* String Properties */\n\n.h-auto { height: auto; }\n.h-inherit { height: inherit; }\n\n@media #{$breakpoint-not-small} {\n .h1-ns { height: $height-1; }\n .h2-ns { height: $height-2; }\n .h3-ns { height: $height-3; }\n .h4-ns { height: $height-4; }\n .h5-ns { height: $height-5; }\n .h-25-ns { height: 25%; }\n .h-50-ns { height: 50%; }\n .h-75-ns { height: 75%; }\n .h-100-ns { height: 100%; }\n .min-h-100-ns { min-height: 100%; }\n .vh-25-ns { height: 25vh; }\n .vh-50-ns { height: 50vh; }\n .vh-75-ns { height: 75vh; }\n .vh-100-ns { height: 100vh; }\n .min-vh-100-ns { min-height: 100vh; }\n .h-auto-ns { height: auto; }\n .h-inherit-ns { height: inherit; }\n}\n\n@media #{$breakpoint-medium} {\n .h1-m { height: $height-1; }\n .h2-m { height: $height-2; }\n .h3-m { height: $height-3; }\n .h4-m { height: $height-4; }\n .h5-m { height: $height-5; }\n .h-25-m { height: 25%; }\n .h-50-m { height: 50%; }\n .h-75-m { height: 75%; }\n .h-100-m { height: 100%; }\n .min-h-100-m { min-height: 100%; }\n .vh-25-m { height: 25vh; }\n .vh-50-m { height: 50vh; }\n .vh-75-m { height: 75vh; }\n .vh-100-m { height: 100vh; }\n .min-vh-100-m { min-height: 100vh; }\n .h-auto-m { height: auto; }\n .h-inherit-m { height: inherit; }\n}\n\n@media #{$breakpoint-large} {\n .h1-l { height: $height-1; }\n .h2-l { height: $height-2; }\n .h3-l { height: $height-3; }\n .h4-l { height: $height-4; }\n .h5-l { height: $height-5; }\n .h-25-l { height: 25%; }\n .h-50-l { height: 50%; }\n .h-75-l { height: 75%; }\n .h-100-l { height: 100%; }\n .min-h-100-l { min-height: 100%; }\n .vh-25-l { height: 25vh; }\n .vh-50-l { height: 50vh; }\n .vh-75-l { height: 75vh; }\n .vh-100-l { height: 100vh; }\n .min-vh-100-l { min-height: 100vh; }\n .h-auto-l { height: auto; }\n .h-inherit-l { height: inherit; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n LETTER SPACING\n Docs: http://tachyons.io/docs/typography/tracking/\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.tracked { letter-spacing: $letter-spacing-1; }\n.tracked-tight { letter-spacing: $letter-spacing-tight; }\n.tracked-mega { letter-spacing: $letter-spacing-2; }\n\n@media #{$breakpoint-not-small} {\n .tracked-ns { letter-spacing: $letter-spacing-1; }\n .tracked-tight-ns { letter-spacing: $letter-spacing-tight; }\n .tracked-mega-ns { letter-spacing: $letter-spacing-2; }\n}\n\n@media #{$breakpoint-medium} {\n .tracked-m { letter-spacing: $letter-spacing-1; }\n .tracked-tight-m { letter-spacing: $letter-spacing-tight; }\n .tracked-mega-m { letter-spacing: $letter-spacing-2; }\n}\n\n@media #{$breakpoint-large} {\n .tracked-l { letter-spacing: $letter-spacing-1; }\n .tracked-tight-l { letter-spacing: $letter-spacing-tight; }\n .tracked-mega-l { letter-spacing: $letter-spacing-2; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n LINE HEIGHT / LEADING\n Docs: http://tachyons.io/docs/typography/line-height\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n .lh-solid { line-height: $line-height-solid; }\n .lh-title { line-height: $line-height-title; }\n .lh-copy { line-height: $line-height-copy; }\n\n@media #{$breakpoint-not-small} {\n .lh-solid-ns { line-height: $line-height-solid; }\n .lh-title-ns { line-height: $line-height-title; }\n .lh-copy-ns { line-height: $line-height-copy; }\n}\n\n@media #{$breakpoint-medium} {\n .lh-solid-m { line-height: $line-height-solid; }\n .lh-title-m { line-height: $line-height-title; }\n .lh-copy-m { line-height: $line-height-copy; }\n}\n\n@media #{$breakpoint-large} {\n .lh-solid-l { line-height: $line-height-solid; }\n .lh-title-l { line-height: $line-height-title; }\n .lh-copy-l { line-height: $line-height-copy; }\n}\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n LINKS\n Docs: http://tachyons.io/docs/elements/links/\n\n*/\n\n.link {\n text-decoration: none;\n transition: color .15s ease-in;\n}\n\n.link:link,\n.link:visited {\n transition: color .15s ease-in;\n}\n.link:hover {\n transition: color .15s ease-in;\n}\n.link:active {\n transition: color .15s ease-in;\n}\n.link:focus {\n transition: color .15s ease-in;\n outline: 1px dotted currentColor;\n}\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n LISTS\n http://tachyons.io/docs/elements/lists/\n\n*/\n\n.list { list-style-type: none; }\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n MAX WIDTHS\n Docs: http://tachyons.io/docs/layout/max-widths/\n\n Base:\n mw = max-width\n\n Modifiers\n 1 = 1st step in width scale\n 2 = 2nd step in width scale\n 3 = 3rd step in width scale\n 4 = 4th step in width scale\n 5 = 5th step in width scale\n 6 = 6st step in width scale\n 7 = 7nd step in width scale\n 8 = 8rd step in width scale\n 9 = 9th step in width scale\n\n -100 = literal value 100%\n\n -none = string value none\n\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n/* Max Width Percentages */\n\n.mw-100 { max-width: 100%; }\n\n/* Max Width Scale */\n\n.mw1 { max-width: $max-width-1; }\n.mw2 { max-width: $max-width-2; }\n.mw3 { max-width: $max-width-3; }\n.mw4 { max-width: $max-width-4; }\n.mw5 { max-width: $max-width-5; }\n.mw6 { max-width: $max-width-6; }\n.mw7 { max-width: $max-width-7; }\n.mw8 { max-width: $max-width-8; }\n.mw9 { max-width: $max-width-9; }\n\n/* Max Width String Properties */\n\n.mw-none { max-width: none; }\n\n@media #{$breakpoint-not-small} {\n .mw-100-ns { max-width: 100%; }\n\n .mw1-ns { max-width: $max-width-1; }\n .mw2-ns { max-width: $max-width-2; }\n .mw3-ns { max-width: $max-width-3; }\n .mw4-ns { max-width: $max-width-4; }\n .mw5-ns { max-width: $max-width-5; }\n .mw6-ns { max-width: $max-width-6; }\n .mw7-ns { max-width: $max-width-7; }\n .mw8-ns { max-width: $max-width-8; }\n .mw9-ns { max-width: $max-width-9; }\n\n .mw-none-ns { max-width: none; }\n}\n\n@media #{$breakpoint-medium} {\n .mw-100-m { max-width: 100%; }\n\n .mw1-m { max-width: $max-width-1; }\n .mw2-m { max-width: $max-width-2; }\n .mw3-m { max-width: $max-width-3; }\n .mw4-m { max-width: $max-width-4; }\n .mw5-m { max-width: $max-width-5; }\n .mw6-m { max-width: $max-width-6; }\n .mw7-m { max-width: $max-width-7; }\n .mw8-m { max-width: $max-width-8; }\n .mw9-m { max-width: $max-width-9; }\n\n .mw-none-m { max-width: none; }\n}\n\n@media #{$breakpoint-large} {\n .mw-100-l { max-width: 100%; }\n\n .mw1-l { max-width: $max-width-1; }\n .mw2-l { max-width: $max-width-2; }\n .mw3-l { max-width: $max-width-3; }\n .mw4-l { max-width: $max-width-4; }\n .mw5-l { max-width: $max-width-5; }\n .mw6-l { max-width: $max-width-6; }\n .mw7-l { max-width: $max-width-7; }\n .mw8-l { max-width: $max-width-8; }\n .mw9-l { max-width: $max-width-9; }\n\n .mw-none-l { max-width: none; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n WIDTHS\n Docs: http://tachyons.io/docs/layout/widths/\n\n Base:\n w = width\n\n Modifiers\n 1 = 1st step in width scale\n 2 = 2nd step in width scale\n 3 = 3rd step in width scale\n 4 = 4th step in width scale\n 5 = 5th step in width scale\n\n -10 = literal value 10%\n -20 = literal value 20%\n -25 = literal value 25%\n -30 = literal value 30%\n -33 = literal value 33%\n -34 = literal value 34%\n -40 = literal value 40%\n -50 = literal value 50%\n -60 = literal value 60%\n -70 = literal value 70%\n -75 = literal value 75%\n -80 = literal value 80%\n -90 = literal value 90%\n -100 = literal value 100%\n\n -third = 100% / 3 (Not supported in opera mini or IE8)\n -two-thirds = 100% / 1.5 (Not supported in opera mini or IE8)\n -auto = string value auto\n\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n */\n\n/* Width Scale */\n\n.w1 { width: $width-1; }\n.w2 { width: $width-2; }\n.w3 { width: $width-3; }\n.w4 { width: $width-4; }\n.w5 { width: $width-5; }\n\n.w-10 { width: 10%; }\n.w-20 { width: 20%; }\n.w-25 { width: 25%; }\n.w-30 { width: 30%; }\n.w-33 { width: 33%; }\n.w-34 { width: 34%; }\n.w-40 { width: 40%; }\n.w-50 { width: 50%; }\n.w-60 { width: 60%; }\n.w-70 { width: 70%; }\n.w-75 { width: 75%; }\n.w-80 { width: 80%; }\n.w-90 { width: 90%; }\n.w-100 { width: 100%; }\n\n.w-third { width: (100% / 3); }\n.w-two-thirds { width: (100% / 1.5); }\n.w-auto { width: auto; }\n\n@media #{$breakpoint-not-small} {\n .w1-ns { width: $width-1; }\n .w2-ns { width: $width-2; }\n .w3-ns { width: $width-3; }\n .w4-ns { width: $width-4; }\n .w5-ns { width: $width-5; }\n .w-10-ns { width: 10%; }\n .w-20-ns { width: 20%; }\n .w-25-ns { width: 25%; }\n .w-30-ns { width: 30%; }\n .w-33-ns { width: 33%; }\n .w-34-ns { width: 34%; }\n .w-40-ns { width: 40%; }\n .w-50-ns { width: 50%; }\n .w-60-ns { width: 60%; }\n .w-70-ns { width: 70%; }\n .w-75-ns { width: 75%; }\n .w-80-ns { width: 80%; }\n .w-90-ns { width: 90%; }\n .w-100-ns { width: 100%; }\n .w-third-ns { width: (100% / 3); }\n .w-two-thirds-ns { width: (100% / 1.5); }\n .w-auto-ns { width: auto; }\n}\n\n@media #{$breakpoint-medium} {\n .w1-m { width: $width-1; }\n .w2-m { width: $width-2; }\n .w3-m { width: $width-3; }\n .w4-m { width: $width-4; }\n .w5-m { width: $width-5; }\n .w-10-m { width: 10%; }\n .w-20-m { width: 20%; }\n .w-25-m { width: 25%; }\n .w-30-m { width: 30%; }\n .w-33-m { width: 33%; }\n .w-34-m { width: 34%; }\n .w-40-m { width: 40%; }\n .w-50-m { width: 50%; }\n .w-60-m { width: 60%; }\n .w-70-m { width: 70%; }\n .w-75-m { width: 75%; }\n .w-80-m { width: 80%; }\n .w-90-m { width: 90%; }\n .w-100-m { width: 100%; }\n .w-third-m { width: (100% / 3); }\n .w-two-thirds-m { width: (100% / 1.5); }\n .w-auto-m { width: auto; }\n}\n\n@media #{$breakpoint-large} {\n .w1-l { width: $width-1; }\n .w2-l { width: $width-2; }\n .w3-l { width: $width-3; }\n .w4-l { width: $width-4; }\n .w5-l { width: $width-5; }\n .w-10-l { width: 10%; }\n .w-20-l { width: 20%; }\n .w-25-l { width: 25%; }\n .w-30-l { width: 30%; }\n .w-33-l { width: 33%; }\n .w-34-l { width: 34%; }\n .w-40-l { width: 40%; }\n .w-50-l { width: 50%; }\n .w-60-l { width: 60%; }\n .w-70-l { width: 70%; }\n .w-75-l { width: 75%; }\n .w-80-l { width: 80%; }\n .w-90-l { width: 90%; }\n .w-100-l { width: 100%; }\n .w-third-l { width: (100% / 3); }\n .w-two-thirds-l { width: (100% / 1.5); }\n .w-auto-l { width: auto; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n OVERFLOW\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n */\n\n.overflow-visible { overflow: visible; }\n.overflow-hidden { overflow: hidden; }\n.overflow-scroll { overflow: scroll; }\n.overflow-auto { overflow: auto; }\n\n.overflow-x-visible { overflow-x: visible; }\n.overflow-x-hidden { overflow-x: hidden; }\n.overflow-x-scroll { overflow-x: scroll; }\n.overflow-x-auto { overflow-x: auto; }\n\n.overflow-y-visible { overflow-y: visible; }\n.overflow-y-hidden { overflow-y: hidden; }\n.overflow-y-scroll { overflow-y: scroll; }\n.overflow-y-auto { overflow-y: auto; }\n\n@media #{$breakpoint-not-small} {\n .overflow-visible-ns { overflow: visible; }\n .overflow-hidden-ns { overflow: hidden; }\n .overflow-scroll-ns { overflow: scroll; }\n .overflow-auto-ns { overflow: auto; }\n .overflow-x-visible-ns { overflow-x: visible; }\n .overflow-x-hidden-ns { overflow-x: hidden; }\n .overflow-x-scroll-ns { overflow-x: scroll; }\n .overflow-x-auto-ns { overflow-x: auto; }\n\n .overflow-y-visible-ns { overflow-y: visible; }\n .overflow-y-hidden-ns { overflow-y: hidden; }\n .overflow-y-scroll-ns { overflow-y: scroll; }\n .overflow-y-auto-ns { overflow-y: auto; }\n}\n\n@media #{$breakpoint-medium} {\n .overflow-visible-m { overflow: visible; }\n .overflow-hidden-m { overflow: hidden; }\n .overflow-scroll-m { overflow: scroll; }\n .overflow-auto-m { overflow: auto; }\n\n .overflow-x-visible-m { overflow-x: visible; }\n .overflow-x-hidden-m { overflow-x: hidden; }\n .overflow-x-scroll-m { overflow-x: scroll; }\n .overflow-x-auto-m { overflow-x: auto; }\n\n .overflow-y-visible-m { overflow-y: visible; }\n .overflow-y-hidden-m { overflow-y: hidden; }\n .overflow-y-scroll-m { overflow-y: scroll; }\n .overflow-y-auto-m { overflow-y: auto; }\n}\n\n@media #{$breakpoint-large} {\n .overflow-visible-l { overflow: visible; }\n .overflow-hidden-l { overflow: hidden; }\n .overflow-scroll-l { overflow: scroll; }\n .overflow-auto-l { overflow: auto; }\n\n .overflow-x-visible-l { overflow-x: visible; }\n .overflow-x-hidden-l { overflow-x: hidden; }\n .overflow-x-scroll-l { overflow-x: scroll; }\n .overflow-x-auto-l { overflow-x: auto; }\n\n .overflow-y-visible-l { overflow-y: visible; }\n .overflow-y-hidden-l { overflow-y: hidden; }\n .overflow-y-scroll-l { overflow-y: scroll; }\n .overflow-y-auto-l { overflow-y: auto; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n POSITIONING\n Docs: http://tachyons.io/docs/layout/position/\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.static { position: static; }\n.relative { position: relative; }\n.absolute { position: absolute; }\n.fixed { position: fixed; }\n\n@media #{$breakpoint-not-small} {\n .static-ns { position: static; }\n .relative-ns { position: relative; }\n .absolute-ns { position: absolute; }\n .fixed-ns { position: fixed; }\n}\n\n@media #{$breakpoint-medium} {\n .static-m { position: static; }\n .relative-m { position: relative; }\n .absolute-m { position: absolute; }\n .fixed-m { position: fixed; }\n}\n\n@media #{$breakpoint-large} {\n .static-l { position: static; }\n .relative-l { position: relative; }\n .absolute-l { position: absolute; }\n .fixed-l { position: fixed; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n OPACITY\n Docs: http://tachyons.io/docs/themes/opacity/\n\n*/\n\n.o-100 { opacity: 1; }\n.o-90 { opacity: .9; }\n.o-80 { opacity: .8; }\n.o-70 { opacity: .7; }\n.o-60 { opacity: .6; }\n.o-50 { opacity: .5; }\n.o-40 { opacity: .4; }\n.o-30 { opacity: .3; }\n.o-20 { opacity: .2; }\n.o-10 { opacity: .1; }\n.o-05 { opacity: .05; }\n.o-025 { opacity: .025; }\n.o-0 { opacity: 0; }\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n ROTATIONS\n\n*/\n\n.rotate-45 { transform: rotate(45deg); }\n.rotate-90 { transform: rotate(90deg); }\n.rotate-135 { transform: rotate(135deg); }\n.rotate-180 { transform: rotate(180deg); }\n.rotate-225 { transform: rotate(225deg); }\n.rotate-270 { transform: rotate(270deg); }\n.rotate-315 { transform: rotate(315deg); }\n\n@media #{$breakpoint-not-small}{\n .rotate-45-ns { transform: rotate(45deg); }\n .rotate-90-ns { transform: rotate(90deg); }\n .rotate-135-ns { transform: rotate(135deg); }\n .rotate-180-ns { transform: rotate(180deg); }\n .rotate-225-ns { transform: rotate(225deg); }\n .rotate-270-ns { transform: rotate(270deg); }\n .rotate-315-ns { transform: rotate(315deg); }\n}\n\n@media #{$breakpoint-medium}{\n .rotate-45-m { transform: rotate(45deg); }\n .rotate-90-m { transform: rotate(90deg); }\n .rotate-135-m { transform: rotate(135deg); }\n .rotate-180-m { transform: rotate(180deg); }\n .rotate-225-m { transform: rotate(225deg); }\n .rotate-270-m { transform: rotate(270deg); }\n .rotate-315-m { transform: rotate(315deg); }\n}\n\n@media #{$breakpoint-large}{\n .rotate-45-l { transform: rotate(45deg); }\n .rotate-90-l { transform: rotate(90deg); }\n .rotate-135-l { transform: rotate(135deg); }\n .rotate-180-l { transform: rotate(180deg); }\n .rotate-225-l { transform: rotate(225deg); }\n .rotate-270-l { transform: rotate(270deg); }\n .rotate-315-l { transform: rotate(315deg); }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n SKINS\n Docs: http://tachyons.io/docs/themes/skins/\n\n Classes for setting foreground and background colors on elements.\n If you haven't declared a border color, but set border on an element, it will\n be set to the current text color.\n\n*/\n\n/* Text colors */\n\n.black-90 { color: $black-90; }\n.black-80 { color: $black-80; }\n.black-70 { color: $black-70; }\n.black-60 { color: $black-60; }\n.black-50 { color: $black-50; }\n.black-40 { color: $black-40; }\n.black-30 { color: $black-30; }\n.black-20 { color: $black-20; }\n.black-10 { color: $black-10; }\n.black-05 { color: $black-05; }\n\n.white-90 { color: $white-90; }\n.white-80 { color: $white-80; }\n.white-70 { color: $white-70; }\n.white-60 { color: $white-60; }\n.white-50 { color: $white-50; }\n.white-40 { color: $white-40; }\n.white-30 { color: $white-30; }\n.white-20 { color: $white-20; }\n.white-10 { color: $white-10; }\n\n.black { color: $black; }\n.near-black { color: $near-black; }\n.dark-gray { color: $dark-gray; }\n.mid-gray { color: $mid-gray; }\n.gray { color: $gray; }\n.silver { color: $silver; }\n.light-silver { color: $light-silver; }\n.moon-gray { color: $moon-gray; }\n.light-gray { color: $light-gray; }\n.near-white { color: $near-white; }\n.white { color: $white; }\n\n.dark-red { color: $dark-red; }\n.red { color: $red; }\n.light-red { color: $light-red; }\n.orange { color: $orange; }\n.gold { color: $gold; }\n.yellow { color: $yellow; }\n.light-yellow { color: $light-yellow; }\n.purple { color: $purple; }\n.light-purple { color: $light-purple; }\n.dark-pink { color: $dark-pink; }\n.hot-pink { color: $hot-pink; }\n.pink { color: $pink; }\n.light-pink { color: $light-pink; }\n.dark-green { color: $dark-green; }\n.green { color: $green; }\n.light-green { color: $light-green; }\n.navy { color: $navy; }\n.dark-blue { color: $dark-blue; }\n.blue { color: $blue; }\n.light-blue { color: $light-blue; }\n.lightest-blue { color: $lightest-blue; }\n.washed-blue { color: $washed-blue; }\n.washed-green { color: $washed-green; }\n.washed-yellow { color: $washed-yellow; }\n.washed-red { color: $washed-red; }\n.color-inherit { color: inherit; }\n\n.bg-black-90 { background-color: $black-90; }\n.bg-black-80 { background-color: $black-80; }\n.bg-black-70 { background-color: $black-70; }\n.bg-black-60 { background-color: $black-60; }\n.bg-black-50 { background-color: $black-50; }\n.bg-black-40 { background-color: $black-40; }\n.bg-black-30 { background-color: $black-30; }\n.bg-black-20 { background-color: $black-20; }\n.bg-black-10 { background-color: $black-10; }\n.bg-black-05 { background-color: $black-05; }\n.bg-white-90 { background-color: $white-90; }\n.bg-white-80 { background-color: $white-80; }\n.bg-white-70 { background-color: $white-70; }\n.bg-white-60 { background-color: $white-60; }\n.bg-white-50 { background-color: $white-50; }\n.bg-white-40 { background-color: $white-40; }\n.bg-white-30 { background-color: $white-30; }\n.bg-white-20 { background-color: $white-20; }\n.bg-white-10 { background-color: $white-10; }\n\n\n\n/* Background colors */\n\n.bg-black { background-color: $black; }\n.bg-near-black { background-color: $near-black; }\n.bg-dark-gray { background-color: $dark-gray; }\n.bg-mid-gray { background-color: $mid-gray; }\n.bg-gray { background-color: $gray; }\n.bg-silver { background-color: $silver; }\n.bg-light-silver { background-color: $light-silver; }\n.bg-moon-gray { background-color: $moon-gray; }\n.bg-light-gray { background-color: $light-gray; }\n.bg-near-white { background-color: $near-white; }\n.bg-white { background-color: $white; }\n.bg-transparent { background-color: $transparent; }\n\n.bg-dark-red { background-color: $dark-red; }\n.bg-red { background-color: $red; }\n.bg-light-red { background-color: $light-red; }\n.bg-orange { background-color: $orange; }\n.bg-gold { background-color: $gold; }\n.bg-yellow { background-color: $yellow; }\n.bg-light-yellow { background-color: $light-yellow; }\n.bg-purple { background-color: $purple; }\n.bg-light-purple { background-color: $light-purple; }\n.bg-dark-pink { background-color: $dark-pink; }\n.bg-hot-pink { background-color: $hot-pink; }\n.bg-pink { background-color: $pink; }\n.bg-light-pink { background-color: $light-pink; }\n.bg-dark-green { background-color: $dark-green; }\n.bg-green { background-color: $green; }\n.bg-light-green { background-color: $light-green; }\n.bg-navy { background-color: $navy; }\n.bg-dark-blue { background-color: $dark-blue; }\n.bg-blue { background-color: $blue; }\n.bg-light-blue { background-color: $light-blue; }\n.bg-lightest-blue { background-color: $lightest-blue; }\n.bg-washed-blue { background-color: $washed-blue; }\n.bg-washed-green { background-color: $washed-green; }\n.bg-washed-yellow { background-color: $washed-yellow; }\n.bg-washed-red { background-color: $washed-red; }\n.bg-inherit { background-color: inherit; }\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n SKINS:PSEUDO\n\n Customize the color of an element when\n it is focused or hovered over.\n\n */\n\n.hover-black:hover,\n.hover-black:focus { color: $black; }\n.hover-near-black:hover,\n.hover-near-black:focus { color: $near-black; }\n.hover-dark-gray:hover,\n.hover-dark-gray:focus { color: $dark-gray; }\n.hover-mid-gray:hover,\n.hover-mid-gray:focus { color: $mid-gray; }\n.hover-gray:hover,\n.hover-gray:focus { color: $gray; }\n.hover-silver:hover,\n.hover-silver:focus { color: $silver; }\n.hover-light-silver:hover,\n.hover-light-silver:focus { color: $light-silver; }\n.hover-moon-gray:hover,\n.hover-moon-gray:focus { color: $moon-gray; }\n.hover-light-gray:hover,\n.hover-light-gray:focus { color: $light-gray; }\n.hover-near-white:hover,\n.hover-near-white:focus { color: $near-white; }\n.hover-white:hover,\n.hover-white:focus { color: $white; }\n\n.hover-black-90:hover,\n.hover-black-90:focus { color: $black-90; }\n.hover-black-80:hover,\n.hover-black-80:focus { color: $black-80; }\n.hover-black-70:hover,\n.hover-black-70:focus { color: $black-70; }\n.hover-black-60:hover,\n.hover-black-60:focus { color: $black-60; }\n.hover-black-50:hover,\n.hover-black-50:focus { color: $black-50; }\n.hover-black-40:hover,\n.hover-black-40:focus { color: $black-40; }\n.hover-black-30:hover,\n.hover-black-30:focus { color: $black-30; }\n.hover-black-20:hover,\n.hover-black-20:focus { color: $black-20; }\n.hover-black-10:hover,\n.hover-black-10:focus { color: $black-10; }\n.hover-white-90:hover,\n.hover-white-90:focus { color: $white-90; }\n.hover-white-80:hover,\n.hover-white-80:focus { color: $white-80; }\n.hover-white-70:hover,\n.hover-white-70:focus { color: $white-70; }\n.hover-white-60:hover,\n.hover-white-60:focus { color: $white-60; }\n.hover-white-50:hover,\n.hover-white-50:focus { color: $white-50; }\n.hover-white-40:hover,\n.hover-white-40:focus { color: $white-40; }\n.hover-white-30:hover,\n.hover-white-30:focus { color: $white-30; }\n.hover-white-20:hover,\n.hover-white-20:focus { color: $white-20; }\n.hover-white-10:hover,\n.hover-white-10:focus { color: $white-10; }\n.hover-inherit:hover,\n.hover-inherit:focus { color: inherit; }\n\n.hover-bg-black:hover,\n.hover-bg-black:focus { background-color: $black; }\n.hover-bg-near-black:hover,\n.hover-bg-near-black:focus { background-color: $near-black; }\n.hover-bg-dark-gray:hover,\n.hover-bg-dark-gray:focus { background-color: $dark-gray; }\n.hover-bg-mid-gray:hover,\n.hover-bg-mid-gray:focus { background-color: $mid-gray; }\n.hover-bg-gray:hover,\n.hover-bg-gray:focus { background-color: $gray; }\n.hover-bg-silver:hover,\n.hover-bg-silver:focus { background-color: $silver; }\n.hover-bg-light-silver:hover,\n.hover-bg-light-silver:focus { background-color: $light-silver; }\n.hover-bg-moon-gray:hover,\n.hover-bg-moon-gray:focus { background-color: $moon-gray; }\n.hover-bg-light-gray:hover,\n.hover-bg-light-gray:focus { background-color: $light-gray; }\n.hover-bg-near-white:hover,\n.hover-bg-near-white:focus { background-color: $near-white; }\n.hover-bg-white:hover,\n.hover-bg-white:focus { background-color: $white; }\n.hover-bg-transparent:hover,\n.hover-bg-transparent:focus { background-color: $transparent; }\n\n.hover-bg-black-90:hover,\n.hover-bg-black-90:focus { background-color: $black-90; }\n.hover-bg-black-80:hover,\n.hover-bg-black-80:focus { background-color: $black-80; }\n.hover-bg-black-70:hover,\n.hover-bg-black-70:focus { background-color: $black-70; }\n.hover-bg-black-60:hover,\n.hover-bg-black-60:focus { background-color: $black-60; }\n.hover-bg-black-50:hover,\n.hover-bg-black-50:focus { background-color: $black-50; }\n.hover-bg-black-40:hover,\n.hover-bg-black-40:focus { background-color: $black-40; }\n.hover-bg-black-30:hover,\n.hover-bg-black-30:focus { background-color: $black-30; }\n.hover-bg-black-20:hover,\n.hover-bg-black-20:focus { background-color: $black-20; }\n.hover-bg-black-10:hover,\n.hover-bg-black-10:focus { background-color: $black-10; }\n.hover-bg-white-90:hover,\n.hover-bg-white-90:focus { background-color: $white-90; }\n.hover-bg-white-80:hover,\n.hover-bg-white-80:focus { background-color: $white-80; }\n.hover-bg-white-70:hover,\n.hover-bg-white-70:focus { background-color: $white-70; }\n.hover-bg-white-60:hover,\n.hover-bg-white-60:focus { background-color: $white-60; }\n.hover-bg-white-50:hover,\n.hover-bg-white-50:focus { background-color: $white-50; }\n.hover-bg-white-40:hover,\n.hover-bg-white-40:focus { background-color: $white-40; }\n.hover-bg-white-30:hover,\n.hover-bg-white-30:focus { background-color: $white-30; }\n.hover-bg-white-20:hover,\n.hover-bg-white-20:focus { background-color: $white-20; }\n.hover-bg-white-10:hover,\n.hover-bg-white-10:focus { background-color: $white-10; }\n\n.hover-dark-red:hover,\n.hover-dark-red:focus { color: $dark-red; }\n.hover-red:hover,\n.hover-red:focus { color: $red; }\n.hover-light-red:hover,\n.hover-light-red:focus { color: $light-red; }\n.hover-orange:hover,\n.hover-orange:focus { color: $orange; }\n.hover-gold:hover,\n.hover-gold:focus { color: $gold; }\n.hover-yellow:hover,\n.hover-yellow:focus { color: $yellow; }\n.hover-light-yellow:hover,\n.hover-light-yellow:focus { color: $light-yellow; }\n.hover-purple:hover,\n.hover-purple:focus { color: $purple; }\n.hover-light-purple:hover,\n.hover-light-purple:focus { color: $light-purple; }\n.hover-dark-pink:hover,\n.hover-dark-pink:focus { color: $dark-pink; }\n.hover-hot-pink:hover,\n.hover-hot-pink:focus { color: $hot-pink; }\n.hover-pink:hover,\n.hover-pink:focus { color: $pink; }\n.hover-light-pink:hover,\n.hover-light-pink:focus { color: $light-pink; }\n.hover-dark-green:hover,\n.hover-dark-green:focus { color: $dark-green; }\n.hover-green:hover,\n.hover-green:focus { color: $green; }\n.hover-light-green:hover,\n.hover-light-green:focus { color: $light-green; }\n.hover-navy:hover,\n.hover-navy:focus { color: $navy; }\n.hover-dark-blue:hover,\n.hover-dark-blue:focus { color: $dark-blue; }\n.hover-blue:hover,\n.hover-blue:focus { color: $blue; }\n.hover-light-blue:hover,\n.hover-light-blue:focus { color: $light-blue; }\n.hover-lightest-blue:hover,\n.hover-lightest-blue:focus { color: $lightest-blue; }\n.hover-washed-blue:hover,\n.hover-washed-blue:focus { color: $washed-blue; }\n.hover-washed-green:hover,\n.hover-washed-green:focus { color: $washed-green; }\n.hover-washed-yellow:hover,\n.hover-washed-yellow:focus { color: $washed-yellow; }\n.hover-washed-red:hover,\n.hover-washed-red:focus { color: $washed-red; }\n\n.hover-bg-dark-red:hover,\n.hover-bg-dark-red:focus { background-color: $dark-red; }\n.hover-bg-red:hover,\n.hover-bg-red:focus { background-color: $red; }\n.hover-bg-light-red:hover,\n.hover-bg-light-red:focus { background-color: $light-red; }\n.hover-bg-orange:hover,\n.hover-bg-orange:focus { background-color: $orange; }\n.hover-bg-gold:hover,\n.hover-bg-gold:focus { background-color: $gold; }\n.hover-bg-yellow:hover,\n.hover-bg-yellow:focus { background-color: $yellow; }\n.hover-bg-light-yellow:hover,\n.hover-bg-light-yellow:focus { background-color: $light-yellow; }\n.hover-bg-purple:hover,\n.hover-bg-purple:focus { background-color: $purple; }\n.hover-bg-light-purple:hover,\n.hover-bg-light-purple:focus { background-color: $light-purple; }\n.hover-bg-dark-pink:hover,\n.hover-bg-dark-pink:focus { background-color: $dark-pink; }\n.hover-bg-hot-pink:hover,\n.hover-bg-hot-pink:focus { background-color: $hot-pink; }\n.hover-bg-pink:hover,\n.hover-bg-pink:focus { background-color: $pink; }\n.hover-bg-light-pink:hover,\n.hover-bg-light-pink:focus { background-color: $light-pink; }\n.hover-bg-dark-green:hover,\n.hover-bg-dark-green:focus { background-color: $dark-green; }\n.hover-bg-green:hover,\n.hover-bg-green:focus { background-color: $green; }\n.hover-bg-light-green:hover,\n.hover-bg-light-green:focus { background-color: $light-green; }\n.hover-bg-navy:hover,\n.hover-bg-navy:focus { background-color: $navy; }\n.hover-bg-dark-blue:hover,\n.hover-bg-dark-blue:focus { background-color: $dark-blue; }\n.hover-bg-blue:hover,\n.hover-bg-blue:focus { background-color: $blue; }\n.hover-bg-light-blue:hover,\n.hover-bg-light-blue:focus { background-color: $light-blue; }\n.hover-bg-lightest-blue:hover,\n.hover-bg-lightest-blue:focus { background-color: $lightest-blue; }\n.hover-bg-washed-blue:hover,\n.hover-bg-washed-blue:focus { background-color: $washed-blue; }\n.hover-bg-washed-green:hover,\n.hover-bg-washed-green:focus { background-color: $washed-green; }\n.hover-bg-washed-yellow:hover,\n.hover-bg-washed-yellow:focus { background-color: $washed-yellow; }\n.hover-bg-washed-red:hover,\n.hover-bg-washed-red:focus { background-color: $washed-red; }\n.hover-bg-inherit:hover,\n.hover-bg-inherit:focus { background-color: inherit; }\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/* Variables */\n\n/*\n SPACING\n Docs: http://tachyons.io/docs/layout/spacing/\n\n An eight step powers of two scale ranging from 0 to 16rem.\n\n Base:\n p = padding\n m = margin\n\n Modifiers:\n a = all\n h = horizontal\n v = vertical\n t = top\n r = right\n b = bottom\n l = left\n\n 0 = none\n 1 = 1st step in spacing scale\n 2 = 2nd step in spacing scale\n 3 = 3rd step in spacing scale\n 4 = 4th step in spacing scale\n 5 = 5th step in spacing scale\n 6 = 6th step in spacing scale\n 7 = 7th step in spacing scale\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n\n.pa0 { padding: $spacing-none; }\n.pa1 { padding: $spacing-extra-small; }\n.pa2 { padding: $spacing-small; }\n.pa3 { padding: $spacing-medium; }\n.pa4 { padding: $spacing-large; }\n.pa5 { padding: $spacing-extra-large; }\n.pa6 { padding: $spacing-extra-extra-large; }\n.pa7 { padding: $spacing-extra-extra-extra-large; }\n\n.pl0 { padding-left: $spacing-none; }\n.pl1 { padding-left: $spacing-extra-small; }\n.pl2 { padding-left: $spacing-small; }\n.pl3 { padding-left: $spacing-medium; }\n.pl4 { padding-left: $spacing-large; }\n.pl5 { padding-left: $spacing-extra-large; }\n.pl6 { padding-left: $spacing-extra-extra-large; }\n.pl7 { padding-left: $spacing-extra-extra-extra-large; }\n\n.pr0 { padding-right: $spacing-none; }\n.pr1 { padding-right: $spacing-extra-small; }\n.pr2 { padding-right: $spacing-small; }\n.pr3 { padding-right: $spacing-medium; }\n.pr4 { padding-right: $spacing-large; }\n.pr5 { padding-right: $spacing-extra-large; }\n.pr6 { padding-right: $spacing-extra-extra-large; }\n.pr7 { padding-right: $spacing-extra-extra-extra-large; }\n\n.pb0 { padding-bottom: $spacing-none; }\n.pb1 { padding-bottom: $spacing-extra-small; }\n.pb2 { padding-bottom: $spacing-small; }\n.pb3 { padding-bottom: $spacing-medium; }\n.pb4 { padding-bottom: $spacing-large; }\n.pb5 { padding-bottom: $spacing-extra-large; }\n.pb6 { padding-bottom: $spacing-extra-extra-large; }\n.pb7 { padding-bottom: $spacing-extra-extra-extra-large; }\n\n.pt0 { padding-top: $spacing-none; }\n.pt1 { padding-top: $spacing-extra-small; }\n.pt2 { padding-top: $spacing-small; }\n.pt3 { padding-top: $spacing-medium; }\n.pt4 { padding-top: $spacing-large; }\n.pt5 { padding-top: $spacing-extra-large; }\n.pt6 { padding-top: $spacing-extra-extra-large; }\n.pt7 { padding-top: $spacing-extra-extra-extra-large; }\n\n.pv0 {\n padding-top: $spacing-none;\n padding-bottom: $spacing-none;\n}\n.pv1 {\n padding-top: $spacing-extra-small;\n padding-bottom: $spacing-extra-small;\n}\n.pv2 {\n padding-top: $spacing-small;\n padding-bottom: $spacing-small;\n}\n.pv3 {\n padding-top: $spacing-medium;\n padding-bottom: $spacing-medium;\n}\n.pv4 {\n padding-top: $spacing-large;\n padding-bottom: $spacing-large;\n}\n.pv5 {\n padding-top: $spacing-extra-large;\n padding-bottom: $spacing-extra-large;\n}\n.pv6 {\n padding-top: $spacing-extra-extra-large;\n padding-bottom: $spacing-extra-extra-large;\n}\n\n.pv7 {\n padding-top: $spacing-extra-extra-extra-large;\n padding-bottom: $spacing-extra-extra-extra-large;\n}\n\n.ph0 {\n padding-left: $spacing-none;\n padding-right: $spacing-none;\n}\n\n.ph1 {\n padding-left: $spacing-extra-small;\n padding-right: $spacing-extra-small;\n}\n\n.ph2 {\n padding-left: $spacing-small;\n padding-right: $spacing-small;\n}\n\n.ph3 {\n padding-left: $spacing-medium;\n padding-right: $spacing-medium;\n}\n\n.ph4 {\n padding-left: $spacing-large;\n padding-right: $spacing-large;\n}\n\n.ph5 {\n padding-left: $spacing-extra-large;\n padding-right: $spacing-extra-large;\n}\n\n.ph6 {\n padding-left: $spacing-extra-extra-large;\n padding-right: $spacing-extra-extra-large;\n}\n\n.ph7 {\n padding-left: $spacing-extra-extra-extra-large;\n padding-right: $spacing-extra-extra-extra-large;\n}\n\n.ma0 { margin: $spacing-none; }\n.ma1 { margin: $spacing-extra-small; }\n.ma2 { margin: $spacing-small; }\n.ma3 { margin: $spacing-medium; }\n.ma4 { margin: $spacing-large; }\n.ma5 { margin: $spacing-extra-large; }\n.ma6 { margin: $spacing-extra-extra-large; }\n.ma7 { margin: $spacing-extra-extra-extra-large; }\n\n.ml0 { margin-left: $spacing-none; }\n.ml1 { margin-left: $spacing-extra-small; }\n.ml2 { margin-left: $spacing-small; }\n.ml3 { margin-left: $spacing-medium; }\n.ml4 { margin-left: $spacing-large; }\n.ml5 { margin-left: $spacing-extra-large; }\n.ml6 { margin-left: $spacing-extra-extra-large; }\n.ml7 { margin-left: $spacing-extra-extra-extra-large; }\n\n.mr0 { margin-right: $spacing-none; }\n.mr1 { margin-right: $spacing-extra-small; }\n.mr2 { margin-right: $spacing-small; }\n.mr3 { margin-right: $spacing-medium; }\n.mr4 { margin-right: $spacing-large; }\n.mr5 { margin-right: $spacing-extra-large; }\n.mr6 { margin-right: $spacing-extra-extra-large; }\n.mr7 { margin-right: $spacing-extra-extra-extra-large; }\n\n.mb0 { margin-bottom: $spacing-none; }\n.mb1 { margin-bottom: $spacing-extra-small; }\n.mb2 { margin-bottom: $spacing-small; }\n.mb3 { margin-bottom: $spacing-medium; }\n.mb4 { margin-bottom: $spacing-large; }\n.mb5 { margin-bottom: $spacing-extra-large; }\n.mb6 { margin-bottom: $spacing-extra-extra-large; }\n.mb7 { margin-bottom: $spacing-extra-extra-extra-large; }\n\n.mt0 { margin-top: $spacing-none; }\n.mt1 { margin-top: $spacing-extra-small; }\n.mt2 { margin-top: $spacing-small; }\n.mt3 { margin-top: $spacing-medium; }\n.mt4 { margin-top: $spacing-large; }\n.mt5 { margin-top: $spacing-extra-large; }\n.mt6 { margin-top: $spacing-extra-extra-large; }\n.mt7 { margin-top: $spacing-extra-extra-extra-large; }\n\n.mv0 {\n margin-top: $spacing-none;\n margin-bottom: $spacing-none;\n}\n.mv1 {\n margin-top: $spacing-extra-small;\n margin-bottom: $spacing-extra-small;\n}\n.mv2 {\n margin-top: $spacing-small;\n margin-bottom: $spacing-small;\n}\n.mv3 {\n margin-top: $spacing-medium;\n margin-bottom: $spacing-medium;\n}\n.mv4 {\n margin-top: $spacing-large;\n margin-bottom: $spacing-large;\n}\n.mv5 {\n margin-top: $spacing-extra-large;\n margin-bottom: $spacing-extra-large;\n}\n.mv6 {\n margin-top: $spacing-extra-extra-large;\n margin-bottom: $spacing-extra-extra-large;\n}\n.mv7 {\n margin-top: $spacing-extra-extra-extra-large;\n margin-bottom: $spacing-extra-extra-extra-large;\n}\n\n.mh0 {\n margin-left: $spacing-none;\n margin-right: $spacing-none;\n}\n.mh1 {\n margin-left: $spacing-extra-small;\n margin-right: $spacing-extra-small;\n}\n.mh2 {\n margin-left: $spacing-small;\n margin-right: $spacing-small;\n}\n.mh3 {\n margin-left: $spacing-medium;\n margin-right: $spacing-medium;\n}\n.mh4 {\n margin-left: $spacing-large;\n margin-right: $spacing-large;\n}\n.mh5 {\n margin-left: $spacing-extra-large;\n margin-right: $spacing-extra-large;\n}\n.mh6 {\n margin-left: $spacing-extra-extra-large;\n margin-right: $spacing-extra-extra-large;\n}\n.mh7 {\n margin-left: $spacing-extra-extra-extra-large;\n margin-right: $spacing-extra-extra-extra-large;\n}\n\n@media #{$breakpoint-not-small} {\n .pa0-ns { padding: $spacing-none; }\n .pa1-ns { padding: $spacing-extra-small; }\n .pa2-ns { padding: $spacing-small; }\n .pa3-ns { padding: $spacing-medium; }\n .pa4-ns { padding: $spacing-large; }\n .pa5-ns { padding: $spacing-extra-large; }\n .pa6-ns { padding: $spacing-extra-extra-large; }\n .pa7-ns { padding: $spacing-extra-extra-extra-large; }\n\n .pl0-ns { padding-left: $spacing-none; }\n .pl1-ns { padding-left: $spacing-extra-small; }\n .pl2-ns { padding-left: $spacing-small; }\n .pl3-ns { padding-left: $spacing-medium; }\n .pl4-ns { padding-left: $spacing-large; }\n .pl5-ns { padding-left: $spacing-extra-large; }\n .pl6-ns { padding-left: $spacing-extra-extra-large; }\n .pl7-ns { padding-left: $spacing-extra-extra-extra-large; }\n\n .pr0-ns { padding-right: $spacing-none; }\n .pr1-ns { padding-right: $spacing-extra-small; }\n .pr2-ns { padding-right: $spacing-small; }\n .pr3-ns { padding-right: $spacing-medium; }\n .pr4-ns { padding-right: $spacing-large; }\n .pr5-ns { padding-right: $spacing-extra-large; }\n .pr6-ns { padding-right: $spacing-extra-extra-large; }\n .pr7-ns { padding-right: $spacing-extra-extra-extra-large; }\n\n .pb0-ns { padding-bottom: $spacing-none; }\n .pb1-ns { padding-bottom: $spacing-extra-small; }\n .pb2-ns { padding-bottom: $spacing-small; }\n .pb3-ns { padding-bottom: $spacing-medium; }\n .pb4-ns { padding-bottom: $spacing-large; }\n .pb5-ns { padding-bottom: $spacing-extra-large; }\n .pb6-ns { padding-bottom: $spacing-extra-extra-large; }\n .pb7-ns { padding-bottom: $spacing-extra-extra-extra-large; }\n\n .pt0-ns { padding-top: $spacing-none; }\n .pt1-ns { padding-top: $spacing-extra-small; }\n .pt2-ns { padding-top: $spacing-small; }\n .pt3-ns { padding-top: $spacing-medium; }\n .pt4-ns { padding-top: $spacing-large; }\n .pt5-ns { padding-top: $spacing-extra-large; }\n .pt6-ns { padding-top: $spacing-extra-extra-large; }\n .pt7-ns { padding-top: $spacing-extra-extra-extra-large; }\n\n .pv0-ns {\n padding-top: $spacing-none;\n padding-bottom: $spacing-none;\n }\n .pv1-ns {\n padding-top: $spacing-extra-small;\n padding-bottom: $spacing-extra-small;\n }\n .pv2-ns {\n padding-top: $spacing-small;\n padding-bottom: $spacing-small;\n }\n .pv3-ns {\n padding-top: $spacing-medium;\n padding-bottom: $spacing-medium;\n }\n .pv4-ns {\n padding-top: $spacing-large;\n padding-bottom: $spacing-large;\n }\n .pv5-ns {\n padding-top: $spacing-extra-large;\n padding-bottom: $spacing-extra-large;\n }\n .pv6-ns {\n padding-top: $spacing-extra-extra-large;\n padding-bottom: $spacing-extra-extra-large;\n }\n .pv7-ns {\n padding-top: $spacing-extra-extra-extra-large;\n padding-bottom: $spacing-extra-extra-extra-large;\n }\n .ph0-ns {\n padding-left: $spacing-none;\n padding-right: $spacing-none;\n }\n .ph1-ns {\n padding-left: $spacing-extra-small;\n padding-right: $spacing-extra-small;\n }\n .ph2-ns {\n padding-left: $spacing-small;\n padding-right: $spacing-small;\n }\n .ph3-ns {\n padding-left: $spacing-medium;\n padding-right: $spacing-medium;\n }\n .ph4-ns {\n padding-left: $spacing-large;\n padding-right: $spacing-large;\n }\n .ph5-ns {\n padding-left: $spacing-extra-large;\n padding-right: $spacing-extra-large;\n }\n .ph6-ns {\n padding-left: $spacing-extra-extra-large;\n padding-right: $spacing-extra-extra-large;\n }\n .ph7-ns {\n padding-left: $spacing-extra-extra-extra-large;\n padding-right: $spacing-extra-extra-extra-large;\n }\n\n .ma0-ns { margin: $spacing-none; }\n .ma1-ns { margin: $spacing-extra-small; }\n .ma2-ns { margin: $spacing-small; }\n .ma3-ns { margin: $spacing-medium; }\n .ma4-ns { margin: $spacing-large; }\n .ma5-ns { margin: $spacing-extra-large; }\n .ma6-ns { margin: $spacing-extra-extra-large; }\n .ma7-ns { margin: $spacing-extra-extra-extra-large; }\n\n .ml0-ns { margin-left: $spacing-none; }\n .ml1-ns { margin-left: $spacing-extra-small; }\n .ml2-ns { margin-left: $spacing-small; }\n .ml3-ns { margin-left: $spacing-medium; }\n .ml4-ns { margin-left: $spacing-large; }\n .ml5-ns { margin-left: $spacing-extra-large; }\n .ml6-ns { margin-left: $spacing-extra-extra-large; }\n .ml7-ns { margin-left: $spacing-extra-extra-extra-large; }\n\n .mr0-ns { margin-right: $spacing-none; }\n .mr1-ns { margin-right: $spacing-extra-small; }\n .mr2-ns { margin-right: $spacing-small; }\n .mr3-ns { margin-right: $spacing-medium; }\n .mr4-ns { margin-right: $spacing-large; }\n .mr5-ns { margin-right: $spacing-extra-large; }\n .mr6-ns { margin-right: $spacing-extra-extra-large; }\n .mr7-ns { margin-right: $spacing-extra-extra-extra-large; }\n\n .mb0-ns { margin-bottom: $spacing-none; }\n .mb1-ns { margin-bottom: $spacing-extra-small; }\n .mb2-ns { margin-bottom: $spacing-small; }\n .mb3-ns { margin-bottom: $spacing-medium; }\n .mb4-ns { margin-bottom: $spacing-large; }\n .mb5-ns { margin-bottom: $spacing-extra-large; }\n .mb6-ns { margin-bottom: $spacing-extra-extra-large; }\n .mb7-ns { margin-bottom: $spacing-extra-extra-extra-large; }\n\n .mt0-ns { margin-top: $spacing-none; }\n .mt1-ns { margin-top: $spacing-extra-small; }\n .mt2-ns { margin-top: $spacing-small; }\n .mt3-ns { margin-top: $spacing-medium; }\n .mt4-ns { margin-top: $spacing-large; }\n .mt5-ns { margin-top: $spacing-extra-large; }\n .mt6-ns { margin-top: $spacing-extra-extra-large; }\n .mt7-ns { margin-top: $spacing-extra-extra-extra-large; }\n\n .mv0-ns {\n margin-top: $spacing-none;\n margin-bottom: $spacing-none;\n }\n .mv1-ns {\n margin-top: $spacing-extra-small;\n margin-bottom: $spacing-extra-small;\n }\n .mv2-ns {\n margin-top: $spacing-small;\n margin-bottom: $spacing-small;\n }\n .mv3-ns {\n margin-top: $spacing-medium;\n margin-bottom: $spacing-medium;\n }\n .mv4-ns {\n margin-top: $spacing-large;\n margin-bottom: $spacing-large;\n }\n .mv5-ns {\n margin-top: $spacing-extra-large;\n margin-bottom: $spacing-extra-large;\n }\n .mv6-ns {\n margin-top: $spacing-extra-extra-large;\n margin-bottom: $spacing-extra-extra-large;\n }\n .mv7-ns {\n margin-top: $spacing-extra-extra-extra-large;\n margin-bottom: $spacing-extra-extra-extra-large;\n }\n\n .mh0-ns {\n margin-left: $spacing-none;\n margin-right: $spacing-none;\n }\n .mh1-ns {\n margin-left: $spacing-extra-small;\n margin-right: $spacing-extra-small;\n }\n .mh2-ns {\n margin-left: $spacing-small;\n margin-right: $spacing-small;\n }\n .mh3-ns {\n margin-left: $spacing-medium;\n margin-right: $spacing-medium;\n }\n .mh4-ns {\n margin-left: $spacing-large;\n margin-right: $spacing-large;\n }\n .mh5-ns {\n margin-left: $spacing-extra-large;\n margin-right: $spacing-extra-large;\n }\n .mh6-ns {\n margin-left: $spacing-extra-extra-large;\n margin-right: $spacing-extra-extra-large;\n }\n .mh7-ns {\n margin-left: $spacing-extra-extra-extra-large;\n margin-right: $spacing-extra-extra-extra-large;\n }\n\n}\n\n@media #{$breakpoint-medium} {\n .pa0-m { padding: $spacing-none; }\n .pa1-m { padding: $spacing-extra-small; }\n .pa2-m { padding: $spacing-small; }\n .pa3-m { padding: $spacing-medium; }\n .pa4-m { padding: $spacing-large; }\n .pa5-m { padding: $spacing-extra-large; }\n .pa6-m { padding: $spacing-extra-extra-large; }\n .pa7-m { padding: $spacing-extra-extra-extra-large; }\n\n .pl0-m { padding-left: $spacing-none; }\n .pl1-m { padding-left: $spacing-extra-small; }\n .pl2-m { padding-left: $spacing-small; }\n .pl3-m { padding-left: $spacing-medium; }\n .pl4-m { padding-left: $spacing-large; }\n .pl5-m { padding-left: $spacing-extra-large; }\n .pl6-m { padding-left: $spacing-extra-extra-large; }\n .pl7-m { padding-left: $spacing-extra-extra-extra-large; }\n\n .pr0-m { padding-right: $spacing-none; }\n .pr1-m { padding-right: $spacing-extra-small; }\n .pr2-m { padding-right: $spacing-small; }\n .pr3-m { padding-right: $spacing-medium; }\n .pr4-m { padding-right: $spacing-large; }\n .pr5-m { padding-right: $spacing-extra-large; }\n .pr6-m { padding-right: $spacing-extra-extra-large; }\n .pr7-m { padding-right: $spacing-extra-extra-extra-large; }\n\n .pb0-m { padding-bottom: $spacing-none; }\n .pb1-m { padding-bottom: $spacing-extra-small; }\n .pb2-m { padding-bottom: $spacing-small; }\n .pb3-m { padding-bottom: $spacing-medium; }\n .pb4-m { padding-bottom: $spacing-large; }\n .pb5-m { padding-bottom: $spacing-extra-large; }\n .pb6-m { padding-bottom: $spacing-extra-extra-large; }\n .pb7-m { padding-bottom: $spacing-extra-extra-extra-large; }\n\n .pt0-m { padding-top: $spacing-none; }\n .pt1-m { padding-top: $spacing-extra-small; }\n .pt2-m { padding-top: $spacing-small; }\n .pt3-m { padding-top: $spacing-medium; }\n .pt4-m { padding-top: $spacing-large; }\n .pt5-m { padding-top: $spacing-extra-large; }\n .pt6-m { padding-top: $spacing-extra-extra-large; }\n .pt7-m { padding-top: $spacing-extra-extra-extra-large; }\n\n .pv0-m {\n padding-top: $spacing-none;\n padding-bottom: $spacing-none;\n }\n .pv1-m {\n padding-top: $spacing-extra-small;\n padding-bottom: $spacing-extra-small;\n }\n .pv2-m {\n padding-top: $spacing-small;\n padding-bottom: $spacing-small;\n }\n .pv3-m {\n padding-top: $spacing-medium;\n padding-bottom: $spacing-medium;\n }\n .pv4-m {\n padding-top: $spacing-large;\n padding-bottom: $spacing-large;\n }\n .pv5-m {\n padding-top: $spacing-extra-large;\n padding-bottom: $spacing-extra-large;\n }\n .pv6-m {\n padding-top: $spacing-extra-extra-large;\n padding-bottom: $spacing-extra-extra-large;\n }\n .pv7-m {\n padding-top: $spacing-extra-extra-extra-large;\n padding-bottom: $spacing-extra-extra-extra-large;\n }\n\n .ph0-m {\n padding-left: $spacing-none;\n padding-right: $spacing-none;\n }\n .ph1-m {\n padding-left: $spacing-extra-small;\n padding-right: $spacing-extra-small;\n }\n .ph2-m {\n padding-left: $spacing-small;\n padding-right: $spacing-small;\n }\n .ph3-m {\n padding-left: $spacing-medium;\n padding-right: $spacing-medium;\n }\n .ph4-m {\n padding-left: $spacing-large;\n padding-right: $spacing-large;\n }\n .ph5-m {\n padding-left: $spacing-extra-large;\n padding-right: $spacing-extra-large;\n }\n .ph6-m {\n padding-left: $spacing-extra-extra-large;\n padding-right: $spacing-extra-extra-large;\n }\n .ph7-m {\n padding-left: $spacing-extra-extra-extra-large;\n padding-right: $spacing-extra-extra-extra-large;\n }\n\n .ma0-m { margin: $spacing-none; }\n .ma1-m { margin: $spacing-extra-small; }\n .ma2-m { margin: $spacing-small; }\n .ma3-m { margin: $spacing-medium; }\n .ma4-m { margin: $spacing-large; }\n .ma5-m { margin: $spacing-extra-large; }\n .ma6-m { margin: $spacing-extra-extra-large; }\n .ma7-m { margin: $spacing-extra-extra-extra-large; }\n\n .ml0-m { margin-left: $spacing-none; }\n .ml1-m { margin-left: $spacing-extra-small; }\n .ml2-m { margin-left: $spacing-small; }\n .ml3-m { margin-left: $spacing-medium; }\n .ml4-m { margin-left: $spacing-large; }\n .ml5-m { margin-left: $spacing-extra-large; }\n .ml6-m { margin-left: $spacing-extra-extra-large; }\n .ml7-m { margin-left: $spacing-extra-extra-extra-large; }\n\n .mr0-m { margin-right: $spacing-none; }\n .mr1-m { margin-right: $spacing-extra-small; }\n .mr2-m { margin-right: $spacing-small; }\n .mr3-m { margin-right: $spacing-medium; }\n .mr4-m { margin-right: $spacing-large; }\n .mr5-m { margin-right: $spacing-extra-large; }\n .mr6-m { margin-right: $spacing-extra-extra-large; }\n .mr7-m { margin-right: $spacing-extra-extra-extra-large; }\n\n .mb0-m { margin-bottom: $spacing-none; }\n .mb1-m { margin-bottom: $spacing-extra-small; }\n .mb2-m { margin-bottom: $spacing-small; }\n .mb3-m { margin-bottom: $spacing-medium; }\n .mb4-m { margin-bottom: $spacing-large; }\n .mb5-m { margin-bottom: $spacing-extra-large; }\n .mb6-m { margin-bottom: $spacing-extra-extra-large; }\n .mb7-m { margin-bottom: $spacing-extra-extra-extra-large; }\n\n .mt0-m { margin-top: $spacing-none; }\n .mt1-m { margin-top: $spacing-extra-small; }\n .mt2-m { margin-top: $spacing-small; }\n .mt3-m { margin-top: $spacing-medium; }\n .mt4-m { margin-top: $spacing-large; }\n .mt5-m { margin-top: $spacing-extra-large; }\n .mt6-m { margin-top: $spacing-extra-extra-large; }\n .mt7-m { margin-top: $spacing-extra-extra-extra-large; }\n\n .mv0-m {\n margin-top: $spacing-none;\n margin-bottom: $spacing-none;\n }\n .mv1-m {\n margin-top: $spacing-extra-small;\n margin-bottom: $spacing-extra-small;\n }\n .mv2-m {\n margin-top: $spacing-small;\n margin-bottom: $spacing-small;\n }\n .mv3-m {\n margin-top: $spacing-medium;\n margin-bottom: $spacing-medium;\n }\n .mv4-m {\n margin-top: $spacing-large;\n margin-bottom: $spacing-large;\n }\n .mv5-m {\n margin-top: $spacing-extra-large;\n margin-bottom: $spacing-extra-large;\n }\n .mv6-m {\n margin-top: $spacing-extra-extra-large;\n margin-bottom: $spacing-extra-extra-large;\n }\n .mv7-m {\n margin-top: $spacing-extra-extra-extra-large;\n margin-bottom: $spacing-extra-extra-extra-large;\n }\n\n .mh0-m {\n margin-left: $spacing-none;\n margin-right: $spacing-none;\n }\n .mh1-m {\n margin-left: $spacing-extra-small;\n margin-right: $spacing-extra-small;\n }\n .mh2-m {\n margin-left: $spacing-small;\n margin-right: $spacing-small;\n }\n .mh3-m {\n margin-left: $spacing-medium;\n margin-right: $spacing-medium;\n }\n .mh4-m {\n margin-left: $spacing-large;\n margin-right: $spacing-large;\n }\n .mh5-m {\n margin-left: $spacing-extra-large;\n margin-right: $spacing-extra-large;\n }\n .mh6-m {\n margin-left: $spacing-extra-extra-large;\n margin-right: $spacing-extra-extra-large;\n }\n .mh7-m {\n margin-left: $spacing-extra-extra-extra-large;\n margin-right: $spacing-extra-extra-extra-large;\n }\n\n}\n\n@media #{$breakpoint-large} {\n .pa0-l { padding: $spacing-none; }\n .pa1-l { padding: $spacing-extra-small; }\n .pa2-l { padding: $spacing-small; }\n .pa3-l { padding: $spacing-medium; }\n .pa4-l { padding: $spacing-large; }\n .pa5-l { padding: $spacing-extra-large; }\n .pa6-l { padding: $spacing-extra-extra-large; }\n .pa7-l { padding: $spacing-extra-extra-extra-large; }\n\n .pl0-l { padding-left: $spacing-none; }\n .pl1-l { padding-left: $spacing-extra-small; }\n .pl2-l { padding-left: $spacing-small; }\n .pl3-l { padding-left: $spacing-medium; }\n .pl4-l { padding-left: $spacing-large; }\n .pl5-l { padding-left: $spacing-extra-large; }\n .pl6-l { padding-left: $spacing-extra-extra-large; }\n .pl7-l { padding-left: $spacing-extra-extra-extra-large; }\n\n .pr0-l { padding-right: $spacing-none; }\n .pr1-l { padding-right: $spacing-extra-small; }\n .pr2-l { padding-right: $spacing-small; }\n .pr3-l { padding-right: $spacing-medium; }\n .pr4-l { padding-right: $spacing-large; }\n .pr5-l { padding-right: $spacing-extra-large; }\n .pr6-l { padding-right: $spacing-extra-extra-large; }\n .pr7-l { padding-right: $spacing-extra-extra-extra-large; }\n\n .pb0-l { padding-bottom: $spacing-none; }\n .pb1-l { padding-bottom: $spacing-extra-small; }\n .pb2-l { padding-bottom: $spacing-small; }\n .pb3-l { padding-bottom: $spacing-medium; }\n .pb4-l { padding-bottom: $spacing-large; }\n .pb5-l { padding-bottom: $spacing-extra-large; }\n .pb6-l { padding-bottom: $spacing-extra-extra-large; }\n .pb7-l { padding-bottom: $spacing-extra-extra-extra-large; }\n\n .pt0-l { padding-top: $spacing-none; }\n .pt1-l { padding-top: $spacing-extra-small; }\n .pt2-l { padding-top: $spacing-small; }\n .pt3-l { padding-top: $spacing-medium; }\n .pt4-l { padding-top: $spacing-large; }\n .pt5-l { padding-top: $spacing-extra-large; }\n .pt6-l { padding-top: $spacing-extra-extra-large; }\n .pt7-l { padding-top: $spacing-extra-extra-extra-large; }\n\n .pv0-l {\n padding-top: $spacing-none;\n padding-bottom: $spacing-none;\n }\n .pv1-l {\n padding-top: $spacing-extra-small;\n padding-bottom: $spacing-extra-small;\n }\n .pv2-l {\n padding-top: $spacing-small;\n padding-bottom: $spacing-small;\n }\n .pv3-l {\n padding-top: $spacing-medium;\n padding-bottom: $spacing-medium;\n }\n .pv4-l {\n padding-top: $spacing-large;\n padding-bottom: $spacing-large;\n }\n .pv5-l {\n padding-top: $spacing-extra-large;\n padding-bottom: $spacing-extra-large;\n }\n .pv6-l {\n padding-top: $spacing-extra-extra-large;\n padding-bottom: $spacing-extra-extra-large;\n }\n .pv7-l {\n padding-top: $spacing-extra-extra-extra-large;\n padding-bottom: $spacing-extra-extra-extra-large;\n }\n\n .ph0-l {\n padding-left: $spacing-none;\n padding-right: $spacing-none;\n }\n .ph1-l {\n padding-left: $spacing-extra-small;\n padding-right: $spacing-extra-small;\n }\n .ph2-l {\n padding-left: $spacing-small;\n padding-right: $spacing-small;\n }\n .ph3-l {\n padding-left: $spacing-medium;\n padding-right: $spacing-medium;\n }\n .ph4-l {\n padding-left: $spacing-large;\n padding-right: $spacing-large;\n }\n .ph5-l {\n padding-left: $spacing-extra-large;\n padding-right: $spacing-extra-large;\n }\n .ph6-l {\n padding-left: $spacing-extra-extra-large;\n padding-right: $spacing-extra-extra-large;\n }\n .ph7-l {\n padding-left: $spacing-extra-extra-extra-large;\n padding-right: $spacing-extra-extra-extra-large;\n }\n\n .ma0-l { margin: $spacing-none; }\n .ma1-l { margin: $spacing-extra-small; }\n .ma2-l { margin: $spacing-small; }\n .ma3-l { margin: $spacing-medium; }\n .ma4-l { margin: $spacing-large; }\n .ma5-l { margin: $spacing-extra-large; }\n .ma6-l { margin: $spacing-extra-extra-large; }\n .ma7-l { margin: $spacing-extra-extra-extra-large; }\n\n .ml0-l { margin-left: $spacing-none; }\n .ml1-l { margin-left: $spacing-extra-small; }\n .ml2-l { margin-left: $spacing-small; }\n .ml3-l { margin-left: $spacing-medium; }\n .ml4-l { margin-left: $spacing-large; }\n .ml5-l { margin-left: $spacing-extra-large; }\n .ml6-l { margin-left: $spacing-extra-extra-large; }\n .ml7-l { margin-left: $spacing-extra-extra-extra-large; }\n\n .mr0-l { margin-right: $spacing-none; }\n .mr1-l { margin-right: $spacing-extra-small; }\n .mr2-l { margin-right: $spacing-small; }\n .mr3-l { margin-right: $spacing-medium; }\n .mr4-l { margin-right: $spacing-large; }\n .mr5-l { margin-right: $spacing-extra-large; }\n .mr6-l { margin-right: $spacing-extra-extra-large; }\n .mr7-l { margin-right: $spacing-extra-extra-extra-large; }\n\n .mb0-l { margin-bottom: $spacing-none; }\n .mb1-l { margin-bottom: $spacing-extra-small; }\n .mb2-l { margin-bottom: $spacing-small; }\n .mb3-l { margin-bottom: $spacing-medium; }\n .mb4-l { margin-bottom: $spacing-large; }\n .mb5-l { margin-bottom: $spacing-extra-large; }\n .mb6-l { margin-bottom: $spacing-extra-extra-large; }\n .mb7-l { margin-bottom: $spacing-extra-extra-extra-large; }\n\n .mt0-l { margin-top: $spacing-none; }\n .mt1-l { margin-top: $spacing-extra-small; }\n .mt2-l { margin-top: $spacing-small; }\n .mt3-l { margin-top: $spacing-medium; }\n .mt4-l { margin-top: $spacing-large; }\n .mt5-l { margin-top: $spacing-extra-large; }\n .mt6-l { margin-top: $spacing-extra-extra-large; }\n .mt7-l { margin-top: $spacing-extra-extra-extra-large; }\n\n .mv0-l {\n margin-top: $spacing-none;\n margin-bottom: $spacing-none;\n }\n .mv1-l {\n margin-top: $spacing-extra-small;\n margin-bottom: $spacing-extra-small;\n }\n .mv2-l {\n margin-top: $spacing-small;\n margin-bottom: $spacing-small;\n }\n .mv3-l {\n margin-top: $spacing-medium;\n margin-bottom: $spacing-medium;\n }\n .mv4-l {\n margin-top: $spacing-large;\n margin-bottom: $spacing-large;\n }\n .mv5-l {\n margin-top: $spacing-extra-large;\n margin-bottom: $spacing-extra-large;\n }\n .mv6-l {\n margin-top: $spacing-extra-extra-large;\n margin-bottom: $spacing-extra-extra-large;\n }\n .mv7-l {\n margin-top: $spacing-extra-extra-extra-large;\n margin-bottom: $spacing-extra-extra-extra-large;\n }\n\n .mh0-l {\n margin-left: $spacing-none;\n margin-right: $spacing-none;\n }\n .mh1-l {\n margin-left: $spacing-extra-small;\n margin-right: $spacing-extra-small;\n }\n .mh2-l {\n margin-left: $spacing-small;\n margin-right: $spacing-small;\n }\n .mh3-l {\n margin-left: $spacing-medium;\n margin-right: $spacing-medium;\n }\n .mh4-l {\n margin-left: $spacing-large;\n margin-right: $spacing-large;\n }\n .mh5-l {\n margin-left: $spacing-extra-large;\n margin-right: $spacing-extra-large;\n }\n .mh6-l {\n margin-left: $spacing-extra-extra-large;\n margin-right: $spacing-extra-extra-large;\n }\n .mh7-l {\n margin-left: $spacing-extra-extra-extra-large;\n margin-right: $spacing-extra-extra-extra-large;\n }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n NEGATIVE MARGINS\n\n Base:\n n = negative\n\n Modifiers:\n a = all\n t = top\n r = right\n b = bottom\n l = left\n\n 1 = 1st step in spacing scale\n 2 = 2nd step in spacing scale\n 3 = 3rd step in spacing scale\n 4 = 4th step in spacing scale\n 5 = 5th step in spacing scale\n 6 = 6th step in spacing scale\n 7 = 7th step in spacing scale\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n\n\n.na1 { margin: -$spacing-extra-small; }\n.na2 { margin: -$spacing-small; }\n.na3 { margin: -$spacing-medium; }\n.na4 { margin: -$spacing-large; }\n.na5 { margin: -$spacing-extra-large; }\n.na6 { margin: -$spacing-extra-extra-large; }\n.na7 { margin: -$spacing-extra-extra-extra-large; }\n\n.nl1 { margin-left: -$spacing-extra-small; }\n.nl2 { margin-left: -$spacing-small; }\n.nl3 { margin-left: -$spacing-medium; }\n.nl4 { margin-left: -$spacing-large; }\n.nl5 { margin-left: -$spacing-extra-large; }\n.nl6 { margin-left: -$spacing-extra-extra-large; }\n.nl7 { margin-left: -$spacing-extra-extra-extra-large; }\n\n.nr1 { margin-right: -$spacing-extra-small; }\n.nr2 { margin-right: -$spacing-small; }\n.nr3 { margin-right: -$spacing-medium; }\n.nr4 { margin-right: -$spacing-large; }\n.nr5 { margin-right: -$spacing-extra-large; }\n.nr6 { margin-right: -$spacing-extra-extra-large; }\n.nr7 { margin-right: -$spacing-extra-extra-extra-large; }\n\n.nb1 { margin-bottom: -$spacing-extra-small; }\n.nb2 { margin-bottom: -$spacing-small; }\n.nb3 { margin-bottom: -$spacing-medium; }\n.nb4 { margin-bottom: -$spacing-large; }\n.nb5 { margin-bottom: -$spacing-extra-large; }\n.nb6 { margin-bottom: -$spacing-extra-extra-large; }\n.nb7 { margin-bottom: -$spacing-extra-extra-extra-large; }\n\n.nt1 { margin-top: -$spacing-extra-small; }\n.nt2 { margin-top: -$spacing-small; }\n.nt3 { margin-top: -$spacing-medium; }\n.nt4 { margin-top: -$spacing-large; }\n.nt5 { margin-top: -$spacing-extra-large; }\n.nt6 { margin-top: -$spacing-extra-extra-large; }\n.nt7 { margin-top: -$spacing-extra-extra-extra-large; }\n\n@media #{$breakpoint-not-small} {\n\n .na1-ns { margin: -$spacing-extra-small; }\n .na2-ns { margin: -$spacing-small; }\n .na3-ns { margin: -$spacing-medium; }\n .na4-ns { margin: -$spacing-large; }\n .na5-ns { margin: -$spacing-extra-large; }\n .na6-ns { margin: -$spacing-extra-extra-large; }\n .na7-ns { margin: -$spacing-extra-extra-extra-large; }\n\n .nl1-ns { margin-left: -$spacing-extra-small; }\n .nl2-ns { margin-left: -$spacing-small; }\n .nl3-ns { margin-left: -$spacing-medium; }\n .nl4-ns { margin-left: -$spacing-large; }\n .nl5-ns { margin-left: -$spacing-extra-large; }\n .nl6-ns { margin-left: -$spacing-extra-extra-large; }\n .nl7-ns { margin-left: -$spacing-extra-extra-extra-large; }\n\n .nr1-ns { margin-right: -$spacing-extra-small; }\n .nr2-ns { margin-right: -$spacing-small; }\n .nr3-ns { margin-right: -$spacing-medium; }\n .nr4-ns { margin-right: -$spacing-large; }\n .nr5-ns { margin-right: -$spacing-extra-large; }\n .nr6-ns { margin-right: -$spacing-extra-extra-large; }\n .nr7-ns { margin-right: -$spacing-extra-extra-extra-large; }\n\n .nb1-ns { margin-bottom: -$spacing-extra-small; }\n .nb2-ns { margin-bottom: -$spacing-small; }\n .nb3-ns { margin-bottom: -$spacing-medium; }\n .nb4-ns { margin-bottom: -$spacing-large; }\n .nb5-ns { margin-bottom: -$spacing-extra-large; }\n .nb6-ns { margin-bottom: -$spacing-extra-extra-large; }\n .nb7-ns { margin-bottom: -$spacing-extra-extra-extra-large; }\n\n .nt1-ns { margin-top: -$spacing-extra-small; }\n .nt2-ns { margin-top: -$spacing-small; }\n .nt3-ns { margin-top: -$spacing-medium; }\n .nt4-ns { margin-top: -$spacing-large; }\n .nt5-ns { margin-top: -$spacing-extra-large; }\n .nt6-ns { margin-top: -$spacing-extra-extra-large; }\n .nt7-ns { margin-top: -$spacing-extra-extra-extra-large; }\n\n}\n\n@media #{$breakpoint-medium} {\n .na1-m { margin: -$spacing-extra-small; }\n .na2-m { margin: -$spacing-small; }\n .na3-m { margin: -$spacing-medium; }\n .na4-m { margin: -$spacing-large; }\n .na5-m { margin: -$spacing-extra-large; }\n .na6-m { margin: -$spacing-extra-extra-large; }\n .na7-m { margin: -$spacing-extra-extra-extra-large; }\n\n .nl1-m { margin-left: -$spacing-extra-small; }\n .nl2-m { margin-left: -$spacing-small; }\n .nl3-m { margin-left: -$spacing-medium; }\n .nl4-m { margin-left: -$spacing-large; }\n .nl5-m { margin-left: -$spacing-extra-large; }\n .nl6-m { margin-left: -$spacing-extra-extra-large; }\n .nl7-m { margin-left: -$spacing-extra-extra-extra-large; }\n\n .nr1-m { margin-right: -$spacing-extra-small; }\n .nr2-m { margin-right: -$spacing-small; }\n .nr3-m { margin-right: -$spacing-medium; }\n .nr4-m { margin-right: -$spacing-large; }\n .nr5-m { margin-right: -$spacing-extra-large; }\n .nr6-m { margin-right: -$spacing-extra-extra-large; }\n .nr7-m { margin-right: -$spacing-extra-extra-extra-large; }\n\n .nb1-m { margin-bottom: -$spacing-extra-small; }\n .nb2-m { margin-bottom: -$spacing-small; }\n .nb3-m { margin-bottom: -$spacing-medium; }\n .nb4-m { margin-bottom: -$spacing-large; }\n .nb5-m { margin-bottom: -$spacing-extra-large; }\n .nb6-m { margin-bottom: -$spacing-extra-extra-large; }\n .nb7-m { margin-bottom: -$spacing-extra-extra-extra-large; }\n\n .nt1-m { margin-top: -$spacing-extra-small; }\n .nt2-m { margin-top: -$spacing-small; }\n .nt3-m { margin-top: -$spacing-medium; }\n .nt4-m { margin-top: -$spacing-large; }\n .nt5-m { margin-top: -$spacing-extra-large; }\n .nt6-m { margin-top: -$spacing-extra-extra-large; }\n .nt7-m { margin-top: -$spacing-extra-extra-extra-large; }\n\n}\n\n@media #{$breakpoint-large} {\n .na1-l { margin: -$spacing-extra-small; }\n .na2-l { margin: -$spacing-small; }\n .na3-l { margin: -$spacing-medium; }\n .na4-l { margin: -$spacing-large; }\n .na5-l { margin: -$spacing-extra-large; }\n .na6-l { margin: -$spacing-extra-extra-large; }\n .na7-l { margin: -$spacing-extra-extra-extra-large; }\n\n .nl1-l { margin-left: -$spacing-extra-small; }\n .nl2-l { margin-left: -$spacing-small; }\n .nl3-l { margin-left: -$spacing-medium; }\n .nl4-l { margin-left: -$spacing-large; }\n .nl5-l { margin-left: -$spacing-extra-large; }\n .nl6-l { margin-left: -$spacing-extra-extra-large; }\n .nl7-l { margin-left: -$spacing-extra-extra-extra-large; }\n\n .nr1-l { margin-right: -$spacing-extra-small; }\n .nr2-l { margin-right: -$spacing-small; }\n .nr3-l { margin-right: -$spacing-medium; }\n .nr4-l { margin-right: -$spacing-large; }\n .nr5-l { margin-right: -$spacing-extra-large; }\n .nr6-l { margin-right: -$spacing-extra-extra-large; }\n .nr7-l { margin-right: -$spacing-extra-extra-extra-large; }\n\n .nb1-l { margin-bottom: -$spacing-extra-small; }\n .nb2-l { margin-bottom: -$spacing-small; }\n .nb3-l { margin-bottom: -$spacing-medium; }\n .nb4-l { margin-bottom: -$spacing-large; }\n .nb5-l { margin-bottom: -$spacing-extra-large; }\n .nb6-l { margin-bottom: -$spacing-extra-extra-large; }\n .nb7-l { margin-bottom: -$spacing-extra-extra-extra-large; }\n\n .nt1-l { margin-top: -$spacing-extra-small; }\n .nt2-l { margin-top: -$spacing-small; }\n .nt3-l { margin-top: -$spacing-medium; }\n .nt4-l { margin-top: -$spacing-large; }\n .nt5-l { margin-top: -$spacing-extra-large; }\n .nt6-l { margin-top: -$spacing-extra-extra-large; }\n .nt7-l { margin-top: -$spacing-extra-extra-extra-large; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n TABLES\n Docs: http://tachyons.io/docs/elements/tables/\n\n*/\n\n.collapse {\n border-collapse: collapse;\n border-spacing: 0;\n}\n\n.striped--light-silver:nth-child(odd) {\n background-color: $light-silver;\n}\n\n.striped--moon-gray:nth-child(odd) {\n background-color: $moon-gray;\n}\n\n.striped--light-gray:nth-child(odd) {\n background-color: $light-gray;\n}\n\n.striped--near-white:nth-child(odd) {\n background-color: $near-white;\n}\n\n.stripe-light:nth-child(odd) {\n background-color: $white-10;\n}\n\n.stripe-dark:nth-child(odd) {\n background-color: $black-10;\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n TEXT DECORATION\n Docs: http://tachyons.io/docs/typography/text-decoration/\n\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.strike { text-decoration: line-through; }\n.underline { text-decoration: underline; }\n.no-underline { text-decoration: none; }\n\n\n@media #{$breakpoint-not-small} {\n .strike-ns { text-decoration: line-through; }\n .underline-ns { text-decoration: underline; }\n .no-underline-ns { text-decoration: none; }\n}\n\n@media #{$breakpoint-medium} {\n .strike-m { text-decoration: line-through; }\n .underline-m { text-decoration: underline; }\n .no-underline-m { text-decoration: none; }\n}\n\n@media #{$breakpoint-large} {\n .strike-l { text-decoration: line-through; }\n .underline-l { text-decoration: underline; }\n .no-underline-l { text-decoration: none; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n TEXT ALIGN\n Docs: http://tachyons.io/docs/typography/text-align/\n\n Base\n t = text-align\n\n Modifiers\n l = left\n r = right\n c = center\n j = justify\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.tl { text-align: left; }\n.tr { text-align: right; }\n.tc { text-align: center; }\n.tj { text-align: justify; }\n\n@media #{$breakpoint-not-small} {\n .tl-ns { text-align: left; }\n .tr-ns { text-align: right; }\n .tc-ns { text-align: center; }\n .tj-ns { text-align: justify; }\n}\n\n@media #{$breakpoint-medium} {\n .tl-m { text-align: left; }\n .tr-m { text-align: right; }\n .tc-m { text-align: center; }\n .tj-m { text-align: justify; }\n}\n\n@media #{$breakpoint-large} {\n .tl-l { text-align: left; }\n .tr-l { text-align: right; }\n .tc-l { text-align: center; }\n .tj-l { text-align: justify; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n TEXT TRANSFORM\n Docs: http://tachyons.io/docs/typography/text-transform/\n\n Base:\n tt = text-transform\n\n Modifiers\n c = capitalize\n l = lowercase\n u = uppercase\n n = none\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.ttc { text-transform: capitalize; }\n.ttl { text-transform: lowercase; }\n.ttu { text-transform: uppercase; }\n.ttn { text-transform: none; }\n\n@media #{$breakpoint-not-small} {\n .ttc-ns { text-transform: capitalize; }\n .ttl-ns { text-transform: lowercase; }\n .ttu-ns { text-transform: uppercase; }\n .ttn-ns { text-transform: none; }\n}\n\n@media #{$breakpoint-medium} {\n .ttc-m { text-transform: capitalize; }\n .ttl-m { text-transform: lowercase; }\n .ttu-m { text-transform: uppercase; }\n .ttn-m { text-transform: none; }\n}\n\n@media #{$breakpoint-large} {\n .ttc-l { text-transform: capitalize; }\n .ttl-l { text-transform: lowercase; }\n .ttu-l { text-transform: uppercase; }\n .ttn-l { text-transform: none; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n TYPE SCALE\n Docs: http://tachyons.io/docs/typography/scale/\n\n Base:\n f = font-size\n\n Modifiers\n 1 = 1st step in size scale\n 2 = 2nd step in size scale\n 3 = 3rd step in size scale\n 4 = 4th step in size scale\n 5 = 5th step in size scale\n 6 = 6th step in size scale\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n*/\n\n/*\n * For Hero/Marketing Titles\n *\n * These generally are too large for mobile\n * so be careful using them on smaller screens.\n * */\n\n.f-6,\n.f-headline {\n font-size: $font-size-headline;\n}\n.f-5,\n.f-subheadline {\n font-size: $font-size-subheadline;\n}\n\n\n/* Type Scale */\n\n\n.f1 { font-size: $font-size-1; }\n.f2 { font-size: $font-size-2; }\n.f3 { font-size: $font-size-3; }\n.f4 { font-size: $font-size-4; }\n.f5 { font-size: $font-size-5; }\n.f6 { font-size: $font-size-6; }\n.f7 { font-size: $font-size-7; }\n\n@media #{$breakpoint-not-small}{\n .f-6-ns,\n .f-headline-ns { font-size: $font-size-headline; }\n .f-5-ns,\n .f-subheadline-ns { font-size: $font-size-subheadline; }\n .f1-ns { font-size: $font-size-1; }\n .f2-ns { font-size: $font-size-2; }\n .f3-ns { font-size: $font-size-3; }\n .f4-ns { font-size: $font-size-4; }\n .f5-ns { font-size: $font-size-5; }\n .f6-ns { font-size: $font-size-6; }\n .f7-ns { font-size: $font-size-7; }\n}\n\n@media #{$breakpoint-medium} {\n .f-6-m,\n .f-headline-m { font-size: $font-size-headline; }\n .f-5-m,\n .f-subheadline-m { font-size: $font-size-subheadline; }\n .f1-m { font-size: $font-size-1; }\n .f2-m { font-size: $font-size-2; }\n .f3-m { font-size: $font-size-3; }\n .f4-m { font-size: $font-size-4; }\n .f5-m { font-size: $font-size-5; }\n .f6-m { font-size: $font-size-6; }\n .f7-m { font-size: $font-size-7; }\n}\n\n@media #{$breakpoint-large} {\n .f-6-l,\n .f-headline-l {\n font-size: $font-size-headline;\n }\n .f-5-l,\n .f-subheadline-l {\n font-size: $font-size-subheadline;\n }\n .f1-l { font-size: $font-size-1; }\n .f2-l { font-size: $font-size-2; }\n .f3-l { font-size: $font-size-3; }\n .f4-l { font-size: $font-size-4; }\n .f5-l { font-size: $font-size-5; }\n .f6-l { font-size: $font-size-6; }\n .f7-l { font-size: $font-size-7; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n TYPOGRAPHY\n http://tachyons.io/docs/typography/measure/\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n\n\n/* Measure is limited to ~66 characters */\n.measure {\n max-width: $measure;\n}\n\n/* Measure is limited to ~80 characters */\n.measure-wide {\n max-width: $measure-wide;\n}\n\n/* Measure is limited to ~45 characters */\n.measure-narrow {\n max-width: $measure-narrow;\n}\n\n/* Book paragraph style - paragraphs are indented with no vertical spacing. */\n.indent {\n text-indent: 1em;\n margin-top: 0;\n margin-bottom: 0;\n}\n\n.small-caps {\n font-variant: small-caps;\n}\n\n/* Combine this class with a width to truncate text (or just leave as is to truncate at width of containing element. */\n\n.truncate {\n white-space: nowrap;\n overflow: hidden;\n text-overflow: ellipsis;\n}\n\n@media #{$breakpoint-not-small} {\n .measure-ns {\n max-width: $measure;\n }\n .measure-wide-ns {\n max-width: $measure-wide;\n }\n .measure-narrow-ns {\n max-width: $measure-narrow;\n }\n .indent-ns {\n text-indent: 1em;\n margin-top: 0;\n margin-bottom: 0;\n }\n .small-caps-ns {\n font-variant: small-caps;\n }\n .truncate-ns {\n white-space: nowrap;\n overflow: hidden;\n text-overflow: ellipsis;\n }\n}\n\n@media #{$breakpoint-medium} {\n .measure-m {\n max-width: $measure;\n }\n .measure-wide-m {\n max-width: $measure-wide;\n }\n .measure-narrow-m {\n max-width: $measure-narrow;\n }\n .indent-m {\n text-indent: 1em;\n margin-top: 0;\n margin-bottom: 0;\n }\n .small-caps-m {\n font-variant: small-caps;\n }\n .truncate-m {\n white-space: nowrap;\n overflow: hidden;\n text-overflow: ellipsis;\n }\n}\n\n@media #{$breakpoint-large} {\n .measure-l {\n max-width: $measure;\n }\n .measure-wide-l {\n max-width: $measure-wide;\n }\n .measure-narrow-l {\n max-width: $measure-narrow;\n }\n .indent-l {\n text-indent: 1em;\n margin-top: 0;\n margin-bottom: 0;\n }\n .small-caps-l {\n font-variant: small-caps;\n }\n .truncate-l {\n white-space: nowrap;\n overflow: hidden;\n text-overflow: ellipsis;\n }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n UTILITIES\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n/* Equivalent to .overflow-y-scroll */\n.overflow-container {\n overflow-y: scroll;\n}\n\n.center {\n margin-right: auto;\n margin-left: auto;\n}\n\n.mr-auto { margin-right: auto; }\n.ml-auto { margin-left: auto; }\n\n@media #{$breakpoint-not-small}{\n .center-ns {\n margin-right: auto;\n margin-left: auto;\n }\n .mr-auto-ns { margin-right: auto; }\n .ml-auto-ns { margin-left: auto; }\n}\n\n@media #{$breakpoint-medium}{\n .center-m {\n margin-right: auto;\n margin-left: auto;\n }\n .mr-auto-m { margin-right: auto; }\n .ml-auto-m { margin-left: auto; }\n}\n\n@media #{$breakpoint-large}{\n .center-l {\n margin-right: auto;\n margin-left: auto;\n }\n .mr-auto-l { margin-right: auto; }\n .ml-auto-l { margin-left: auto; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n VISIBILITY\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n\n/*\n Text that is hidden but accessible\n Ref: http://snook.ca/archives/html_and_css/hiding-content-for-accessibility\n*/\n\n.clip {\n position: fixed !important;\n _position: absolute !important;\n clip: rect(1px 1px 1px 1px); /* IE6, IE7 */\n clip: rect(1px, 1px, 1px, 1px);\n}\n\n@media #{$breakpoint-not-small} {\n .clip-ns {\n position: fixed !important;\n _position: absolute !important;\n clip: rect(1px 1px 1px 1px); /* IE6, IE7 */\n clip: rect(1px, 1px, 1px, 1px);\n }\n}\n\n@media #{$breakpoint-medium} {\n .clip-m {\n position: fixed !important;\n _position: absolute !important;\n clip: rect(1px 1px 1px 1px); /* IE6, IE7 */\n clip: rect(1px, 1px, 1px, 1px);\n }\n}\n\n@media #{$breakpoint-large} {\n .clip-l {\n position: fixed !important;\n _position: absolute !important;\n clip: rect(1px 1px 1px 1px); /* IE6, IE7 */\n clip: rect(1px, 1px, 1px, 1px);\n }\n}\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n WHITE SPACE\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n\n.ws-normal { white-space: normal; }\n.nowrap { white-space: nowrap; }\n.pre { white-space: pre; }\n\n@media #{$breakpoint-not-small} {\n .ws-normal-ns { white-space: normal; }\n .nowrap-ns { white-space: nowrap; }\n .pre-ns { white-space: pre; }\n}\n\n@media #{$breakpoint-medium} {\n .ws-normal-m { white-space: normal; }\n .nowrap-m { white-space: nowrap; }\n .pre-m { white-space: pre; }\n}\n\n@media #{$breakpoint-large} {\n .ws-normal-l { white-space: normal; }\n .nowrap-l { white-space: nowrap; }\n .pre-l { white-space: pre; }\n}\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n VERTICAL ALIGN\n\n Media Query Extensions:\n -ns = not-small\n -m = medium\n -l = large\n\n*/\n\n.v-base { vertical-align: baseline; }\n.v-mid { vertical-align: middle; }\n.v-top { vertical-align: top; }\n.v-btm { vertical-align: bottom; }\n\n@media #{$breakpoint-not-small} {\n .v-base-ns { vertical-align: baseline; }\n .v-mid-ns { vertical-align: middle; }\n .v-top-ns { vertical-align: top; }\n .v-btm-ns { vertical-align: bottom; }\n}\n\n@media #{$breakpoint-medium} {\n .v-base-m { vertical-align: baseline; }\n .v-mid-m { vertical-align: middle; }\n .v-top-m { vertical-align: top; }\n .v-btm-m { vertical-align: bottom; }\n}\n\n@media #{$breakpoint-large} {\n .v-base-l { vertical-align: baseline; }\n .v-mid-l { vertical-align: middle; }\n .v-top-l { vertical-align: top; }\n .v-btm-l { vertical-align: bottom; }\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n HOVER EFFECTS\n Docs: http://tachyons.io/docs/themes/hovers/\n\n - Dim\n - Glow\n - Hide Child\n - Underline text\n - Grow\n - Pointer\n - Shadow\n\n*/\n\n/*\n\n Dim element on hover by adding the dim class.\n\n*/\n.dim {\n opacity: 1;\n transition: opacity .15s ease-in;\n}\n.dim:hover,\n.dim:focus {\n opacity: .5;\n transition: opacity .15s ease-in;\n}\n.dim:active {\n opacity: .8; transition: opacity .15s ease-out;\n}\n\n/*\n\n Animate opacity to 100% on hover by adding the glow class.\n\n*/\n.glow {\n transition: opacity .15s ease-in;\n}\n.glow:hover,\n.glow:focus {\n opacity: 1;\n transition: opacity .15s ease-in;\n}\n\n/*\n\n Hide child & reveal on hover:\n\n Put the hide-child class on a parent element and any nested element with the\n child class will be hidden and displayed on hover or focus.\n\n
\n
Hidden until hover or focus
\n
Hidden until hover or focus
\n
Hidden until hover or focus
\n
Hidden until hover or focus
\n
\n*/\n\n.hide-child .child {\n opacity: 0;\n transition: opacity .15s ease-in;\n}\n.hide-child:hover .child,\n.hide-child:focus .child,\n.hide-child:active .child {\n opacity: 1;\n transition: opacity .15s ease-in;\n}\n\n.underline-hover:hover,\n.underline-hover:focus {\n text-decoration: underline;\n}\n\n/* Can combine this with overflow-hidden to make background images grow on hover\n * even if you are using background-size: cover */\n\n.grow {\n -moz-osx-font-smoothing: grayscale;\n backface-visibility: hidden;\n transform: translateZ(0);\n transition: transform 0.25s ease-out;\n}\n\n.grow:hover,\n.grow:focus {\n transform: scale(1.05);\n}\n\n.grow:active {\n transform: scale(.90);\n}\n\n.grow-large {\n -moz-osx-font-smoothing: grayscale;\n backface-visibility: hidden;\n transform: translateZ(0);\n transition: transform .25s ease-in-out;\n}\n\n.grow-large:hover,\n.grow-large:focus {\n transform: scale(1.2);\n}\n\n.grow-large:active {\n transform: scale(.95);\n}\n\n/* Add pointer on hover */\n\n.pointer:hover {\n cursor: pointer;\n}\n\n/*\n Add shadow on hover.\n\n Performant box-shadow animation pattern from\n http://tobiasahlin.com/blog/how-to-animate-box-shadow/\n*/\n\n.shadow-hover {\n cursor: pointer;\n position: relative;\n transition: all 0.5s cubic-bezier(0.165, 0.84, 0.44, 1);\n}\n\n.shadow-hover::after {\n content: '';\n box-shadow: 0px 0px 16px 2px rgba( 0, 0, 0, .2 );\n border-radius: inherit;\n opacity: 0;\n position: absolute;\n top: 0;\n left: 0;\n width: 100%;\n height: 100%;\n z-index: -1;\n transition: opacity 0.5s cubic-bezier(0.165, 0.84, 0.44, 1);\n}\n\n.shadow-hover:hover::after,\n.shadow-hover:focus::after {\n opacity: 1;\n}\n\n/* Combine with classes in skins and skins-pseudo for\n * many different transition possibilities. */\n\n.bg-animate,\n.bg-animate:hover,\n.bg-animate:focus {\n transition: background-color .15s ease-in-out;\n}\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n Z-INDEX\n\n Base\n z = z-index\n\n Modifiers\n -0 = literal value 0\n -1 = literal value 1\n -2 = literal value 2\n -3 = literal value 3\n -4 = literal value 4\n -5 = literal value 5\n -999 = literal value 999\n -9999 = literal value 9999\n\n -max = largest accepted z-index value as integer\n\n -inherit = string value inherit\n -initial = string value initial\n -unset = string value unset\n\n MDN: https://developer.mozilla.org/en/docs/Web/CSS/z-index\n Spec: http://www.w3.org/TR/CSS2/zindex.html\n Articles:\n https://philipwalton.com/articles/what-no-one-told-you-about-z-index/\n\n Tips on extending:\n There might be a time worth using negative z-index values.\n Or if you are using tachyons with another project, you might need to\n adjust these values to suit your needs.\n\n*/\n\n.z-0 { z-index: 0; }\n.z-1 { z-index: 1; }\n.z-2 { z-index: 2; }\n.z-3 { z-index: 3; }\n.z-4 { z-index: 4; }\n.z-5 { z-index: 5; }\n\n.z-999 { z-index: 999; }\n.z-9999 { z-index: 9999; }\n\n.z-max {\n z-index: 2147483647;\n}\n\n.z-inherit { z-index: inherit; }\n.z-initial { z-index: initial; }\n.z-unset { z-index: unset; }\n\n","\n// Converted Variables\n\n\n// Custom Media Query Variables\n\n\n/*\n\n NESTED\n Tachyons module for styling nested elements\n that are generated by a cms.\n\n*/\n\n.nested-copy-line-height p,\n.nested-copy-line-height ul,\n.nested-copy-line-height ol {\n line-height: $line-height-copy;\n}\n\n.nested-headline-line-height h1,\n.nested-headline-line-height h2,\n.nested-headline-line-height h3,\n.nested-headline-line-height h4,\n.nested-headline-line-height h5,\n.nested-headline-line-height h6 {\n line-height: $line-height-title;\n}\n\n.nested-list-reset ul,\n.nested-list-reset ol {\n padding-left: 0;\n margin-left: 0;\n list-style-type: none;\n}\n\n.nested-copy-indent p+p {\n text-indent: $letter-spacing-1;\n margin-top: $spacing-none;\n margin-bottom: $spacing-none;\n}\n\n.nested-copy-seperator p+p {\n margin-top: $spacing-copy-separator;\n}\n\n.nested-img img {\n width: 100%;\n max-width: 100%;\n display: block;\n}\n\n.nested-links a {\n color: $blue;\n transition: color .15s ease-in;\n}\n\n.nested-links a:hover,\n.nested-links a:focus {\n color: $light-blue;\n transition: color .15s ease-in;\n}\n",".wrapper\n{\n width: 100%;\n max-width: 1460px;\n margin: 0 auto;\n padding: 0 20px;\n box-sizing: border-box;\n}\n\n.opblock-tag-section\n{\n display: flex;\n flex-direction: column;\n}\n\n.try-out.btn-group {\n padding: 0;\n display: flex;\n flex: 0.1 2 auto;\n}\n\n.try-out__btn {\n margin-left: 1.25rem;\n}\n\n.opblock-tag\n{\n display: flex;\n align-items: center;\n\n padding: 10px 20px 10px 10px;\n\n cursor: pointer;\n transition: all .2s;\n\n border-bottom: 1px solid rgba($opblock-tag-border-bottom-color, .3);\n\n &:hover\n {\n background: rgba($opblock-tag-background-color-hover,.02);\n }\n}\n\n@mixin method($color)\n{\n border-color: $color;\n background: rgba($color, .1);\n\n .opblock-summary-method\n {\n background: $color;\n }\n\n .opblock-summary\n {\n border-color: $color;\n }\n\n .tab-header .tab-item.active h4 span:after\n {\n background: $color;\n }\n}\n\n\n\n\n.opblock-tag\n{\n font-size: 24px;\n\n margin: 0 0 5px 0;\n\n @include text_headline();\n\n &.no-desc\n {\n span\n {\n flex: 1;\n }\n }\n\n svg\n {\n transition: all .4s;\n }\n\n small\n {\n font-size: 14px;\n font-weight: normal;\n\n flex: 2;\n\n padding: 0 10px;\n\n @include text_body();\n }\n\n >div\n {\n overflow: hidden;\n white-space: nowrap;\n text-overflow: ellipsis;\n flex: 1 1 150px;\n font-weight: 400;\n }\n\n @media (max-width: 640px) {\n small\n {\n flex: 1;\n }\n\n >div\n {\n flex: 1;\n }\n }\n\n .info__externaldocs\n {\n text-align: right;\n }\n}\n\n.parameter__type\n{\n font-size: 12px;\n\n padding: 5px 0;\n\n @include text_code();\n}\n\n.parameter-controls {\n margin-top: 0.75em;\n}\n\n.examples {\n &__title {\n display: block;\n font-size: 1.1em;\n font-weight: bold;\n margin-bottom: 0.75em;\n }\n\n &__section {\n margin-top: 1.5em;\n }\n &__section-header {\n font-weight: bold;\n font-size: .9rem;\n margin-bottom: .5rem;\n // color: #555;\n }\n}\n\n.examples-select {\n margin-bottom: .75em;\n display: inline-block;\n .examples-select-element {\n width: 100%;\n }\n &__section-label {\n font-weight: bold;\n font-size: .9rem;\n margin-right: .5rem;\n }\n}\n\n.example {\n &__section {\n margin-top: 1.5em;\n }\n &__section-header {\n font-weight: bold;\n font-size: .9rem;\n margin-bottom: .5rem;\n // color: #555;\n }\n}\n\n.view-line-link\n{\n position: relative;\n top: 3px;\n\n width: 20px;\n margin: 0 5px;\n\n cursor: pointer;\n transition: all .5s;\n}\n\n\n\n.opblock\n{\n margin: 0 0 15px 0;\n\n border: 1px solid $opblock-border-color;\n border-radius: 4px;\n box-shadow: 0 0 3px rgba($opblock-box-shadow-color,.19);\n\n .tab-header\n {\n display: flex;\n\n flex: 1;\n\n .tab-item\n {\n padding: 0 40px;\n\n cursor: pointer;\n\n &:first-of-type\n {\n padding: 0 40px 0 0;\n }\n &.active\n {\n h4\n {\n span\n {\n position: relative;\n\n\n &:after\n {\n position: absolute;\n bottom: -15px;\n left: 50%;\n\n width: 120%;\n height: 4px;\n\n content: '';\n transform: translateX(-50%);\n\n background: $opblock-tab-header-tab-item-active-h4-span-after-background-color;\n }\n }\n }\n }\n }\n }\n\n\n &.is-open\n {\n .opblock-summary\n {\n border-bottom: 1px solid $opblock-isopen-summary-border-bottom-color;\n }\n }\n\n .opblock-section-header\n {\n display: flex;\n align-items: center;\n\n padding: 8px 20px;\n\n min-height: 50px;\n\n background: rgba($opblock-isopen-section-header-background-color,.8);\n box-shadow: 0 1px 2px rgba($opblock-isopen-section-header-box-shadow-color,.1);\n\n >label\n {\n font-size: 12px;\n font-weight: bold;\n\n display: flex;\n align-items: center;\n\n margin: 0;\n margin-left: auto;\n\n @include text_headline();\n\n >span\n {\n padding: 0 10px 0 0;\n }\n }\n\n h4\n {\n font-size: 14px;\n\n flex: 1;\n\n margin: 0;\n\n @include text_headline();\n }\n }\n\n .opblock-summary-method\n {\n font-size: 14px;\n font-weight: bold;\n\n min-width: 80px;\n padding: 6px 0;\n\n text-align: center;\n\n border-radius: 3px;\n background: $opblock-summary-method-background-color;\n text-shadow: 0 1px 0 rgba($opblock-summary-method-text-shadow-color,.1);\n\n @include text_headline($opblock-summary-method-font-color);\n }\n\n .opblock-summary-path,\n .opblock-summary-operation-id,\n .opblock-summary-path__deprecated\n {\n font-size: 16px;\n @media (max-width: 768px) {\n font-size: 12px;\n }\n\n\n display: flex;\n align-items: center;\n\n word-break: break-word;\n\n padding: 0 10px;\n\n @include text_code();\n\n }\n\n .opblock-summary-path\n {\n flex-shrink: 0;\n max-width: calc(100% - 110px - 15rem);\n }\n\n @media (max-width: 640px) {\n .opblock-summary-path\n {\n flex-shrink: 1;\n max-width: 100%;\n }\n }\n\n .opblock-summary-path__deprecated\n {\n text-decoration: line-through;\n }\n\n .opblock-summary-operation-id\n {\n font-size: 14px;\n }\n\n .opblock-summary-description\n {\n font-size: 13px;\n\n flex: 1 1 auto;\n\n word-break: break-word;\n\n @include text_body();\n }\n\n .opblock-summary\n {\n display: flex;\n align-items: center;\n\n padding: 5px;\n\n cursor: pointer;\n\n .view-line-link\n {\n position: relative;\n top: 2px;\n\n width: 0;\n margin: 0;\n\n cursor: pointer;\n transition: all .5s;\n }\n\n &:hover\n {\n .view-line-link\n {\n width: 18px;\n margin: 0 5px;\n\n &.copy-to-clipboard {\n width: 24px;\n }\n }\n }\n }\n\n\n\n &.opblock-post\n {\n @include method($_color-post);\n }\n\n &.opblock-put\n {\n @include method($_color-put);\n }\n\n &.opblock-delete\n {\n @include method($_color-delete);\n }\n\n &.opblock-get\n {\n @include method($_color-get);\n }\n\n &.opblock-patch\n {\n @include method($_color-patch);\n }\n\n &.opblock-head\n {\n @include method($_color-head);\n }\n\n &.opblock-options\n {\n @include method($_color-options);\n }\n\n &.opblock-deprecated\n {\n opacity: .6;\n\n @include method($_color-disabled);\n }\n\n .opblock-schemes\n {\n padding: 8px 20px;\n\n .schemes-title\n {\n padding: 0 10px 0 0;\n }\n }\n}\n\n.filter\n{\n .operation-filter-input\n {\n width: 100%;\n margin: 20px 0;\n padding: 10px 10px;\n\n border: 2px solid $operational-filter-input-border-color;\n }\n}\n\n.filter, .download-url-wrapper\n{\n .failed\n {\n color: red;\n }\n\n .loading\n {\n color: #aaa;\n }\n}\n\n.model-example {\n margin-top: 1em;\n}\n\n.tab\n{\n display: flex;\n\n padding: 0;\n\n list-style: none;\n\n li\n {\n font-size: 12px;\n\n min-width: 60px;\n padding: 0;\n\n cursor: pointer;\n\n @include text_headline();\n\n &:first-of-type\n {\n position: relative;\n\n padding-left: 0;\n padding-right: 12px;\n\n &:after\n {\n position: absolute;\n top: 0;\n right: 6px;\n\n width: 1px;\n height: 100%;\n\n content: '';\n\n background: rgba($tab-list-item-first-background-color,.2);\n }\n }\n\n &.active\n {\n font-weight: bold;\n }\n\n button.tablinks\n {\n background: none;\n border: 0;\n padding: 0;\n\n color: inherit;\n font-family: inherit;\n font-weight: inherit;\n }\n }\n}\n\n.opblock-description-wrapper,\n.opblock-external-docs-wrapper,\n.opblock-title_normal\n{\n font-size: 12px;\n\n margin: 0 0 5px 0;\n padding: 15px 20px;\n\n @include text_body();\n\n h4\n {\n font-size: 12px;\n\n margin: 0 0 5px 0;\n\n @include text_body();\n }\n\n p\n {\n font-size: 14px;\n\n margin: 0;\n\n @include text_body();\n }\n}\n\n.opblock-external-docs-wrapper {\n h4 {\n padding-left: 0px;\n }\n}\n\n.execute-wrapper\n{\n padding: 20px;\n\n text-align: right;\n\n .btn\n {\n width: 100%;\n padding: 8px 40px;\n }\n}\n\n.body-param-options\n{\n display: flex;\n flex-direction: column;\n\n .body-param-edit\n {\n padding: 10px 0;\n }\n\n label\n {\n padding: 8px 0;\n select\n {\n margin: 3px 0 0 0;\n }\n }\n}\n\n.responses-inner\n{\n padding: 20px;\n\n h5,\n h4\n {\n font-size: 12px;\n\n margin: 10px 0 5px 0;\n\n @include text_body();\n }\n\n .curl\n {\n white-space: normal;\n }\n}\n\n.response-col_status\n{\n font-size: 14px;\n\n @include text_body();\n\n .response-undocumented\n {\n font-size: 11px;\n\n @include text_code($response-col-status-undocumented-font-color);\n }\n}\n\n.response-col_links\n{\n padding-left: 2em;\n max-width: 40em;\n font-size: 14px;\n\n @include text_body();\n\n .response-undocumented\n {\n font-size: 11px;\n\n @include text_code($response-col-links-font-color);\n }\n\n .operation-link\n {\n margin-bottom: 1.5em;\n\n .description\n {\n margin-bottom: 0.5em;\n }\n }\n}\n\n.opblock-body\n{\n .opblock-loading-animation\n {\n display: block;\n margin: 3em;\n margin-left: auto;\n margin-right: auto;\n }\n}\n\n.opblock-body pre.microlight\n{\n font-size: 12px;\n\n margin: 0;\n padding: 10px;\n\n white-space: pre-wrap;\n word-wrap: break-word;\n word-break: break-all;\n word-break: break-word;\n hyphens: auto;\n\n border-radius: 4px;\n background: $opblock-body-background-color;\n\n overflow-wrap: break-word;\n @include text_code($opblock-body-font-color);\n\n // disabled to have syntax highliting with react-syntax-highlight\n // span\n // {\n // color: $opblock-body-font-color !important;\n // }\n\n .headerline\n {\n display: block;\n }\n}\n\n.highlight-code {\n position: relative;\n\n > .microlight {\n overflow-y: auto;\n max-height: 400px;\n min-height: 6em;\n\n code {\n white-space: pre-wrap !important;\n word-break: break-all;\n }\n }\n}\n.curl-command {\n position: relative;\n}\n\n.download-contents {\n position: absolute;\n bottom: 10px;\n right: 10px;\n cursor: pointer;\n background: #7d8293;\n text-align: center;\n padding: 5px;\n border-radius: 4px;\n font-family: sans-serif;\n font-weight: 600;\n color: white;\n font-size: 14px;\n height: 30px;\n justify-content: center;\n align-items: center;\n display: flex;\n}\n\n.scheme-container\n{\n margin: 0 0 20px 0;\n padding: 30px 0;\n\n background: $scheme-container-background-color;\n box-shadow: 0 1px 2px 0 rgba($scheme-container-box-shadow-color,.15);\n\n .schemes\n {\n display: flex;\n align-items: flex-end;\n\n > label\n {\n font-size: 12px;\n font-weight: bold;\n\n display: flex;\n flex-direction: column;\n\n margin: -20px 15px 0 0;\n\n @include text_headline();\n\n select\n {\n min-width: 130px;\n\n text-transform: uppercase;\n }\n }\n }\n}\n\n.loading-container\n{\n padding: 40px 0 60px;\n margin-top: 1em;\n min-height: 1px;\n display: flex;\n justify-content: center;\n align-items: center;\n flex-direction: column;\n\n .loading\n {\n position: relative;\n\n\n &:after\n {\n font-size: 10px;\n font-weight: bold;\n\n position: absolute;\n top: 50%;\n left: 50%;\n\n content: 'loading';\n transform: translate(-50%,-50%);\n text-transform: uppercase;\n\n @include text_headline();\n }\n\n &:before\n {\n position: absolute;\n top: 50%;\n left: 50%;\n\n display: block;\n\n width: 60px;\n height: 60px;\n margin: -30px -30px;\n\n content: '';\n animation: rotation 1s infinite linear, opacity .5s;\n\n opacity: 1;\n border: 2px solid rgba($loading-container-before-border-color, .1);\n border-top-color: rgba($loading-container-before-border-top-color, .6);\n border-radius: 100%;\n\n backface-visibility: hidden;\n\n @keyframes rotation\n {\n to\n {\n transform: rotate(360deg);\n }\n }\n }\n }\n}\n\n.response-controls {\n padding-top: 1em;\n display: flex;\n}\n\n.response-control-media-type {\n margin-right: 1em;\n\n &--accept-controller {\n select {\n border-color: $response-content-type-controls-accept-header-select-border-color;\n }\n }\n\n &__accept-message {\n color: $response-content-type-controls-accept-header-small-font-color;\n font-size: .7em;\n }\n\n &__title {\n display: block;\n margin-bottom: 0.2em;\n font-size: .7em;\n }\n}\n\n.response-control-examples {\n &__title {\n display: block;\n margin-bottom: 0.2em;\n font-size: .7em;\n }\n}\n\n@keyframes blinker\n{\n 50%\n {\n opacity: 0;\n }\n}\n\n.hidden\n{\n display: none;\n}\n\n.no-margin\n{\n height: auto;\n border: none;\n margin: 0;\n padding: 0;\n}\n\n.float-right\n{\n float: right;\n}\n\n.svg-assets\n{\n position: absolute;\n width: 0;\n height: 0;\n}\n\nsection\n{\n h3\n {\n @include text_headline();\n }\n}\n\na.nostyle {\n text-decoration: inherit;\n color: inherit;\n cursor: pointer;\n display: inline;\n\n &:visited {\n text-decoration: inherit;\n color: inherit;\n cursor: pointer;\n }\n}\n\n.fallback\n{\n padding: 1em;\n color: #aaa;\n}\n\n.version-pragma {\n height: 100%;\n padding: 5em 0px;\n\n &__message {\n display: flex;\n justify-content: center;\n height: 100%;\n font-size: 1.2em;\n text-align: center;\n line-height: 1.5em;\n\n padding: 0px .6em;\n\n > div {\n max-width: 55ch;\n flex: 1;\n }\n\n code {\n background-color: #dedede;\n padding: 4px 4px 2px;\n white-space: pre;\n }\n }\n}\n\n.opblock-link\n{\n font-weight: normal;\n\n &.shown\n {\n font-weight: bold;\n }\n}\n\nspan\n{\n &.token-string\n {\n color: #555;\n }\n\n &.token-not-formatted\n {\n color: #555;\n font-weight: bold;\n }\n}\n",".btn\n{\n font-size: 14px;\n font-weight: bold;\n\n padding: 5px 23px;\n\n transition: all .3s;\n\n border: 2px solid $btn-border-color;\n border-radius: 4px;\n background: transparent;\n box-shadow: 0 1px 2px rgba($btn-box-shadow-color,.1);\n\n @include text_headline();\n\n &.btn-sm\n {\n font-size: 12px;\n padding: 4px 23px;\n }\n\n &[disabled]\n {\n cursor: not-allowed;\n\n opacity: .3;\n }\n\n &:hover\n {\n box-shadow: 0 0 5px rgba($btn-box-shadow-color,.3);\n }\n\n &.cancel\n {\n border-color: $btn-cancel-border-color;\n background-color: $btn-cancel-background-color;\n @include text_headline($btn-cancel-font-color);\n }\n\n &.authorize\n {\n line-height: 1;\n\n display: inline;\n\n color: $btn-authorize-font-color;\n border-color: $btn-authorize-border-color;\n background-color: $btn-authorize-background-color;\n\n span\n {\n float: left;\n\n padding: 4px 20px 0 0;\n }\n\n svg\n {\n fill: $btn-authorize-svg-fill-color;\n }\n }\n\n &.execute\n {\n background-color: $btn-execute-background-color-alt;\n color: $btn-execute-font-color;\n border-color: $btn-execute-border-color;\n }\n}\n\n.btn-group\n{\n display: flex;\n\n padding: 30px;\n\n .btn\n {\n flex: 1;\n\n &:first-child\n {\n border-radius: 4px 0 0 4px;\n }\n\n &:last-child\n {\n border-radius: 0 4px 4px 0;\n }\n }\n}\n\n.authorization__btn\n{\n padding: 0 0 0 10px;\n\n border: none;\n background: none;\n\n &.locked\n {\n opacity: 1;\n }\n\n &.unlocked\n {\n opacity: .4;\n }\n}\n\n.opblock-summary-control,\n.models-control,\n.model-box-control\n{\n all: inherit;\n flex: 1;\n border-bottom: 0;\n padding: 0;\n cursor: pointer;\n\n &:focus {\n outline: auto;\n }\n}\n\n.expand-methods,\n.expand-operation\n{\n border: none;\n background: none;\n\n svg\n {\n width: 20px;\n height: 20px;\n }\n}\n\n.expand-methods\n{\n padding: 0 10px;\n\n &:hover\n {\n svg\n {\n fill: $expand-methods-svg-fill-color-hover;\n }\n }\n\n svg\n {\n transition: all .3s;\n\n fill: $expand-methods-svg-fill-color;\n }\n}\n\nbutton\n{\n cursor: pointer;\n\n &.invalid\n {\n @include invalidFormElement();\n }\n}\n\n.copy-to-clipboard\n{\n position: absolute;\n display: flex;\n justify-content: center;\n align-items: center;\n bottom: 10px;\n right: 100px;\n width: 30px;\n height: 30px;\n background: #7d8293;\n border-radius: 4px;\n border: none;\n\n button\n {\n flex-grow: 1;\n flex-shrink: 1;\n border: none;\n height: 25px;\n background: url(\"data:image/svg+xml, \") center center no-repeat;\n }\n}\n\n// overrides for smaller copy button for curl command\n.curl-command .copy-to-clipboard\n{\n bottom: 5px;\n right: 10px;\n width: 20px;\n height: 20px;\n\n button\n {\n height: 18px;\n }\n}\n\n// overrides for copy to clipboard button\n.opblock .opblock-summary .view-line-link.copy-to-clipboard\n{\n height: 26px;\n position: unset;\n}","// - - - - - - - - - - - - - - - - - - -\n// - - _mixins.scss module\n// styles for the _mixins.scss module\n@function calculateRem($size)\n{\n $remSize: $size / 16px;\n @return $remSize * 1rem;\n}\n\n@mixin font-size($size)\n{\n font-size: $size;\n font-size: calculateRem($size);\n}\n\n%clearfix\n{\n &:before,\n &:after\n {\n display: table;\n\n content: ' ';\n }\n &:after\n {\n clear: both;\n }\n}\n\n@mixin size($width, $height: $width)\n{\n width: $width;\n height: $height;\n}\n\n$ease: (\n in-quad: cubic-bezier(.550, .085, .680, .530),\n in-cubic: cubic-bezier(.550, .055, .675, .190),\n in-quart: cubic-bezier(.895, .030, .685, .220),\n in-quint: cubic-bezier(.755, .050, .855, .060),\n in-sine: cubic-bezier(.470, .000, .745, .715),\n in-expo: cubic-bezier(.950, .050, .795, .035),\n in-circ: cubic-bezier(.600, .040, .980, .335),\n in-back: cubic-bezier(.600, -.280, .735, .045),\n out-quad: cubic-bezier(.250, .460, .450, .940),\n out-cubic: cubic-bezier(.215, .610, .355, 1.000),\n out-quart: cubic-bezier(.165, .840, .440, 1.000),\n out-quint: cubic-bezier(.230, 1.000, .320, 1.000),\n out-sine: cubic-bezier(.390, .575, .565, 1.000),\n out-expo: cubic-bezier(.190, 1.000, .220, 1.000),\n out-circ: cubic-bezier(.075, .820, .165, 1.000),\n out-back: cubic-bezier(.175, .885, .320, 1.275),\n in-out-quad: cubic-bezier(.455, .030, .515, .955),\n in-out-cubic: cubic-bezier(.645, .045, .355, 1.000),\n in-out-quart: cubic-bezier(.770, .000, .175, 1.000),\n in-out-quint: cubic-bezier(.860, .000, .070, 1.000),\n in-out-sine: cubic-bezier(.445, .050, .550, .950),\n in-out-expo: cubic-bezier(1.000, .000, .000, 1.000),\n in-out-circ: cubic-bezier(.785, .135, .150, .860),\n in-out-back: cubic-bezier(.680, -.550, .265, 1.550)\n);\n\n@function ease($key)\n{\n @if map-has-key($ease, $key)\n {\n @return map-get($ease, $key);\n }\n\n @warn 'Unkown \\'#{$key}\\' in $ease.';\n @return null;\n}\n\n\n@mixin ease($key)\n{\n transition-timing-function: ease($key);\n}\n\n@mixin text-truncate\n{\n overflow: hidden;\n\n white-space: nowrap;\n text-overflow: ellipsis;\n}\n\n@mixin aspect-ratio($width, $height)\n{\n position: relative;\n &:before\n {\n display: block;\n\n width: 100%;\n padding-top: ($height / $width) * 100%;\n\n content: '';\n }\n > iframe\n {\n position: absolute;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n }\n}\n\n$browser-context: 16;\n\n@function em($pixels, $context: $browser-context)\n{\n @if (unitless($pixels))\n {\n $pixels: $pixels * 1px;\n }\n\n @if (unitless($context))\n {\n $context: $context * 1px;\n }\n\n @return $pixels / $context * 1em;\n}\n\n@mixin maxHeight($height)\n{\n @media (max-height: $height)\n {\n @content;\n }\n}\n\n\n@mixin breakpoint($class)\n{\n @if $class == tablet\n {\n @media (min-width: 768px) and (max-width: 1024px)\n {\n @content;\n }\n }\n\n @else if $class == mobile\n {\n @media (min-width: 320px) and (max-width : 736px)\n {\n @content;\n }\n }\n\n @else if $class == desktop\n {\n @media (min-width: 1400px)\n {\n @content;\n }\n }\n\n @else\n {\n @warn 'Breakpoint mixin supports: tablet, mobile, desktop';\n }\n}\n\n@mixin invalidFormElement() {\n animation: shake .4s 1;\n border-color: $_color-delete;\n background: lighten($_color-delete, 35%);\n}\n","select\n{\n font-size: 14px;\n font-weight: bold;\n\n padding: 5px 40px 5px 10px;\n\n border: 2px solid $form-select-border-color;\n border-radius: 4px;\n background: $form-select-background-color url('data:image/svg+xml, ') right 10px center no-repeat;\n background-size: 20px;\n box-shadow: 0 1px 2px 0 rgba($form-select-box-shadow-color, .25);\n\n @include text_headline();\n appearance: none;\n\n &[multiple]\n {\n margin: 5px 0;\n padding: 5px;\n\n background: $form-select-background-color;\n }\n\n &.invalid {\n @include invalidFormElement();\n }\n}\n\n.opblock-body select\n{\n min-width: 230px;\n @media (max-width: 768px)\n {\n min-width: 180px;\n }\n @media (max-width: 640px)\n {\n width: 100%;\n min-width: 100%;\n }\n}\n\nlabel\n{\n font-size: 12px;\n font-weight: bold;\n\n margin: 0 0 5px 0;\n\n @include text_headline();\n}\n\ninput[type=text],\ninput[type=password],\ninput[type=search],\ninput[type=email],\ninput[type=file]\n{\n line-height: 1;\n\n @media (max-width: 768px) {\n max-width: 175px;\n }\n}\n\n\ninput[type=text],\ninput[type=password],\ninput[type=search],\ninput[type=email],\ninput[type=file],\ntextarea\n{\n min-width: 100px;\n margin: 5px 0;\n padding: 8px 10px;\n\n border: 1px solid $form-input-border-color;\n border-radius: 4px;\n background: $form-input-background-color;\n\n\n &.invalid\n {\n @include invalidFormElement();\n }\n\n}\n\ninput,\ntextarea,\nselect {\n &[disabled] {\n // opacity: 0.85;\n background-color: #fafafa;\n color: #888;\n cursor: not-allowed;\n }\n}\n\nselect[disabled] {\n border-color: #888;\n}\n\ntextarea[disabled] {\n background-color: #41444e;\n color: #fff;\n}\n\n@keyframes shake\n{\n 10%,\n 90%\n {\n transform: translate3d(-1px, 0, 0);\n }\n\n 20%,\n 80%\n {\n transform: translate3d(2px, 0, 0);\n }\n\n 30%,\n 50%,\n 70%\n {\n transform: translate3d(-4px, 0, 0);\n }\n\n 40%,\n 60%\n {\n transform: translate3d(4px, 0, 0);\n }\n}\n\ntextarea\n{\n font-size: 12px;\n\n width: 100%;\n min-height: 280px;\n padding: 10px;\n\n border: none;\n border-radius: 4px;\n outline: none;\n background: rgba($form-textarea-background-color,.8);\n\n @include text_code();\n\n &:focus\n {\n border: 2px solid $form-textarea-focus-border-color;\n }\n\n &.curl\n {\n font-size: 12px;\n\n min-height: 100px;\n margin: 0;\n padding: 10px;\n\n resize: none;\n\n border-radius: 4px;\n background: $form-textarea-curl-background-color;\n\n @include text_code($form-textarea-curl-font-color);\n }\n}\n\n\n.checkbox\n{\n padding: 5px 0 10px;\n\n transition: opacity .5s;\n\n color: $form-checkbox-label-font-color;\n\n label\n {\n display: flex;\n }\n\n p\n {\n font-weight: normal !important;\n font-style: italic;\n\n margin: 0 !important;\n\n @include text_code();\n }\n\n input[type=checkbox]\n {\n display: none;\n\n & + label > .item\n {\n position: relative;\n top: 3px;\n\n display: inline-block;\n\n width: 16px;\n height: 16px;\n margin: 0 8px 0 0;\n padding: 5px;\n\n cursor: pointer;\n\n border-radius: 1px;\n background: $form-checkbox-background-color;\n box-shadow: 0 0 0 2px $form-checkbox-box-shadow-color;\n\n flex: none;\n\n &:active\n {\n transform: scale(.9);\n }\n }\n\n &:checked + label > .item\n {\n background: $form-checkbox-background-color url('data:image/svg+xml, ') center center no-repeat;\n }\n }\n}\n",".dialog-ux\n{\n position: fixed;\n z-index: 9999;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n\n .backdrop-ux\n {\n position: fixed;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n\n background: rgba($dialog-ux-backdrop-background-color,.8);\n }\n\n .modal-ux\n {\n position: absolute;\n z-index: 9999;\n top: 50%;\n left: 50%;\n\n width: 100%;\n min-width: 300px;\n max-width: 650px;\n\n transform: translate(-50%,-50%);\n\n border: 1px solid $dialog-ux-modal-border-color;\n border-radius: 4px;\n background: $dialog-ux-modal-background-color;\n box-shadow: 0 10px 30px 0 rgba($dialog-ux-modal-box-shadow-color,.20);\n }\n\n .modal-ux-content\n {\n overflow-y: auto;\n\n max-height: 540px;\n padding: 20px;\n\n p\n {\n font-size: 12px;\n\n margin: 0 0 5px 0;\n\n color: $dialog-ux-modal-content-font-color;\n\n @include text_body();\n }\n\n h4\n {\n font-size: 18px;\n font-weight: 600;\n\n margin: 15px 0 0 0;\n\n @include text_headline();\n }\n }\n\n .modal-ux-header\n {\n display: flex;\n\n padding: 12px 0;\n\n border-bottom: 1px solid $dialog-ux-modal-header-border-bottom-color;\n\n align-items: center;\n\n .close-modal\n {\n padding: 0 10px;\n\n border: none;\n background: none;\n\n appearance: none;\n }\n\n\n h3\n {\n font-size: 20px;\n font-weight: 600;\n\n margin: 0;\n padding: 0 20px;\n\n flex: 1;\n @include text_headline();\n }\n }\n}\n",".model\n{\n font-size: 12px;\n font-weight: 300;\n\n @include text_code();\n\n .deprecated\n {\n span,\n td\n {\n color: $model-deprecated-font-color !important;\n }\n\n > td:first-of-type {\n text-decoration: line-through;\n }\n }\n &-toggle\n {\n font-size: 10px;\n\n position: relative;\n top: 6px;\n\n display: inline-block;\n\n margin: auto .3em;\n\n cursor: pointer;\n transition: transform .15s ease-in;\n transform: rotate(90deg);\n transform-origin: 50% 50%;\n\n &.collapsed\n {\n transform: rotate(0deg);\n }\n\n &:after\n {\n display: block;\n\n width: 20px;\n height: 20px;\n\n content: '';\n\n background: url('data:image/svg+xml, ') center no-repeat;\n background-size: 100%;\n }\n }\n\n &-jump-to-path\n {\n position: relative;\n\n cursor: pointer;\n\n .view-line-link\n {\n position: absolute;\n top: -.4em;\n\n cursor: pointer;\n }\n }\n\n &-title\n {\n position: relative;\n\n &:hover .model-hint\n {\n visibility: visible;\n }\n }\n\n &-hint\n {\n position: absolute;\n top: -1.8em;\n\n visibility: hidden;\n\n padding: .1em .5em;\n\n white-space: nowrap;\n\n color: $model-hint-font-color;\n border-radius: 4px;\n background: rgba($model-hint-background-color,.7);\n }\n\n p\n {\n margin: 0 0 1em 0;\n }\n\n .property\n {\n color: #999;\n font-style: italic;\n\n &.primitive\n {\n color: #6b6b6b;\n }\n }\n\n .external-docs\n {\n color: #666;\n font-weight: normal;\n }\n}\n\ntable.model\n{\n tr\n {\n &.description\n {\n color: #666;\n font-weight: normal;\n \n td:first-child\n {\n font-weight: bold;\n }\n }\n\n &.property-row\n {\n &.required td:first-child\n {\n font-weight: bold;\n }\n\n td\n {\n vertical-align: top;\n\n &:first-child\n {\n padding-right: 0.2em;\n }\n }\n\n .star\n {\n color: red;\n }\n }\n\n &.extension\n {\n color: #777;\n\n td:last-child\n {\n vertical-align: top;\n }\n }\n\n &.external-docs\n {\n td:first-child\n {\n font-weight: bold;\n }\n }\n\n .renderedMarkdown p:first-child\n {\n margin-top: 0;\n } \n }\n}\n\nsection.models\n{\n margin: 30px 0;\n\n border: 1px solid rgba($section-models-border-color, .3);\n border-radius: 4px;\n\n .pointer\n {\n cursor: pointer;\n }\n\n &.is-open\n {\n padding: 0 0 20px;\n h4\n {\n margin: 0 0 5px 0;\n\n border-bottom: 1px solid rgba($section-models-isopen-h4-border-bottom-color, .3);\n }\n }\n h4\n {\n font-size: 16px;\n\n display: flex;\n align-items: center;\n\n margin: 0;\n padding: 10px 20px 10px 10px;\n\n cursor: pointer;\n transition: all .2s;\n\n @include text_headline($section-models-h4-font-color);\n\n svg\n {\n transition: all .4s;\n }\n\n span\n {\n flex: 1;\n }\n\n &:hover\n {\n background: rgba($section-models-h4-background-color-hover,.02);\n }\n }\n\n h5\n {\n font-size: 16px;\n\n margin: 0 0 10px 0;\n\n @include text_headline($section-models-h5-font-color);\n }\n\n .model-jump-to-path\n {\n position: relative;\n top: 5px;\n }\n\n .model-container\n {\n margin: 0 20px 15px;\n position: relative;\n\n transition: all .5s;\n\n border-radius: 4px;\n background: rgba($section-models-model-container-background-color,.05);\n\n &:hover\n {\n background: rgba($section-models-model-container-background-color,.07);\n }\n\n &:first-of-type\n {\n margin: 20px;\n }\n\n &:last-of-type\n {\n margin: 0 20px;\n }\n\n .models-jump-to-path {\n position: absolute;\n top: 8px;\n right: 5px;\n opacity: 0.65;\n }\n }\n\n .model-box\n {\n background: none;\n }\n}\n\n\n.model-box\n{\n padding: 10px;\n display: inline-block;\n\n border-radius: 4px;\n background: rgba($section-models-model-box-background-color,.1);\n\n .model-jump-to-path\n {\n position: relative;\n top: 4px;\n }\n\n &.deprecated\n {\n opacity: .5;\n }\n}\n\n\n.model-title\n{\n font-size: 16px;\n\n @include text_headline($section-models-model-title-font-color);\n\n img\n {\n margin-left: 1em;\n position: relative;\n bottom: 0px;\n }\n}\n\n.model-deprecated-warning\n{\n font-size: 16px;\n font-weight: 600;\n\n margin-right: 1em;\n\n @include text_headline($_color-delete);\n}\n\n\nspan\n{\n > span.model\n {\n .brace-close\n {\n padding: 0 0 0 10px;\n }\n }\n}\n\n.prop-name\n{\n display: inline-block;\n\n margin-right: 1em;\n}\n\n.prop-type\n{\n color: $prop-type-font-color;\n}\n\n.prop-enum\n{\n display: block;\n}\n.prop-format\n{\n color: $prop-format-font-color;\n}\n",".servers\n{\n > label\n {\n font-size: 12px;\n\n margin: -20px 15px 0 0;\n\n @include text_headline();\n\n select\n {\n min-width: 130px;\n max-width: 100%;\n width: 100%;\n }\n }\n\n h4.message {\n padding-bottom: 2em;\n }\n\n table {\n tr {\n width: 30em;\n }\n td {\n display: inline-block;\n max-width: 15em;\n vertical-align: middle;\n padding-top: 10px;\n padding-bottom: 10px;\n\n &:first-of-type {\n padding-right: 1em;\n }\n\n input {\n width: 100%;\n height: 100%;\n }\n }\n }\n\n .computed-url {\n margin: 2em 0;\n\n code {\n display: inline-block;\n padding: 4px;\n font-size: 16px;\n margin: 0 1em;\n }\n }\n}\n\n.servers-title {\n font-size: 12px;\n font-weight: bold;\n}\n\n.operation-servers {\n h4.message {\n margin-bottom: 2em;\n }\n}\n","table\n{\n width: 100%;\n padding: 0 10px;\n\n border-collapse: collapse;\n\n &.model\n {\n tbody\n {\n tr\n {\n td\n {\n padding: 0;\n\n vertical-align: top;\n\n &:first-of-type\n {\n width: 174px;\n padding: 0 0 0 2em;\n }\n }\n }\n }\n }\n\n &.headers\n {\n td\n {\n font-size: 12px;\n font-weight: 300;\n\n vertical-align: middle;\n\n @include text_code();\n }\n\n .header-example\n {\n color: #999;\n font-style: italic;\n }\n }\n\n tbody\n {\n tr\n {\n td\n {\n padding: 10px 0 0 0;\n\n vertical-align: top;\n\n &:first-of-type\n {\n min-width: 6em;\n padding: 10px 0;\n }\n }\n }\n }\n\n thead\n {\n tr\n {\n th,\n td\n {\n font-size: 12px;\n font-weight: bold;\n\n padding: 12px 0;\n\n text-align: left;\n\n border-bottom: 1px solid rgba($table-thead-td-border-bottom-color, .2);\n\n @include text_body();\n }\n }\n }\n}\n\n.parameters-col_description\n{\n width: 99%; // forces other columns to shrink to their content widths\n margin-bottom: 2em;\n input\n {\n width: 100%;\n max-width: 340px;\n }\n\n select {\n border-width: 1px;\n }\n\n .markdown {\n p {\n margin: 0;\n }\n }\n}\n\n.parameter__name\n{\n font-size: 16px;\n font-weight: normal;\n\n // hack to give breathing room to the name column\n // TODO: refactor all of this to flexbox\n margin-right: .75em;\n\n @include text_headline();\n\n &.required\n {\n font-weight: bold;\n\n span\n {\n color: red;\n }\n\n &:after\n {\n font-size: 10px;\n\n position: relative;\n top: -6px;\n\n padding: 5px;\n\n content: 'required';\n\n color: rgba($table-parameter-name-required-font-color, .6);\n }\n }\n}\n\n.parameter__in,\n.parameter__extension\n{\n font-size: 12px;\n font-style: italic;\n\n @include text_code($table-parameter-in-font-color);\n}\n\n.parameter__deprecated\n{\n font-size: 12px;\n font-style: italic;\n\n @include text_code($table-parameter-deprecated-font-color);\n}\n\n.parameter__empty_value_toggle {\n display: block;\n font-size: 13px;\n padding-top: 5px;\n padding-bottom: 12px;\n\n input {\n margin-right: 7px;\n }\n\n &.disabled {\n opacity: 0.7;\n }\n}\n\n\n.table-container\n{\n padding: 20px;\n}\n\n\n.response-col_description {\n width: 99%; // forces other columns to shrink to their content widths\n\n .markdown {\n p {\n margin: 0;\n }\n }\n}\n\n.response-col_links {\n min-width: 6em;\n}\n\n.response__extension\n{\n font-size: 12px;\n font-style: italic;\n\n @include text_code($table-parameter-in-font-color);\n}\n",".topbar\n{\n padding: 10px 0;\n\n background-color: $topbar-background-color;\n .topbar-wrapper\n {\n display: flex;\n align-items: center;\n }\n a\n {\n font-size: 1.5em;\n font-weight: bold;\n\n display: flex;\n align-items: center;\n flex: 1;\n\n max-width: 300px;\n\n text-decoration: none;\n\n @include text_headline($topbar-link-font-color);\n\n span\n {\n margin: 0;\n padding: 0 10px;\n }\n }\n\n .download-url-wrapper\n {\n display: flex;\n flex: 3;\n justify-content: flex-end;\n\n input[type=text]\n {\n width: 100%;\n margin: 0;\n\n border: 2px solid $topbar-download-url-wrapper-element-border-color;\n border-radius: 4px 0 0 4px;\n outline: none;\n }\n\n .select-label\n {\n display: flex;\n align-items: center;\n\n width: 100%;\n max-width: 600px;\n margin: 0;\n color: #f0f0f0;\n span\n {\n font-size: 16px;\n\n flex: 1;\n\n padding: 0 10px 0 0;\n\n text-align: right;\n }\n\n select\n {\n flex: 2;\n\n width: 100%;\n\n border: 2px solid $topbar-download-url-wrapper-element-border-color;\n outline: none;\n box-shadow: none;\n }\n }\n\n\n .download-url-button\n {\n font-size: 16px;\n font-weight: bold;\n\n padding: 4px 30px;\n\n border: none;\n border-radius: 0 4px 4px 0;\n background: $topbar-download-url-button-background-color;\n\n @include text_headline($topbar-download-url-button-font-color);\n }\n }\n}\n",".info\n{\n margin: 50px 0;\n\n &.failed-config\n { \n max-width: 880px;\n margin-left: auto;\n margin-right: auto;\n text-align: center\n }\n\n hgroup.main\n {\n margin: 0 0 20px 0;\n a\n {\n font-size: 12px;\n }\n }\n pre \n {\n font-size: 14px;\n }\n p, li, table\n {\n font-size: 14px;\n\n @include text_body();\n }\n\n h1, h2, h3, h4, h5\n {\n @include text_body();\n }\n\n a\n {\n font-size: 14px;\n\n transition: all .4s;\n\n @include text_body($info-link-font-color);\n\n &:hover\n {\n color: darken($info-link-font-color-hover, 15%);\n }\n }\n > div\n {\n margin: 0 0 5px 0;\n }\n\n .base-url\n {\n font-size: 12px;\n font-weight: 300 !important;\n\n margin: 0;\n\n @include text_code();\n }\n\n .title\n {\n font-size: 36px;\n\n margin: 0;\n\n @include text_body();\n\n small\n {\n font-size: 10px;\n\n position: relative;\n top: -5px;\n\n display: inline-block;\n\n margin: 0 0 0 5px;\n padding: 2px 4px;\n\n vertical-align: super;\n\n border-radius: 57px;\n background: $info-title-small-background-color;\n \n &.version-stamp\n {\n background-color: #89bf04;\n }\n\n pre\n {\n margin: 0;\n padding: 0;\n\n @include text_headline($info-title-small-pre-font-color);\n }\n }\n }\n}\n",".auth-btn-wrapper\n{\n display: flex;\n\n padding: 10px 0;\n\n justify-content: center;\n\n .btn-done {\n margin-right: 1em;\n }\n}\n\n.auth-wrapper\n{\n display: flex;\n\n flex: 1;\n justify-content: flex-end;\n\n .authorize\n {\n padding-right: 20px;\n margin-left: 10px;\n margin-right: 10px;\n }\n}\n\n.auth-container\n{\n margin: 0 0 10px 0;\n padding: 10px 20px;\n\n border-bottom: 1px solid $auth-container-border-color;\n\n &:last-of-type\n {\n margin: 0;\n padding: 10px 20px;\n\n border: 0;\n }\n\n h4\n {\n margin: 5px 0 15px 0 !important;\n }\n\n .wrapper\n {\n margin: 0;\n padding: 0;\n }\n\n input[type=text],\n input[type=password]\n {\n min-width: 230px;\n }\n\n .errors\n {\n font-size: 12px;\n\n padding: 10px;\n\n border-radius: 4px;\n\n background-color: #ffeeee;\n\n color: red;\n\n margin: 1em;\n\n @include text_code();\n\n b\n {\n text-transform: capitalize;\n margin-right: 1em;\n }\n }\n}\n\n.scopes\n{\n h2\n {\n font-size: 14px;\n\n @include text_headline();\n\n a\n {\n font-size: 12px;\n color: $auth-select-all-none-link-font-color;\n cursor: pointer;\n padding-left: 10px;\n text-decoration: underline;\n }\n }\n}\n\n.scope-def\n{\n padding: 0 0 20px 0;\n}\n",".errors-wrapper\n{\n margin: 20px;\n padding: 10px 20px;\n\n animation: scaleUp .5s;\n\n border: 2px solid $_color-delete;\n border-radius: 4px;\n background: rgba($_color-delete, .1);\n\n .error-wrapper\n {\n margin: 0 0 10px 0;\n }\n\n .errors\n {\n h4\n {\n font-size: 14px;\n\n margin: 0;\n\n @include text_code();\n }\n\n small\n {\n color: $errors-wrapper-errors-small-font-color;\n }\n\n .message\n { \n white-space: pre-line;\n \n &.thrown\n {\n max-width: 100%;\n }\n }\n\n .error-line\n {\n text-decoration: underline;\n cursor: pointer;\n }\n }\n\n hgroup\n {\n display: flex;\n\n align-items: center;\n\n h4\n {\n font-size: 20px;\n\n margin: 0;\n\n flex: 1;\n @include text_headline();\n }\n }\n}\n\n\n@keyframes scaleUp\n{\n 0%\n {\n transform: scale(.8);\n\n opacity: 0;\n }\n 100%\n {\n transform: scale(1);\n\n opacity: 1;\n }\n}\n",".Resizer.vertical.disabled {\n display: none;\n}",".markdown, .renderedMarkdown {\n p, pre {\n margin: 1em auto;\n\n word-break: break-all; /* Fallback trick */\n word-break: break-word;\n }\n pre {\n color: black;\n font-weight: normal;\n white-space: pre-wrap;\n background: none;\n padding: 0px;\n }\n\n code {\n font-size: 14px;\n padding: 5px 7px;\n\n border-radius: 4px;\n background: rgba($info-code-background-color,.05);\n\n @include text_code($info-code-font-color);\n }\n\n pre > code {\n display: block;\n }\n}\n"],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/vendor/github.com/swaggo/files/v2/dist/swagger-ui.js b/vendor/github.com/swaggo/files/v2/dist/swagger-ui.js new file mode 100644 index 0000000..883c169 --- /dev/null +++ b/vendor/github.com/swaggo/files/v2/dist/swagger-ui.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.SwaggerUICore=t():e.SwaggerUICore=t()}(this,(function(){return(()=>{var e={6024:(e,t,r)=>{"use strict";r.d(t,{Z:()=>E});var n=r(4250),s=r.n(n),a=r(1093),o=r.n(a),l=r(8493),i=r.n(l),c=r(3942),p=r.n(c),u=r(6689),d=r.n(u);const m=require("react-immutable-pure-component");var h=r.n(m),g=r(8082),f=r.n(g),y=r(580),v=r.n(y);class E extends(h()){constructor(){super(...arguments),o()(this,"getModelName",(e=>-1!==i()(e).call(e,"#/definitions/")?e.replace(/^.*#\/definitions\//,""):-1!==i()(e).call(e,"#/components/schemas/")?e.replace(/^.*#\/components\/schemas\//,""):void 0)),o()(this,"getRefSchema",(e=>{let{specSelectors:t}=this.props;return t.findDefinition(e)}))}render(){let{getComponent:e,getConfigs:t,specSelectors:n,schema:a,required:o,name:l,isRef:i,specPath:c,displayName:p,includeReadOnly:u,includeWriteOnly:m}=this.props;const h=e("ObjectModel"),g=e("ArrayModel"),f=e("PrimitiveModel");let y="object",v=a&&a.get("$$ref");if(!l&&v&&(l=this.getModelName(v)),!a&&v&&(a=this.getRefSchema(l)),!a)return d().createElement("span",{className:"model model-title"},d().createElement("span",{className:"model-title__text"},p||l),d().createElement("img",{src:r(2517),height:"20px",width:"20px"}));const E=n.isOAS3()&&a.get("deprecated");switch(i=void 0!==i?i:!!v,y=a&&a.get("type")||y,y){case"object":return d().createElement(h,s()({className:"object"},this.props,{specPath:c,getConfigs:t,schema:a,name:l,deprecated:E,isRef:i,includeReadOnly:u,includeWriteOnly:m}));case"array":return d().createElement(g,s()({className:"array"},this.props,{getConfigs:t,schema:a,name:l,deprecated:E,required:o,includeReadOnly:u,includeWriteOnly:m}));default:return d().createElement(f,s()({},this.props,{getComponent:e,getConfigs:t,schema:a,name:l,deprecated:E,required:o}))}}}o()(E,"propTypes",{schema:p()(f()).isRequired,getComponent:v().func.isRequired,getConfigs:v().func.isRequired,specSelectors:v().object.isRequired,name:v().string,displayName:v().string,isRef:v().bool,required:v().bool,expandDepth:v().number,depth:v().number,specPath:f().list.isRequired,includeReadOnly:v().bool,includeWriteOnly:v().bool})},5623:(e,t,r)=>{"use strict";r.d(t,{Z:()=>m});var n=r(1093),s=r.n(n),a=r(7252),o=r.n(a),l=r(6689),i=r.n(l),c=r(3883),p=r.n(c),u=(r(580),r(1890)),d=r(7504);class m extends i().Component{constructor(e,t){super(e,t),s()(this,"getDefinitionUrl",(()=>{let{specSelectors:e}=this.props;return new(p())(e.url(),d.Z.location).toString()}));let{getConfigs:r}=e,{validatorUrl:n}=r();this.state={url:this.getDefinitionUrl(),validatorUrl:void 0===n?"https://validator.swagger.io/validator":n}}UNSAFE_componentWillReceiveProps(e){let{getConfigs:t}=e,{validatorUrl:r}=t();this.setState({url:this.getDefinitionUrl(),validatorUrl:void 0===r?"https://validator.swagger.io/validator":r})}render(){let{getConfigs:e}=this.props,{spec:t}=e(),r=(0,u.Nm)(this.state.validatorUrl);return"object"==typeof t&&o()(t).length?null:this.state.url&&(0,u.hW)(this.state.validatorUrl)&&(0,u.hW)(this.state.url)?i().createElement("span",{className:"float-right"},i().createElement("a",{target:"_blank",rel:"noopener noreferrer",href:`${r}/debug?url=${encodeURIComponent(this.state.url)}`},i().createElement(h,{src:`${r}?url=${encodeURIComponent(this.state.url)}`,alt:"Online validator badge"}))):null}}class h extends i().Component{constructor(e){super(e),this.state={loaded:!1,error:!1}}componentDidMount(){const e=new Image;e.onload=()=>{this.setState({loaded:!0})},e.onerror=()=>{this.setState({error:!0})},e.src=this.props.src}UNSAFE_componentWillReceiveProps(e){if(e.src!==this.props.src){const t=new Image;t.onload=()=>{this.setState({loaded:!0})},t.onerror=()=>{this.setState({error:!0})},t.src=e.src}}render(){return this.state.error?i().createElement("img",{alt:"Error"}):this.state.loaded?i().createElement("img",{src:this.props.src,alt:this.props.alt}):null}}},2552:(e,t,r)=>{"use strict";r.d(t,{Z:()=>d,s:()=>m});var n=r(6689),s=r.n(n),a=(r(580),r(963));const o=require("remarkable/linkify"),l=require("dompurify");var i=r.n(l),c=r(9003),p=r.n(c);function u(e){let{source:t,className:r="",getConfigs:n}=e;if("string"!=typeof t)return null;const l=new a.Remarkable({html:!0,typographer:!0,breaks:!0,linkTarget:"_blank"}).use(o.linkify);l.core.ruler.disable(["replacements","smartquotes"]);const{useUnsafeMarkdown:i}=n(),c=l.render(t),u=m(c,{useUnsafeMarkdown:i});return t&&c&&u?s().createElement("div",{className:p()(r,"markdown"),dangerouslySetInnerHTML:{__html:u}}):null}i().addHook&&i().addHook("beforeSanitizeElements",(function(e){return e.href&&e.setAttribute("rel","noopener noreferrer"),e})),u.defaultProps={getConfigs:()=>({useUnsafeMarkdown:!1})};const d=u;function m(e){let{useUnsafeMarkdown:t=!1}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};const r=t,n=t?[]:["style","class"];return t&&!m.hasWarnedAboutDeprecation&&(console.warn("useUnsafeMarkdown display configuration parameter is deprecated since >3.26.0 and will be removed in v4.0.0."),m.hasWarnedAboutDeprecation=!0),i().sanitize(e,{ADD_ATTR:["target"],FORBID_TAGS:["style","form"],ALLOW_DATA_ATTR:r,FORBID_ATTR:n})}m.hasWarnedAboutDeprecation=!1},5308:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>d});var n,s=r(4235),a=r.n(s),o=r(874),l=r.n(o),i=r(1890),c=r(9595);const p=r(5102),u={},d=u;a()(n=l()(p).call(p)).call(n,(function(e){if("./index.js"===e)return;let t=p(e);u[(0,i.Zl)(e)]=t.default?t.default:t})),u.SafeRender=c.default},5812:(e,t,r)=>{"use strict";r.r(t),r.d(t,{SHOW_AUTH_POPUP:()=>u,AUTHORIZE:()=>d,LOGOUT:()=>m,PRE_AUTHORIZE_OAUTH2:()=>h,AUTHORIZE_OAUTH2:()=>g,VALIDATE:()=>f,CONFIGURE_AUTH:()=>y,RESTORE_AUTHORIZATION:()=>v,showDefinitions:()=>E,authorize:()=>S,authorizeWithPersistOption:()=>C,logout:()=>b,logoutWithPersistOption:()=>x,preAuthorizeImplicit:()=>w,authorizeOauth2:()=>_,authorizeOauth2WithPersistOption:()=>A,authorizePassword:()=>I,authorizeApplication:()=>N,authorizeAccessCodeWithFormParams:()=>q,authorizeAccessCodeWithBasicAuthentication:()=>R,authorizeRequest:()=>T,configureAuth:()=>P,restoreAuthorization:()=>k,persistAuthorizationIfNeeded:()=>O,authPopup:()=>M});var n=r(8344),s=r.n(n),a=r(4994),o=r.n(a),l=r(3883),i=r.n(l),c=r(7504),p=r(1890);const u="show_popup",d="authorize",m="logout",h="pre_authorize_oauth2",g="authorize_oauth2",f="validate",y="configure_auth",v="restore_authorization";function E(e){return{type:u,payload:e}}function S(e){return{type:d,payload:e}}const C=e=>t=>{let{authActions:r}=t;r.authorize(e),r.persistAuthorizationIfNeeded()};function b(e){return{type:m,payload:e}}const x=e=>t=>{let{authActions:r}=t;r.logout(e),r.persistAuthorizationIfNeeded()},w=e=>t=>{let{authActions:r,errActions:n}=t,{auth:a,token:o,isValid:l}=e,{schema:i,name:p}=a,u=i.get("flow");delete c.Z.swaggerUIRedirectOauth2,"accessCode"===u||l||n.newAuthErr({authId:p,source:"auth",level:"warning",message:"Authorization may be unsafe, passed state was changed in server Passed state wasn't returned from auth server"}),o.error?n.newAuthErr({authId:p,source:"auth",level:"error",message:s()(o)}):r.authorizeOauth2WithPersistOption({auth:a,token:o})};function _(e){return{type:g,payload:e}}const A=e=>t=>{let{authActions:r}=t;r.authorizeOauth2(e),r.persistAuthorizationIfNeeded()},I=e=>t=>{let{authActions:r}=t,{schema:n,name:s,username:a,password:l,passwordType:i,clientId:c,clientSecret:u}=e,d={grant_type:"password",scope:e.scopes.join(" "),username:a,password:l},m={};switch(i){case"request-body":!function(e,t,r){t&&o()(e,{client_id:t});r&&o()(e,{client_secret:r})}(d,c,u);break;case"basic":m.Authorization="Basic "+(0,p.r3)(c+":"+u);break;default:console.warn(`Warning: invalid passwordType ${i} was passed, not including client id and secret`)}return r.authorizeRequest({body:(0,p.GZ)(d),url:n.get("tokenUrl"),name:s,headers:m,query:{},auth:e})};const N=e=>t=>{let{authActions:r}=t,{schema:n,scopes:s,name:a,clientId:o,clientSecret:l}=e,i={Authorization:"Basic "+(0,p.r3)(o+":"+l)},c={grant_type:"client_credentials",scope:s.join(" ")};return r.authorizeRequest({body:(0,p.GZ)(c),name:a,url:n.get("tokenUrl"),auth:e,headers:i})},q=e=>{let{auth:t,redirectUrl:r}=e;return e=>{let{authActions:n}=e,{schema:s,name:a,clientId:o,clientSecret:l,codeVerifier:i}=t,c={grant_type:"authorization_code",code:t.code,client_id:o,client_secret:l,redirect_uri:r,code_verifier:i};return n.authorizeRequest({body:(0,p.GZ)(c),name:a,url:s.get("tokenUrl"),auth:t})}},R=e=>{let{auth:t,redirectUrl:r}=e;return e=>{let{authActions:n}=e,{schema:s,name:a,clientId:o,clientSecret:l,codeVerifier:i}=t,c={Authorization:"Basic "+(0,p.r3)(o+":"+l)},u={grant_type:"authorization_code",code:t.code,client_id:o,redirect_uri:r,code_verifier:i};return n.authorizeRequest({body:(0,p.GZ)(u),name:a,url:s.get("tokenUrl"),auth:t,headers:c})}},T=e=>t=>{let r,{fn:n,getConfigs:a,authActions:l,errActions:c,oas3Selectors:p,specSelectors:u,authSelectors:d}=t,{body:m,query:h={},headers:g={},name:f,url:y,auth:v}=e,{additionalQueryStringParams:E}=d.getConfigs()||{};if(u.isOAS3()){let e=p.serverEffectiveValue(p.selectedServer());r=i()(y,e,!0)}else r=i()(y,u.url(),!0);"object"==typeof E&&(r.query=o()({},r.query,E));const S=r.toString();let C=o()({Accept:"application/json, text/plain, */*","Content-Type":"application/x-www-form-urlencoded","X-Requested-With":"XMLHttpRequest"},g);n.fetch({url:S,method:"post",headers:C,query:h,body:m,requestInterceptor:a().requestInterceptor,responseInterceptor:a().responseInterceptor}).then((function(e){let t=JSON.parse(e.data),r=t&&(t.error||""),n=t&&(t.parseError||"");e.ok?r||n?c.newAuthErr({authId:f,level:"error",source:"auth",message:s()(t)}):l.authorizeOauth2WithPersistOption({auth:v,token:t}):c.newAuthErr({authId:f,level:"error",source:"auth",message:e.statusText})})).catch((e=>{let t=new Error(e).message;if(e.response&&e.response.data){const r=e.response.data;try{const e="string"==typeof r?JSON.parse(r):r;e.error&&(t+=`, error: ${e.error}`),e.error_description&&(t+=`, description: ${e.error_description}`)}catch(e){}}c.newAuthErr({authId:f,level:"error",source:"auth",message:t})}))};function P(e){return{type:y,payload:e}}function k(e){return{type:v,payload:e}}const O=()=>e=>{let{authSelectors:t,getConfigs:r}=e;if(r().persistAuthorization){const e=t.authorized();localStorage.setItem("authorized",s()(e.toJS()))}},M=(e,t)=>()=>{c.Z.swaggerUIRedirectOauth2=t,c.Z.open(e)}},3705:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>c,preauthorizeBasic:()=>p,preauthorizeApiKey:()=>u});var n=r(593),s=r.n(n),a=r(3962),o=r(5812),l=r(35),i=r(8302);function c(){return{afterLoad(e){this.rootInjects=this.rootInjects||{},this.rootInjects.initOAuth=e.authActions.configureAuth,this.rootInjects.preauthorizeApiKey=s()(u).call(u,null,e),this.rootInjects.preauthorizeBasic=s()(p).call(p,null,e)},statePlugins:{auth:{reducers:a.default,actions:o,selectors:l},spec:{wrapActions:i}}}}function p(e,t,r,n){const{authActions:{authorize:s},specSelectors:{specJson:a,isOAS3:o}}=e,l=o()?["components","securitySchemes"]:["securityDefinitions"],i=a().getIn([...l,t]);return i?s({[t]:{value:{username:r,password:n},schema:i.toJS()}}):null}function u(e,t,r){const{authActions:{authorize:n},specSelectors:{specJson:s,isOAS3:a}}=e,o=a()?["components","securitySchemes"]:["securityDefinitions"],l=s().getIn([...o,t]);return l?n({[t]:{value:r,schema:l.toJS()}}):null}},3962:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(4235),s=r.n(n),a=r(4994),o=r.n(a),l=r(5572),i=r(1890),c=r(5812);const p={[c.SHOW_AUTH_POPUP]:(e,t)=>{let{payload:r}=t;return e.set("showDefinitions",r)},[c.AUTHORIZE]:(e,t)=>{var r;let{payload:n}=t,a=(0,l.fromJS)(n),o=e.get("authorized")||(0,l.Map)();return s()(r=a.entrySeq()).call(r,(t=>{let[r,n]=t;if(!(0,i.Wl)(n.getIn))return e.set("authorized",o);let s=n.getIn(["schema","type"]);if("apiKey"===s||"http"===s)o=o.set(r,n);else if("basic"===s){let e=n.getIn(["value","username"]),t=n.getIn(["value","password"]);o=o.setIn([r,"value"],{username:e,header:"Basic "+(0,i.r3)(e+":"+t)}),o=o.setIn([r,"schema"],n.get("schema"))}})),e.set("authorized",o)},[c.AUTHORIZE_OAUTH2]:(e,t)=>{let r,{payload:n}=t,{auth:s,token:a}=n;s.token=o()({},a),r=(0,l.fromJS)(s);let i=e.get("authorized")||(0,l.Map)();return i=i.set(r.get("name"),r),e.set("authorized",i)},[c.LOGOUT]:(e,t)=>{let{payload:r}=t,n=e.get("authorized").withMutations((e=>{s()(r).call(r,(t=>{e.delete(t)}))}));return e.set("authorized",n)},[c.CONFIGURE_AUTH]:(e,t)=>{let{payload:r}=t;return e.set("configs",r)},[c.RESTORE_AUTHORIZATION]:(e,t)=>{let{payload:r}=t;return e.set("authorized",(0,l.fromJS)(r.authorized))}}},35:(e,t,r)=>{"use strict";r.r(t),r.d(t,{shownDefinitions:()=>v,definitionsToAuthorize:()=>E,getDefinitionsByNames:()=>S,definitionsForRequirements:()=>C,authorized:()=>b,isAuthorized:()=>x,getConfigs:()=>w});var n=r(4235),s=r.n(n),a=r(3580),o=r.n(a),l=r(9998),i=r.n(l),c=r(8493),p=r.n(c),u=r(3942),d=r.n(u),m=r(7252),h=r.n(m),g=r(6814),f=r(5572);const y=e=>e,v=(0,g.createSelector)(y,(e=>e.get("showDefinitions"))),E=(0,g.createSelector)(y,(()=>e=>{var t;let{specSelectors:r}=e,n=r.securityDefinitions()||(0,f.Map)({}),a=(0,f.List)();return s()(t=n.entrySeq()).call(t,(e=>{let[t,r]=e,n=(0,f.Map)();n=n.set(t,r),a=a.push(n)})),a})),S=(e,t)=>e=>{var r;let{specSelectors:n}=e;console.warn("WARNING: getDefinitionsByNames is deprecated and will be removed in the next major version.");let a=n.securityDefinitions(),o=(0,f.List)();return s()(r=t.valueSeq()).call(r,(e=>{var t;let r=(0,f.Map)();s()(t=e.entrySeq()).call(t,(e=>{let t,[n,o]=e,l=a.get(n);var i;"oauth2"===l.get("type")&&o.size&&(t=l.get("scopes"),s()(i=t.keySeq()).call(i,(e=>{o.contains(e)||(t=t.delete(e))})),l=l.set("allowedScopes",t));r=r.set(n,l)})),o=o.push(r)})),o},C=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:(0,f.List)();return e=>{let{authSelectors:r}=e;const n=r.definitionsToAuthorize()||(0,f.List)();let a=(0,f.List)();return s()(n).call(n,(e=>{let r=o()(t).call(t,(t=>t.get(e.keySeq().first())));r&&(s()(e).call(e,((t,n)=>{if("oauth2"===t.get("type")){const o=r.get(n);let l=t.get("scopes");var a;if(f.List.isList(o)&&f.Map.isMap(l))s()(a=l.keySeq()).call(a,(e=>{o.contains(e)||(l=l.delete(e))})),e=e.set(n,t.set("scopes",l))}})),a=a.push(e))})),a}},b=(0,g.createSelector)(y,(e=>e.get("authorized")||(0,f.Map)())),x=(e,t)=>e=>{var r;let{authSelectors:n}=e,s=n.authorized();return f.List.isList(t)?!!i()(r=t.toJS()).call(r,(e=>{var t,r;return-1===p()(t=d()(r=h()(e)).call(r,(e=>!!s.get(e)))).call(t,!1)})).length:null},w=(0,g.createSelector)(y,(e=>e.get("configs")))},8302:(e,t,r)=>{"use strict";r.r(t),r.d(t,{execute:()=>n});const n=(e,t)=>{let{authSelectors:r,specSelectors:n}=t;return t=>{let{path:s,method:a,operation:o,extras:l}=t,i={authorized:r.authorized()&&r.authorized().toJS(),definitions:n.securityDefinitions()&&n.securityDefinitions().toJS(),specSecurity:n.security()&&n.security().toJS()};return e({path:s,method:a,operation:o,securities:i,...l})}}},714:(e,t,r)=>{"use strict";r.r(t),r.d(t,{UPDATE_CONFIGS:()=>n,TOGGLE_CONFIGS:()=>s,update:()=>a,toggle:()=>o,loaded:()=>l});const n="configs_update",s="configs_toggle";function a(e,t){return{type:n,payload:{[e]:t}}}function o(e){return{type:s,payload:e}}const l=()=>e=>{let{getConfigs:t,authActions:r}=e;if(t().persistAuthorization){const e=localStorage.getItem("authorized");e&&r.restoreAuthorization({authorized:JSON.parse(e)})}}},2256:(e,t,r)=>{"use strict";r.r(t),r.d(t,{parseYamlConfig:()=>a});var n=r(9793),s=r.n(n);const a=(e,t)=>{try{return s().load(e)}catch(e){return t&&t.errActions.newThrownErr(new Error(e)),{}}}},1661:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(5163),s=r(2256),a=r(714),o=r(2698),l=r(9018),i=r(7743);const c={getLocalConfig:()=>(0,s.parseYamlConfig)(n)};function p(){return{statePlugins:{spec:{actions:o,selectors:c},configs:{reducers:i.default,actions:a,selectors:l}}}}},7743:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(5572),s=r(714);const a={[s.UPDATE_CONFIGS]:(e,t)=>e.merge((0,n.fromJS)(t.payload)),[s.TOGGLE_CONFIGS]:(e,t)=>{const r=t.payload,n=e.get(r);return e.set(r,!n)}}},9018:(e,t,r)=>{"use strict";r.r(t),r.d(t,{get:()=>a});var n=r(7104),s=r.n(n);const a=(e,t)=>e.getIn(s()(t)?t:[t])},2698:(e,t,r)=>{"use strict";r.r(t),r.d(t,{downloadConfig:()=>s,getConfigByUrl:()=>a});var n=r(2256);const s=e=>t=>{const{fn:{fetch:r}}=t;return r(e)},a=(e,t)=>r=>{let{specActions:s}=r;if(e)return s.downloadConfig(e).then(a,a);function a(r){r instanceof Error||r.status>=400?(s.updateLoadingStatus("failedConfig"),s.updateLoadingStatus("failedConfig"),s.updateUrl(""),console.error(r.statusText+" "+e.url),t(null)):t((0,n.parseYamlConfig)(r.text))}}},1970:(e,t,r)=>{"use strict";r.r(t),r.d(t,{setHash:()=>n});const n=e=>e?history.pushState(null,null,`#${e}`):window.location.hash=""},4980:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(2179),s=r(877),a=r(4584);function o(){return[n.default,{statePlugins:{configs:{wrapActions:{loaded:(e,t)=>function(){e(...arguments);const r=decodeURIComponent(window.location.hash);t.layoutActions.parseDeepLinkHash(r)}}}},wrapComponents:{operation:s.default,OperationTag:a.default}}]}},2179:(e,t,r)=>{"use strict";r.r(t),r.d(t,{clearScrollTo:()=>w,default:()=>_,parseDeepLinkHash:()=>C,readyToScroll:()=>b,scrollTo:()=>S,scrollToElement:()=>x,show:()=>E});var n=r(7104),s=r.n(n),a=r(600),o=r.n(a),l=r(3942),i=r.n(l),c=r(8493),p=r.n(c),u=r(1970);const d=require("zenscroll");var m=r.n(d),h=r(1890),g=r(5572),f=r.n(g);const y="layout_scroll_to",v="layout_clear_scroll",E=(e,t)=>{let{getConfigs:r,layoutSelectors:n}=t;return function(){for(var t=arguments.length,a=new Array(t),o=0;o({type:y,payload:s()(e)?e:[e]}),C=e=>t=>{let{layoutActions:r,layoutSelectors:n,getConfigs:s}=t;if(s().deepLinking&&e){var a;let t=o()(e).call(e,1);"!"===t[0]&&(t=o()(t).call(t,1)),"/"===t[0]&&(t=o()(t).call(t,1));const s=i()(a=t.split("/")).call(a,(e=>e||"")),l=n.isShownKeyFromUrlHashArray(s),[c,u="",d=""]=l;if("operations"===c){const e=n.isShownKeyFromUrlHashArray([u]);p()(u).call(u,"_")>-1&&(console.warn("Warning: escaping deep link whitespace with `_` will be unsupported in v4.0, use `%20` instead."),r.show(i()(e).call(e,(e=>e.replace(/_/g," "))),!0)),r.show(e,!0)}(p()(u).call(u,"_")>-1||p()(d).call(d,"_")>-1)&&(console.warn("Warning: escaping deep link whitespace with `_` will be unsupported in v4.0, use `%20` instead."),r.show(i()(l).call(l,(e=>e.replace(/_/g," "))),!0)),r.show(l,!0),r.scrollTo(l)}},b=(e,t)=>r=>{const n=r.layoutSelectors.getScrollToKey();f().is(n,(0,g.fromJS)(e))&&(r.layoutActions.scrollToElement(t),r.layoutActions.clearScrollTo())},x=(e,t)=>r=>{try{t=t||r.fn.getScrollParent(e),m().createScroller(t).to(e)}catch(e){console.error(e)}},w=()=>({type:v});const _={fn:{getScrollParent:function(e,t){const r=document.documentElement;let n=getComputedStyle(e);const s="absolute"===n.position,a=t?/(auto|scroll|hidden)/:/(auto|scroll)/;if("fixed"===n.position)return r;for(let t=e;t=t.parentElement;)if(n=getComputedStyle(t),(!s||"static"!==n.position)&&a.test(n.overflow+n.overflowY+n.overflowX))return t;return r}},statePlugins:{layout:{actions:{scrollToElement:x,scrollTo:S,clearScrollTo:w,readyToScroll:b,parseDeepLinkHash:C},selectors:{getScrollToKey:e=>e.get("scrollToKey"),isShownKeyFromUrlHashArray(e,t){const[r,n]=t;return n?["operations",r,n]:r?["operations-tag",r]:[]},urlHashArrayFromIsShownKey(e,t){let[r,n,s]=t;return"operations"==r?[n,s]:"operations-tag"==r?[n]:[]}},reducers:{[y]:(e,t)=>e.set("scrollToKey",f().fromJS(t.payload)),[v]:e=>e.delete("scrollToKey")},wrapActions:{show:E}}}}},4584:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>l});var n=r(1093),s=r.n(n),a=r(6689),o=r.n(a);r(580);const l=(e,t)=>class extends o().Component{constructor(){super(...arguments),s()(this,"onLoad",(e=>{const{tag:r}=this.props,n=["operations-tag",r];t.layoutActions.readyToScroll(n,e)}))}render(){return o().createElement("span",{ref:this.onLoad},o().createElement(e,this.props))}}},877:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>l});var n=r(1093),s=r.n(n),a=r(6689),o=r.n(a);r(8082);const l=(e,t)=>class extends o().Component{constructor(){super(...arguments),s()(this,"onLoad",(e=>{const{operation:r}=this.props,{tag:n,operationId:s}=r.toObject();let{isShownKey:a}=r.toObject();a=a||["operations",n,s],t.layoutActions.readyToScroll(a,e)}))}render(){return o().createElement("span",{ref:this.onLoad},o().createElement(e,this.props))}}},8011:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>h});var n=r(4994),s=r.n(n),a=r(9478),o=r.n(a),l=r(8493),i=r.n(l),c=r(8344),p=r.n(c),u=r(6814),d=r(5572),m=r(7504);function h(e){let{fn:t}=e;return{statePlugins:{spec:{actions:{download:e=>r=>{let{errActions:n,specSelectors:a,specActions:l,getConfigs:i}=r,{fetch:c}=t;const p=i();function u(t){if(t instanceof Error||t.status>=400)return l.updateLoadingStatus("failed"),n.newThrownErr(s()(new Error((t.message||t.statusText)+" "+e),{source:"fetch"})),void(!t.status&&t instanceof Error&&function(){try{let t;if("URL"in m.Z?t=new(o())(e):(t=document.createElement("a"),t.href=e),"https:"!==t.protocol&&"https:"===m.Z.location.protocol){const e=s()(new Error(`Possible mixed-content issue? The page was loaded over https:// but a ${t.protocol}// URL was specified. Check that you are not attempting to load mixed content.`),{source:"fetch"});return void n.newThrownErr(e)}if(t.origin!==m.Z.location.origin){const e=s()(new Error(`Possible cross-origin (CORS) issue? The URL origin (${t.origin}) does not match the page (${m.Z.location.origin}). Check the server returns the correct 'Access-Control-Allow-*' headers.`),{source:"fetch"});n.newThrownErr(e)}}catch(e){return}}());l.updateLoadingStatus("success"),l.updateSpec(t.text),a.url()!==e&&l.updateUrl(e)}e=e||a.url(),l.updateLoadingStatus("loading"),n.clear({source:"fetch"}),c({url:e,loadSpec:!0,requestInterceptor:p.requestInterceptor||(e=>e),responseInterceptor:p.responseInterceptor||(e=>e),credentials:"same-origin",headers:{Accept:"application/json,*/*"}}).then(u,u)},updateLoadingStatus:e=>{let t=[null,"loading","failed","success","failedConfig"];return-1===i()(t).call(t,e)&&console.error(`Error: ${e} is not one of ${p()(t)}`),{type:"spec_update_loading_status",payload:e}}},reducers:{spec_update_loading_status:(e,t)=>"string"==typeof t.payload?e.set("loadingStatus",t.payload):e},selectors:{loadingStatus:(0,u.createSelector)((e=>e||(0,d.Map)()),(e=>e.get("loadingStatus")||null))}}}}}},4966:(e,t,r)=>{"use strict";r.r(t),r.d(t,{NEW_THROWN_ERR:()=>s,NEW_THROWN_ERR_BATCH:()=>a,NEW_SPEC_ERR:()=>o,NEW_SPEC_ERR_BATCH:()=>l,NEW_AUTH_ERR:()=>i,CLEAR:()=>c,CLEAR_BY:()=>p,newThrownErr:()=>u,newThrownErrBatch:()=>d,newSpecErr:()=>m,newSpecErrBatch:()=>h,newAuthErr:()=>g,clear:()=>f,clearBy:()=>y});var n=r(41);const s="err_new_thrown_err",a="err_new_thrown_err_batch",o="err_new_spec_err",l="err_new_spec_err_batch",i="err_new_auth_err",c="err_clear",p="err_clear_by";function u(e){return{type:s,payload:(0,n.serializeError)(e)}}function d(e){return{type:a,payload:e}}function m(e){return{type:o,payload:e}}function h(e){return{type:l,payload:e}}function g(e){return{type:i,payload:e}}function f(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return{type:c,payload:e}}function y(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:()=>!0;return{type:p,payload:e}}},2860:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(9998),s=r.n(n),a=r(3942),o=r.n(a);const l=require("lodash/reduce");var i=r.n(l);const c=[r(2392),r(1835)];function p(e){var t;let r={jsSpec:{}},n=i()(c,((e,t)=>{try{let n=t.transform(e,r);return s()(n).call(n,(e=>!!e))}catch(t){return console.error("Transformer error:",t),e}}),e);return o()(t=s()(n).call(n,(e=>!!e))).call(t,(e=>(!e.get("line")&&e.get("path"),e)))}},2392:(e,t,r)=>{"use strict";r.r(t),r.d(t,{transform:()=>u});var n=r(3942),s=r.n(n),a=r(8493),o=r.n(a),l=r(600),i=r.n(l),c=r(66),p=r.n(c);function u(e){return s()(e).call(e,(e=>{var t;let r="is not of a type(s)",n=o()(t=e.get("message")).call(t,r);if(n>-1){var s,a;let t=i()(s=e.get("message")).call(s,n+r.length).split(",");return e.set("message",i()(a=e.get("message")).call(a,0,n)+function(e){return p()(e).call(e,((e,t,r,n)=>r===n.length-1&&n.length>1?e+"or "+t:n[r+1]&&n.length>2?e+t+", ":n[r+1]?e+t+" ":e+t),"should be a")}(t))}return e}))}},1835:(e,t,r)=>{"use strict";r.r(t),r.d(t,{transform:()=>n});r(3942),r(8493),r(1712),r(5572);function n(e,t){let{jsSpec:r}=t;return e}},7793:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(3527),s=r(4966),a=r(7667);function o(e){return{statePlugins:{err:{reducers:(0,n.default)(e),actions:s,selectors:a}}}}},3527:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>y});var n=r(4994),s=r.n(n),a=r(3942),o=r.n(a),l=r(4883),i=r.n(l),c=r(9998),p=r.n(c),u=r(7834),d=r.n(u),m=r(4966),h=r(5572),g=r(2860);let f={line:0,level:"error",message:"Unknown error"};function y(){return{[m.NEW_THROWN_ERR]:(e,t)=>{let{payload:r}=t,n=s()(f,r,{type:"thrown"});return e.update("errors",(e=>(e||(0,h.List)()).push((0,h.fromJS)(n)))).update("errors",(e=>(0,g.default)(e)))},[m.NEW_THROWN_ERR_BATCH]:(e,t)=>{let{payload:r}=t;return r=o()(r).call(r,(e=>(0,h.fromJS)(s()(f,e,{type:"thrown"})))),e.update("errors",(e=>{var t;return i()(t=e||(0,h.List)()).call(t,(0,h.fromJS)(r))})).update("errors",(e=>(0,g.default)(e)))},[m.NEW_SPEC_ERR]:(e,t)=>{let{payload:r}=t,n=(0,h.fromJS)(r);return n=n.set("type","spec"),e.update("errors",(e=>(e||(0,h.List)()).push((0,h.fromJS)(n)).sortBy((e=>e.get("line"))))).update("errors",(e=>(0,g.default)(e)))},[m.NEW_SPEC_ERR_BATCH]:(e,t)=>{let{payload:r}=t;return r=o()(r).call(r,(e=>(0,h.fromJS)(s()(f,e,{type:"spec"})))),e.update("errors",(e=>{var t;return i()(t=e||(0,h.List)()).call(t,(0,h.fromJS)(r))})).update("errors",(e=>(0,g.default)(e)))},[m.NEW_AUTH_ERR]:(e,t)=>{let{payload:r}=t,n=(0,h.fromJS)(s()({},r));return n=n.set("type","auth"),e.update("errors",(e=>(e||(0,h.List)()).push((0,h.fromJS)(n)))).update("errors",(e=>(0,g.default)(e)))},[m.CLEAR]:(e,t)=>{var r;let{payload:n}=t;if(!n||!e.get("errors"))return e;let s=p()(r=e.get("errors")).call(r,(e=>{var t;return d()(t=e.keySeq()).call(t,(t=>{const r=e.get(t),s=n[t];return!s||r!==s}))}));return e.merge({errors:s})},[m.CLEAR_BY]:(e,t)=>{var r;let{payload:n}=t;if(!n||"function"!=typeof n)return e;let s=p()(r=e.get("errors")).call(r,(e=>n(e)));return e.merge({errors:s})}}}},7667:(e,t,r)=>{"use strict";r.r(t),r.d(t,{allErrors:()=>a,lastError:()=>o});var n=r(5572),s=r(6814);const a=(0,s.createSelector)((e=>e),(e=>e.get("errors",(0,n.List)()))),o=(0,s.createSelector)(a,(e=>e.last()))},9978:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>s});var n=r(4309);function s(){return{fn:{opsFilter:n.default}}}},4309:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>l});var n=r(9998),s=r.n(n),a=r(8493),o=r.n(a);function l(e,t){return s()(e).call(e,((e,r)=>-1!==o()(r).call(r,t)))}},5474:(e,t,r)=>{"use strict";r.r(t),r.d(t,{UPDATE_LAYOUT:()=>s,UPDATE_FILTER:()=>a,UPDATE_MODE:()=>o,SHOW:()=>l,updateLayout:()=>i,updateFilter:()=>c,show:()=>p,changeMode:()=>u});var n=r(1890);const s="layout_update_layout",a="layout_update_filter",o="layout_update_mode",l="layout_show";function i(e){return{type:s,payload:e}}function c(e){return{type:a,payload:e}}function p(e){let t=!(arguments.length>1&&void 0!==arguments[1])||arguments[1];return e=(0,n.AF)(e),{type:l,payload:{thing:e,shown:t}}}function u(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";return e=(0,n.AF)(e),{type:o,payload:{thing:e,mode:t}}}},6821:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>l});var n=r(5672),s=r(5474),a=r(4400),o=r(8989);function l(){return{statePlugins:{layout:{reducers:n.default,actions:s,selectors:a},spec:{wrapSelectors:o}}}}},5672:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>l});var n=r(4883),s=r.n(n),a=r(5572),o=r(5474);const l={[o.UPDATE_LAYOUT]:(e,t)=>e.set("layout",t.payload),[o.UPDATE_FILTER]:(e,t)=>e.set("filter",t.payload),[o.SHOW]:(e,t)=>{const r=t.payload.shown,n=(0,a.fromJS)(t.payload.thing);return e.update("shown",(0,a.fromJS)({}),(e=>e.set(n,r)))},[o.UPDATE_MODE]:(e,t)=>{var r;let n=t.payload.thing,a=t.payload.mode;return e.setIn(s()(r=["modes"]).call(r,n),(a||"")+"")}}},4400:(e,t,r)=>{"use strict";r.r(t),r.d(t,{current:()=>o,currentFilter:()=>l,isShown:()=>i,whatMode:()=>c,showSummary:()=>p});var n=r(6814),s=r(1890),a=r(5572);const o=e=>e.get("layout"),l=e=>e.get("filter"),i=(e,t,r)=>(t=(0,s.AF)(t),e.get("shown",(0,a.fromJS)({})).get((0,a.fromJS)(t),r)),c=function(e,t){let r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"";return t=(0,s.AF)(t),e.getIn(["modes",...t],r)},p=(0,n.createSelector)((e=>e),(e=>!i(e,"editor")))},8989:(e,t,r)=>{"use strict";r.r(t),r.d(t,{taggedOperations:()=>a});var n=r(600),s=r.n(n);const a=(e,t)=>function(r){for(var n=arguments.length,a=new Array(n>1?n-1:0),o=1;o=0&&(l=s()(l).call(l,0,d)),l}},9150:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(593),s=r.n(n);function a(e){let{configs:t}=e;const r={debug:0,info:1,log:2,warn:3,error:4},n=e=>r[e]||-1;let{logLevel:a}=t,o=n(a);function l(e){for(var t=arguments.length,r=new Array(t>1?t-1:0),s=1;s=o&&console[e](...r)}return l.warn=s()(l).call(l,null,"warn"),l.error=s()(l).call(l,null,"error"),l.info=s()(l).call(l,null,"info"),l.debug=s()(l).call(l,null,"debug"),{rootInjects:{log:l}}}},7002:(e,t,r)=>{"use strict";r.r(t),r.d(t,{UPDATE_SELECTED_SERVER:()=>n,UPDATE_REQUEST_BODY_VALUE:()=>s,UPDATE_REQUEST_BODY_VALUE_RETAIN_FLAG:()=>a,UPDATE_REQUEST_BODY_INCLUSION:()=>o,UPDATE_ACTIVE_EXAMPLES_MEMBER:()=>l,UPDATE_REQUEST_CONTENT_TYPE:()=>i,UPDATE_RESPONSE_CONTENT_TYPE:()=>c,UPDATE_SERVER_VARIABLE_VALUE:()=>p,SET_REQUEST_BODY_VALIDATE_ERROR:()=>u,CLEAR_REQUEST_BODY_VALIDATE_ERROR:()=>d,CLEAR_REQUEST_BODY_VALUE:()=>m,setSelectedServer:()=>h,setRequestBodyValue:()=>g,setRetainRequestBodyValueFlag:()=>f,setRequestBodyInclusion:()=>y,setActiveExamplesMember:()=>v,setRequestContentType:()=>E,setResponseContentType:()=>S,setServerVariableValue:()=>C,setRequestBodyValidateError:()=>b,clearRequestBodyValidateError:()=>x,initRequestBodyValidateError:()=>w,clearRequestBodyValue:()=>_});const n="oas3_set_servers",s="oas3_set_request_body_value",a="oas3_set_request_body_retain_flag",o="oas3_set_request_body_inclusion",l="oas3_set_active_examples_member",i="oas3_set_request_content_type",c="oas3_set_response_content_type",p="oas3_set_server_variable_value",u="oas3_set_request_body_validate_error",d="oas3_clear_request_body_validate_error",m="oas3_clear_request_body_value";function h(e,t){return{type:n,payload:{selectedServerUrl:e,namespace:t}}}function g(e){let{value:t,pathMethod:r}=e;return{type:s,payload:{value:t,pathMethod:r}}}const f=e=>{let{value:t,pathMethod:r}=e;return{type:a,payload:{value:t,pathMethod:r}}};function y(e){let{value:t,pathMethod:r,name:n}=e;return{type:o,payload:{value:t,pathMethod:r,name:n}}}function v(e){let{name:t,pathMethod:r,contextType:n,contextName:s}=e;return{type:l,payload:{name:t,pathMethod:r,contextType:n,contextName:s}}}function E(e){let{value:t,pathMethod:r}=e;return{type:i,payload:{value:t,pathMethod:r}}}function S(e){let{value:t,path:r,method:n}=e;return{type:c,payload:{value:t,path:r,method:n}}}function C(e){let{server:t,namespace:r,key:n,val:s}=e;return{type:p,payload:{server:t,namespace:r,key:n,val:s}}}const b=e=>{let{path:t,method:r,validationErrors:n}=e;return{type:u,payload:{path:t,method:r,validationErrors:n}}},x=e=>{let{path:t,method:r}=e;return{type:d,payload:{path:t,method:r}}},w=e=>{let{pathMethod:t}=e;return{type:d,payload:{path:t[0],method:t[1]}}},_=e=>{let{pathMethod:t}=e;return{type:m,payload:{pathMethod:t}}}},3723:(e,t,r)=>{"use strict";r.r(t),r.d(t,{definitionsToAuthorize:()=>d});var n=r(4235),s=r.n(n),a=r(9998),o=r.n(a),l=r(66),i=r.n(l),c=r(6814),p=r(5572),u=r(7779);const d=(m=(0,c.createSelector)((e=>e),(e=>{let{specSelectors:t}=e;return t.securityDefinitions()}),((e,t)=>{var r;let n=(0,p.List)();return t?(s()(r=t.entrySeq()).call(r,(e=>{let[t,r]=e;const a=r.get("type");var l;if("oauth2"===a&&s()(l=r.get("flows").entrySeq()).call(l,(e=>{let[s,a]=e,l=(0,p.fromJS)({flow:s,authorizationUrl:a.get("authorizationUrl"),tokenUrl:a.get("tokenUrl"),scopes:a.get("scopes"),type:r.get("type"),description:r.get("description")});n=n.push(new p.Map({[t]:o()(l).call(l,(e=>void 0!==e))}))})),"http"!==a&&"apiKey"!==a||(n=n.push(new p.Map({[t]:r}))),"openIdConnect"===a&&r.get("openIdConnectData")){let e=r.get("openIdConnectData"),a=e.get("grant_types_supported")||["authorization_code","implicit"];s()(a).call(a,(s=>{var a;let l=e.get("scopes_supported")&&i()(a=e.get("scopes_supported")).call(a,((e,t)=>e.set(t,"")),new p.Map),c=(0,p.fromJS)({flow:s,authorizationUrl:e.get("authorization_endpoint"),tokenUrl:e.get("token_endpoint"),scopes:l,type:"oauth2",openIdConnectUrl:r.get("openIdConnectUrl")});n=n.push(new p.Map({[t]:o()(c).call(c,(e=>void 0!==e))}))}))}})),n):n})),(e,t)=>function(){const r=t.getSystem().specSelectors.specJson();for(var n=arguments.length,s=new Array(n),a=0;a{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(4250),s=r.n(n),a=r(3942),o=r.n(a),l=r(6689),i=r.n(l),c=(r(580),r(8082),r(5572));const p=e=>{var t;let{callbacks:r,getComponent:n,specPath:a}=e;const l=n("OperationContainer",!0);if(!r)return i().createElement("span",null,"No callbacks");let p=o()(t=r.entrySeq()).call(t,(t=>{var r;let[n,p]=t;return i().createElement("div",{key:n},i().createElement("h2",null,n),o()(r=p.entrySeq()).call(r,(t=>{var r;let[p,u]=t;return"$$ref"===p?null:i().createElement("div",{key:p},o()(r=u.entrySeq()).call(r,(t=>{let[r,o]=t;if("$$ref"===r)return null;let u=(0,c.fromJS)({operation:o});return i().createElement(l,s()({},e,{op:u,key:r,tag:"",method:r,path:p,specPath:a.push(n,p,r),allowTryItOut:!1}))})))})))}));return i().createElement("div",null,p)}},6775:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>m});var n=r(1093),s=r.n(n),a=r(4994),o=r.n(a),l=r(9998),i=r.n(l),c=r(3942),p=r.n(c),u=r(6689),d=r.n(u);r(580);class m extends d().Component{constructor(e,t){super(e,t),s()(this,"onChange",(e=>{let{onChange:t}=this.props,{value:r,name:n}=e.target,s=o()({},this.state.value);n?s[n]=r:s=r,this.setState({value:s},(()=>t(this.state)))}));let{name:r,schema:n}=this.props,a=this.getValue();this.state={name:r,schema:n,value:a}}getValue(){let{name:e,authorized:t}=this.props;return t&&t.getIn([e,"value"])}render(){var e;let{schema:t,getComponent:r,errSelectors:n,name:s}=this.props;const a=r("Input"),o=r("Row"),l=r("Col"),c=r("authError"),u=r("Markdown",!0),m=r("JumpToPath",!0),h=(t.get("scheme")||"").toLowerCase();let g=this.getValue(),f=i()(e=n.allErrors()).call(e,(e=>e.get("authId")===s));if("basic"===h){var y;let e=g?g.get("username"):null;return d().createElement("div",null,d().createElement("h4",null,d().createElement("code",null,s||t.get("name")),"  (http, Basic)",d().createElement(m,{path:["securityDefinitions",s]})),e&&d().createElement("h6",null,"Authorized"),d().createElement(o,null,d().createElement(u,{source:t.get("description")})),d().createElement(o,null,d().createElement("label",null,"Username:"),e?d().createElement("code",null," ",e," "):d().createElement(l,null,d().createElement(a,{type:"text",required:"required",name:"username","aria-label":"auth-basic-username",onChange:this.onChange,autoFocus:!0}))),d().createElement(o,null,d().createElement("label",null,"Password:"),e?d().createElement("code",null," ****** "):d().createElement(l,null,d().createElement(a,{autoComplete:"new-password",name:"password",type:"password","aria-label":"auth-basic-password",onChange:this.onChange}))),p()(y=f.valueSeq()).call(y,((e,t)=>d().createElement(c,{error:e,key:t}))))}var v;return"bearer"===h?d().createElement("div",null,d().createElement("h4",null,d().createElement("code",null,s||t.get("name")),"  (http, Bearer)",d().createElement(m,{path:["securityDefinitions",s]})),g&&d().createElement("h6",null,"Authorized"),d().createElement(o,null,d().createElement(u,{source:t.get("description")})),d().createElement(o,null,d().createElement("label",null,"Value:"),g?d().createElement("code",null," ****** "):d().createElement(l,null,d().createElement(a,{type:"text","aria-label":"auth-bearer-value",onChange:this.onChange,autoFocus:!0}))),p()(v=f.valueSeq()).call(v,((e,t)=>d().createElement(c,{error:e,key:t})))):d().createElement("div",null,d().createElement("em",null,d().createElement("b",null,s)," HTTP authentication: unsupported scheme ",`'${h}'`))}}},6467:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(3427),s=r(2458),a=r(5757),o=r(6617),l=r(9928),i=r(5327),c=r(6775),p=r(6796);const u={Callbacks:n.default,HttpAuth:c.default,RequestBody:s.default,Servers:o.default,ServersContainer:l.default,RequestBodyEditor:i.default,OperationServers:p.default,operationLink:a.default}},5757:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(8344),s=r.n(n),a=r(3942),o=r.n(a),l=r(6689),i=r.n(l);r(580),r(8082);class c extends l.Component{render(){const{link:e,name:t,getComponent:r}=this.props,n=r("Markdown",!0);let a=e.get("operationId")||e.get("operationRef"),l=e.get("parameters")&&e.get("parameters").toJS(),c=e.get("description");return i().createElement("div",{className:"operation-link"},i().createElement("div",{className:"description"},i().createElement("b",null,i().createElement("code",null,t)),c?i().createElement(n,{source:c}):null),i().createElement("pre",null,"Operation `",a,"`",i().createElement("br",null),i().createElement("br",null),"Parameters ",function(e,t){var r;if("string"!=typeof t)return"";return o()(r=t.split("\n")).call(r,((t,r)=>r>0?Array(e+1).join(" ")+t:t)).join("\n")}(0,s()(l,null,2))||"{}",i().createElement("br",null)))}}const p=c},6796:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>l});var n=r(1093),s=r.n(n),a=r(6689),o=r.n(a);r(580),r(8082);class l extends o().Component{constructor(){super(...arguments),s()(this,"setSelectedServer",(e=>{const{path:t,method:r}=this.props;return this.forceUpdate(),this.props.setSelectedServer(e,`${t}:${r}`)})),s()(this,"setServerVariableValue",(e=>{const{path:t,method:r}=this.props;return this.forceUpdate(),this.props.setServerVariableValue({...e,namespace:`${t}:${r}`})})),s()(this,"getSelectedServer",(()=>{const{path:e,method:t}=this.props;return this.props.getSelectedServer(`${e}:${t}`)})),s()(this,"getServerVariable",((e,t)=>{const{path:r,method:n}=this.props;return this.props.getServerVariable({namespace:`${r}:${n}`,server:e},t)})),s()(this,"getEffectiveServerValue",(e=>{const{path:t,method:r}=this.props;return this.props.getEffectiveServerValue({server:e,namespace:`${t}:${r}`})}))}render(){const{operationServers:e,pathServers:t,getComponent:r}=this.props;if(!e&&!t)return null;const n=r("Servers"),s=e||t,a=e?"operation":"path";return o().createElement("div",{className:"opblock-section operation-servers"},o().createElement("div",{className:"opblock-section-header"},o().createElement("div",{className:"tab-header"},o().createElement("h4",{className:"opblock-title"},"Servers"))),o().createElement("div",{className:"opblock-description-wrapper"},o().createElement("h4",{className:"message"},"These ",a,"-level options override the global server options."),o().createElement(n,{servers:s,currentServer:this.getSelectedServer(),setSelectedServer:this.setSelectedServer,setServerVariableValue:this.setServerVariableValue,getServerVariable:this.getServerVariable,getEffectiveServerValue:this.getEffectiveServerValue})))}}},5327:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(1093),s=r.n(n),a=r(6689),o=r.n(a),l=(r(580),r(9003)),i=r.n(l),c=r(1890);const p=Function.prototype;class u extends a.PureComponent{constructor(e,t){super(e,t),s()(this,"applyDefaultValue",(e=>{const{onChange:t,defaultValue:r}=e||this.props;return this.setState({value:r}),t(r)})),s()(this,"onChange",(e=>{this.props.onChange((0,c.Pz)(e))})),s()(this,"onDomChange",(e=>{const t=e.target.value;this.setState({value:t},(()=>this.onChange(t)))})),this.state={value:(0,c.Pz)(e.value)||e.defaultValue},e.onChange(e.value)}UNSAFE_componentWillReceiveProps(e){this.props.value!==e.value&&e.value!==this.state.value&&this.setState({value:(0,c.Pz)(e.value)}),!e.value&&e.defaultValue&&this.state.value&&this.applyDefaultValue(e)}render(){let{getComponent:e,errors:t}=this.props,{value:r}=this.state,n=t.size>0;const s=e("TextArea");return o().createElement("div",{className:"body-param"},o().createElement(s,{className:i()("body-param__text",{invalid:n}),title:t.size?t.join(", "):"",value:r,onChange:this.onDomChange}))}}s()(u,"defaultProps",{onChange:p,userHasEditedBody:!1})},2458:(e,t,r)=>{"use strict";r.r(t),r.d(t,{getDefaultRequestBodyValue:()=>f,default:()=>y});var n=r(3942),s=r.n(n),a=r(8493),o=r.n(a),l=r(2605),i=r.n(l),c=r(7104),p=r.n(c),u=r(6689),d=r.n(u),m=(r(580),r(8082),r(5572)),h=r(1890),g=r(2518);const f=(e,t,r)=>{const n=e.getIn(["content",t]),s=n.get("schema").toJS(),a=void 0!==n.get("examples"),o=n.get("example"),l=a?n.getIn(["examples",r,"value"]):o,i=(0,h.xi)(s,t,{includeWriteOnly:!0},l);return(0,h.Pz)(i)},y=e=>{let{userHasEditedBody:t,requestBody:r,requestBodyValue:n,requestBodyInclusionSetting:a,requestBodyErrors:l,getComponent:c,getConfigs:u,specSelectors:y,fn:v,contentType:E,isExecute:S,specPath:C,onChange:b,onChangeIncludeEmpty:x,activeExamplesKey:w,updateActiveExamplesKey:_,setRetainRequestBodyValueFlag:A}=e;const I=e=>{b(e.target.files[0])},N=e=>{let t={key:e,shouldDispatchInit:!1,defaultValue:!0};return"no value"===a.get(e,"no value")&&(t.shouldDispatchInit=!0),t},q=c("Markdown",!0),R=c("modelExample"),T=c("RequestBodyEditor"),P=c("highlightCode"),k=c("ExamplesSelectValueRetainer"),O=c("Example"),M=c("ParameterIncludeEmpty"),{showCommonExtensions:j}=u(),V=r&&r.get("description")||null,D=r&&r.get("content")||new m.OrderedMap;E=E||D.keySeq().first()||"";const L=D.get(E,(0,m.OrderedMap)()),U=L.get("schema",(0,m.OrderedMap)()),z=L.get("examples",null),B=null==z?void 0:s()(z).call(z,((e,t)=>{var n;const s=null===(n=e)||void 0===n?void 0:n.get("value",null);return s&&(e=e.set("value",f(r,E,t),s)),e}));if(l=m.List.isList(l)?l:(0,m.List)(),!L.size)return null;const $="object"===L.getIn(["schema","type"]),J="binary"===L.getIn(["schema","format"]),F="base64"===L.getIn(["schema","format"]);if("application/octet-stream"===E||0===o()(E).call(E,"image/")||0===o()(E).call(E,"audio/")||0===o()(E).call(E,"video/")||J||F){const e=c("Input");return S?d().createElement(e,{type:"file",onChange:I}):d().createElement("i",null,"Example values are not available for ",d().createElement("code",null,E)," media types.")}if($&&("application/x-www-form-urlencoded"===E||0===o()(E).call(E,"multipart/"))&&U.get("properties",(0,m.OrderedMap)()).size>0){var W;const e=c("JsonSchemaForm"),t=c("ParameterExt"),r=U.get("properties",(0,m.OrderedMap)());return n=m.Map.isMap(n)?n:(0,m.OrderedMap)(),d().createElement("div",{className:"table-container"},V&&d().createElement(q,{source:V}),d().createElement("table",null,d().createElement("tbody",null,m.Map.isMap(r)&&s()(W=r.entrySeq()).call(W,(r=>{var o,u;let[g,f]=r;if(f.get("readOnly"))return;let y=j?(0,h.po)(f):null;const E=i()(o=U.get("required",(0,m.List)())).call(o,g),C=f.get("type"),w=f.get("format"),_=f.get("description"),A=n.getIn([g,"value"]),I=n.getIn([g,"errors"])||l,R=a.get(g)||!1,T=f.has("default")||f.has("example")||f.hasIn(["items","example"])||f.hasIn(["items","default"]),P=f.has("enum")&&(1===f.get("enum").size||E),k=T||P;let O="";"array"!==C||k||(O=[]),("object"===C||k)&&(O=(0,h.xi)(f,!1,{includeWriteOnly:!0})),"string"!=typeof O&&"object"===C&&(O=(0,h.Pz)(O)),"string"==typeof O&&"array"===C&&(O=JSON.parse(O));const V="string"===C&&("binary"===w||"base64"===w);return d().createElement("tr",{key:g,className:"parameters","data-property-name":g},d().createElement("td",{className:"parameters-col_name"},d().createElement("div",{className:E?"parameter__name required":"parameter__name"},g,E?d().createElement("span",null," *"):null),d().createElement("div",{className:"parameter__type"},C,w&&d().createElement("span",{className:"prop-format"},"($",w,")"),j&&y.size?s()(u=y.entrySeq()).call(u,(e=>{let[r,n]=e;return d().createElement(t,{key:`${r}-${n}`,xKey:r,xVal:n})})):null),d().createElement("div",{className:"parameter__deprecated"},f.get("deprecated")?"deprecated":null)),d().createElement("td",{className:"parameters-col_description"},d().createElement(q,{source:_}),S?d().createElement("div",null,d().createElement(e,{fn:v,dispatchInitialValue:!V,schema:f,description:g,getComponent:c,value:void 0===A?O:A,required:E,errors:I,onChange:e=>{b(e,[g])}}),E?null:d().createElement(M,{onChange:e=>x(g,e),isIncluded:R,isIncludedOptions:N(g),isDisabled:p()(A)?0!==A.length:!(0,h.O2)(A)})):null))})))))}const H=f(r,E,w);let K=null;return(0,g.O)(H)&&(K="json"),d().createElement("div",null,V&&d().createElement(q,{source:V}),B?d().createElement(k,{userHasEditedBody:t,examples:B,currentKey:w,currentUserInputValue:n,onSelect:e=>{_(e)},updateValue:b,defaultToFirstExample:!0,getComponent:c,setRetainRequestBodyValueFlag:A}):null,S?d().createElement("div",null,d().createElement(T,{value:n,errors:l,defaultValue:H,onChange:b,getComponent:c})):d().createElement(R,{getComponent:c,getConfigs:u,specSelectors:y,expandDepth:1,isExecute:S,schema:L.get("schema"),specPath:C.push("content",E),example:d().createElement(P,{className:"body-param__example",getConfigs:u,language:K,value:(0,h.Pz)(n)||H}),includeWriteOnly:!0}),B?d().createElement(O,{example:B.get(w),getComponent:c,getConfigs:u}):null)}},9928:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(6689),s=r.n(n);r(580);class a extends s().Component{render(){const{specSelectors:e,oas3Selectors:t,oas3Actions:r,getComponent:n}=this.props,a=e.servers(),o=n("Servers");return a&&a.size?s().createElement("div",null,s().createElement("span",{className:"servers-title"},"Servers"),s().createElement(o,{servers:a,currentServer:t.selectedServer(),setSelectedServer:r.setSelectedServer,setServerVariableValue:r.setServerVariableValue,getServerVariable:t.serverVariableValue,getEffectiveServerValue:t.serverEffectiveValue})):null}}},6617:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>d});var n=r(1093),s=r.n(n),a=r(3580),o=r.n(a),l=r(3942),i=r.n(l),c=r(6689),p=r.n(c),u=r(5572);r(580),r(8082);class d extends p().Component{constructor(){super(...arguments),s()(this,"onServerChange",(e=>{this.setServer(e.target.value)})),s()(this,"onServerVariableValueChange",(e=>{let{setServerVariableValue:t,currentServer:r}=this.props,n=e.target.getAttribute("data-variable"),s=e.target.value;"function"==typeof t&&t({server:r,key:n,val:s})})),s()(this,"setServer",(e=>{let{setSelectedServer:t}=this.props;t(e)}))}componentDidMount(){var e;let{servers:t,currentServer:r}=this.props;r||this.setServer(null===(e=t.first())||void 0===e?void 0:e.get("url"))}UNSAFE_componentWillReceiveProps(e){let{servers:t,setServerVariableValue:r,getServerVariable:n}=e;if(this.props.currentServer!==e.currentServer||this.props.servers!==e.servers){var s;let a=o()(t).call(t,(t=>t.get("url")===e.currentServer)),l=o()(s=this.props.servers).call(s,(e=>e.get("url")===this.props.currentServer))||(0,u.OrderedMap)();if(!a)return this.setServer(t.first().get("url"));let c=l.get("variables")||(0,u.OrderedMap)(),p=(o()(c).call(c,(e=>e.get("default")))||(0,u.OrderedMap)()).get("default"),d=a.get("variables")||(0,u.OrderedMap)(),m=(o()(d).call(d,(e=>e.get("default")))||(0,u.OrderedMap)()).get("default");i()(d).call(d,((t,s)=>{n(e.currentServer,s)&&p===m||r({server:e.currentServer,key:s,val:t.get("default")||""})}))}}render(){var e,t;let{servers:r,currentServer:n,getServerVariable:s,getEffectiveServerValue:a}=this.props,l=(o()(r).call(r,(e=>e.get("url")===n))||(0,u.OrderedMap)()).get("variables")||(0,u.OrderedMap)(),c=0!==l.size;return p().createElement("div",{className:"servers"},p().createElement("label",{htmlFor:"servers"},p().createElement("select",{onChange:this.onServerChange,value:n},i()(e=r.valueSeq()).call(e,(e=>p().createElement("option",{value:e.get("url"),key:e.get("url")},e.get("url"),e.get("description")&&` - ${e.get("description")}`))).toArray())),c?p().createElement("div",null,p().createElement("div",{className:"computed-url"},"Computed URL:",p().createElement("code",null,a(n))),p().createElement("h4",null,"Server variables"),p().createElement("table",null,p().createElement("tbody",null,i()(t=l.entrySeq()).call(t,(e=>{var t;let[r,a]=e;return p().createElement("tr",{key:r},p().createElement("td",null,r),p().createElement("td",null,a.get("enum")?p().createElement("select",{"data-variable":r,onChange:this.onServerVariableValueChange},i()(t=a.get("enum")).call(t,(e=>p().createElement("option",{selected:e===s(n,r),key:e,value:e},e)))):p().createElement("input",{type:"text",value:s(n,r)||"",onChange:this.onServerVariableValueChange,"data-variable":r})))}))))):null)}}},7779:(e,t,r)=>{"use strict";r.r(t),r.d(t,{isOAS3:()=>c,isSwagger2:()=>p,OAS3ComponentWrapFactory:()=>u});var n=r(4250),s=r.n(n),a=r(3262),o=r.n(a),l=r(6689),i=r.n(l);function c(e){const t=e.get("openapi");return"string"==typeof t&&(o()(t).call(t,"3.0.")&&t.length>4)}function p(e){const t=e.get("swagger");return"string"==typeof t&&o()(t).call(t,"2.0")}function u(e){return(t,r)=>n=>{if(r&&r.specSelectors&&r.specSelectors.specJson){return c(r.specSelectors.specJson())?i().createElement(e,s()({},n,r,{Ori:t})):i().createElement(t,n)}return console.warn("OAS3 wrapper: couldn't get spec"),null}}},7451:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(2044),s=r(3723),a=r(1741),o=r(6467),l=r(7761),i=r(7002),c=r(5065),p=r(2109);function u(){return{components:o.default,wrapComponents:l.default,statePlugins:{spec:{wrapSelectors:n,selectors:a},auth:{wrapSelectors:s},oas3:{actions:i,reducers:p.default,selectors:c}}}}},2109:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(874),s=r.n(n),a=r(4235),o=r.n(a),l=r(66),i=r.n(l),c=r(5572),p=r(7002);const u={[p.UPDATE_SELECTED_SERVER]:(e,t)=>{let{payload:{selectedServerUrl:r,namespace:n}}=t;const s=n?[n,"selectedServer"]:["selectedServer"];return e.setIn(s,r)},[p.UPDATE_REQUEST_BODY_VALUE]:(e,t)=>{let{payload:{value:r,pathMethod:n}}=t,[a,l]=n;if(!c.Map.isMap(r))return e.setIn(["requestData",a,l,"bodyValue"],r);let i,p=e.getIn(["requestData",a,l,"bodyValue"])||(0,c.Map)();c.Map.isMap(p)||(p=(0,c.Map)());const[...u]=s()(r).call(r);return o()(u).call(u,(e=>{let t=r.getIn([e]);p.has(e)&&c.Map.isMap(t)||(i=p.setIn([e,"value"],t))})),e.setIn(["requestData",a,l,"bodyValue"],i)},[p.UPDATE_REQUEST_BODY_VALUE_RETAIN_FLAG]:(e,t)=>{let{payload:{value:r,pathMethod:n}}=t,[s,a]=n;return e.setIn(["requestData",s,a,"retainBodyValue"],r)},[p.UPDATE_REQUEST_BODY_INCLUSION]:(e,t)=>{let{payload:{value:r,pathMethod:n,name:s}}=t,[a,o]=n;return e.setIn(["requestData",a,o,"bodyInclusion",s],r)},[p.UPDATE_ACTIVE_EXAMPLES_MEMBER]:(e,t)=>{let{payload:{name:r,pathMethod:n,contextType:s,contextName:a}}=t,[o,l]=n;return e.setIn(["examples",o,l,s,a,"activeExample"],r)},[p.UPDATE_REQUEST_CONTENT_TYPE]:(e,t)=>{let{payload:{value:r,pathMethod:n}}=t,[s,a]=n;return e.setIn(["requestData",s,a,"requestContentType"],r)},[p.UPDATE_RESPONSE_CONTENT_TYPE]:(e,t)=>{let{payload:{value:r,path:n,method:s}}=t;return e.setIn(["requestData",n,s,"responseContentType"],r)},[p.UPDATE_SERVER_VARIABLE_VALUE]:(e,t)=>{let{payload:{server:r,namespace:n,key:s,val:a}}=t;const o=n?[n,"serverVariableValues",r,s]:["serverVariableValues",r,s];return e.setIn(o,a)},[p.SET_REQUEST_BODY_VALIDATE_ERROR]:(e,t)=>{let{payload:{path:r,method:n,validationErrors:s}}=t,a=[];if(a.push("Required field is not provided"),s.missingBodyValue)return e.setIn(["requestData",r,n,"errors"],(0,c.fromJS)(a));if(s.missingRequiredKeys&&s.missingRequiredKeys.length>0){const{missingRequiredKeys:t}=s;return e.updateIn(["requestData",r,n,"bodyValue"],(0,c.fromJS)({}),(e=>i()(t).call(t,((e,t)=>e.setIn([t,"errors"],(0,c.fromJS)(a))),e)))}return console.warn("unexpected result: SET_REQUEST_BODY_VALIDATE_ERROR"),e},[p.CLEAR_REQUEST_BODY_VALIDATE_ERROR]:(e,t)=>{let{payload:{path:r,method:n}}=t;const a=e.getIn(["requestData",r,n,"bodyValue"]);if(!c.Map.isMap(a))return e.setIn(["requestData",r,n,"errors"],(0,c.fromJS)([]));const[...o]=s()(a).call(a);return o?e.updateIn(["requestData",r,n,"bodyValue"],(0,c.fromJS)({}),(e=>i()(o).call(o,((e,t)=>e.setIn([t,"errors"],(0,c.fromJS)([]))),e))):e},[p.CLEAR_REQUEST_BODY_VALUE]:(e,t)=>{let{payload:{pathMethod:r}}=t,[n,s]=r;const a=e.getIn(["requestData",n,s,"bodyValue"]);return a?c.Map.isMap(a)?e.setIn(["requestData",n,s,"bodyValue"],(0,c.Map)()):e.setIn(["requestData",n,s,"bodyValue"],""):e}}},5065:(e,t,r)=>{"use strict";r.r(t),r.d(t,{selectedServer:()=>f,requestBodyValue:()=>y,shouldRetainRequestBodyValue:()=>v,selectDefaultRequestBodyValue:()=>E,hasUserEditedBody:()=>S,requestBodyInclusionSetting:()=>C,requestBodyErrors:()=>b,activeExamplesMember:()=>x,requestContentType:()=>w,responseContentType:()=>_,serverVariableValue:()=>A,serverVariables:()=>I,serverEffectiveValue:()=>N,validateBeforeExecute:()=>q,validateShallowRequired:()=>T});var n=r(3942),s=r.n(n),a=r(4235),o=r.n(a),l=r(7252),i=r.n(l),c=r(8493),p=r.n(c),u=r(5572),d=r(7779),m=r(2458),h=r(1890);function g(e){return function(){for(var t=arguments.length,r=new Array(t),n=0;n{const n=t.getSystem().specSelectors.specJson();return(0,d.isOAS3)(n)?e(...r):null}}}const f=g(((e,t)=>{const r=t?[t,"selectedServer"]:["selectedServer"];return e.getIn(r)||""})),y=g(((e,t,r)=>e.getIn(["requestData",t,r,"bodyValue"])||null)),v=g(((e,t,r)=>e.getIn(["requestData",t,r,"retainBodyValue"])||!1)),E=(e,t,r)=>e=>{const{oas3Selectors:n,specSelectors:s}=e.getSystem(),a=s.specJson();if((0,d.isOAS3)(a)){const e=n.requestContentType(t,r);if(e)return(0,m.getDefaultRequestBodyValue)(s.specResolvedSubtree(["paths",t,r,"requestBody"]),e,n.activeExamplesMember(t,r,"requestBody","requestBody"))}return null},S=(e,t,r)=>e=>{const{oas3Selectors:n,specSelectors:s}=e.getSystem(),a=s.specJson();if((0,d.isOAS3)(a)){let e=!1;const a=n.requestContentType(t,r);let o=n.requestBodyValue(t,r);if(u.Map.isMap(o)&&(o=(0,h.Pz)(o.mapEntries((e=>u.Map.isMap(e[1])?[e[0],e[1].get("value")]:e)).toJS())),u.List.isList(o)&&(o=(0,h.Pz)(o)),a){const l=(0,m.getDefaultRequestBodyValue)(s.specResolvedSubtree(["paths",t,r,"requestBody"]),a,n.activeExamplesMember(t,r,"requestBody","requestBody"));e=!!o&&o!==l}return e}return null},C=g(((e,t,r)=>e.getIn(["requestData",t,r,"bodyInclusion"])||(0,u.Map)())),b=g(((e,t,r)=>e.getIn(["requestData",t,r,"errors"])||null)),x=g(((e,t,r,n,s)=>e.getIn(["examples",t,r,n,s,"activeExample"])||null)),w=g(((e,t,r)=>e.getIn(["requestData",t,r,"requestContentType"])||null)),_=g(((e,t,r)=>e.getIn(["requestData",t,r,"responseContentType"])||null)),A=g(((e,t,r)=>{let n;if("string"!=typeof t){const{server:e,namespace:s}=t;n=s?[s,"serverVariableValues",e,r]:["serverVariableValues",e,r]}else{n=["serverVariableValues",t,r]}return e.getIn(n)||null})),I=g(((e,t)=>{let r;if("string"!=typeof t){const{server:e,namespace:n}=t;r=n?[n,"serverVariableValues",e]:["serverVariableValues",e]}else{r=["serverVariableValues",t]}return e.getIn(r)||(0,u.OrderedMap)()})),N=g(((e,t)=>{var r,n;if("string"!=typeof t){const{server:s,namespace:a}=t;n=s,r=a?e.getIn([a,"serverVariableValues",n]):e.getIn(["serverVariableValues",n])}else n=t,r=e.getIn(["serverVariableValues",n]);r=r||(0,u.OrderedMap)();let a=n;return s()(r).call(r,((e,t)=>{a=a.replace(new RegExp(`{${t}}`,"g"),e)})),a})),q=(R=(e,t)=>((e,t)=>(t=t||[],!!e.getIn(["requestData",...t,"bodyValue"])))(e,t),function(){for(var e=arguments.length,t=new Array(e),r=0;r{const r=e.getSystem().specSelectors.specJson();let n=[...t][1]||[];return!r.getIn(["paths",...n,"requestBody","required"])||R(...t)}});var R;const T=(e,t)=>{var r;let{oas3RequiredRequestBodyContentType:n,oas3RequestContentType:s,oas3RequestBodyValue:a}=t,l=[];if(!u.Map.isMap(a))return l;let c=[];return o()(r=i()(n.requestContentType)).call(r,(e=>{if(e===s){let t=n.requestContentType[e];o()(t).call(t,(e=>{p()(c).call(c,e)<0&&c.push(e)}))}})),o()(c).call(c,(e=>{a.getIn([e,"value"])||l.push(e)})),l}},1741:(e,t,r)=>{"use strict";r.r(t),r.d(t,{servers:()=>c,isSwagger2:()=>u});var n=r(6814),s=r(5572),a=r(7779);const o=e=>e||(0,s.Map)(),l=(0,n.createSelector)(o,(e=>e.get("json",(0,s.Map)()))),i=(0,n.createSelector)(o,(e=>e.get("resolved",(0,s.Map)()))),c=(p=(0,n.createSelector)((e=>{let t=i(e);return t.count()<1&&(t=l(e)),t}),(e=>e.getIn(["servers"])||(0,s.Map)())),()=>function(e){const t=e.getSystem().specSelectors.specJson();if((0,a.isOAS3)(t)){for(var r=arguments.length,n=new Array(r>1?r-1:0),s=1;s()=>{const e=t.getSystem().specSelectors.specJson();return(0,a.isSwagger2)(e)}},2044:(e,t,r)=>{"use strict";r.r(t),r.d(t,{definitions:()=>m,hasHost:()=>h,securityDefinitions:()=>g,host:()=>f,basePath:()=>y,consumes:()=>v,produces:()=>E,schemes:()=>S,servers:()=>C,isOAS3:()=>b,isSwagger2:()=>x});var n=r(6814),s=r(3881),a=r(5572),o=r(7779);function l(e){return(t,r)=>function(){const n=r.getSystem().specSelectors.specJson();return(0,o.isOAS3)(n)?e(...arguments):t(...arguments)}}const i=e=>e||(0,a.Map)(),c=l((0,n.createSelector)((()=>null))),p=(0,n.createSelector)(i,(e=>e.get("json",(0,a.Map)()))),u=(0,n.createSelector)(i,(e=>e.get("resolved",(0,a.Map)()))),d=e=>{let t=u(e);return t.count()<1&&(t=p(e)),t},m=l((0,n.createSelector)(d,(e=>{const t=e.getIn(["components","schemas"]);return a.Map.isMap(t)?t:(0,a.Map)()}))),h=l((e=>d(e).hasIn(["servers",0]))),g=l((0,n.createSelector)(s.specJsonWithResolvedSubtrees,(e=>e.getIn(["components","securitySchemes"])||null))),f=c,y=c,v=c,E=c,S=c,C=l((0,n.createSelector)(d,(e=>e.getIn(["servers"])||(0,a.Map)()))),b=(e,t)=>()=>{const e=t.getSystem().specSelectors.specJson();return(0,o.isOAS3)(a.Map.isMap(e)?e:(0,a.Map)())},x=(e,t)=>()=>{const e=t.getSystem().specSelectors.specJson();return(0,o.isSwagger2)(a.Map.isMap(e)?e:(0,a.Map)())}},356:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(6689),s=r.n(n);const a=(0,r(7779).OAS3ComponentWrapFactory)((e=>{let{Ori:t,...r}=e;const{schema:n,getComponent:a,errSelectors:o,authorized:l,onAuthChange:i,name:c}=r,p=a("HttpAuth");return"http"===n.get("type")?s().createElement(p,{key:c,schema:n,name:c,errSelectors:o,authorized:l,getComponent:a,onChange:i}):s().createElement(t,r)}))},7761:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>c});var n=r(2460),s=r(356),a=r(9487),o=r(58),l=r(3499),i=r(287);const c={Markdown:n.default,AuthItem:s.default,JsonSchema_string:i.default,VersionStamp:a.default,model:l.default,onlineValidatorBadge:o.default}},287:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(6689),s=r.n(n);const a=(0,r(7779).OAS3ComponentWrapFactory)((e=>{let{Ori:t,...r}=e;const{schema:n,getComponent:a,errors:o,onChange:l}=r,i=n&&n.get?n.get("format"):null,c=n&&n.get?n.get("type"):null,p=a("Input");return c&&"string"===c&&i&&("binary"===i||"base64"===i)?s().createElement(p,{type:"file",className:o.length?"invalid":"",title:o.length?o:"",onChange:e=>{l(e.target.files[0])},disabled:t.isDisabled}):s().createElement(t,r)}))},2460:(e,t,r)=>{"use strict";r.r(t),r.d(t,{Markdown:()=>m,default:()=>h});var n=r(7390),s=r.n(n),a=r(6689),o=r.n(a),l=(r(580),r(9003)),i=r.n(l),c=r(963),p=r(7779),u=r(2552);const d=new c.Remarkable("commonmark");d.block.ruler.enable(["table"]),d.set({linkTarget:"_blank"});const m=e=>{let{source:t,className:r="",getConfigs:n}=e;if("string"!=typeof t)return null;if(t){const{useUnsafeMarkdown:e}=n(),a=d.render(t),l=(0,u.s)(a,{useUnsafeMarkdown:e});let c;return"string"==typeof l&&(c=s()(l).call(l)),o().createElement("div",{dangerouslySetInnerHTML:{__html:c},className:i()(r,"renderedMarkdown")})}return null};m.defaultProps={getConfigs:()=>({useUnsafeMarkdown:!1})};const h=(0,p.OAS3ComponentWrapFactory)(m)},3499:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(4250),s=r.n(n),a=r(6689),o=r.n(a),l=(r(580),r(7779)),i=r(6024);class c extends a.Component{render(){let{getConfigs:e,schema:t}=this.props,r=["model-box"],n=null;return!0===t.get("deprecated")&&(r.push("deprecated"),n=o().createElement("span",{className:"model-deprecated-warning"},"Deprecated:")),o().createElement("div",{className:r.join(" ")},n,o().createElement(i.Z,s()({},this.props,{getConfigs:e,depth:1,expandDepth:this.props.expandDepth||0})))}}const p=(0,l.OAS3ComponentWrapFactory)(c)},58:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(7779),s=r(5623);const a=(0,n.OAS3ComponentWrapFactory)(s.Z)},9487:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(6689),s=r.n(n);const a=(0,r(7779).OAS3ComponentWrapFactory)((e=>{const{Ori:t}=e;return s().createElement("span",null,s().createElement(t,e),s().createElement("small",{className:"version-stamp"},s().createElement("pre",{className:"version"},"OAS3")))}))},8560:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(9300),s=r.n(n);let a=!1;function o(){return{statePlugins:{spec:{wrapActions:{updateSpec:e=>function(){return a=!0,e(...arguments)},updateJsonSpec:(e,t)=>function(){const r=t.getConfigs().onComplete;return a&&"function"==typeof r&&(s()(r,0),a=!1),e(...arguments)}}}}}}},8223:(e,t,r)=>{"use strict";r.r(t),r.d(t,{requestSnippetGenerator_curl_bash:()=>A,requestSnippetGenerator_curl_cmd:()=>I,requestSnippetGenerator_curl_powershell:()=>_});var n=r(8493),s=r.n(n),a=r(7390),o=r.n(a),l=r(8344),i=r.n(l),c=r(3942),p=r.n(c);const u=require("@babel/runtime-corejs3/core-js-stable/instance/repeat");var d=r.n(u),m=r(7862),h=r.n(m),g=r(2605),f=r.n(g),y=r(7504),v=r(5572);const E=e=>{var t;const r="_**[]";return s()(e).call(e,r)<0?e:o()(t=e.split(r)[0]).call(t)},S=e=>"-d "===e||/^[_\/-]/g.test(e)?e:"'"+e.replace(/'/g,"'\\''")+"'",C=e=>"-d "===(e=e.replace(/\^/g,"^^").replace(/\\"/g,'\\\\"').replace(/"/g,'""').replace(/\n/g,"^\n"))?e.replace(/-d /g,"-d ^\n"):/^[_\/-]/g.test(e)?e:'"'+e+'"',b=e=>"-d "===e?e:/\n/.test(e)?'@"\n'+e.replace(/"/g,'\\"').replace(/`/g,"``").replace(/\$/,"`$")+'\n"@':/^[_\/-]/g.test(e)?e:"'"+e.replace(/"/g,'""').replace(/'/g,"''")+"'";function x(e){let t=[];for(let[r,n]of e.get("body").entrySeq()){let e=E(r);n instanceof y.Z.File?t.push(` "${e}": {\n "name": "${n.name}"${n.type?`,\n "type": "${n.type}"`:""}\n }`):t.push(` "${e}": ${i()(n,null,2).replace(/(\r\n|\r|\n)/g,"\n ")}`)}return`{\n${t.join(",\n")}\n}`}const w=function(e,t,r){let n=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"",s=!1,a="";const o=function(){for(var e=arguments.length,r=new Array(e),n=0;na+=` ${r}`,u=function(){var e;let t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1;return a+=d()(e=" ").call(e,t)};let m=e.get("headers");if(a+="curl"+n,e.has("curlOptions")&&o(...e.get("curlOptions")),o("-X",e.get("method")),c(),u(),l(`${e.get("url")}`),m&&m.size)for(let t of h()(g=e.get("headers")).call(g)){var g;c(),u();let[e,r]=t;l("-H",`${e}: ${r}`),s=s||/^content-type$/i.test(e)&&/^multipart\/form-data$/i.test(r)}const S=e.get("body");var C;if(S)if(s&&f()(C=["POST","PUT","PATCH"]).call(C,e.get("method")))for(let[e,t]of S.entrySeq()){let r=E(e);c(),u(),l("-F"),t instanceof y.Z.File?o(`${r}=@${t.name}${t.type?`;type=${t.type}`:""}`):o(`${r}=${t}`)}else if(S instanceof y.Z.File)c(),u(),l(`--data-binary '@${S.name}'`);else{c(),u(),l("-d ");let t=S;v.Map.isMap(t)?l(x(e)):("string"!=typeof t&&(t=i()(t)),l(t))}else S||"POST"!==e.get("method")||(c(),u(),l("-d ''"));return a},_=e=>w(e,b,"`\n",".exe"),A=e=>w(e,S,"\\\n"),I=e=>w(e,C,"^\n")},6575:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(8223),s=r(4669),a=r(4206);const o=()=>({components:{RequestSnippets:a.default},fn:n,statePlugins:{requestSnippets:{selectors:s}}})},4206:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>C});var n=r(9998),s=r.n(n),a=r(1733),o=r.n(a),l=r(4235),i=r.n(l),c=r(3942),p=r.n(c),u=r(6689),d=r.n(u),m=(r(580),r(1712)),h=r.n(m),g=r(5716),f=r.n(g),y=r(2807),v=r(6068);const E={cursor:"pointer",lineHeight:1,display:"inline-flex",backgroundColor:"rgb(250, 250, 250)",paddingBottom:"0",paddingTop:"0",border:"1px solid rgb(51, 51, 51)",borderRadius:"4px 4px 0 0",boxShadow:"none",borderBottom:"none"},S={cursor:"pointer",lineHeight:1,display:"inline-flex",backgroundColor:"rgb(51, 51, 51)",boxShadow:"none",border:"1px solid rgb(51, 51, 51)",paddingBottom:"0",paddingTop:"0",borderRadius:"4px 4px 0 0",marginTop:"-5px",marginRight:"-5px",marginLeft:"-5px",zIndex:"9999",borderBottom:"none"},C=e=>{var t,r;let{request:n,requestSnippetsSelectors:a,getConfigs:l}=e;const c=f()(l)?l():null,m=!1!==h()(c,"syntaxHighlight")&&h()(c,"syntaxHighlight.activated",!0),g=(0,u.useRef)(null),[C,b]=(0,u.useState)(null===(t=a.getSnippetGenerators())||void 0===t?void 0:t.keySeq().first()),[x,w]=(0,u.useState)(null==a?void 0:a.getDefaultExpanded());(0,u.useEffect)((()=>{}),[]),(0,u.useEffect)((()=>{var e;const t=s()(e=o()(g.current.childNodes)).call(e,(e=>{var t;return!!e.nodeType&&(null===(t=e.classList)||void 0===t?void 0:t.contains("curl-command"))}));return i()(t).call(t,(e=>e.addEventListener("mousewheel",R,{passive:!1}))),()=>{i()(t).call(t,(e=>e.removeEventListener("mousewheel",R)))}}),[n]);const _=a.getSnippetGenerators(),A=_.get(C),I=A.get("fn")(n),N=()=>{w(!x)},q=e=>e===C?S:E,R=e=>{const{target:t,deltaY:r}=e,{scrollHeight:n,offsetHeight:s,scrollTop:a}=t;n>s&&(0===a&&r<0||s+a>=n&&r>0)&&e.preventDefault()},T=m?d().createElement(v.d3,{language:A.get("syntax"),className:"curl microlight",style:(0,v.C2)(h()(c,"syntaxHighlight.theme"))},I):d().createElement("textarea",{readOnly:!0,className:"curl",value:I});return d().createElement("div",{className:"request-snippets",ref:g},d().createElement("div",{style:{width:"100%",display:"flex",justifyContent:"flex-start",alignItems:"center",marginBottom:"15px"}},d().createElement("h4",{onClick:()=>N(),style:{cursor:"pointer"}},"Snippets"),d().createElement("button",{onClick:()=>N(),style:{border:"none",background:"none"},title:x?"Collapse operation":"Expand operation"},d().createElement("svg",{className:"arrow",width:"10",height:"10"},d().createElement("use",{href:x?"#large-arrow-down":"#large-arrow",xlinkHref:x?"#large-arrow-down":"#large-arrow"})))),x&&d().createElement("div",{className:"curl-command"},d().createElement("div",{style:{paddingLeft:"15px",paddingRight:"10px",width:"100%",display:"flex"}},p()(r=_.entrySeq()).call(r,(e=>{let[t,r]=e;return d().createElement("div",{style:q(t),className:"btn",key:t,onClick:()=>(e=>{C!==e&&b(e)})(t)},d().createElement("h4",{style:t===C?{color:"white"}:{}},r.get("title")))}))),d().createElement("div",{className:"copy-to-clipboard"},d().createElement(y.CopyToClipboard,{text:I},d().createElement("button",null))),d().createElement("div",null,T)))}},4669:(e,t,r)=>{"use strict";r.r(t),r.d(t,{getGenerators:()=>d,getSnippetGenerators:()=>m,getActiveLanguage:()=>h,getDefaultExpanded:()=>g});var n=r(9998),s=r.n(n),a=r(2605),o=r.n(a),l=r(3942),i=r.n(l),c=r(6814),p=r(5572);const u=e=>e||(0,p.Map)(),d=(0,c.createSelector)(u,(e=>{const t=e.get("languages"),r=e.get("generators",(0,p.Map)());return!t||t.isEmpty()?r:s()(r).call(r,((e,r)=>o()(t).call(t,r)))})),m=e=>t=>{var r,n;let{fn:a}=t;return s()(r=i()(n=d(e)).call(n,((e,t)=>{const r=(e=>a[`requestSnippetGenerator_${e}`])(t);return"function"!=typeof r?null:e.set("fn",r)}))).call(r,(e=>e))},h=(0,c.createSelector)(u,(e=>e.get("activeLanguage"))),g=(0,c.createSelector)(u,(e=>e.get("defaultExpanded")))},6195:(e,t,r)=>{"use strict";r.r(t),r.d(t,{ErrorBoundary:()=>l,default:()=>i});r(580);var n=r(6689),s=r.n(n),a=r(6189),o=r(9403);class l extends n.Component{static getDerivedStateFromError(e){return{hasError:!0,error:e}}constructor(){super(...arguments),this.state={hasError:!1,error:null}}componentDidCatch(e,t){this.props.fn.componentDidCatch(e,t)}render(){const{getComponent:e,targetName:t,children:r}=this.props;if(this.state.hasError){const r=e("Fallback");return s().createElement(r,{name:t})}return r}}l.defaultProps={targetName:"this component",getComponent:()=>o.default,fn:{componentDidCatch:a.componentDidCatch},children:null};const i=l},9403:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(6689),s=r.n(n);r(580);const a=e=>{let{name:t}=e;return s().createElement("div",{className:"fallback"},"😱 ",s().createElement("i",null,"Could not render ","t"===t?"this component":t,", see the console."))}},6189:(e,t,r)=>{"use strict";r.r(t),r.d(t,{componentDidCatch:()=>l,withErrorBoundary:()=>i});var n=r(4250),s=r.n(n),a=r(6689),o=r.n(a);const l=console.error,i=e=>t=>{const{getComponent:r,fn:n}=e(),l=r("ErrorBoundary"),i=n.getDisplayName(t);class c extends a.Component{render(){return o().createElement(l,{targetName:i,getComponent:r,fn:n},o().createElement(t,s()({},this.props,this.context)))}}var p;return c.displayName=`WithErrorBoundary(${i})`,(p=t).prototype&&p.prototype.isReactComponent&&(c.prototype.mapStateToProps=t.prototype.mapStateToProps),c}},9595:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});const n=require("@babel/runtime-corejs3/core-js-stable/instance/fill");var s=r.n(n);const a=require("lodash/zipObject");var o=r.n(a),l=r(6195),i=r(9403),c=r(6189);const p=function(){let{componentList:e=[],fullOverride:t=!1}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return r=>{var n;let{getSystem:a}=r;const p=t?e:["App","BaseLayout","VersionPragmaFilter","InfoContainer","ServersContainer","SchemesContainer","AuthorizeBtnContainer","FilterContainer","Operations","OperationContainer","parameters","responses","OperationServers","Models","ModelWrapper",...e],u=o()(p,s()(n=Array(p.length)).call(n,((e,t)=>{let{fn:r}=t;return r.withErrorBoundary(e)})));return{fn:{componentDidCatch:c.componentDidCatch,withErrorBoundary:(0,c.withErrorBoundary)(a)},components:{ErrorBoundary:l.default,Fallback:i.default},wrapComponents:u}}}},4128:(e,t,r)=>{"use strict";r.r(t),r.d(t,{createXMLExample:()=>z,inferSchema:()=>U,memoizedCreateXMLExample:()=>J,memoizedSampleFromSchema:()=>F,sampleFromSchema:()=>B,sampleFromSchemaGeneric:()=>L});var n=r(8493),s=r.n(n),a=r(4235),o=r.n(a),l=r(7104),i=r.n(l),c=r(2605),p=r.n(c),u=r(5626),d=r.n(u),m=r(600),h=r.n(m),g=r(3580),f=r.n(g),y=r(4883),v=r.n(y),E=r(3942),S=r.n(E),C=r(8344),b=r.n(C);const x=require("xml");var w=r.n(x);const _=require("randexp");var A=r.n(_);const I=require("lodash/isEmpty");var N=r.n(I),q=r(1890),R=r(7481);const T={string:e=>e.pattern?(e=>{try{return new(A())(e).gen()}catch(e){return"string"}})(e.pattern):"string",string_email:()=>"user@example.com","string_date-time":()=>(new Date).toISOString(),string_date:()=>(new Date).toISOString().substring(0,10),string_uuid:()=>"3fa85f64-5717-4562-b3fc-2c963f66afa6",string_hostname:()=>"example.com",string_ipv4:()=>"198.51.100.42",string_ipv6:()=>"2001:0db8:5b96:0000:0000:426f:8e17:642a",number:()=>0,number_float:()=>0,integer:()=>0,boolean:e=>"boolean"!=typeof e.default||e.default},P=e=>{e=(0,q.mz)(e);let{type:t,format:r}=e,n=T[`${t}_${r}`]||T[t];return(0,q.Wl)(n)?n(e):"Unknown Type: "+e.type},k=e=>(0,q.XV)(e,"$$ref",(e=>"string"==typeof e&&s()(e).call(e,"#")>-1)),O=["maxProperties","minProperties"],M=["minItems","maxItems"],j=["minimum","maximum","exclusiveMinimum","exclusiveMaximum"],V=["minLength","maxLength"],D=function(e,t){var r;let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};const a=r=>{void 0===t[r]&&void 0!==e[r]&&(t[r]=e[r])};var l;(o()(r=["example","default","enum","xml","type",...O,...M,...j,...V]).call(r,(e=>a(e))),void 0!==e.required&&i()(e.required))&&(void 0!==t.required&&t.required.length||(t.required=[]),o()(l=e.required).call(l,(e=>{var r;p()(r=t.required).call(r,e)||t.required.push(e)})));if(e.properties){t.properties||(t.properties={});let r=(0,q.mz)(e.properties);for(let a in r){var c;if(Object.prototype.hasOwnProperty.call(r,a))if(!r[a]||!r[a].deprecated)if(!r[a]||!r[a].readOnly||n.includeReadOnly)if(!r[a]||!r[a].writeOnly||n.includeWriteOnly)if(!t.properties[a])t.properties[a]=r[a],!e.required&&i()(e.required)&&-1!==s()(c=e.required).call(c,a)&&(t.required?t.required.push(a):t.required=[a])}}return e.items&&(t.items||(t.items={}),t.items=D(e.items,t.items,n)),t},L=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:void 0,n=arguments.length>3&&void 0!==arguments[3]&&arguments[3];e&&(0,q.Wl)(e.toJS)&&(e=e.toJS());let a=void 0!==r||e&&void 0!==e.example||e&&void 0!==e.default;const l=!a&&e&&e.oneOf&&e.oneOf.length>0,c=!a&&e&&e.anyOf&&e.anyOf.length>0;if(!a&&(l||c)){const r=(0,q.mz)(l?e.oneOf[0]:e.anyOf[0]);if(D(r,e,t),!e.xml&&r.xml&&(e.xml=r.xml),void 0!==e.example&&void 0!==r.example)a=!0;else if(r.properties){e.properties||(e.properties={});let n=(0,q.mz)(r.properties);for(let a in n){var u;if(Object.prototype.hasOwnProperty.call(n,a))if(!n[a]||!n[a].deprecated)if(!n[a]||!n[a].readOnly||t.includeReadOnly)if(!n[a]||!n[a].writeOnly||t.includeWriteOnly)if(!e.properties[a])e.properties[a]=n[a],!r.required&&i()(r.required)&&-1!==s()(u=r.required).call(u,a)&&(e.required?e.required.push(a):e.required=[a])}}}const m={};let{xml:g,type:y,example:E,properties:C,additionalProperties:b,items:x}=e||{},{includeReadOnly:w,includeWriteOnly:_}=t;g=g||{};let A,{name:I,prefix:R,namespace:T}=g,V={};if(n&&(I=I||"notagname",A=(R?R+":":"")+I,T)){m[R?"xmlns:"+R:"xmlns"]=T}n&&(V[A]=[]);const U=t=>d()(t).call(t,(t=>Object.prototype.hasOwnProperty.call(e,t)));e&&!y&&(C||b||U(O)?y="object":x||U(M)?y="array":U(j)?(y="number",e.type="number"):a||e.enum||(y="string",e.type="string"));const z=t=>{var r,n,s,a,o;null!==(null===(r=e)||void 0===r?void 0:r.maxItems)&&void 0!==(null===(n=e)||void 0===n?void 0:n.maxItems)&&(t=h()(t).call(t,0,null===(o=e)||void 0===o?void 0:o.maxItems));if(null!==(null===(s=e)||void 0===s?void 0:s.minItems)&&void 0!==(null===(a=e)||void 0===a?void 0:a.minItems)){let r=0;for(;t.length<(null===(l=e)||void 0===l?void 0:l.minItems);){var l;t.push(t[r++%t.length])}}return t},B=(0,q.mz)(C);let $,J=0;const F=()=>e&&null!==e.maxProperties&&void 0!==e.maxProperties&&J>=e.maxProperties,W=()=>{if(!e||!e.required)return 0;let t=0;var r,s;n?o()(r=e.required).call(r,(e=>t+=void 0===V[e]?0:1)):o()(s=e.required).call(s,(e=>{var r;return t+=void 0===(null===(r=V[A])||void 0===r?void 0:f()(r).call(r,(t=>void 0!==t[e])))?0:1}));return e.required.length-t},H=t=>{var r;return!(e&&e.required&&e.required.length)||!p()(r=e.required).call(r,t)},K=t=>!e||null===e.maxProperties||void 0===e.maxProperties||!F()&&(!H(t)||e.maxProperties-J-W()>0);if($=n?function(r){let s=arguments.length>1&&void 0!==arguments[1]?arguments[1]:void 0;if(e&&B[r]){if(B[r].xml=B[r].xml||{},B[r].xml.attribute){const e=i()(B[r].enum)?B[r].enum[0]:void 0,t=B[r].example,n=B[r].default;return void(m[B[r].xml.name||r]=void 0!==t?t:void 0!==n?n:void 0!==e?e:P(B[r]))}B[r].xml.name=B[r].xml.name||r}else B[r]||!1===b||(B[r]={xml:{name:r}});let a=L(e&&B[r]||void 0,t,s,n);var o;K(r)&&(J++,i()(a)?V[A]=v()(o=V[A]).call(o,a):V[A].push(a))}:(r,s)=>{if(K(r)){if(Object.prototype.hasOwnProperty.call(e,"discriminator")&&e.discriminator&&Object.prototype.hasOwnProperty.call(e.discriminator,"mapping")&&e.discriminator.mapping&&Object.prototype.hasOwnProperty.call(e,"$$ref")&&e.$$ref&&e.discriminator.propertyName===r){for(let t in e.discriminator.mapping)if(-1!==e.$$ref.search(e.discriminator.mapping[t])){V[r]=t;break}}else V[r]=L(B[r],t,s,n);J++}},a){let s;if(s=k(void 0!==r?r:void 0!==E?E:e.default),!n){if("number"==typeof s&&"string"===y)return`${s}`;if("string"!=typeof s||"string"===y)return s;try{return JSON.parse(s)}catch(e){return s}}if(e||(y=i()(s)?"array":typeof s),"array"===y){if(!i()(s)){if("string"==typeof s)return s;s=[s]}const r=e?e.items:void 0;r&&(r.xml=r.xml||g||{},r.xml.name=r.xml.name||g.name);let a=S()(s).call(s,(e=>L(r,t,e,n)));return a=z(a),g.wrapped?(V[A]=a,N()(m)||V[A].push({_attr:m})):V=a,V}if("object"===y){if("string"==typeof s)return s;for(let t in s)Object.prototype.hasOwnProperty.call(s,t)&&(e&&B[t]&&B[t].readOnly&&!w||e&&B[t]&&B[t].writeOnly&&!_||(e&&B[t]&&B[t].xml&&B[t].xml.attribute?m[B[t].xml.name||t]=s[t]:$(t,s[t])));return N()(m)||V[A].push({_attr:m}),V}return V[A]=N()(m)?s:[{_attr:m},s],V}if("object"===y){for(let e in B)Object.prototype.hasOwnProperty.call(B,e)&&(B[e]&&B[e].deprecated||B[e]&&B[e].readOnly&&!w||B[e]&&B[e].writeOnly&&!_||$(e));if(n&&m&&V[A].push({_attr:m}),F())return V;if(!0===b)n?V[A].push({additionalProp:"Anything can be here"}):V.additionalProp1={},J++;else if(b){const r=(0,q.mz)(b),s=L(r,t,void 0,n);if(n&&r.xml&&r.xml.name&&"notagname"!==r.xml.name)V[A].push(s);else{const t=null!==e.minProperties&&void 0!==e.minProperties&&JL(D(x,e,t),t,void 0,n)));else if(i()(x.oneOf)){var Y;r=S()(Y=x.oneOf).call(Y,(e=>L(D(x,e,t),t,void 0,n)))}else{if(!(!n||n&&g.wrapped))return L(x,t,void 0,n);r=[L(x,t,void 0,n)]}return r=z(r),n&&g.wrapped?(V[A]=r,N()(m)||V[A].push({_attr:m}),V):r}let X;if(e&&i()(e.enum))X=(0,q.AF)(e.enum)[0];else{if(!e)return;if(X=P(e),"number"==typeof X){let t=e.minimum;null!=t&&(e.exclusiveMinimum&&t++,X=t);let r=e.maximum;null!=r&&(e.exclusiveMaximum&&r--,X=r)}if("string"==typeof X&&(null!==e.maxLength&&void 0!==e.maxLength&&(X=h()(X).call(X,0,e.maxLength)),null!==e.minLength&&void 0!==e.minLength)){let t=0;for(;X.length(e.schema&&(e=e.schema),e.properties&&(e.type="object"),e),z=(e,t,r)=>{const n=L(e,t,r,!0);if(n)return"string"==typeof n?n:w()(n,{declaration:!0,indent:"\t"})},B=(e,t,r)=>L(e,t,r,!1),$=(e,t,r)=>[e,b()(t),b()(r)],J=(0,R.Z)(z,$),F=(0,R.Z)(B,$)},8883:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>s});var n=r(4128);function s(){return{fn:n}}},9381:(e,t,r)=>{"use strict";r.r(t),r.d(t,{CLEAR_REQUEST:()=>Q,CLEAR_RESPONSE:()=>X,CLEAR_VALIDATE_PARAMS:()=>ee,LOG_REQUEST:()=>Y,SET_MUTATED_REQUEST:()=>G,SET_REQUEST:()=>Z,SET_RESPONSE:()=>K,SET_SCHEME:()=>se,UPDATE_EMPTY_PARAM_INCLUSION:()=>W,UPDATE_JSON:()=>J,UPDATE_OPERATION_META_VALUE:()=>te,UPDATE_PARAM:()=>F,UPDATE_RESOLVED:()=>re,UPDATE_RESOLVED_SUBTREE:()=>ne,UPDATE_SPEC:()=>B,UPDATE_URL:()=>$,VALIDATE_PARAMS:()=>H,changeConsumesValue:()=>be,changeParam:()=>ge,changeParamByIdentity:()=>fe,changeProducesValue:()=>xe,clearRequest:()=>Te,clearResponse:()=>Re,clearValidateParams:()=>Ce,execute:()=>qe,executeRequest:()=>Ne,invalidateResolvedSubtreeCache:()=>ve,logRequest:()=>Ie,parseToJson:()=>ce,requestResolvedSubtree:()=>he,resolveSpec:()=>ue,setMutatedRequest:()=>Ae,setRequest:()=>_e,setResponse:()=>we,setScheme:()=>Pe,updateEmptyParamInclusion:()=>Se,updateJsonSpec:()=>ie,updateResolved:()=>oe,updateResolvedSubtree:()=>ye,updateSpec:()=>ae,updateUrl:()=>le,validateParams:()=>Ee});var n=r(7104),s=r.n(n),a=r(3942),o=r.n(a);const l=require("@babel/runtime-corejs3/core-js-stable/object/define-property");var i=r.n(l),c=r(66),p=r.n(c),u=r(7834),d=r.n(u);const m=require("@babel/runtime-corejs3/core-js-stable/promise");var h=r.n(m),g=r(9998),f=r.n(g),y=r(9968),v=r.n(y),E=r(8493),S=r.n(E),C=r(4235),b=r.n(C),x=r(7252),w=r.n(x),_=r(4994),A=r.n(_);const I=require("@babel/runtime-corejs3/core-js-stable/date/now");var N=r.n(I),q=r(9793),R=r.n(q),T=r(5572),P=r(3883),k=r.n(P),O=r(41);const M=require("lodash/isString");var j=r.n(M);const V=require("lodash/debounce");var D=r.n(V);const L=require("lodash/set");var U=r.n(L),z=r(1890);const B="spec_update_spec",$="spec_update_url",J="spec_update_json",F="spec_update_param",W="spec_update_empty_param_inclusion",H="spec_validate_param",K="spec_set_response",Z="spec_set_request",G="spec_set_mutated_request",Y="spec_log_request",X="spec_clear_response",Q="spec_clear_request",ee="spec_clear_validate_param",te="spec_update_operation_meta_value",re="spec_update_resolved",ne="spec_update_resolved_subtree",se="set_scheme";function ae(e){const t=(r=e,j()(r)?r:"").replace(/\t/g," ");var r;if("string"==typeof e)return{type:B,payload:t}}function oe(e){return{type:re,payload:e}}function le(e){return{type:$,payload:e}}function ie(e){return{type:J,payload:e}}const ce=e=>t=>{let{specActions:r,specSelectors:n,errActions:s}=t,{specStr:a}=n,o=null;try{e=e||a(),s.clear({source:"parser"}),o=R().load(e,{schema:q.JSON_SCHEMA})}catch(e){return console.error(e),s.newSpecErr({source:"parser",level:"error",message:e.reason,line:e.mark&&e.mark.line?e.mark.line+1:void 0})}return o&&"object"==typeof o?r.updateJsonSpec(o):{}};let pe=!1;const ue=(e,t)=>r=>{let{specActions:n,specSelectors:a,errActions:l,fn:{fetch:c,resolve:p,AST:u={}},getConfigs:d}=r;pe||(console.warn("specActions.resolveSpec is deprecated since v3.10.0 and will be removed in v4.0.0; use requestResolvedSubtree instead!"),pe=!0);const{modelPropertyMacro:m,parameterMacro:h,requestInterceptor:g,responseInterceptor:f}=d();void 0===e&&(e=a.specJson()),void 0===t&&(t=a.url());let y=u.getLineNumberForPath?u.getLineNumberForPath:()=>{},v=a.specStr();return p({fetch:c,spec:e,baseDoc:t,modelPropertyMacro:m,parameterMacro:h,requestInterceptor:g,responseInterceptor:f}).then((e=>{let{spec:t,errors:r}=e;if(l.clear({type:"thrown"}),s()(r)&&r.length>0){let e=o()(r).call(r,(e=>(console.error(e),e.line=e.fullPath?y(v,e.fullPath):null,e.path=e.fullPath?e.fullPath.join("."):null,e.level="error",e.type="thrown",e.source="resolver",i()(e,"message",{enumerable:!0,value:e.message}),e)));l.newThrownErrBatch(e)}return n.updateResolved(t)}))};let de=[];const me=D()((async()=>{const e=de.system;if(!e)return void console.error("debResolveSubtrees: don't have a system to operate on, aborting.");const{errActions:t,errSelectors:r,fn:{resolveSubtree:n,fetch:a,AST:l={}},specSelectors:c,specActions:u}=e;if(!n)return void console.error("Error: Swagger-Client did not provide a `resolveSubtree` method, doing nothing.");let m=l.getLineNumberForPath?l.getLineNumberForPath:()=>{};const g=c.specStr(),{modelPropertyMacro:y,parameterMacro:E,requestInterceptor:S,responseInterceptor:C}=e.getConfigs();try{var b=await p()(de).call(de,(async(e,l)=>{const{resultMap:p,specWithCurrentSubtrees:u}=await e,{errors:b,spec:x}=await n(u,l,{baseDoc:c.url(),modelPropertyMacro:y,parameterMacro:E,requestInterceptor:S,responseInterceptor:C});if(r.allErrors().size&&t.clearBy((e=>{var t;return"thrown"!==e.get("type")||"resolver"!==e.get("source")||!d()(t=e.get("fullPath")).call(t,((e,t)=>e===l[t]||void 0===l[t]))})),s()(b)&&b.length>0){let e=o()(b).call(b,(e=>(e.line=e.fullPath?m(g,e.fullPath):null,e.path=e.fullPath?e.fullPath.join("."):null,e.level="error",e.type="thrown",e.source="resolver",i()(e,"message",{enumerable:!0,value:e.message}),e)));t.newThrownErrBatch(e)}var w,_;x&&c.isOAS3()&&"components"===l[0]&&"securitySchemes"===l[1]&&await h().all(o()(w=f()(_=v()(x)).call(_,(e=>"openIdConnect"===e.type))).call(w,(async e=>{const t={url:e.openIdConnectUrl,requestInterceptor:S,responseInterceptor:C};try{const r=await a(t);r instanceof Error||r.status>=400?console.error(r.statusText+" "+t.url):e.openIdConnectData=JSON.parse(r.text)}catch(e){console.error(e)}})));return U()(p,l,x),U()(u,l,x),{resultMap:p,specWithCurrentSubtrees:u}}),h().resolve({resultMap:(c.specResolvedSubtree([])||(0,T.Map)()).toJS(),specWithCurrentSubtrees:c.specJson().toJS()}));delete de.system,de=[]}catch(e){console.error(e)}u.updateResolvedSubtree([],b.resultMap)}),35),he=e=>t=>{var r;S()(r=o()(de).call(de,(e=>e.join("@@")))).call(r,e.join("@@"))>-1||(de.push(e),de.system=t,me())};function ge(e,t,r,n,s){return{type:F,payload:{path:e,value:n,paramName:t,paramIn:r,isXml:s}}}function fe(e,t,r,n){return{type:F,payload:{path:e,param:t,value:r,isXml:n}}}const ye=(e,t)=>({type:ne,payload:{path:e,value:t}}),ve=()=>({type:ne,payload:{path:[],value:(0,T.Map)()}}),Ee=(e,t)=>({type:H,payload:{pathMethod:e,isOAS3:t}}),Se=(e,t,r,n)=>({type:W,payload:{pathMethod:e,paramName:t,paramIn:r,includeEmptyValue:n}});function Ce(e){return{type:ee,payload:{pathMethod:e}}}function be(e,t){return{type:te,payload:{path:e,value:t,key:"consumes_value"}}}function xe(e,t){return{type:te,payload:{path:e,value:t,key:"produces_value"}}}const we=(e,t,r)=>({payload:{path:e,method:t,res:r},type:K}),_e=(e,t,r)=>({payload:{path:e,method:t,req:r},type:Z}),Ae=(e,t,r)=>({payload:{path:e,method:t,req:r},type:G}),Ie=e=>({payload:e,type:Y}),Ne=e=>t=>{let{fn:r,specActions:n,specSelectors:a,getConfigs:l,oas3Selectors:i}=t,{pathName:c,method:p,operation:u}=e,{requestInterceptor:d,responseInterceptor:m}=l(),h=u.toJS();var g,y;u&&u.get("parameters")&&b()(g=f()(y=u.get("parameters")).call(y,(e=>e&&!0===e.get("allowEmptyValue")))).call(g,(t=>{if(a.parameterInclusionSettingFor([c,p],t.get("name"),t.get("in"))){e.parameters=e.parameters||{};const r=(0,z.cz)(t,e.parameters);(!r||r&&0===r.size)&&(e.parameters[t.get("name")]="")}}));if(e.contextUrl=k()(a.url()).toString(),h&&h.operationId?e.operationId=h.operationId:h&&c&&p&&(e.operationId=r.opId(h,c,p)),a.isOAS3()){const t=`${c}:${p}`;e.server=i.selectedServer(t)||i.selectedServer();const r=i.serverVariables({server:e.server,namespace:t}).toJS(),n=i.serverVariables({server:e.server}).toJS();e.serverVariables=w()(r).length?r:n,e.requestContentType=i.requestContentType(c,p),e.responseContentType=i.responseContentType(c,p)||"*/*";const a=i.requestBodyValue(c,p),l=i.requestBodyInclusionSetting(c,p);var v;if(a&&a.toJS)e.requestBody=f()(v=o()(a).call(a,(e=>T.Map.isMap(e)?e.get("value"):e))).call(v,((e,t)=>(s()(e)?0!==e.length:!(0,z.O2)(e))||l.get(t))).toJS();else e.requestBody=a}let E=A()({},e);E=r.buildRequest(E),n.setRequest(e.pathName,e.method,E);e.requestInterceptor=async t=>{let r=await d.apply(void 0,[t]),s=A()({},r);return n.setMutatedRequest(e.pathName,e.method,s),r},e.responseInterceptor=m;const S=N()();return r.execute(e).then((t=>{t.duration=N()()-S,n.setResponse(e.pathName,e.method,t)})).catch((t=>{"Failed to fetch"===t.message&&(t.name="",t.message='**Failed to fetch.** \n**Possible Reasons:** \n - CORS \n - Network Failure \n - URL scheme must be "http" or "https" for CORS request.'),n.setResponse(e.pathName,e.method,{error:!0,err:(0,O.serializeError)(t)})}))},qe=function(){let{path:e,method:t,...r}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return n=>{let{fn:{fetch:s},specSelectors:a,specActions:o}=n,l=a.specJsonWithResolvedSubtrees().toJS(),i=a.operationScheme(e,t),{requestContentType:c,responseContentType:p}=a.contentTypeValues([e,t]).toJS(),u=/xml/i.test(c),d=a.parameterValues([e,t],u).toJS();return o.executeRequest({...r,fetch:s,spec:l,pathName:e,method:t,parameters:d,requestContentType:c,scheme:i,responseContentType:p})}};function Re(e,t){return{type:X,payload:{path:e,method:t}}}function Te(e,t){return{type:Q,payload:{path:e,method:t}}}function Pe(e,t,r){return{type:se,payload:{scheme:e,path:t,method:r}}}},7038:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>l});var n=r(32),s=r(9381),a=r(3881),o=r(7508);function l(){return{statePlugins:{spec:{wrapActions:o,reducers:n.default,actions:s,selectors:a}}}}},32:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>h});var n=r(66),s=r.n(n),a=r(3942),o=r.n(a),l=r(4994),i=r.n(l),c=r(5572),p=r(1890),u=r(7504),d=r(3881),m=r(9381);const h={[m.UPDATE_SPEC]:(e,t)=>"string"==typeof t.payload?e.set("spec",t.payload):e,[m.UPDATE_URL]:(e,t)=>e.set("url",t.payload+""),[m.UPDATE_JSON]:(e,t)=>e.set("json",(0,p.oG)(t.payload)),[m.UPDATE_RESOLVED]:(e,t)=>e.setIn(["resolved"],(0,p.oG)(t.payload)),[m.UPDATE_RESOLVED_SUBTREE]:(e,t)=>{const{value:r,path:n}=t.payload;return e.setIn(["resolvedSubtrees",...n],(0,p.oG)(r))},[m.UPDATE_PARAM]:(e,t)=>{let{payload:r}=t,{path:n,paramName:s,paramIn:a,param:o,value:l,isXml:i}=r,c=o?(0,p.V9)(o):`${a}.${s}`;const u=i?"value_xml":"value";return e.setIn(["meta","paths",...n,"parameters",c,u],l)},[m.UPDATE_EMPTY_PARAM_INCLUSION]:(e,t)=>{let{payload:r}=t,{pathMethod:n,paramName:s,paramIn:a,includeEmptyValue:o}=r;if(!s||!a)return console.warn("Warning: UPDATE_EMPTY_PARAM_INCLUSION could not generate a paramKey."),e;const l=`${a}.${s}`;return e.setIn(["meta","paths",...n,"parameter_inclusions",l],o)},[m.VALIDATE_PARAMS]:(e,t)=>{let{payload:{pathMethod:r,isOAS3:n}}=t;const a=(0,d.specJsonWithResolvedSubtrees)(e).getIn(["paths",...r]),o=(0,d.parameterValues)(e,r).toJS();return e.updateIn(["meta","paths",...r,"parameters"],(0,c.fromJS)({}),(t=>{var l;return s()(l=a.get("parameters",(0,c.List)())).call(l,((t,s)=>{const a=(0,p.cz)(s,o),l=(0,d.parameterInclusionSettingFor)(e,r,s.get("name"),s.get("in")),i=(0,p.Ik)(s,a,{bypassRequiredCheck:l,isOAS3:n});return t.setIn([(0,p.V9)(s),"errors"],(0,c.fromJS)(i))}),t)}))},[m.CLEAR_VALIDATE_PARAMS]:(e,t)=>{let{payload:{pathMethod:r}}=t;return e.updateIn(["meta","paths",...r,"parameters"],(0,c.fromJS)([]),(e=>o()(e).call(e,(e=>e.set("errors",(0,c.fromJS)([]))))))},[m.SET_RESPONSE]:(e,t)=>{let r,{payload:{res:n,path:s,method:a}}=t;r=n.error?i()({error:!0,name:n.err.name,message:n.err.message,statusCode:n.err.statusCode},n.err.response):n,r.headers=r.headers||{};let o=e.setIn(["responses",s,a],(0,p.oG)(r));return u.Z.Blob&&n.data instanceof u.Z.Blob&&(o=o.setIn(["responses",s,a,"text"],n.data)),o},[m.SET_REQUEST]:(e,t)=>{let{payload:{req:r,path:n,method:s}}=t;return e.setIn(["requests",n,s],(0,p.oG)(r))},[m.SET_MUTATED_REQUEST]:(e,t)=>{let{payload:{req:r,path:n,method:s}}=t;return e.setIn(["mutatedRequests",n,s],(0,p.oG)(r))},[m.UPDATE_OPERATION_META_VALUE]:(e,t)=>{let{payload:{path:r,value:n,key:s}}=t,a=["paths",...r],o=["meta","paths",...r];return e.getIn(["json",...a])||e.getIn(["resolved",...a])||e.getIn(["resolvedSubtrees",...a])?e.setIn([...o,s],(0,c.fromJS)(n)):e},[m.CLEAR_RESPONSE]:(e,t)=>{let{payload:{path:r,method:n}}=t;return e.deleteIn(["responses",r,n])},[m.CLEAR_REQUEST]:(e,t)=>{let{payload:{path:r,method:n}}=t;return e.deleteIn(["requests",r,n])},[m.SET_SCHEME]:(e,t)=>{let{payload:{scheme:r,path:n,method:s}}=t;return n&&s?e.setIn(["scheme",n,s],r):n||s?void 0:e.setIn(["scheme","_defaultScheme"],r)}}},3881:(e,t,r)=>{"use strict";r.r(t),r.d(t,{lastError:()=>N,url:()=>q,specStr:()=>R,specSource:()=>T,specJson:()=>P,specResolved:()=>k,specResolvedSubtree:()=>O,specJsonWithResolvedSubtrees:()=>j,spec:()=>V,isOAS3:()=>D,info:()=>L,externalDocs:()=>U,version:()=>z,semver:()=>B,paths:()=>$,operations:()=>J,consumes:()=>F,produces:()=>W,security:()=>H,securityDefinitions:()=>K,findDefinition:()=>Z,definitions:()=>G,basePath:()=>Y,host:()=>X,schemes:()=>Q,operationsWithRootInherited:()=>ee,tags:()=>te,tagDetails:()=>re,operationsWithTags:()=>ne,taggedOperations:()=>se,responses:()=>ae,requests:()=>oe,mutatedRequests:()=>le,responseFor:()=>ie,requestFor:()=>ce,mutatedRequestFor:()=>pe,allowTryItOutFor:()=>ue,parameterWithMetaByIdentity:()=>de,parameterInclusionSettingFor:()=>me,parameterWithMeta:()=>he,operationWithMeta:()=>ge,getParameter:()=>fe,hasHost:()=>ye,parameterValues:()=>ve,parametersIncludeIn:()=>Ee,parametersIncludeType:()=>Se,contentTypeValues:()=>Ce,currentProducesFor:()=>be,producesOptionsFor:()=>xe,consumesOptionsFor:()=>we,operationScheme:()=>_e,canExecuteScheme:()=>Ae,validationErrors:()=>Ie,validateBeforeExecute:()=>Ne,getOAS3RequiredRequestBodyContentType:()=>qe,isMediaTypeSchemaPropertiesEqual:()=>Re});var n=r(600),s=r.n(n),a=r(4235),o=r.n(a),l=r(8493),i=r.n(l),c=r(3942),p=r.n(c),u=r(9998),d=r.n(u),m=r(3580),h=r.n(m),g=r(66),f=r.n(g),y=r(9247),v=r.n(y),E=r(5626),S=r.n(E),C=r(7104),b=r.n(C),x=r(6814),w=r(1890),_=r(5572);const A=["get","put","post","delete","options","head","patch","trace"],I=e=>e||(0,_.Map)(),N=(0,x.createSelector)(I,(e=>e.get("lastError"))),q=(0,x.createSelector)(I,(e=>e.get("url"))),R=(0,x.createSelector)(I,(e=>e.get("spec")||"")),T=(0,x.createSelector)(I,(e=>e.get("specSource")||"not-editor")),P=(0,x.createSelector)(I,(e=>e.get("json",(0,_.Map)()))),k=(0,x.createSelector)(I,(e=>e.get("resolved",(0,_.Map)()))),O=(e,t)=>e.getIn(["resolvedSubtrees",...t],void 0),M=(e,t)=>_.Map.isMap(e)&&_.Map.isMap(t)?t.get("$$ref")?t:(0,_.OrderedMap)().mergeWith(M,e,t):t,j=(0,x.createSelector)(I,(e=>(0,_.OrderedMap)().mergeWith(M,e.get("json"),e.get("resolvedSubtrees")))),V=e=>P(e),D=(0,x.createSelector)(V,(()=>!1)),L=(0,x.createSelector)(V,(e=>Te(e&&e.get("info")))),U=(0,x.createSelector)(V,(e=>Te(e&&e.get("externalDocs")))),z=(0,x.createSelector)(L,(e=>e&&e.get("version"))),B=(0,x.createSelector)(z,(e=>{var t;return s()(t=/v?([0-9]*)\.([0-9]*)\.([0-9]*)/i.exec(e)).call(t,1)})),$=(0,x.createSelector)(j,(e=>e.get("paths"))),J=(0,x.createSelector)($,(e=>{if(!e||e.size<1)return(0,_.List)();let t=(0,_.List)();return e&&o()(e)?(o()(e).call(e,((e,r)=>{if(!e||!o()(e))return{};o()(e).call(e,((e,n)=>{i()(A).call(A,n)<0||(t=t.push((0,_.fromJS)({path:r,method:n,operation:e,id:`${n}-${r}`})))}))})),t):(0,_.List)()})),F=(0,x.createSelector)(V,(e=>(0,_.Set)(e.get("consumes")))),W=(0,x.createSelector)(V,(e=>(0,_.Set)(e.get("produces")))),H=(0,x.createSelector)(V,(e=>e.get("security",(0,_.List)()))),K=(0,x.createSelector)(V,(e=>e.get("securityDefinitions"))),Z=(e,t)=>{const r=e.getIn(["resolvedSubtrees","definitions",t],null),n=e.getIn(["json","definitions",t],null);return r||n||null},G=(0,x.createSelector)(V,(e=>{const t=e.get("definitions");return _.Map.isMap(t)?t:(0,_.Map)()})),Y=(0,x.createSelector)(V,(e=>e.get("basePath"))),X=(0,x.createSelector)(V,(e=>e.get("host"))),Q=(0,x.createSelector)(V,(e=>e.get("schemes",(0,_.Map)()))),ee=(0,x.createSelector)(J,F,W,((e,t,r)=>p()(e).call(e,(e=>e.update("operation",(e=>{if(e){if(!_.Map.isMap(e))return;return e.withMutations((e=>(e.get("consumes")||e.update("consumes",(e=>(0,_.Set)(e).merge(t))),e.get("produces")||e.update("produces",(e=>(0,_.Set)(e).merge(r))),e)))}return(0,_.Map)()})))))),te=(0,x.createSelector)(V,(e=>{const t=e.get("tags",(0,_.List)());return _.List.isList(t)?d()(t).call(t,(e=>_.Map.isMap(e))):(0,_.List)()})),re=(e,t)=>{var r;let n=te(e)||(0,_.List)();return h()(r=d()(n).call(n,_.Map.isMap)).call(r,(e=>e.get("name")===t),(0,_.Map)())},ne=(0,x.createSelector)(ee,te,((e,t)=>f()(e).call(e,((e,t)=>{let r=(0,_.Set)(t.getIn(["operation","tags"]));return r.count()<1?e.update("default",(0,_.List)(),(e=>e.push(t))):f()(r).call(r,((e,r)=>e.update(r,(0,_.List)(),(e=>e.push(t)))),e)}),f()(t).call(t,((e,t)=>e.set(t.get("name"),(0,_.List)())),(0,_.OrderedMap)())))),se=e=>t=>{var r;let{getConfigs:n}=t,{tagsSorter:s,operationsSorter:a}=n();return p()(r=ne(e).sortBy(((e,t)=>t),((e,t)=>{let r="function"==typeof s?s:w.wh.tagsSorter[s];return r?r(e,t):null}))).call(r,((t,r)=>{let n="function"==typeof a?a:w.wh.operationsSorter[a],s=n?v()(t).call(t,n):t;return(0,_.Map)({tagDetails:re(e,r),operations:s})}))},ae=(0,x.createSelector)(I,(e=>e.get("responses",(0,_.Map)()))),oe=(0,x.createSelector)(I,(e=>e.get("requests",(0,_.Map)()))),le=(0,x.createSelector)(I,(e=>e.get("mutatedRequests",(0,_.Map)()))),ie=(e,t,r)=>ae(e).getIn([t,r],null),ce=(e,t,r)=>oe(e).getIn([t,r],null),pe=(e,t,r)=>le(e).getIn([t,r],null),ue=()=>!0,de=(e,t,r)=>{const n=j(e).getIn(["paths",...t,"parameters"],(0,_.OrderedMap)()),s=e.getIn(["meta","paths",...t,"parameters"],(0,_.OrderedMap)()),a=p()(n).call(n,(e=>{const t=s.get(`${r.get("in")}.${r.get("name")}`),n=s.get(`${r.get("in")}.${r.get("name")}.hash-${r.hashCode()}`);return(0,_.OrderedMap)().merge(e,t,n)}));return h()(a).call(a,(e=>e.get("in")===r.get("in")&&e.get("name")===r.get("name")),(0,_.OrderedMap)())},me=(e,t,r,n)=>{const s=`${n}.${r}`;return e.getIn(["meta","paths",...t,"parameter_inclusions",s],!1)},he=(e,t,r,n)=>{const s=j(e).getIn(["paths",...t,"parameters"],(0,_.OrderedMap)()),a=h()(s).call(s,(e=>e.get("in")===n&&e.get("name")===r),(0,_.OrderedMap)());return de(e,t,a)},ge=(e,t,r)=>{var n;const s=j(e).getIn(["paths",t,r],(0,_.OrderedMap)()),a=e.getIn(["meta","paths",t,r],(0,_.OrderedMap)()),o=p()(n=s.get("parameters",(0,_.List)())).call(n,(n=>de(e,[t,r],n)));return(0,_.OrderedMap)().merge(s,a).set("parameters",o)};function fe(e,t,r,n){t=t||[];let s=e.getIn(["meta","paths",...t,"parameters"],(0,_.fromJS)([]));return h()(s).call(s,(e=>_.Map.isMap(e)&&e.get("name")===r&&e.get("in")===n))||(0,_.Map)()}const ye=(0,x.createSelector)(V,(e=>{const t=e.get("host");return"string"==typeof t&&t.length>0&&"/"!==t[0]}));function ve(e,t,r){t=t||[];let n=ge(e,...t).get("parameters",(0,_.List)());return f()(n).call(n,((e,t)=>{let n=r&&"body"===t.get("in")?t.get("value_xml"):t.get("value");return e.set((0,w.V9)(t,{allowHashes:!1}),n)}),(0,_.fromJS)({}))}function Ee(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";if(_.List.isList(e))return S()(e).call(e,(e=>_.Map.isMap(e)&&e.get("in")===t))}function Se(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";if(_.List.isList(e))return S()(e).call(e,(e=>_.Map.isMap(e)&&e.get("type")===t))}function Ce(e,t){t=t||[];let r=j(e).getIn(["paths",...t],(0,_.fromJS)({})),n=e.getIn(["meta","paths",...t],(0,_.fromJS)({})),s=be(e,t);const a=r.get("parameters")||new _.List,o=n.get("consumes_value")?n.get("consumes_value"):Se(a,"file")?"multipart/form-data":Se(a,"formData")?"application/x-www-form-urlencoded":void 0;return(0,_.fromJS)({requestContentType:o,responseContentType:s})}function be(e,t){t=t||[];const r=j(e).getIn(["paths",...t],null);if(null===r)return;const n=e.getIn(["meta","paths",...t,"produces_value"],null),s=r.getIn(["produces",0],null);return n||s||"application/json"}function xe(e,t){t=t||[];const r=j(e),n=r.getIn(["paths",...t],null);if(null===n)return;const[s]=t,a=n.get("produces",null),o=r.getIn(["paths",s,"produces"],null),l=r.getIn(["produces"],null);return a||o||l}function we(e,t){t=t||[];const r=j(e),n=r.getIn(["paths",...t],null);if(null===n)return;const[s]=t,a=n.get("consumes",null),o=r.getIn(["paths",s,"consumes"],null),l=r.getIn(["consumes"],null);return a||o||l}const _e=(e,t,r)=>{let n=e.get("url").match(/^([a-z][a-z0-9+\-.]*):/),s=b()(n)?n[1]:null;return e.getIn(["scheme",t,r])||e.getIn(["scheme","_defaultScheme"])||s||""},Ae=(e,t,r)=>{var n;return i()(n=["http","https"]).call(n,_e(e,t,r))>-1},Ie=(e,t)=>{t=t||[];let r=e.getIn(["meta","paths",...t,"parameters"],(0,_.fromJS)([]));const n=[];return o()(r).call(r,(e=>{let t=e.get("errors");t&&t.count()&&o()(t).call(t,(e=>n.push(e)))})),n},Ne=(e,t)=>0===Ie(e,t).length,qe=(e,t)=>{var r;let n={requestBody:!1,requestContentType:{}},s=e.getIn(["resolvedSubtrees","paths",...t,"requestBody"],(0,_.fromJS)([]));return s.size<1||(s.getIn(["required"])&&(n.requestBody=s.getIn(["required"])),o()(r=s.getIn(["content"]).entrySeq()).call(r,(e=>{const t=e[0];if(e[1].getIn(["schema","required"])){const r=e[1].getIn(["schema","required"]).toJS();n.requestContentType[t]=r}}))),n},Re=(e,t,r,n)=>{if((r||n)&&r===n)return!0;let s=e.getIn(["resolvedSubtrees","paths",...t,"requestBody","content"],(0,_.fromJS)([]));if(s.size<2||!r||!n)return!1;let a=s.getIn([r,"schema","properties"],(0,_.fromJS)([])),o=s.getIn([n,"schema","properties"],(0,_.fromJS)([]));return!!a.equals(o)};function Te(e){return _.Map.isMap(e)?e:new _.Map}},7508:(e,t,r)=>{"use strict";r.r(t),r.d(t,{updateSpec:()=>c,updateJsonSpec:()=>p,executeRequest:()=>u,validateParams:()=>d});var n=r(7252),s=r.n(n),a=r(4235),o=r.n(a),l=r(1712),i=r.n(l);const c=(e,t)=>{let{specActions:r}=t;return function(){e(...arguments),r.parseToJson(...arguments)}},p=(e,t)=>{let{specActions:r}=t;return function(){for(var t=arguments.length,n=new Array(t),a=0;a{i()(c,[e]).$ref&&r.requestResolvedSubtree(["paths",e])})),r.requestResolvedSubtree(["components","securitySchemes"])}},u=(e,t)=>{let{specActions:r}=t;return t=>(r.logRequest(t),e(t))},d=(e,t)=>{let{specSelectors:r}=t;return t=>e(t,r.isOAS3())}},4852:(e,t,r)=>{"use strict";r.r(t),r.d(t,{loaded:()=>n});const n=(e,t)=>function(){e(...arguments);const r=t.getConfigs().withCredentials;void 0!==r&&(t.fn.fetch.withCredentials="string"==typeof r?"true"===r:!!r)}},8901:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>d});const n=require("swagger-client/es/resolver");var s=r.n(n);const a=require("swagger-client/es/execute"),o=require("swagger-client/es/http");var l=r.n(o);const i=require("swagger-client/es/subtree-resolver");var c=r.n(i),p=r(6765),u=r(4852);function d(e){let{configs:t,getConfigs:r}=e;return{fn:{fetch:(0,o.makeHttp)(l(),t.preFetch,t.postFetch),buildRequest:a.buildRequest,execute:a.execute,resolve:s(),resolveSubtree:function(e,t,n){if(void 0===n){const e=r();n={modelPropertyMacro:e.modelPropertyMacro,parameterMacro:e.parameterMacro,requestInterceptor:e.requestInterceptor,responseInterceptor:e.responseInterceptor}}for(var s=arguments.length,a=new Array(s>3?s-3:0),o=3;o{"use strict";r.r(t),r.d(t,{default:()=>s});var n=r(1890);function s(){return{fn:{shallowEqualKeys:n.be}}}},8347:(e,t,r)=>{"use strict";r.r(t),r.d(t,{getDisplayName:()=>n});const n=e=>e.displayName||e.name||"Component"},3420:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>c});var n=r(8344),s=r.n(n),a=r(1890),o=r(290),l=r(8347),i=r(7481);const c=e=>{let{getComponents:t,getStore:r,getSystem:n}=e;const c=(p=(0,o.getComponent)(n,r,t),(0,a.HP)(p,(function(){for(var e=arguments.length,t=new Array(e),r=0;r(0,i.Z)(e,(function(){for(var e=arguments.length,t=new Array(e),r=0;r{"use strict";r.r(t),r.d(t,{getComponent:()=>x,render:()=>b,withMappedContainer:()=>C});var n=r(4250),s=r.n(n),a=r(7252),o=r.n(a),l=r(6689),i=r.n(l);const c=require("react-dom");var p=r.n(c),u=r(6695);const d=require("react-redux"),m=require("lodash/omit");var h=r.n(m);const g=require("lodash/identity");var f=r.n(g);const y=e=>t=>{const{fn:r}=e();class n extends l.Component{render(){return i().createElement(t,s()({},e(),this.props,this.context))}}return n.displayName=`WithSystem(${r.getDisplayName(t)})`,n},v=(e,t)=>r=>{const{fn:n}=e();class a extends l.Component{render(){return i().createElement(d.Provider,{store:t},i().createElement(r,s()({},this.props,this.context)))}}return a.displayName=`WithRoot(${n.getDisplayName(r)})`,a},E=(e,t,r)=>(0,u.compose)(r?v(e,r):f(),(0,d.connect)(((r,n)=>{var s;const a={...n,...e()},o=(null===(s=t.prototype)||void 0===s?void 0:s.mapStateToProps)||(e=>({state:e}));return o(r,a)})),y(e))(t),S=(e,t,r,n)=>{for(const s in t){const a=t[s];"function"==typeof a&&a(r[s],n[s],e())}},C=(e,t,r)=>(t,n)=>{const{fn:s}=e(),a=r(t,"root");class c extends l.Component{constructor(t,r){super(t,r),S(e,n,t,{})}UNSAFE_componentWillReceiveProps(t){S(e,n,t,this.props)}render(){const e=h()(this.props,n?o()(n):[]);return i().createElement(a,e)}}return c.displayName=`WithMappedContainer(${s.getDisplayName(a)})`,c},b=(e,t,r,n)=>s=>{const a=r(e,t,n)("App","root");p().render(i().createElement(a,null),s)},x=(e,t,r)=>function(n,s){let a=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};if("string"!=typeof n)throw new TypeError("Need a string, to fetch a component. Was given a "+typeof n);const o=r(n);return o?s?"root"===s?E(e,o,t()):E(e,o):o:(a.failSilently||e().log.warn("Could not find component:",n),null)}},6068:(e,t,r)=>{"use strict";r.d(t,{d3:()=>i(),C2:()=>V});var n=r(7252),s=r.n(n),a=r(2605),o=r.n(a);const l=require("react-syntax-highlighter/dist/esm/light");var i=r.n(l);const c=require("react-syntax-highlighter/dist/esm/languages/hljs/javascript");var p=r.n(c);const u=require("react-syntax-highlighter/dist/esm/languages/hljs/json");var d=r.n(u);const m=require("react-syntax-highlighter/dist/esm/languages/hljs/xml");var h=r.n(m);const g=require("react-syntax-highlighter/dist/esm/languages/hljs/bash");var f=r.n(g);const y=require("react-syntax-highlighter/dist/esm/languages/hljs/yaml");var v=r.n(y);const E=require("react-syntax-highlighter/dist/esm/languages/hljs/http");var S=r.n(E);const C=require("react-syntax-highlighter/dist/esm/languages/hljs/powershell");var b=r.n(C);const x=require("react-syntax-highlighter/dist/esm/styles/hljs/agate");var w=r.n(x);const _=require("react-syntax-highlighter/dist/esm/styles/hljs/arta");var A=r.n(_);const I=require("react-syntax-highlighter/dist/esm/styles/hljs/monokai");var N=r.n(I);const q=require("react-syntax-highlighter/dist/esm/styles/hljs/nord");var R=r.n(q);const T=require("react-syntax-highlighter/dist/esm/styles/hljs/obsidian");var P=r.n(T);const k=require("react-syntax-highlighter/dist/esm/styles/hljs/tomorrow-night");var O=r.n(k);i().registerLanguage("json",d()),i().registerLanguage("js",p()),i().registerLanguage("xml",h()),i().registerLanguage("yaml",v()),i().registerLanguage("http",S()),i().registerLanguage("bash",f()),i().registerLanguage("powershell",b()),i().registerLanguage("javascript",p());const M={agate:w(),arta:A(),monokai:N(),nord:R(),obsidian:P(),"tomorrow-night":O()},j=s()(M),V=e=>o()(j).call(j,e)?M[e]:(console.warn(`Request style '${e}' is not available, returning default instead`),w())},1890:(e,t,r)=>{"use strict";r.d(t,{r3:()=>je,GZ:()=>De,Xb:()=>Qe,oJ:()=>$e,XV:()=>He,iQ:()=>xe,J6:()=>Je,DR:()=>_e,oG:()=>me,Uj:()=>Xe,QG:()=>Be,po:()=>We,nX:()=>Fe,gp:()=>we,xi:()=>Oe,kJ:()=>ve,O2:()=>tt,LQ:()=>ge,Wl:()=>ye,Kn:()=>fe,HP:()=>Ee,AF:()=>he,D$:()=>Ze,Ay:()=>Se,Q2:()=>Ce,mz:()=>de,V9:()=>Ge,cz:()=>Ye,UG:()=>Me,Zl:()=>Ae,hW:()=>ze,Nm:()=>Ue,be:()=>Le,wh:()=>Ve,Pz:()=>Ke,_5:()=>be,Ik:()=>Ne});var n=r(7104),s=r.n(n),a=r(3942),o=r.n(a),l=r(7862),i=r.n(l),c=r(4235),p=r.n(c),u=r(9998),d=r.n(u),m=r(7252),h=r.n(m),g=(r(593),r(66)),f=r.n(g),y=r(4994),v=r.n(y),E=r(9247),S=r.n(E),C=r(600),b=r.n(C),x=(r(4883),r(5626)),w=r.n(x),_=(r(2605),r(8344)),A=r.n(_),I=r(8493),N=r.n(I),q=r(3580),R=r.n(q),T=r(3262),P=r.n(T),k=r(7390),O=r.n(k),M=r(5572),j=r.n(M);const V=require("@braintree/sanitize-url"),D=require("lodash/camelCase");var L=r.n(D);const U=require("lodash/upperFirst");var z=r.n(U),B=r(541),$=r.n(B);const J=require("lodash/find");var F=r.n(J);const W=require("lodash/some");var H=r.n(W);const K=require("lodash/eq");var Z=r.n(K),G=r(5716),Y=r.n(G),X=r(4128),Q=r(7504);const ee=require("css.escape");var te=r.n(ee),re=r(9069),ne=r(185),se=r.n(ne);const ae=require("sha.js");var oe=r.n(ae),le=r(9793),ie=r.n(le),ce=r(871).Buffer;const pe="default",ue=e=>j().Iterable.isIterable(e);function de(e){return fe(e)?ue(e)?e.toJS():e:{}}function me(e){var t,r;if(ue(e))return e;if(e instanceof Q.Z.File)return e;if(!fe(e))return e;if(s()(e))return o()(r=j().Seq(e)).call(r,me).toList();if(Y()(i()(e))){var n;const t=function(e){if(!Y()(i()(e)))return e;const t={},r="_**[]",n={};for(let s of i()(e).call(e))if(t[s[0]]||n[s[0]]&&n[s[0]].containsMultiple){if(!n[s[0]]){n[s[0]]={containsMultiple:!0,length:1},t[`${s[0]}${r}${n[s[0]].length}`]=t[s[0]],delete t[s[0]]}n[s[0]].length+=1,t[`${s[0]}${r}${n[s[0]].length}`]=s[1]}else t[s[0]]=s[1];return t}(e);return o()(n=j().OrderedMap(t)).call(n,me)}return o()(t=j().OrderedMap(e)).call(t,me)}function he(e){return s()(e)?e:[e]}function ge(e){return"function"==typeof e}function fe(e){return!!e&&"object"==typeof e}function ye(e){return"function"==typeof e}function ve(e){return s()(e)}const Ee=$();function Se(e,t){var r;return f()(r=h()(e)).call(r,((r,n)=>(r[n]=t(e[n],n),r)),{})}function Ce(e,t){var r;return f()(r=h()(e)).call(r,((r,n)=>{let s=t(e[n],n);return s&&"object"==typeof s&&v()(r,s),r}),{})}function be(e){return t=>{let{dispatch:r,getState:n}=t;return t=>r=>"function"==typeof r?r(e()):t(r)}}function xe(e){var t;let r=e.keySeq();return r.contains(pe)?pe:S()(t=d()(r).call(r,(e=>"2"===(e+"")[0]))).call(t).first()}function we(e,t){if(!j().Iterable.isIterable(e))return j().List();let r=e.getIn(s()(t)?t:[t]);return j().List.isList(r)?r:j().List()}function _e(e){let t,r=[/filename\*=[^']+'\w*'"([^"]+)";?/i,/filename\*=[^']+'\w*'([^;]+);?/i,/filename="([^;]*);?"/i,/filename=([^;]*);?/i];if(w()(r).call(r,(r=>(t=r.exec(e),null!==t))),null!==t&&t.length>1)try{return decodeURIComponent(t[1])}catch(e){console.error(e)}return null}function Ae(e){return t=e.replace(/\.[^./]*$/,""),z()(L()(t));var t}function Ie(e,t,r,n,a){if(!t)return[];let l=[],i=t.get("nullable"),c=t.get("required"),u=t.get("maximum"),m=t.get("minimum"),h=t.get("type"),g=t.get("format"),f=t.get("maxLength"),y=t.get("minLength"),v=t.get("uniqueItems"),E=t.get("maxItems"),S=t.get("minItems"),C=t.get("pattern");const b=r||!0===c,x=null!=e;if(i&&null===e||!h||!(b||x&&"array"===h||!(!b&&!x)))return[];let _="string"===h&&e,A="array"===h&&s()(e)&&e.length,I="array"===h&&j().List.isList(e)&&e.count();const N=[_,A,I,"array"===h&&"string"==typeof e&&e,"file"===h&&e instanceof Q.Z.File,"boolean"===h&&(e||!1===e),"number"===h&&(e||0===e),"integer"===h&&(e||0===e),"object"===h&&"object"==typeof e&&null!==e,"object"===h&&"string"==typeof e&&e],q=w()(N).call(N,(e=>!!e));if(b&&!q&&!n)return l.push("Required field is not provided"),l;if("object"===h&&(null===a||"application/json"===a)){let r=e;if("string"==typeof e)try{r=JSON.parse(e)}catch(e){return l.push("Parameter string value must be valid JSON"),l}var R;if(t&&t.has("required")&&ye(c.isList)&&c.isList()&&p()(c).call(c,(e=>{void 0===r[e]&&l.push({propKey:e,error:"Required property not found"})})),t&&t.has("properties"))p()(R=t.get("properties")).call(R,((e,t)=>{const s=Ie(r[t],e,!1,n,a);l.push(...o()(s).call(s,(e=>({propKey:t,error:e}))))}))}if(C){let t=((e,t)=>{if(!new RegExp(t).test(e))return"Value must follow pattern "+t})(e,C);t&&l.push(t)}if(S&&"array"===h){let t=((e,t)=>{if(!e&&t>=1||e&&e.length{if(e&&e.length>t)return`Array must not contain more then ${t} item${1===t?"":"s"}`})(e,E);t&&l.push({needRemove:!0,error:t})}if(v&&"array"===h){let t=((e,t)=>{if(e&&("true"===t||!0===t)){const t=(0,M.fromJS)(e),r=t.toSet();if(e.length>r.size){let e=(0,M.Set)();if(p()(t).call(t,((r,n)=>{d()(t).call(t,(e=>ye(e.equals)?e.equals(r):e===r)).size>1&&(e=e.add(n))})),0!==e.size)return o()(e).call(e,(e=>({index:e,error:"No duplicates allowed."}))).toArray()}}})(e,v);t&&l.push(...t)}if(f||0===f){let t=((e,t)=>{if(e.length>t)return`Value must be no longer than ${t} character${1!==t?"s":""}`})(e,f);t&&l.push(t)}if(y){let t=((e,t)=>{if(e.length{if(e>t)return`Value must be less than ${t}`})(e,u);t&&l.push(t)}if(m||0===m){let t=((e,t)=>{if(e{if(isNaN(Date.parse(e)))return"Value must be a DateTime"})(e):"uuid"===g?(e=>{if(e=e.toString().toLowerCase(),!/^[{(]?[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}[)}]?$/.test(e))return"Value must be a Guid"})(e):(e=>{if(e&&"string"!=typeof e)return"Value must be a string"})(e),!t)return l;l.push(t)}else if("boolean"===h){let t=(e=>{if("true"!==e&&"false"!==e&&!0!==e&&!1!==e)return"Value must be a boolean"})(e);if(!t)return l;l.push(t)}else if("number"===h){let t=(e=>{if(!/^-?\d+(\.?\d+)?$/.test(e))return"Value must be a number"})(e);if(!t)return l;l.push(t)}else if("integer"===h){let t=(e=>{if(!/^-?\d+$/.test(e))return"Value must be an integer"})(e);if(!t)return l;l.push(t)}else if("array"===h){if(!A&&!I)return l;e&&p()(e).call(e,((e,r)=>{const s=Ie(e,t.get("items"),!1,n,a);l.push(...o()(s).call(s,(e=>({index:r,error:e}))))}))}else if("file"===h){let t=(e=>{if(e&&!(e instanceof Q.Z.File))return"Value must be a file"})(e);if(!t)return l;l.push(t)}return l}const Ne=function(e,t){let{isOAS3:r=!1,bypassRequiredCheck:n=!1}=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},s=e.get("required"),{schema:a,parameterContentMediaType:o}=(0,re.Z)(e,{isOAS3:r});return Ie(t,a,s,n,o)},qe=(e,t,r)=>{if(e&&!e.xml&&(e.xml={}),e&&!e.xml.name){if(!e.$$ref&&(e.type||e.items||e.properties||e.additionalProperties))return'\n\x3c!-- XML example cannot be generated; root element name is undefined --\x3e';if(e.$$ref){let t=e.$$ref.match(/\S*\/(\S+)$/);e.xml.name=t[1]}}return(0,X.memoizedCreateXMLExample)(e,t,r)},Re=[{when:/json/,shouldStringifyTypes:["string"]}],Te=["object"],Pe=(e,t,r,n)=>{const s=(0,X.memoizedSampleFromSchema)(e,t,n),a=typeof s,o=f()(Re).call(Re,((e,t)=>t.when.test(r)?[...e,...t.shouldStringifyTypes]:e),Te);return H()(o,(e=>e===a))?A()(s,null,2):s},ke=(e,t,r,n)=>{const s=Pe(e,t,r,n);let a;try{a=ie().dump(ie().load(s),{lineWidth:-1},{schema:le.JSON_SCHEMA}),"\n"===a[a.length-1]&&(a=b()(a).call(a,0,a.length-1))}catch(e){return console.error(e),"error: could not generate yaml example"}return a.replace(/\t/g," ")},Oe=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=arguments.length>3&&void 0!==arguments[3]?arguments[3]:void 0;return e&&ye(e.toJS)&&(e=e.toJS()),n&&ye(n.toJS)&&(n=n.toJS()),/xml/.test(t)?qe(e,r,n):/(yaml|yml)/.test(t)?ke(e,r,t,n):Pe(e,r,t,n)},Me=()=>{let e={},t=Q.Z.location.search;if(!t)return{};if(""!=t){let r=t.substr(1).split("&");for(let t in r)Object.prototype.hasOwnProperty.call(r,t)&&(t=r[t].split("="),e[decodeURIComponent(t[0])]=t[1]&&decodeURIComponent(t[1])||"")}return e},je=e=>{let t;return t=e instanceof ce?e:ce.from(e.toString(),"utf-8"),t.toString("base64")},Ve={operationsSorter:{alpha:(e,t)=>e.get("path").localeCompare(t.get("path")),method:(e,t)=>e.get("method").localeCompare(t.get("method"))},tagsSorter:{alpha:(e,t)=>e.localeCompare(t)}},De=e=>{let t=[];for(let r in e){let n=e[r];void 0!==n&&""!==n&&t.push([r,"=",encodeURIComponent(n).replace(/%20/g,"+")].join(""))}return t.join("&")},Le=(e,t,r)=>!!F()(r,(r=>Z()(e[r],t[r])));function Ue(e){return"string"!=typeof e||""===e?"":(0,V.sanitizeUrl)(e)}function ze(e){return!(!e||N()(e).call(e,"localhost")>=0||N()(e).call(e,"127.0.0.1")>=0||"none"===e)}function Be(e){if(!j().OrderedMap.isOrderedMap(e))return null;if(!e.size)return null;const t=R()(e).call(e,((e,t)=>P()(t).call(t,"2")&&h()(e.get("content")||{}).length>0)),r=e.get("default")||j().OrderedMap(),n=(r.get("content")||j().OrderedMap()).keySeq().toJS().length?r:null;return t||n}const $e=e=>"string"==typeof e||e instanceof String?O()(e).call(e).replace(/\s/g,"%20"):"",Je=e=>te()($e(e).replace(/%20/g,"_")),Fe=e=>d()(e).call(e,((e,t)=>/^x-/.test(t))),We=e=>d()(e).call(e,((e,t)=>/^pattern|maxLength|minLength|maximum|minimum/.test(t)));function He(e,t){var r;let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:()=>!0;if("object"!=typeof e||s()(e)||null===e||!t)return e;const a=v()({},e);return p()(r=h()(a)).call(r,(e=>{e===t&&n(a[e],e)?delete a[e]:a[e]=He(a[e],t,n)})),a}function Ke(e){if("string"==typeof e)return e;if(e&&e.toJS&&(e=e.toJS()),"object"==typeof e&&null!==e)try{return A()(e,null,2)}catch(t){return String(e)}return null==e?"":e.toString()}function Ze(e){return"number"==typeof e?e.toString():e}function Ge(e){let{returnAll:t=!1,allowHashes:r=!0}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(!j().Map.isMap(e))throw new Error("paramToIdentifier: received a non-Im.Map parameter as input");const n=e.get("name"),s=e.get("in");let a=[];return e&&e.hashCode&&s&&n&&r&&a.push(`${s}.${n}.hash-${e.hashCode()}`),s&&n&&a.push(`${s}.${n}`),a.push(n),t?a:a[0]||""}function Ye(e,t){var r;const n=Ge(e,{returnAll:!0});return d()(r=o()(n).call(n,(e=>t[e]))).call(r,(e=>void 0!==e))[0]}function Xe(){return et(se()(32).toString("base64"))}function Qe(e){return et(oe()("sha256").update(e).digest("base64"))}function et(e){return e.replace(/\+/g,"-").replace(/\//g,"_").replace(/=/g,"")}const tt=e=>!e||!(!ue(e)||!e.isEmpty())},2518:(e,t,r)=>{"use strict";function n(e){return function(e){try{return!!JSON.parse(e)}catch(e){return null}}(e)?"json":null}r.d(t,{O:()=>n})},7504:(e,t,r)=>{"use strict";r.d(t,{Z:()=>n});const n=function(){var e={location:{},history:{},open:()=>{},close:()=>{},File:function(){}};if("undefined"==typeof window)return e;try{e=window;for(var t of["File","Blob","FormData"])t in window&&(e[t]=window[t])}catch(e){console.error(e)}return e}()},9069:(e,t,r)=>{"use strict";r.d(t,{Z:()=>p});var n=r(9998),s=r.n(n),a=r(2605),o=r.n(a),l=r(5572),i=r.n(l);const c=i().Set.of("type","format","items","default","maximum","exclusiveMaximum","minimum","exclusiveMinimum","maxLength","minLength","pattern","maxItems","minItems","uniqueItems","enum","multipleOf");function p(e){let{isOAS3:t}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(!i().Map.isMap(e))return{schema:i().Map(),parameterContentMediaType:null};if(!t)return"body"===e.get("in")?{schema:e.get("schema",i().Map()),parameterContentMediaType:null}:{schema:s()(e).call(e,((e,t)=>o()(c).call(c,t))),parameterContentMediaType:null};if(e.get("content")){const t=e.get("content",i().Map({})).keySeq().first();return{schema:e.getIn(["content",t,"schema"],i().Map()),parameterContentMediaType:t}}return{schema:e.get("schema")?e.get("schema",i().Map()):i().Map(),parameterContentMediaType:null}}},7481:(e,t,r)=>{"use strict";r.d(t,{Z:()=>b});var n=r(7104),s=r.n(n),a=r(7834),o=r.n(a),l=r(1733),i=r.n(l),c=r(874),p=r.n(c),u=r(3580),d=r.n(u);const m=require("@babel/runtime-corejs3/core-js-stable/instance/find-index");var h=r.n(m),g=r(2611),f=r.n(g),y=r(541),v=r.n(y);const E=e=>t=>s()(e)&&s()(t)&&e.length===t.length&&o()(e).call(e,((e,r)=>e===t[r])),S=function(){for(var e=arguments.length,t=new Array(e),r=0;r1&&void 0!==arguments[1]?arguments[1]:S;const{Cache:r}=v();v().Cache=C;const n=v()(e,t);return v().Cache=r,n}},5102:(e,t,r)=>{var n={"./all.js":5308,"./auth/actions.js":5812,"./auth/index.js":3705,"./auth/reducers.js":3962,"./auth/selectors.js":35,"./auth/spec-wrap-actions.js":8302,"./configs/actions.js":714,"./configs/helpers.js":2256,"./configs/index.js":1661,"./configs/reducers.js":7743,"./configs/selectors.js":9018,"./configs/spec-actions.js":2698,"./deep-linking/helpers.js":1970,"./deep-linking/index.js":4980,"./deep-linking/layout.js":2179,"./deep-linking/operation-tag-wrapper.jsx":4584,"./deep-linking/operation-wrapper.jsx":877,"./download-url.js":8011,"./err/actions.js":4966,"./err/error-transformers/hook.js":2860,"./err/error-transformers/transformers/not-of-type.js":2392,"./err/error-transformers/transformers/parameter-oneof.js":1835,"./err/index.js":7793,"./err/reducers.js":3527,"./err/selectors.js":7667,"./filter/index.js":9978,"./filter/opsFilter.js":4309,"./layout/actions.js":5474,"./layout/index.js":6821,"./layout/reducers.js":5672,"./layout/selectors.js":4400,"./layout/spec-extensions/wrap-selector.js":8989,"./logs/index.js":9150,"./oas3/actions.js":7002,"./oas3/auth-extensions/wrap-selectors.js":3723,"./oas3/components/callbacks.jsx":3427,"./oas3/components/http-auth.jsx":6775,"./oas3/components/index.js":6467,"./oas3/components/operation-link.jsx":5757,"./oas3/components/operation-servers.jsx":6796,"./oas3/components/request-body-editor.jsx":5327,"./oas3/components/request-body.jsx":2458,"./oas3/components/servers-container.jsx":9928,"./oas3/components/servers.jsx":6617,"./oas3/helpers.jsx":7779,"./oas3/index.js":7451,"./oas3/reducers.js":2109,"./oas3/selectors.js":5065,"./oas3/spec-extensions/selectors.js":1741,"./oas3/spec-extensions/wrap-selectors.js":2044,"./oas3/wrap-components/auth-item.jsx":356,"./oas3/wrap-components/index.js":7761,"./oas3/wrap-components/json-schema-string.jsx":287,"./oas3/wrap-components/markdown.jsx":2460,"./oas3/wrap-components/model.jsx":3499,"./oas3/wrap-components/online-validator-badge.js":58,"./oas3/wrap-components/version-stamp.jsx":9487,"./on-complete/index.js":8560,"./request-snippets/fn.js":8223,"./request-snippets/index.js":6575,"./request-snippets/request-snippets.jsx":4206,"./request-snippets/selectors.js":4669,"./safe-render/components/error-boundary.jsx":6195,"./safe-render/components/fallback.jsx":9403,"./safe-render/fn.jsx":6189,"./safe-render/index.js":9595,"./samples/fn.js":4128,"./samples/index.js":8883,"./spec/actions.js":9381,"./spec/index.js":7038,"./spec/reducers.js":32,"./spec/selectors.js":3881,"./spec/wrap-actions.js":7508,"./swagger-js/configs-wrap-actions.js":4852,"./swagger-js/index.js":8901,"./util/index.js":8525,"./view/fn.js":8347,"./view/index.js":3420,"./view/root-injects.jsx":290,"core/plugins/all.js":5308,"core/plugins/auth/actions.js":5812,"core/plugins/auth/index.js":3705,"core/plugins/auth/reducers.js":3962,"core/plugins/auth/selectors.js":35,"core/plugins/auth/spec-wrap-actions.js":8302,"core/plugins/configs/actions.js":714,"core/plugins/configs/helpers.js":2256,"core/plugins/configs/index.js":1661,"core/plugins/configs/reducers.js":7743,"core/plugins/configs/selectors.js":9018,"core/plugins/configs/spec-actions.js":2698,"core/plugins/deep-linking/helpers.js":1970,"core/plugins/deep-linking/index.js":4980,"core/plugins/deep-linking/layout.js":2179,"core/plugins/deep-linking/operation-tag-wrapper.jsx":4584,"core/plugins/deep-linking/operation-wrapper.jsx":877,"core/plugins/download-url.js":8011,"core/plugins/err/actions.js":4966,"core/plugins/err/error-transformers/hook.js":2860,"core/plugins/err/error-transformers/transformers/not-of-type.js":2392,"core/plugins/err/error-transformers/transformers/parameter-oneof.js":1835,"core/plugins/err/index.js":7793,"core/plugins/err/reducers.js":3527,"core/plugins/err/selectors.js":7667,"core/plugins/filter/index.js":9978,"core/plugins/filter/opsFilter.js":4309,"core/plugins/layout/actions.js":5474,"core/plugins/layout/index.js":6821,"core/plugins/layout/reducers.js":5672,"core/plugins/layout/selectors.js":4400,"core/plugins/layout/spec-extensions/wrap-selector.js":8989,"core/plugins/logs/index.js":9150,"core/plugins/oas3/actions.js":7002,"core/plugins/oas3/auth-extensions/wrap-selectors.js":3723,"core/plugins/oas3/components/callbacks.jsx":3427,"core/plugins/oas3/components/http-auth.jsx":6775,"core/plugins/oas3/components/index.js":6467,"core/plugins/oas3/components/operation-link.jsx":5757,"core/plugins/oas3/components/operation-servers.jsx":6796,"core/plugins/oas3/components/request-body-editor.jsx":5327,"core/plugins/oas3/components/request-body.jsx":2458,"core/plugins/oas3/components/servers-container.jsx":9928,"core/plugins/oas3/components/servers.jsx":6617,"core/plugins/oas3/helpers.jsx":7779,"core/plugins/oas3/index.js":7451,"core/plugins/oas3/reducers.js":2109,"core/plugins/oas3/selectors.js":5065,"core/plugins/oas3/spec-extensions/selectors.js":1741,"core/plugins/oas3/spec-extensions/wrap-selectors.js":2044,"core/plugins/oas3/wrap-components/auth-item.jsx":356,"core/plugins/oas3/wrap-components/index.js":7761,"core/plugins/oas3/wrap-components/json-schema-string.jsx":287,"core/plugins/oas3/wrap-components/markdown.jsx":2460,"core/plugins/oas3/wrap-components/model.jsx":3499,"core/plugins/oas3/wrap-components/online-validator-badge.js":58,"core/plugins/oas3/wrap-components/version-stamp.jsx":9487,"core/plugins/on-complete/index.js":8560,"core/plugins/request-snippets/fn.js":8223,"core/plugins/request-snippets/index.js":6575,"core/plugins/request-snippets/request-snippets.jsx":4206,"core/plugins/request-snippets/selectors.js":4669,"core/plugins/safe-render/components/error-boundary.jsx":6195,"core/plugins/safe-render/components/fallback.jsx":9403,"core/plugins/safe-render/fn.jsx":6189,"core/plugins/safe-render/index.js":9595,"core/plugins/samples/fn.js":4128,"core/plugins/samples/index.js":8883,"core/plugins/spec/actions.js":9381,"core/plugins/spec/index.js":7038,"core/plugins/spec/reducers.js":32,"core/plugins/spec/selectors.js":3881,"core/plugins/spec/wrap-actions.js":7508,"core/plugins/swagger-js/configs-wrap-actions.js":4852,"core/plugins/swagger-js/index.js":8901,"core/plugins/util/index.js":8525,"core/plugins/view/fn.js":8347,"core/plugins/view/index.js":3420,"core/plugins/view/root-injects.jsx":290};function s(e){var t=a(e);return r(t)}function a(e){if(!r.o(n,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return n[e]}s.keys=function(){return Object.keys(n)},s.resolve=a,e.exports=s,s.id=5102},2517:e=>{"use strict";e.exports="data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjAwcHgiICBoZWlnaHQ9IjIwMHB4IiAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB2aWV3Qm94PSIwIDAgMTAwIDEwMCIgcHJlc2VydmVBc3BlY3RSYXRpbz0ieE1pZFlNaWQiIGNsYXNzPSJsZHMtcm9sbGluZyIgc3R5bGU9ImJhY2tncm91bmQtaW1hZ2U6IG5vbmU7IGJhY2tncm91bmQtcG9zaXRpb246IGluaXRpYWwgaW5pdGlhbDsgYmFja2dyb3VuZC1yZXBlYXQ6IGluaXRpYWwgaW5pdGlhbDsiPjxjaXJjbGUgY3g9IjUwIiBjeT0iNTAiIGZpbGw9Im5vbmUiIG5nLWF0dHItc3Ryb2tlPSJ7e2NvbmZpZy5jb2xvcn19IiBuZy1hdHRyLXN0cm9rZS13aWR0aD0ie3tjb25maWcud2lkdGh9fSIgbmctYXR0ci1yPSJ7e2NvbmZpZy5yYWRpdXN9fSIgbmctYXR0ci1zdHJva2UtZGFzaGFycmF5PSJ7e2NvbmZpZy5kYXNoYXJyYXl9fSIgc3Ryb2tlPSIjNTU1NTU1IiBzdHJva2Utd2lkdGg9IjEwIiByPSIzNSIgc3Ryb2tlLWRhc2hhcnJheT0iMTY0LjkzMzYxNDMxMzQ2NDE1IDU2Ljk3Nzg3MTQzNzgyMTM4Ij48YW5pbWF0ZVRyYW5zZm9ybSBhdHRyaWJ1dGVOYW1lPSJ0cmFuc2Zvcm0iIHR5cGU9InJvdGF0ZSIgY2FsY01vZGU9ImxpbmVhciIgdmFsdWVzPSIwIDUwIDUwOzM2MCA1MCA1MCIga2V5VGltZXM9IjA7MSIgZHVyPSIxcyIgYmVnaW49IjBzIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSI+PC9hbmltYXRlVHJhbnNmb3JtPjwvY2lyY2xlPjwvc3ZnPgo="},5163:e=>{"use strict";e.exports='---\nurl: "https://petstore.swagger.io/v2/swagger.json"\ndom_id: "#swagger-ui"\nvalidatorUrl: "https://validator.swagger.io/validator"\n'},1733:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/array/from")},7104:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/array/is-array")},593:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/bind")},4883:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/concat")},7862:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/entries")},7834:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/every")},9998:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/filter")},3580:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/find")},4235:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/for-each")},2605:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/includes")},8493:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/index-of")},874:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/keys")},3942:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/map")},66:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/reduce")},600:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/slice")},5626:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/some")},9247:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/sort")},3262:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/starts-with")},7390:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/instance/trim")},8344:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/json/stringify")},2611:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/map")},4994:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/object/assign")},7252:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/object/keys")},9968:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/object/values")},9300:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/set-timeout")},9478:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/core-js-stable/url")},1093:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/helpers/defineProperty")},4250:e=>{"use strict";e.exports=require("@babel/runtime-corejs3/helpers/extends")},871:e=>{"use strict";e.exports=require("buffer")},9003:e=>{"use strict";e.exports=require("classnames")},5572:e=>{"use strict";e.exports=require("immutable")},9793:e=>{"use strict";e.exports=require("js-yaml")},1712:e=>{"use strict";e.exports=require("lodash/get")},5716:e=>{"use strict";e.exports=require("lodash/isFunction")},541:e=>{"use strict";e.exports=require("lodash/memoize")},580:e=>{"use strict";e.exports=require("prop-types")},185:e=>{"use strict";e.exports=require("randombytes")},6689:e=>{"use strict";e.exports=require("react")},2807:e=>{"use strict";e.exports=require("react-copy-to-clipboard")},8082:e=>{"use strict";e.exports=require("react-immutable-proptypes")},6695:e=>{"use strict";e.exports=require("redux")},963:e=>{"use strict";e.exports=require("remarkable")},6814:e=>{"use strict";e.exports=require("reselect")},41:e=>{"use strict";e.exports=require("serialize-error")},6765:e=>{"use strict";e.exports=require("swagger-client/es/helpers")},3883:e=>{"use strict";e.exports=require("url-parse")}},t={};function r(n){var s=t[n];if(void 0!==s)return s.exports;var a=t[n]={exports:{}};return e[n](a,a.exports,r),a.exports}r.n=e=>{var t=e&&e.__esModule?()=>e.default:()=>e;return r.d(t,{a:t}),t},r.d=(e,t)=>{for(var n in t)r.o(t,n)&&!r.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},r.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})};var n={};return(()=>{"use strict";r.d(n,{default:()=>Cn});var e={};r.r(e),r.d(e,{Button:()=>lr,Col:()=>ar,Collapse:()=>mr,Container:()=>nr,Input:()=>cr,Link:()=>ur,Row:()=>or,Select:()=>pr,TextArea:()=>ir});var t={};r.r(t),r.d(t,{JsonSchemaArrayItemFile:()=>on,JsonSchemaArrayItemText:()=>an,JsonSchemaForm:()=>rn,JsonSchema_array:()=>sn,JsonSchema_boolean:()=>ln,JsonSchema_object:()=>pn,JsonSchema_string:()=>nn});const s=require("@babel/runtime-corejs3/core-js-stable/instance/last-index-of");var a=r.n(s),o=r(9998),l=r.n(o),i=r(7252),c=r.n(i),p=r(8344),u=r.n(p);const d=require("deep-extend");var m=r.n(d),h=r(593),g=r.n(h),f=r(4994),y=r.n(f),v=r(600),E=r.n(v),S=r(7104),C=r.n(S),b=r(66),x=r.n(b),w=r(3942),_=r.n(w),A=r(4883),I=r.n(A),N=r(6689),q=r.n(N),R=r(6695),T=r(5572),P=r.n(T);const k=require("redux-immutable");var O=r(41);const M=require("lodash/merge");var j=r.n(M),V=r(4966),D=r(7504),L=r(1890);const U=e=>e;class z{constructor(){var e;let t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};var r,n,s;m()(this,{state:{},plugins:[],pluginsOptions:{},system:{configs:{},fn:{},components:{},rootInjects:{},statePlugins:{}},boundSystem:{},toolbox:{}},t),this.getSystem=g()(e=this._getSystem).call(e,this),this.store=(r=U,n=(0,T.fromJS)(this.state),s=this.getSystem,function(e,t,r){let n=[(0,L._5)(r)];const s=D.Z.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__||R.compose;return(0,R.createStore)(e,t,s((0,R.applyMiddleware)(...n)))}(r,n,s)),this.buildSystem(!1),this.register(this.plugins)}getStore(){return this.store}register(e){let t=!(arguments.length>1&&void 0!==arguments[1])||arguments[1];var r=B(e,this.getSystem(),this.pluginsOptions);J(this.system,r),t&&this.buildSystem();$.call(this.system,e,this.getSystem())&&this.buildSystem()}buildSystem(){let e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=this.getStore().dispatch,r=this.getStore().getState;this.boundSystem=y()({},this.getRootInjects(),this.getWrappedAndBoundActions(t),this.getWrappedAndBoundSelectors(r,this.getSystem),this.getStateThunks(r),this.getFn(),this.getConfigs()),e&&this.rebuildReducer()}_getSystem(){return this.boundSystem}getRootInjects(){var e,t,r;return y()({getSystem:this.getSystem,getStore:g()(e=this.getStore).call(e,this),getComponents:g()(t=this.getComponents).call(t,this),getState:this.getStore().getState,getConfigs:g()(r=this._getConfigs).call(r,this),Im:P(),React:q()},this.system.rootInjects||{})}_getConfigs(){return this.system.configs}getConfigs(){return{configs:this.system.configs}}setConfigs(e){this.system.configs=e}rebuildReducer(){var e;this.store.replaceReducer((e=this.system.statePlugins,function(e){var t;let r=x()(t=c()(e)).call(t,((t,r)=>(t[r]=function(e){return function(){let t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:new T.Map,r=arguments.length>1?arguments[1]:void 0;if(!e)return t;let n=e[r.type];if(n){const e=F(n)(t,r);return null===e?t:e}return t}}(e[r]),t)),{});return c()(r).length?(0,k.combineReducers)(r):U}((0,L.Ay)(e,(e=>e.reducers)))))}getType(e){let t=e[0].toUpperCase()+E()(e).call(e,1);return(0,L.Q2)(this.system.statePlugins,((r,n)=>{let s=r[e];if(s)return{[n+t]:s}}))}getSelectors(){return this.getType("selectors")}getActions(){let e=this.getType("actions");return(0,L.Ay)(e,(e=>(0,L.Q2)(e,((e,t)=>{if((0,L.LQ)(e))return{[t]:e}}))))}getWrappedAndBoundActions(e){var t=this;let r=this.getBoundActions(e);return(0,L.Ay)(r,((e,r)=>{let n=this.system.statePlugins[E()(r).call(r,0,-7)].wrapActions;return n?(0,L.Ay)(e,((e,r)=>{let s=n[r];return s?(C()(s)||(s=[s]),x()(s).call(s,((e,r)=>{let n=function(){return r(e,t.getSystem())(...arguments)};if(!(0,L.LQ)(n))throw new TypeError("wrapActions needs to return a function that returns a new function (ie the wrapped action)");return F(n)}),e||Function.prototype)):e})):e}))}getWrappedAndBoundSelectors(e,t){var r=this;let n=this.getBoundSelectors(e,t);return(0,L.Ay)(n,((t,n)=>{let s=[E()(n).call(n,0,-9)],a=this.system.statePlugins[s].wrapSelectors;return a?(0,L.Ay)(t,((t,n)=>{let o=a[n];return o?(C()(o)||(o=[o]),x()(o).call(o,((t,n)=>{let a=function(){for(var a=arguments.length,o=new Array(a),l=0;l(t[r]=e.get(r),t)),{})}getStateThunks(e){var t;return x()(t=c()(this.system.statePlugins)).call(t,((t,r)=>(t[r]=()=>e().get(r),t)),{})}getFn(){return{fn:this.system.fn}}getComponents(e){const t=this.system.components[e];return C()(t)?x()(t).call(t,((e,t)=>t(e,this.getSystem()))):void 0!==e?this.system.components[e]:this.system.components}getBoundSelectors(e,t){return(0,L.Ay)(this.getSelectors(),((r,n)=>{let s=[E()(n).call(n,0,-9)];const a=()=>e().getIn(s);return(0,L.Ay)(r,(e=>function(){for(var r=arguments.length,n=new Array(r),s=0;s"function"!=typeof e?(0,L.Ay)(e,(e=>r(e))):function(){var t=null;try{t=e(...arguments)}catch(e){t={type:V.NEW_THROWN_ERR,error:!0,payload:(0,O.serializeError)(e)}}finally{return t}};return(0,L.Ay)(t,(t=>(0,R.bindActionCreators)(r(t),e)))}getMapStateToProps(){return()=>y()({},this.getSystem())}getMapDispatchToProps(e){return t=>m()({},this.getWrappedAndBoundActions(t),this.getFn(),e)}}function B(e,t,r){if((0,L.Kn)(e)&&!(0,L.kJ)(e))return j()({},e);if((0,L.Wl)(e))return B(e(t),t,r);if((0,L.kJ)(e)){var n;const s="chain"===r.pluginLoadType?t.getComponents():{};return x()(n=_()(e).call(e,(e=>B(e,t,r)))).call(n,J,s)}return{}}function $(e,t){let{hasLoaded:r}=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=r;return(0,L.Kn)(e)&&!(0,L.kJ)(e)&&"function"==typeof e.afterLoad&&(n=!0,F(e.afterLoad).call(this,t)),(0,L.Wl)(e)?$.call(this,e(t),t,{hasLoaded:n}):(0,L.kJ)(e)?_()(e).call(e,(e=>$.call(this,e,t,{hasLoaded:n}))):n}function J(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(!(0,L.Kn)(e))return{};if(!(0,L.Kn)(t))return e;t.wrapComponents&&((0,L.Ay)(t.wrapComponents,((r,n)=>{const s=e.components&&e.components[n];s&&C()(s)?(e.components[n]=I()(s).call(s,[r]),delete t.wrapComponents[n]):s&&(e.components[n]=[s,r],delete t.wrapComponents[n])})),c()(t.wrapComponents).length||delete t.wrapComponents);const{statePlugins:r}=e;if((0,L.Kn)(r))for(let e in r){const a=r[e];if(!(0,L.Kn)(a))continue;const{wrapActions:o,wrapSelectors:l}=a;if((0,L.Kn)(o))for(let r in o){let s=o[r];var n;if(C()(s)||(s=[s],o[r]=s),t&&t.statePlugins&&t.statePlugins[e]&&t.statePlugins[e].wrapActions&&t.statePlugins[e].wrapActions[r])t.statePlugins[e].wrapActions[r]=I()(n=o[r]).call(n,t.statePlugins[e].wrapActions[r])}if((0,L.Kn)(l))for(let r in l){let n=l[r];var s;if(C()(n)||(n=[n],l[r]=n),t&&t.statePlugins&&t.statePlugins[e]&&t.statePlugins[e].wrapSelectors&&t.statePlugins[e].wrapSelectors[r])t.statePlugins[e].wrapSelectors[r]=I()(s=l[r]).call(s,t.statePlugins[e].wrapSelectors[r])}}return m()(e,t)}function F(e){let{logErrors:t=!0}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return"function"!=typeof e?e:function(){try{for(var r=arguments.length,n=new Array(r),s=0;s{let{layoutActions:e,tag:t,operationId:r,isShown:n}=this.props;const s=this.getResolvedSubtree();n||void 0!==s||this.requestResolvedSubtree(),e.show(["operations",t,r],!n)})),ce()(this,"onCancelClick",(()=>{this.setState({tryItOutEnabled:!this.state.tryItOutEnabled})})),ce()(this,"onTryoutClick",(()=>{this.setState({tryItOutEnabled:!this.state.tryItOutEnabled})})),ce()(this,"onResetClick",(e=>{const t=this.props.oas3Selectors.selectDefaultRequestBodyValue(...e);this.props.oas3Actions.setRequestBodyValue({value:t,pathMethod:e})})),ce()(this,"onExecute",(()=>{this.setState({executeInProgress:!0})})),ce()(this,"getResolvedSubtree",(()=>{const{specSelectors:e,path:t,method:r,specPath:n}=this.props;return n?e.specResolvedSubtree(n.toJS()):e.specResolvedSubtree(["paths",t,r])})),ce()(this,"requestResolvedSubtree",(()=>{const{specActions:e,path:t,method:r,specPath:n}=this.props;return n?e.requestResolvedSubtree(n.toJS()):e.requestResolvedSubtree(["paths",t,r])}));const{tryItOutEnabled:r}=e.getConfigs();this.state={tryItOutEnabled:!0===r||"true"===r,executeInProgress:!1}}mapStateToProps(e,t){const{op:r,layoutSelectors:n,getConfigs:s}=t,{docExpansion:a,deepLinking:o,displayOperationId:l,displayRequestDuration:i,supportedSubmitMethods:c}=s(),p=n.showSummary(),u=r.getIn(["operation","__originalOperationId"])||r.getIn(["operation","operationId"])||(0,de.opId)(r.get("operation"),t.path,t.method)||r.get("id"),d=["operations",t.tag,u],m=o&&"false"!==o,h=ue()(c).call(c,t.method)>=0&&(void 0===t.allowTryItOut?t.specSelectors.allowTryItOutFor(t.path,t.method):t.allowTryItOut),g=r.getIn(["operation","security"])||t.specSelectors.security();return{operationId:u,isDeepLinkingEnabled:m,showSummary:p,displayOperationId:l,displayRequestDuration:i,allowTryItOut:h,security:g,isAuthorized:t.authSelectors.isAuthorized(g),isShown:n.isShown(d,"full"===a),jumpToKey:`paths.${t.path}.${t.method}`,response:t.specSelectors.responseFor(t.path,t.method),request:t.specSelectors.requestFor(t.path,t.method)}}componentDidMount(){const{isShown:e}=this.props,t=this.getResolvedSubtree();e&&void 0===t&&this.requestResolvedSubtree()}UNSAFE_componentWillReceiveProps(e){const{response:t,isShown:r}=e,n=this.getResolvedSubtree();t!==this.props.response&&this.setState({executeInProgress:!1}),r&&void 0===n&&this.requestResolvedSubtree()}render(){let{op:e,tag:t,path:r,method:n,security:s,isAuthorized:a,operationId:o,showSummary:l,isShown:i,jumpToKey:c,allowTryItOut:p,response:u,request:d,displayOperationId:m,displayRequestDuration:h,isDeepLinkingEnabled:g,specPath:f,specSelectors:y,specActions:v,getComponent:E,getConfigs:S,layoutSelectors:C,layoutActions:b,authActions:x,authSelectors:w,oas3Actions:_,oas3Selectors:A,fn:I}=this.props;const N=E("operation"),R=this.getResolvedSubtree()||(0,T.Map)(),P=(0,T.fromJS)({op:R,tag:t,path:r,summary:e.getIn(["operation","summary"])||"",deprecated:R.get("deprecated")||e.getIn(["operation","deprecated"])||!1,method:n,security:s,isAuthorized:a,operationId:o,originalOperationId:R.getIn(["operation","__originalOperationId"]),showSummary:l,isShown:i,jumpToKey:c,allowTryItOut:p,request:d,displayOperationId:m,displayRequestDuration:h,isDeepLinkingEnabled:g,executeInProgress:this.state.executeInProgress,tryItOutEnabled:this.state.tryItOutEnabled});return q().createElement(N,{operation:P,response:u,request:d,isShown:i,toggleShown:this.toggleShown,onTryoutClick:this.onTryoutClick,onResetClick:this.onResetClick,onCancelClick:this.onCancelClick,onExecute:this.onExecute,specPath:f,specActions:v,specSelectors:y,oas3Actions:_,oas3Selectors:A,layoutActions:b,layoutSelectors:C,authActions:x,authSelectors:w,getComponent:E,getConfigs:S,fn:I})}}ce()(me,"defaultProps",{showSummary:!0,response:null,allowTryItOut:!0,displayOperationId:!1,displayRequestDuration:!1});class he extends q().Component{getLayout(){let{getComponent:e,layoutSelectors:t}=this.props;const r=t.current(),n=e(r,!0);return n||(()=>q().createElement("h1",null,' No layout defined for "',r,'" '))}render(){const e=this.getLayout();return q().createElement(e,null)}}he.defaultProps={};class ge extends q().Component{constructor(){super(...arguments),ce()(this,"close",(()=>{let{authActions:e}=this.props;e.showDefinitions(!1)}))}render(){var e;let{authSelectors:t,authActions:r,getComponent:n,errSelectors:s,specSelectors:a,fn:{AST:o={}}}=this.props,l=t.shownDefinitions();const i=n("auths");return q().createElement("div",{className:"dialog-ux"},q().createElement("div",{className:"backdrop-ux"}),q().createElement("div",{className:"modal-ux"},q().createElement("div",{className:"modal-dialog-ux"},q().createElement("div",{className:"modal-ux-inner"},q().createElement("div",{className:"modal-ux-header"},q().createElement("h3",null,"Available authorizations"),q().createElement("button",{type:"button",className:"close-modal",onClick:this.close},q().createElement("svg",{width:"20",height:"20"},q().createElement("use",{href:"#close",xlinkHref:"#close"})))),q().createElement("div",{className:"modal-ux-content"},_()(e=l.valueSeq()).call(e,((e,l)=>q().createElement(i,{key:l,AST:o,definitions:e,getComponent:n,errSelectors:s,authSelectors:t,authActions:r,specSelectors:a}))))))))}}class fe extends q().Component{render(){let{isAuthorized:e,showPopup:t,onClick:r,getComponent:n}=this.props;const s=n("authorizationPopup",!0);return q().createElement("div",{className:"auth-wrapper"},q().createElement("button",{className:e?"btn authorize locked":"btn authorize unlocked",onClick:r},q().createElement("span",null,"Authorize"),q().createElement("svg",{width:"20",height:"20"},q().createElement("use",{href:e?"#locked":"#unlocked",xlinkHref:e?"#locked":"#unlocked"}))),t&&q().createElement(s,null))}}class ye extends q().Component{render(){const{authActions:e,authSelectors:t,specSelectors:r,getComponent:n}=this.props,s=r.securityDefinitions(),a=t.definitionsToAuthorize(),o=n("authorizeBtn");return s?q().createElement(o,{onClick:()=>e.showDefinitions(a),isAuthorized:!!t.authorized().size,showPopup:!!t.shownDefinitions(),getComponent:n}):null}}class ve extends q().Component{constructor(){super(...arguments),ce()(this,"onClick",(e=>{e.stopPropagation();let{onClick:t}=this.props;t&&t()}))}render(){let{isAuthorized:e}=this.props;return q().createElement("button",{className:e?"authorization__btn locked":"authorization__btn unlocked","aria-label":e?"authorization button locked":"authorization button unlocked",onClick:this.onClick},q().createElement("svg",{width:"20",height:"20"},q().createElement("use",{href:e?"#locked":"#unlocked",xlinkHref:e?"#locked":"#unlocked"})))}}class Ee extends q().Component{constructor(e,t){super(e,t),ce()(this,"onAuthChange",(e=>{let{name:t}=e;this.setState({[t]:e})})),ce()(this,"submitAuth",(e=>{e.preventDefault();let{authActions:t}=this.props;t.authorizeWithPersistOption(this.state)})),ce()(this,"logoutClick",(e=>{e.preventDefault();let{authActions:t,definitions:r}=this.props,n=_()(r).call(r,((e,t)=>t)).toArray();this.setState(x()(n).call(n,((e,t)=>(e[t]="",e)),{})),t.logoutWithPersistOption(n)})),ce()(this,"close",(e=>{e.preventDefault();let{authActions:t}=this.props;t.showDefinitions(!1)})),this.state={}}render(){var e;let{definitions:t,getComponent:r,authSelectors:n,errSelectors:s}=this.props;const a=r("AuthItem"),o=r("oauth2",!0),i=r("Button");let c=n.authorized(),p=l()(t).call(t,((e,t)=>!!c.get(t))),u=l()(t).call(t,(e=>"oauth2"!==e.get("type"))),d=l()(t).call(t,(e=>"oauth2"===e.get("type")));return q().createElement("div",{className:"auth-container"},!!u.size&&q().createElement("form",{onSubmit:this.submitAuth},_()(u).call(u,((e,t)=>q().createElement(a,{key:t,schema:e,name:t,getComponent:r,onAuthChange:this.onAuthChange,authorized:c,errSelectors:s}))).toArray(),q().createElement("div",{className:"auth-btn-wrapper"},u.size===p.size?q().createElement(i,{className:"btn modal-btn auth",onClick:this.logoutClick},"Logout"):q().createElement(i,{type:"submit",className:"btn modal-btn auth authorize"},"Authorize"),q().createElement(i,{className:"btn modal-btn auth btn-done",onClick:this.close},"Close"))),d&&d.size?q().createElement("div",null,q().createElement("div",{className:"scope-def"},q().createElement("p",null,"Scopes are used to grant an application different levels of access to data on behalf of the end user. Each API may declare one or more scopes."),q().createElement("p",null,"API requires the following scopes. Select which ones you want to grant to Swagger UI.")),_()(e=l()(t).call(t,(e=>"oauth2"===e.get("type")))).call(e,((e,t)=>q().createElement("div",{key:t},q().createElement(o,{authorized:c,schema:e,name:t})))).toArray()):null)}}class Se extends q().Component{render(){let{schema:e,name:t,getComponent:r,onAuthChange:n,authorized:s,errSelectors:a}=this.props;const o=r("apiKeyAuth"),l=r("basicAuth");let i;const c=e.get("type");switch(c){case"apiKey":i=q().createElement(o,{key:t,schema:e,name:t,errSelectors:a,authorized:s,getComponent:r,onChange:n});break;case"basic":i=q().createElement(l,{key:t,schema:e,name:t,errSelectors:a,authorized:s,getComponent:r,onChange:n});break;default:i=q().createElement("div",{key:t},"Unknown security definition type ",c)}return q().createElement("div",{key:`${t}-jump`},i)}}class Ce extends q().Component{render(){let{error:e}=this.props,t=e.get("level"),r=e.get("message"),n=e.get("source");return q().createElement("div",{className:"errors"},q().createElement("b",null,n," ",t),q().createElement("span",null,r))}}class be extends q().Component{constructor(e,t){super(e,t),ce()(this,"onChange",(e=>{let{onChange:t}=this.props,r=e.target.value,n=y()({},this.state,{value:r});this.setState(n),t(n)}));let{name:r,schema:n}=this.props,s=this.getValue();this.state={name:r,schema:n,value:s}}getValue(){let{name:e,authorized:t}=this.props;return t&&t.getIn([e,"value"])}render(){var e,t;let{schema:r,getComponent:n,errSelectors:s,name:a}=this.props;const o=n("Input"),i=n("Row"),c=n("Col"),p=n("authError"),u=n("Markdown",!0),d=n("JumpToPath",!0);let m=this.getValue(),h=l()(e=s.allErrors()).call(e,(e=>e.get("authId")===a));return q().createElement("div",null,q().createElement("h4",null,q().createElement("code",null,a||r.get("name"))," (apiKey)",q().createElement(d,{path:["securityDefinitions",a]})),m&&q().createElement("h6",null,"Authorized"),q().createElement(i,null,q().createElement(u,{source:r.get("description")})),q().createElement(i,null,q().createElement("p",null,"Name: ",q().createElement("code",null,r.get("name")))),q().createElement(i,null,q().createElement("p",null,"In: ",q().createElement("code",null,r.get("in")))),q().createElement(i,null,q().createElement("label",null,"Value:"),m?q().createElement("code",null," ****** "):q().createElement(c,null,q().createElement(o,{type:"text",onChange:this.onChange,autoFocus:!0}))),_()(t=h.valueSeq()).call(t,((e,t)=>q().createElement(p,{error:e,key:t}))))}}class xe extends q().Component{constructor(e,t){super(e,t),ce()(this,"onChange",(e=>{let{onChange:t}=this.props,{value:r,name:n}=e.target,s=this.state.value;s[n]=r,this.setState({value:s}),t(this.state)}));let{schema:r,name:n}=this.props,s=this.getValue().username;this.state={name:n,schema:r,value:s?{username:s}:{}}}getValue(){let{authorized:e,name:t}=this.props;return e&&e.getIn([t,"value"])||{}}render(){var e,t;let{schema:r,getComponent:n,name:s,errSelectors:a}=this.props;const o=n("Input"),i=n("Row"),c=n("Col"),p=n("authError"),u=n("JumpToPath",!0),d=n("Markdown",!0);let m=this.getValue().username,h=l()(e=a.allErrors()).call(e,(e=>e.get("authId")===s));return q().createElement("div",null,q().createElement("h4",null,"Basic authorization",q().createElement(u,{path:["securityDefinitions",s]})),m&&q().createElement("h6",null,"Authorized"),q().createElement(i,null,q().createElement(d,{source:r.get("description")})),q().createElement(i,null,q().createElement("label",null,"Username:"),m?q().createElement("code",null," ",m," "):q().createElement(c,null,q().createElement(o,{type:"text",required:"required",name:"username",onChange:this.onChange,autoFocus:!0}))),q().createElement(i,null,q().createElement("label",null,"Password:"),m?q().createElement("code",null," ****** "):q().createElement(c,null,q().createElement(o,{autoComplete:"new-password",name:"password",type:"password",onChange:this.onChange}))),_()(t=h.valueSeq()).call(t,((e,t)=>q().createElement(p,{error:e,key:t}))))}}function we(e){const{example:t,showValue:r,getComponent:n,getConfigs:s}=e,a=n("Markdown",!0),o=n("highlightCode");return t?q().createElement("div",{className:"example"},t.get("description")?q().createElement("section",{className:"example__section"},q().createElement("div",{className:"example__section-header"},"Example Description"),q().createElement("p",null,q().createElement(a,{source:t.get("description")}))):null,r&&t.has("value")?q().createElement("section",{className:"example__section"},q().createElement("div",{className:"example__section-header"},"Example Value"),q().createElement(o,{getConfigs:s,value:(0,L.Pz)(t.get("value"))})):null):null}var _e=r(2611),Ae=r.n(_e);class Ie extends q().PureComponent{constructor(){var e;super(...arguments),e=this,ce()(this,"_onSelect",(function(t){let{isSyntheticChange:r=!1}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};"function"==typeof e.props.onSelect&&e.props.onSelect(t,{isSyntheticChange:r})})),ce()(this,"_onDomSelect",(e=>{if("function"==typeof this.props.onSelect){const t=e.target.selectedOptions[0].getAttribute("value");this._onSelect(t,{isSyntheticChange:!1})}})),ce()(this,"getCurrentExample",(()=>{const{examples:e,currentExampleKey:t}=this.props,r=e.get(t),n=e.keySeq().first(),s=e.get(n);return r||s||Ae()({})}))}componentDidMount(){const{onSelect:e,examples:t}=this.props;if("function"==typeof e){const e=t.first(),r=t.keyOf(e);this._onSelect(r,{isSyntheticChange:!0})}}UNSAFE_componentWillReceiveProps(e){const{currentExampleKey:t,examples:r}=e;if(r!==this.props.examples&&!r.has(t)){const e=r.first(),t=r.keyOf(e);this._onSelect(t,{isSyntheticChange:!0})}}render(){const{examples:e,currentExampleKey:t,isValueModified:r,isModifiedValueAvailable:n,showLabels:s}=this.props;return q().createElement("div",{className:"examples-select"},s?q().createElement("span",{className:"examples-select__section-label"},"Examples: "):null,q().createElement("select",{className:"examples-select-element",onChange:this._onDomSelect,value:n&&r?"__MODIFIED__VALUE__":t||""},n?q().createElement("option",{value:"__MODIFIED__VALUE__"},"[Modified value]"):null,_()(e).call(e,((e,t)=>q().createElement("option",{key:t,value:t},e.get("summary")||t))).valueSeq()))}}ce()(Ie,"defaultProps",{examples:P().Map({}),onSelect:function(){for(var e=arguments.length,t=new Array(e),r=0;rT.List.isList(e)?e:(0,L.Pz)(e);class qe extends q().PureComponent{constructor(e){var t;super(e),t=this,ce()(this,"_getStateForCurrentNamespace",(()=>{const{currentNamespace:e}=this.props;return(this.state[e]||(0,T.Map)()).toObject()})),ce()(this,"_setStateForCurrentNamespace",(e=>{const{currentNamespace:t}=this.props;return this._setStateForNamespace(t,e)})),ce()(this,"_setStateForNamespace",((e,t)=>{const r=(this.state[e]||(0,T.Map)()).mergeDeep(t);return this.setState({[e]:r})})),ce()(this,"_isCurrentUserInputSameAsExampleValue",(()=>{const{currentUserInputValue:e}=this.props;return this._getCurrentExampleValue()===e})),ce()(this,"_getValueForExample",((e,t)=>{const{examples:r}=t||this.props;return Ne((r||(0,T.Map)({})).getIn([e,"value"]))})),ce()(this,"_getCurrentExampleValue",(e=>{const{currentKey:t}=e||this.props;return this._getValueForExample(t,e||this.props)})),ce()(this,"_onExamplesSelect",(function(e){let{isSyntheticChange:r}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};const{onSelect:n,updateValue:s,currentUserInputValue:a,userHasEditedBody:o}=t.props,{lastUserEditedValue:l}=t._getStateForCurrentNamespace(),i=t._getValueForExample(e);if("__MODIFIED__VALUE__"===e)return s(Ne(l)),t._setStateForCurrentNamespace({isModifiedValueSelected:!0});if("function"==typeof n){for(var c=arguments.length,p=new Array(c>2?c-2:0),u=2;ue.get("value")===t||(0,L.Pz)(e.get("value"))===t));if(c.size){let t;t=c.has(e.currentKey)?e.currentKey:c.keySeq().first(),n(t,{isSyntheticChange:!0})}else t!==this.props.currentUserInputValue&&t!==a&&t!==o&&(this.props.setRetainRequestBodyValueFlag(!0),this._setStateForNamespace(e.currentNamespace,{lastUserEditedValue:e.currentUserInputValue,isModifiedValueSelected:s||t!==i}))}render(){const{currentUserInputValue:e,examples:t,currentKey:r,getComponent:n,userHasEditedBody:s}=this.props,{lastDownstreamValue:a,lastUserEditedValue:o,isModifiedValueSelected:l}=this._getStateForCurrentNamespace(),i=n("ExamplesSelect");return q().createElement(i,{examples:t,currentExampleKey:r,onSelect:this._onExamplesSelect,isModifiedValueAvailable:!!o&&o!==a,isValueModified:void 0!==e&&l&&e!==this._getCurrentExampleValue()||s})}}ce()(qe,"defaultProps",{userHasEditedBody:!1,examples:(0,T.Map)({}),currentNamespace:"__DEFAULT__NAMESPACE__",setRetainRequestBodyValueFlag:()=>{},onSelect:function(){for(var e=arguments.length,t=new Array(e),r=0;r{e.preventDefault();let{authActions:t}=this.props;t.showDefinitions(!1)})),ce()(this,"authorize",(()=>{let{authActions:e,errActions:t,getConfigs:r,authSelectors:n,oas3Selectors:s}=this.props,a=r(),o=n.getConfigs();t.clear({authId:name,type:"auth",source:"auth"}),function(e){let{auth:t,authActions:r,errActions:n,configs:s,authConfigs:a={},currentServer:o}=e,{schema:l,scopes:i,name:c,clientId:p}=t,u=l.get("flow"),d=[];switch(u){case"password":return void r.authorizePassword(t);case"application":case"clientCredentials":case"client_credentials":return void r.authorizeApplication(t);case"accessCode":case"authorizationCode":case"authorization_code":d.push("response_type=code");break;case"implicit":d.push("response_type=token")}"string"==typeof p&&d.push("client_id="+encodeURIComponent(p));let m=s.oauth2RedirectUrl;if(void 0===m)return void n.newAuthErr({authId:c,source:"validation",level:"error",message:"oauth2RedirectUrl configuration is not passed. Oauth2 authorization cannot be performed."});d.push("redirect_uri="+encodeURIComponent(m));let h=[];if(C()(i)?h=i:P().List.isList(i)&&(h=i.toArray()),h.length>0){let e=a.scopeSeparator||" ";d.push("scope="+encodeURIComponent(h.join(e)))}let g=(0,L.r3)(new Date);if(d.push("state="+encodeURIComponent(g)),void 0!==a.realm&&d.push("realm="+encodeURIComponent(a.realm)),("authorizationCode"===u||"authorization_code"===u||"accessCode"===u)&&a.usePkceWithAuthorizationCodeGrant){const e=(0,L.Uj)(),r=(0,L.Xb)(e);d.push("code_challenge="+r),d.push("code_challenge_method=S256"),t.codeVerifier=e}let{additionalQueryStringParams:f}=a;for(let e in f){var y;void 0!==f[e]&&d.push(_()(y=[e,f[e]]).call(y,encodeURIComponent).join("="))}const v=l.get("authorizationUrl");let E;E=o?Ve()((0,L.Nm)(v),o,!0).toString():(0,L.Nm)(v);let S,b=[E,d.join("&")].join(-1===ue()(v).call(v,"?")?"?":"&");S="implicit"===u?r.preAuthorizeImplicit:a.useBasicAuthenticationWithAccessCodeGrant?r.authorizeAccessCodeWithBasicAuthentication:r.authorizeAccessCodeWithFormParams,r.authPopup(b,{auth:t,state:g,redirectUrl:m,callback:S,errCb:n.newAuthErr})}({auth:this.state,currentServer:s.serverEffectiveValue(s.selectedServer()),authActions:e,errActions:t,configs:a,authConfigs:o})})),ce()(this,"onScopeChange",(e=>{var t,r;let{target:n}=e,{checked:s}=n,a=n.dataset.value;if(s&&-1===ue()(t=this.state.scopes).call(t,a)){var o;let e=I()(o=this.state.scopes).call(o,[a]);this.setState({scopes:e})}else if(!s&&ue()(r=this.state.scopes).call(r,a)>-1){var i;this.setState({scopes:l()(i=this.state.scopes).call(i,(e=>e!==a))})}})),ce()(this,"onInputChange",(e=>{let{target:{dataset:{name:t},value:r}}=e,n={[t]:r};this.setState(n)})),ce()(this,"selectScopes",(e=>{var t;e.target.dataset.all?this.setState({scopes:Te()(ke()(t=this.props.schema.get("allowedScopes")||this.props.schema.get("scopes")).call(t))}):this.setState({scopes:[]})})),ce()(this,"logout",(e=>{e.preventDefault();let{authActions:t,errActions:r,name:n}=this.props;r.clear({authId:n,type:"auth",source:"auth"}),t.logoutWithPersistOption([n])}));let{name:r,schema:n,authorized:s,authSelectors:a}=this.props,o=s&&s.get(r),i=a.getConfigs()||{},c=o&&o.get("username")||"",p=o&&o.get("clientId")||i.clientId||"",u=o&&o.get("clientSecret")||i.clientSecret||"",d=o&&o.get("passwordType")||"basic",m=o&&o.get("scopes")||i.scopes||[];"string"==typeof m&&(m=m.split(i.scopeSeparator||" ")),this.state={appName:i.appName,name:r,schema:n,scopes:m,clientId:p,clientSecret:u,username:c,password:"",passwordType:d}}render(){var e,t;let{schema:r,getComponent:n,authSelectors:s,errSelectors:a,name:o,specSelectors:i}=this.props;const c=n("Input"),p=n("Row"),u=n("Col"),d=n("Button"),m=n("authError"),h=n("JumpToPath",!0),g=n("Markdown",!0),f=n("InitializedInput"),{isOAS3:y}=i;let v=y()?r.get("openIdConnectUrl"):null;const E="implicit",S="password",C=y()?v?"authorization_code":"authorizationCode":"accessCode",b=y()?v?"client_credentials":"clientCredentials":"application";let x=!!(s.getConfigs()||{}).usePkceWithAuthorizationCodeGrant,w=r.get("flow"),A=w===C&&x?w+" with PKCE":w,I=r.get("allowedScopes")||r.get("scopes"),N=!!s.authorized().get(o),R=l()(e=a.allErrors()).call(e,(e=>e.get("authId")===o)),T=!l()(R).call(R,(e=>"validation"===e.get("source"))).size,P=r.get("description");return q().createElement("div",null,q().createElement("h4",null,o," (OAuth2, ",A,") ",q().createElement(h,{path:["securityDefinitions",o]})),this.state.appName?q().createElement("h5",null,"Application: ",this.state.appName," "):null,P&&q().createElement(g,{source:r.get("description")}),N&&q().createElement("h6",null,"Authorized"),v&&q().createElement("p",null,"OpenID Connect URL: ",q().createElement("code",null,v)),(w===E||w===C)&&q().createElement("p",null,"Authorization URL: ",q().createElement("code",null,r.get("authorizationUrl"))),(w===S||w===C||w===b)&&q().createElement("p",null,"Token URL:",q().createElement("code",null," ",r.get("tokenUrl"))),q().createElement("p",{className:"flow"},"Flow: ",q().createElement("code",null,A)),w!==S?null:q().createElement(p,null,q().createElement(p,null,q().createElement("label",{htmlFor:"oauth_username"},"username:"),N?q().createElement("code",null," ",this.state.username," "):q().createElement(u,{tablet:10,desktop:10},q().createElement("input",{id:"oauth_username",type:"text","data-name":"username",onChange:this.onInputChange,autoFocus:!0}))),q().createElement(p,null,q().createElement("label",{htmlFor:"oauth_password"},"password:"),N?q().createElement("code",null," ****** "):q().createElement(u,{tablet:10,desktop:10},q().createElement("input",{id:"oauth_password",type:"password","data-name":"password",onChange:this.onInputChange}))),q().createElement(p,null,q().createElement("label",{htmlFor:"password_type"},"Client credentials location:"),N?q().createElement("code",null," ",this.state.passwordType," "):q().createElement(u,{tablet:10,desktop:10},q().createElement("select",{id:"password_type","data-name":"passwordType",onChange:this.onInputChange},q().createElement("option",{value:"basic"},"Authorization header"),q().createElement("option",{value:"request-body"},"Request body"))))),(w===b||w===E||w===C||w===S)&&(!N||N&&this.state.clientId)&&q().createElement(p,null,q().createElement("label",{htmlFor:"client_id"},"client_id:"),N?q().createElement("code",null," ****** "):q().createElement(u,{tablet:10,desktop:10},q().createElement(f,{id:"client_id",type:"text",required:w===S,initialValue:this.state.clientId,"data-name":"clientId",onChange:this.onInputChange}))),(w===b||w===C||w===S)&&q().createElement(p,null,q().createElement("label",{htmlFor:"client_secret"},"client_secret:"),N?q().createElement("code",null," ****** "):q().createElement(u,{tablet:10,desktop:10},q().createElement(f,{id:"client_secret",initialValue:this.state.clientSecret,type:"password","data-name":"clientSecret",onChange:this.onInputChange}))),!N&&I&&I.size?q().createElement("div",{className:"scopes"},q().createElement("h2",null,"Scopes:",q().createElement("a",{onClick:this.selectScopes,"data-all":!0},"select all"),q().createElement("a",{onClick:this.selectScopes},"select none")),_()(I).call(I,((e,t)=>{var r;return q().createElement(p,{key:t},q().createElement("div",{className:"checkbox"},q().createElement(c,{"data-value":t,id:`${t}-${w}-checkbox-${this.state.name}`,disabled:N,checked:Me()(r=this.state.scopes).call(r,t),type:"checkbox",onChange:this.onScopeChange}),q().createElement("label",{htmlFor:`${t}-${w}-checkbox-${this.state.name}`},q().createElement("span",{className:"item"}),q().createElement("div",{className:"text"},q().createElement("p",{className:"name"},t),q().createElement("p",{className:"description"},e)))))})).toArray()):null,_()(t=R.valueSeq()).call(t,((e,t)=>q().createElement(m,{error:e,key:t}))),q().createElement("div",{className:"auth-btn-wrapper"},T&&(N?q().createElement(d,{className:"btn modal-btn auth authorize",onClick:this.logout},"Logout"):q().createElement(d,{className:"btn modal-btn auth authorize",onClick:this.authorize},"Authorize")),q().createElement(d,{className:"btn modal-btn auth btn-done",onClick:this.close},"Close")))}}class Le extends N.Component{constructor(){super(...arguments),ce()(this,"onClick",(()=>{let{specActions:e,path:t,method:r}=this.props;e.clearResponse(t,r),e.clearRequest(t,r)}))}render(){return q().createElement("button",{className:"btn btn-clear opblock-control__btn",onClick:this.onClick},"Clear")}}const Ue=e=>{let{headers:t}=e;return q().createElement("div",null,q().createElement("h5",null,"Response headers"),q().createElement("pre",{className:"microlight"},t))},ze=e=>{let{duration:t}=e;return q().createElement("div",null,q().createElement("h5",null,"Request duration"),q().createElement("pre",{className:"microlight"},t," ms"))};class Be extends q().Component{shouldComponentUpdate(e){return this.props.response!==e.response||this.props.path!==e.path||this.props.method!==e.method||this.props.displayRequestDuration!==e.displayRequestDuration}render(){const{response:e,getComponent:t,getConfigs:r,displayRequestDuration:n,specSelectors:s,path:a,method:o}=this.props,{showMutatedRequest:l,requestSnippetsEnabled:i}=r(),p=l?s.mutatedRequestFor(a,o):s.requestFor(a,o),u=e.get("status"),d=p.get("url"),m=e.get("headers").toJS(),h=e.get("notDocumented"),g=e.get("error"),f=e.get("text"),y=e.get("duration"),v=c()(m),E=m["content-type"]||m["Content-Type"],S=t("responseBody"),b=_()(v).call(v,(e=>{var t=C()(m[e])?m[e].join():m[e];return q().createElement("span",{className:"headerline",key:e}," ",e,": ",t," ")})),x=0!==b.length,w=t("Markdown",!0),A=t("RequestSnippets",!0),I=t("curl");return q().createElement("div",null,p&&(!0===i||"true"===i?q().createElement(A,{request:p}):q().createElement(I,{request:p,getConfigs:r})),d&&q().createElement("div",null,q().createElement("div",{className:"request-url"},q().createElement("h4",null,"Request URL"),q().createElement("pre",{className:"microlight"},d))),q().createElement("h4",null,"Server response"),q().createElement("table",{className:"responses-table live-responses-table"},q().createElement("thead",null,q().createElement("tr",{className:"responses-header"},q().createElement("td",{className:"col_header response-col_status"},"Code"),q().createElement("td",{className:"col_header response-col_description"},"Details"))),q().createElement("tbody",null,q().createElement("tr",{className:"response"},q().createElement("td",{className:"response-col_status"},u,h?q().createElement("div",{className:"response-undocumented"},q().createElement("i",null," Undocumented ")):null),q().createElement("td",{className:"response-col_description"},g?q().createElement(w,{source:`${""!==e.get("name")?`${e.get("name")}: `:""}${e.get("message")}`}):null,f?q().createElement(S,{content:f,contentType:E,url:d,headers:m,getConfigs:r,getComponent:t}):null,x?q().createElement(Ue,{headers:b}):null,n&&y?q().createElement(ze,{duration:y}):null)))))}}var $e=r(5623);const Je=["get","put","post","delete","options","head","patch"],Fe=I()(Je).call(Je,["trace"]);class We extends q().Component{constructor(){super(...arguments),ce()(this,"renderOperationTag",((e,t)=>{const{specSelectors:r,getComponent:n,oas3Selectors:s,layoutSelectors:a,layoutActions:o,getConfigs:l}=this.props,i=n("OperationContainer",!0),c=n("OperationTag"),p=e.get("operations");return q().createElement(c,{key:"operation-"+t,tagObj:e,tag:t,oas3Selectors:s,layoutSelectors:a,layoutActions:o,getConfigs:l,getComponent:n,specUrl:r.url()},q().createElement("div",{className:"operation-tag-content"},_()(p).call(p,(e=>{const n=e.get("path"),s=e.get("method"),a=P().List(["paths",n,s]),o=r.isOAS3()?Fe:Je;return-1===ue()(o).call(o,s)?null:q().createElement(i,{key:`${n}-${s}`,specPath:a,op:e,path:n,method:s,tag:t})})).toArray()))}))}render(){let{specSelectors:e}=this.props;const t=e.taggedOperations();return 0===t.size?q().createElement("h3",null," No operations defined in spec!"):q().createElement("div",null,_()(t).call(t,this.renderOperationTag).toArray(),t.size<1?q().createElement("h3",null," No operations defined in spec! "):null)}}var He=r(9478),Ke=r.n(He);function Ze(e){return e.match(/^(?:[a-z]+:)?\/\//i)}function Ge(e,t){return e?Ze(e)?(r=e).match(/^\/\//i)?`${window.location.protocol}${r}`:r:new(Ke())(e,t).href:t;var r}function Ye(e,t){let{selectedServer:r=""}=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};if(!e)return;if(Ze(e))return e;const n=Ge(r,t);return Ze(n)?new(Ke())(e,n).href:new(Ke())(e,window.location.href).href}function Xe(e,t){let{selectedServer:r=""}=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};try{return Ye(e,t,{selectedServer:r})}catch{return}}class Qe extends q().Component{render(){const{tagObj:e,tag:t,children:r,oas3Selectors:n,layoutSelectors:s,layoutActions:a,getConfigs:o,getComponent:l,specUrl:i}=this.props;let{docExpansion:c,deepLinking:p}=o();const u=p&&"false"!==p,d=l("Collapse"),m=l("Markdown",!0),h=l("DeepLink"),g=l("Link");let f,y=e.getIn(["tagDetails","description"],null),v=e.getIn(["tagDetails","externalDocs","description"]),E=e.getIn(["tagDetails","externalDocs","url"]);f=(0,L.Wl)(n)&&(0,L.Wl)(n.selectedServer)?Xe(E,i,{selectedServer:n.selectedServer()}):E;let S=["operations-tag",t],C=s.isShown(S,"full"===c||"list"===c);return q().createElement("div",{className:C?"opblock-tag-section is-open":"opblock-tag-section"},q().createElement("h3",{onClick:()=>a.show(S,!C),className:y?"opblock-tag":"opblock-tag no-desc",id:_()(S).call(S,(e=>(0,L.J6)(e))).join("-"),"data-tag":t,"data-is-open":C},q().createElement(h,{enabled:u,isShown:C,path:(0,L.oJ)(t),text:t}),y?q().createElement("small",null,q().createElement(m,{source:y})):q().createElement("small",null),f?q().createElement("div",{className:"info__externaldocs"},q().createElement("small",null,q().createElement(g,{href:(0,L.Nm)(f),onClick:e=>e.stopPropagation(),target:"_blank"},v||f))):null,q().createElement("button",{"aria-expanded":C,className:"expand-operation",title:C?"Collapse operation":"Expand operation",onClick:()=>a.show(S,!C)},q().createElement("svg",{className:"arrow",width:"20",height:"20","aria-hidden":"true",focusable:"false"},q().createElement("use",{href:C?"#large-arrow-up":"#large-arrow-down",xlinkHref:C?"#large-arrow-up":"#large-arrow-down"})))),q().createElement(d,{isOpened:C},r))}}ce()(Qe,"defaultProps",{tagObj:P().fromJS({}),tag:""});class et extends N.PureComponent{render(){let{specPath:e,response:t,request:n,toggleShown:s,onTryoutClick:a,onResetClick:o,onCancelClick:l,onExecute:i,fn:c,getComponent:p,getConfigs:u,specActions:d,specSelectors:m,authActions:h,authSelectors:g,oas3Actions:f,oas3Selectors:y}=this.props,v=this.props.operation,{deprecated:E,isShown:S,path:C,method:b,op:x,tag:w,operationId:A,allowTryItOut:I,displayRequestDuration:N,tryItOutEnabled:R,executeInProgress:T}=v.toJS(),{description:P,externalDocs:k,schemes:O}=x;const M=k?Xe(k.url,m.url(),{selectedServer:y.selectedServer()}):"";let j=v.getIn(["op"]),V=j.get("responses"),D=(0,L.gp)(j,["parameters"]),U=m.operationScheme(C,b),z=["operations",w,A],B=(0,L.nX)(j);const $=p("responses"),J=p("parameters"),F=p("execute"),W=p("clear"),H=p("Collapse"),K=p("Markdown",!0),Z=p("schemes"),G=p("OperationServers"),Y=p("OperationExt"),X=p("OperationSummary"),Q=p("Link"),{showExtensions:ee}=u();if(V&&t&&t.size>0){let e=!V.get(String(t.get("status")))&&!V.get("default");t=t.set("notDocumented",e)}let te=[C,b];const re=m.validationErrors([C,b]);return q().createElement("div",{className:E?"opblock opblock-deprecated":S?`opblock opblock-${b} is-open`:`opblock opblock-${b}`,id:(0,L.J6)(z.join("-"))},q().createElement(X,{operationProps:v,isShown:S,toggleShown:s,getComponent:p,authActions:h,authSelectors:g,specPath:e}),q().createElement(H,{isOpened:S},q().createElement("div",{className:"opblock-body"},j&&j.size||null===j?null:q().createElement("img",{height:"32px",width:"32px",src:r(2517),className:"opblock-loading-animation"}),E&&q().createElement("h4",{className:"opblock-title_normal"}," Warning: Deprecated"),P&&q().createElement("div",{className:"opblock-description-wrapper"},q().createElement("div",{className:"opblock-description"},q().createElement(K,{source:P}))),M?q().createElement("div",{className:"opblock-external-docs-wrapper"},q().createElement("h4",{className:"opblock-title_normal"},"Find more details"),q().createElement("div",{className:"opblock-external-docs"},k.description&&q().createElement("span",{className:"opblock-external-docs__description"},q().createElement(K,{source:k.description})),q().createElement(Q,{target:"_blank",className:"opblock-external-docs__link",href:(0,L.Nm)(M)},M))):null,j&&j.size?q().createElement(J,{parameters:D,specPath:e.push("parameters"),operation:j,onChangeKey:te,onTryoutClick:a,onResetClick:o,onCancelClick:l,tryItOutEnabled:R,allowTryItOut:I,fn:c,getComponent:p,specActions:d,specSelectors:m,pathMethod:[C,b],getConfigs:u,oas3Actions:f,oas3Selectors:y}):null,R?q().createElement(G,{getComponent:p,path:C,method:b,operationServers:j.get("servers"),pathServers:m.paths().getIn([C,"servers"]),getSelectedServer:y.selectedServer,setSelectedServer:f.setSelectedServer,setServerVariableValue:f.setServerVariableValue,getServerVariable:y.serverVariableValue,getEffectiveServerValue:y.serverEffectiveValue}):null,R&&I&&O&&O.size?q().createElement("div",{className:"opblock-schemes"},q().createElement(Z,{schemes:O,path:C,method:b,specActions:d,currentScheme:U})):null,!R||!I||re.length<=0?null:q().createElement("div",{className:"validation-errors errors-wrapper"},"Please correct the following validation errors and try again.",q().createElement("ul",null,_()(re).call(re,((e,t)=>q().createElement("li",{key:t}," ",e," "))))),q().createElement("div",{className:R&&t&&I?"btn-group":"execute-wrapper"},R&&I?q().createElement(F,{operation:j,specActions:d,specSelectors:m,oas3Selectors:y,oas3Actions:f,path:C,method:b,onExecute:i,disabled:T}):null,R&&t&&I?q().createElement(W,{specActions:d,path:C,method:b}):null),T?q().createElement("div",{className:"loading-container"},q().createElement("div",{className:"loading"})):null,V?q().createElement($,{responses:V,request:n,tryItOutResponse:t,getComponent:p,getConfigs:u,specSelectors:m,oas3Actions:f,oas3Selectors:y,specActions:d,produces:m.producesOptionsFor([C,b]),producesValue:m.currentProducesFor([C,b]),specPath:e.push("responses"),path:C,method:b,displayRequestDuration:N,fn:c}):null,ee&&B.size?q().createElement(Y,{extensions:B,getComponent:p}):null)))}}ce()(et,"defaultProps",{operation:null,response:null,request:null,specPath:(0,T.List)(),summary:""});const tt=require("lodash/toString");var rt=r.n(tt);class nt extends N.PureComponent{render(){let{isShown:e,toggleShown:t,getComponent:r,authActions:n,authSelectors:s,operationProps:a,specPath:o}=this.props,{summary:l,isAuthorized:i,method:c,op:p,showSummary:u,path:d,operationId:m,originalOperationId:h,displayOperationId:g}=a.toJS(),{summary:f}=p,y=a.get("security");const v=r("authorizeOperationBtn"),E=r("OperationSummaryMethod"),S=r("OperationSummaryPath"),C=r("JumpToPath",!0),b=r("CopyToClipboardBtn",!0),x=y&&!!y.count(),w=x&&1===y.size&&y.first().isEmpty(),_=!x||w;return q().createElement("div",{className:`opblock-summary opblock-summary-${c}`},q().createElement("button",{"aria-label":`${c} ${d.replace(/\//g,"​/")}`,"aria-expanded":e,className:"opblock-summary-control",onClick:t},q().createElement(E,{method:c}),q().createElement(S,{getComponent:r,operationProps:a,specPath:o}),u?q().createElement("div",{className:"opblock-summary-description"},rt()(f||l)):null,g&&(h||m)?q().createElement("span",{className:"opblock-summary-operation-id"},h||m):null,q().createElement("svg",{className:"arrow",width:"20",height:"20","aria-hidden":"true",focusable:"false"},q().createElement("use",{href:e?"#large-arrow-up":"#large-arrow-down",xlinkHref:e?"#large-arrow-up":"#large-arrow-down"}))),_?null:q().createElement(v,{isAuthorized:i,onClick:()=>{const e=s.definitionsForRequirements(y);n.showDefinitions(e)}}),q().createElement(b,{textToCopy:`${o.get(1)}`}),q().createElement(C,{path:o}))}}ce()(nt,"defaultProps",{operationProps:null,specPath:(0,T.List)(),summary:""});class st extends N.PureComponent{render(){let{method:e}=this.props;return q().createElement("span",{className:"opblock-summary-method"},e.toUpperCase())}}ce()(st,"defaultProps",{operationProps:null});const at=require("@babel/runtime-corejs3/core-js-stable/instance/splice");var ot=r.n(at);class lt extends N.PureComponent{render(){let{getComponent:e,operationProps:t}=this.props,{deprecated:r,isShown:n,path:s,tag:a,operationId:o,isDeepLinkingEnabled:l}=t.toJS();const i=s.split(/(?=\/)/g);for(let e=1;e{var t;let{extensions:r,getComponent:n}=e,s=n("OperationExtRow");return q().createElement("div",{className:"opblock-section"},q().createElement("div",{className:"opblock-section-header"},q().createElement("h4",null,"Extensions")),q().createElement("div",{className:"table-container"},q().createElement("table",null,q().createElement("thead",null,q().createElement("tr",null,q().createElement("td",{className:"col_header"},"Field"),q().createElement("td",{className:"col_header"},"Value"))),q().createElement("tbody",null,_()(t=r.entrySeq()).call(t,(e=>{let[t,r]=e;return q().createElement(s,{key:`${t}-${r}`,xKey:t,xVal:r})}))))))},ct=e=>{let{xKey:t,xVal:r}=e;const n=r?r.toJS?r.toJS():r:null;return q().createElement("tr",null,q().createElement("td",null,t),q().createElement("td",null,u()(n)))};var pt=r(4235),ut=r.n(pt),dt=r(9003),mt=r.n(dt),ht=r(6068),gt=r(1712),ft=r.n(gt),yt=r(5716),vt=r.n(yt);const Et=require("js-file-download");var St=r.n(Et),Ct=r(2807);const bt=e=>{let{value:t,fileName:r,className:n,downloadable:s,getConfigs:a,canCopy:o,language:i}=e;const c=vt()(a)?a():null,p=!1!==ft()(c,"syntaxHighlight")&&ft()(c,"syntaxHighlight.activated",!0),u=(0,N.useRef)(null);(0,N.useEffect)((()=>{var e;const t=l()(e=Te()(u.current.childNodes)).call(e,(e=>!!e.nodeType&&e.classList.contains("microlight")));return ut()(t).call(t,(e=>e.addEventListener("mousewheel",d,{passive:!1}))),()=>{ut()(t).call(t,(e=>e.removeEventListener("mousewheel",d)))}}),[t,n,i]);const d=e=>{const{target:t,deltaY:r}=e,{scrollHeight:n,offsetHeight:s,scrollTop:a}=t;n>s&&(0===a&&r<0||s+a>=n&&r>0)&&e.preventDefault()};return q().createElement("div",{className:"highlight-code",ref:u},s?q().createElement("div",{className:"download-contents",onClick:()=>{St()(t,r)}},"Download"):null,o&&q().createElement("div",{className:"copy-to-clipboard"},q().createElement(Ct.CopyToClipboard,{text:t},q().createElement("button",null))),p?q().createElement(ht.d3,{language:i,className:mt()(n,"microlight"),style:(0,ht.C2)(ft()(c,"syntaxHighlight.theme","agate"))},t):q().createElement("pre",{className:mt()(n,"microlight")},t))};bt.defaultProps={fileName:"response.txt"};const xt=bt;class wt extends q().Component{constructor(){super(...arguments),ce()(this,"onChangeProducesWrapper",(e=>this.props.specActions.changeProducesValue([this.props.path,this.props.method],e))),ce()(this,"onResponseContentTypeChange",(e=>{let{controlsAcceptHeader:t,value:r}=e;const{oas3Actions:n,path:s,method:a}=this.props;t&&n.setResponseContentType({value:r,path:s,method:a})}))}render(){var e;let{responses:t,tryItOutResponse:r,getComponent:n,getConfigs:s,specSelectors:a,fn:o,producesValue:l,displayRequestDuration:i,specPath:c,path:p,method:u,oas3Selectors:d,oas3Actions:m}=this.props,h=(0,L.iQ)(t);const g=n("contentType"),f=n("liveResponse"),y=n("response");let v=this.props.produces&&this.props.produces.size?this.props.produces:wt.defaultProps.produces;const E=a.isOAS3()?(0,L.QG)(t):null,S=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"_";return e.replace(/[^\w-]/g,t)}(`${u}${p}_responses`),C=`${S}_select`;return q().createElement("div",{className:"responses-wrapper"},q().createElement("div",{className:"opblock-section-header"},q().createElement("h4",null,"Responses"),a.isOAS3()?null:q().createElement("label",{htmlFor:C},q().createElement("span",null,"Response content type"),q().createElement(g,{value:l,ariaControls:S,ariaLabel:"Response content type",className:"execute-content-type",contentTypes:v,controlId:C,onChange:this.onChangeProducesWrapper}))),q().createElement("div",{className:"responses-inner"},r?q().createElement("div",null,q().createElement(f,{response:r,getComponent:n,getConfigs:s,specSelectors:a,path:this.props.path,method:this.props.method,displayRequestDuration:i}),q().createElement("h4",null,"Responses")):null,q().createElement("table",{"aria-live":"polite",className:"responses-table",id:S,role:"region"},q().createElement("thead",null,q().createElement("tr",{className:"responses-header"},q().createElement("td",{className:"col_header response-col_status"},"Code"),q().createElement("td",{className:"col_header response-col_description"},"Description"),a.isOAS3()?q().createElement("td",{className:"col col_header response-col_links"},"Links"):null)),q().createElement("tbody",null,_()(e=t.entrySeq()).call(e,(e=>{let[t,i]=e,g=r&&r.get("status")==t?"response_current":"";return q().createElement(y,{key:t,path:p,method:u,specPath:c.push(t),isDefault:h===t,fn:o,className:g,code:t,response:i,specSelectors:a,controlsAcceptHeader:i===E,onContentTypeChange:this.onResponseContentTypeChange,contentType:l,getConfigs:s,activeExamplesKey:d.activeExamplesMember(p,u,"responses",t),oas3Actions:m,getComponent:n})})).toArray()))))}}ce()(wt,"defaultProps",{tryItOutResponse:null,produces:(0,T.fromJS)(["application/json"]),displayRequestDuration:!1});const _t=require("@babel/runtime-corejs3/core-js-stable/instance/values");var At=r.n(_t),It=r(2518);class Nt extends q().Component{constructor(e,t){super(e,t),ce()(this,"_onContentTypeChange",(e=>{const{onContentTypeChange:t,controlsAcceptHeader:r}=this.props;this.setState({responseContentType:e}),t({value:e,controlsAcceptHeader:r})})),ce()(this,"getTargetExamplesKey",(()=>{const{response:e,contentType:t,activeExamplesKey:r}=this.props,n=this.state.responseContentType||t,s=e.getIn(["content",n],(0,T.Map)({})).get("examples",null).keySeq().first();return r||s})),this.state={responseContentType:""}}render(){var e,t;let{path:r,method:n,code:s,response:a,className:o,specPath:l,fn:i,getComponent:c,getConfigs:p,specSelectors:u,contentType:d,controlsAcceptHeader:m,oas3Actions:h}=this.props,{inferSchema:g}=i,f=u.isOAS3();const{showExtensions:y}=p();let v=y?(0,L.nX)(a):null,E=a.get("headers"),S=a.get("links");const C=c("ResponseExtension"),b=c("headers"),x=c("highlightCode"),w=c("modelExample"),A=c("Markdown",!0),I=c("operationLink"),N=c("contentType"),R=c("ExamplesSelect"),P=c("Example");var k,O;const M=this.state.responseContentType||d,j=a.getIn(["content",M],(0,T.Map)({})),V=j.get("examples",null);if(f){const e=j.get("schema");k=e?g(e.toJS()):null,O=e?(0,T.List)(["content",this.state.responseContentType,"schema"]):l}else k=a.get("schema"),O=a.has("schema")?l.push("schema"):l;let D,U,z=!1,B={includeReadOnly:!0};if(f){var $;if(U=null===($=j.get("schema"))||void 0===$?void 0:$.toJS(),V){const e=this.getTargetExamplesKey(),t=e=>e.get("value");D=t(V.get(e,(0,T.Map)({}))),void 0===D&&(D=t(At()(V).call(V).next().value)),z=!0}else void 0!==j.get("example")&&(D=j.get("example"),z=!0)}else{U=k,B={...B,includeWriteOnly:!0};const e=a.getIn(["examples",M]);e&&(D=e,z=!0)}let J=((e,t,r)=>{if(null!=e){let n=null;return(0,It.O)(e)&&(n="json"),q().createElement("div",null,q().createElement(t,{className:"example",getConfigs:r,language:n,value:(0,L.Pz)(e)}))}return null})((0,L.xi)(U,M,B,z?D:void 0),x,p);return q().createElement("tr",{className:"response "+(o||""),"data-code":s},q().createElement("td",{className:"response-col_status"},s),q().createElement("td",{className:"response-col_description"},q().createElement("div",{className:"response-col_description__inner"},q().createElement(A,{source:a.get("description")})),y&&v.size?_()(e=v.entrySeq()).call(e,(e=>{let[t,r]=e;return q().createElement(C,{key:`${t}-${r}`,xKey:t,xVal:r})})):null,f&&a.get("content")?q().createElement("section",{className:"response-controls"},q().createElement("div",{className:mt()("response-control-media-type",{"response-control-media-type--accept-controller":m})},q().createElement("small",{className:"response-control-media-type__title"},"Media type"),q().createElement(N,{value:this.state.responseContentType,contentTypes:a.get("content")?a.get("content").keySeq():(0,T.Seq)(),onChange:this._onContentTypeChange,ariaLabel:"Media Type"}),m?q().createElement("small",{className:"response-control-media-type__accept-message"},"Controls ",q().createElement("code",null,"Accept")," header."):null),V?q().createElement("div",{className:"response-control-examples"},q().createElement("small",{className:"response-control-examples__title"},"Examples"),q().createElement(R,{examples:V,currentExampleKey:this.getTargetExamplesKey(),onSelect:e=>h.setActiveExamplesMember({name:e,pathMethod:[r,n],contextType:"responses",contextName:s}),showLabels:!1})):null):null,J||k?q().createElement(w,{specPath:O,getComponent:c,getConfigs:p,specSelectors:u,schema:(0,L.oG)(k),example:J,includeReadOnly:!0}):null,f&&V?q().createElement(P,{example:V.get(this.getTargetExamplesKey(),(0,T.Map)({})),getComponent:c,getConfigs:p,omitValue:!0}):null,E?q().createElement(b,{headers:E,getComponent:c}):null),f?q().createElement("td",{className:"response-col_links"},S?_()(t=S.toSeq().entrySeq()).call(t,(e=>{let[t,r]=e;return q().createElement(I,{key:t,name:t,link:r,getComponent:c})})):q().createElement("i",null,"No links")):null)}}ce()(Nt,"defaultProps",{response:(0,T.fromJS)({}),onContentTypeChange:()=>{}});const qt=e=>{let{xKey:t,xVal:r}=e;return q().createElement("div",{className:"response__extension"},t,": ",String(r))},Rt=require("xml-but-prettier");var Tt=r.n(Rt);const Pt=require("lodash/toLower");var kt=r.n(Pt);class Ot extends q().PureComponent{constructor(){super(...arguments),ce()(this,"state",{parsedContent:null}),ce()(this,"updateParsedContent",(e=>{const{content:t}=this.props;if(e!==t)if(t&&t instanceof Blob){var r=new FileReader;r.onload=()=>{this.setState({parsedContent:r.result})},r.readAsText(t)}else this.setState({parsedContent:t.toString()})}))}componentDidMount(){this.updateParsedContent(null)}componentDidUpdate(e){this.updateParsedContent(e.content)}render(){let{content:e,contentType:t,url:r,headers:n={},getConfigs:s,getComponent:o}=this.props;const{parsedContent:l}=this.state,i=o("highlightCode"),c="response_"+(new Date).getTime();let p,d;if(r=r||"",/^application\/octet-stream/i.test(t)||n["Content-Disposition"]&&/attachment/i.test(n["Content-Disposition"])||n["content-disposition"]&&/attachment/i.test(n["content-disposition"])||n["Content-Description"]&&/File Transfer/i.test(n["Content-Description"])||n["content-description"]&&/File Transfer/i.test(n["content-description"]))if("Blob"in window){let s=t||"text/html",o=e instanceof Blob?e:new Blob([e],{type:s}),l=Ke().createObjectURL(o),i=[s,r.substr(a()(r).call(r,"/")+1),l].join(":"),c=n["content-disposition"]||n["Content-Disposition"];if(void 0!==c){let e=(0,L.DR)(c);null!==e&&(i=e)}d=D.Z.navigator&&D.Z.navigator.msSaveOrOpenBlob?q().createElement("div",null,q().createElement("a",{href:l,onClick:()=>D.Z.navigator.msSaveOrOpenBlob(o,i)},"Download file")):q().createElement("div",null,q().createElement("a",{href:l,download:i},"Download file"))}else d=q().createElement("pre",{className:"microlight"},"Download headers detected but your browser does not support downloading binary via XHR (Blob).");else if(/json/i.test(t)){let t=null;(0,It.O)(e)&&(t="json");try{p=u()(JSON.parse(e),null," ")}catch(t){p="can't parse JSON. Raw result:\n\n"+e}d=q().createElement(i,{language:t,downloadable:!0,fileName:`${c}.json`,value:p,getConfigs:s,canCopy:!0})}else/xml/i.test(t)?(p=Tt()(e,{textNodesOnSameLine:!0,indentor:" "}),d=q().createElement(i,{downloadable:!0,fileName:`${c}.xml`,value:p,getConfigs:s,canCopy:!0})):d="text/html"===kt()(t)||/text\/plain/.test(t)?q().createElement(i,{downloadable:!0,fileName:`${c}.html`,value:e,getConfigs:s,canCopy:!0}):"text/csv"===kt()(t)||/text\/csv/.test(t)?q().createElement(i,{downloadable:!0,fileName:`${c}.csv`,value:e,getConfigs:s,canCopy:!0}):/^image\//i.test(t)?Me()(t).call(t,"svg")?q().createElement("div",null," ",e," "):q().createElement("img",{src:Ke().createObjectURL(e)}):/^audio\//i.test(t)?q().createElement("pre",{className:"microlight"},q().createElement("audio",{controls:!0,key:r},q().createElement("source",{src:r,type:t}))):"string"==typeof e?q().createElement(i,{downloadable:!0,fileName:`${c}.txt`,value:e,getConfigs:s,canCopy:!0}):e.size>0?l?q().createElement("div",null,q().createElement("p",{className:"i"},"Unrecognized response type; displaying content as text."),q().createElement(i,{downloadable:!0,fileName:`${c}.txt`,value:l,getConfigs:s,canCopy:!0})):q().createElement("p",{className:"i"},"Unrecognized response type; unable to display."):null;return d?q().createElement("div",null,q().createElement("h5",null,"Response body"),d):null}}var Mt=r(9968),jt=r.n(Mt);class Vt extends N.Component{constructor(e){super(e),ce()(this,"onChange",((e,t,r)=>{let{specActions:{changeParamByIdentity:n},onChangeKey:s}=this.props;n(s,e,t,r)})),ce()(this,"onChangeConsumesWrapper",(e=>{let{specActions:{changeConsumesValue:t},onChangeKey:r}=this.props;t(r,e)})),ce()(this,"toggleTab",(e=>"parameters"===e?this.setState({parametersVisible:!0,callbackVisible:!1}):"callbacks"===e?this.setState({callbackVisible:!0,parametersVisible:!1}):void 0)),ce()(this,"onChangeMediaType",(e=>{let{value:t,pathMethod:r}=e,{specActions:n,oas3Selectors:s,oas3Actions:a}=this.props;const o=s.hasUserEditedBody(...r),l=s.shouldRetainRequestBodyValue(...r);a.setRequestContentType({value:t,pathMethod:r}),a.initRequestBodyValidateError({pathMethod:r}),o||(l||a.setRequestBodyValue({value:void 0,pathMethod:r}),n.clearResponse(...r),n.clearRequest(...r),n.clearValidateParams(r))})),this.state={callbackVisible:!1,parametersVisible:!0}}render(){var e;let{onTryoutClick:t,onResetClick:r,parameters:n,allowTryItOut:s,tryItOutEnabled:a,specPath:o,fn:l,getComponent:i,getConfigs:c,specSelectors:p,specActions:u,pathMethod:d,oas3Actions:m,oas3Selectors:h,operation:g}=this.props;const f=i("parameterRow"),y=i("TryItOutButton"),v=i("contentType"),S=i("Callbacks",!0),C=i("RequestBody",!0),b=a&&s,w=p.isOAS3(),A=g.get("requestBody"),N=x()(e=jt()(x()(n).call(n,((e,t)=>{const r=t.get("in");return e[r]??(e[r]=[]),e[r].push(t),e}),{}))).call(e,((e,t)=>I()(e).call(e,t)),[]);return q().createElement("div",{className:"opblock-section"},q().createElement("div",{className:"opblock-section-header"},w?q().createElement("div",{className:"tab-header"},q().createElement("div",{onClick:()=>this.toggleTab("parameters"),className:`tab-item ${this.state.parametersVisible&&"active"}`},q().createElement("h4",{className:"opblock-title"},q().createElement("span",null,"Parameters"))),g.get("callbacks")?q().createElement("div",{onClick:()=>this.toggleTab("callbacks"),className:`tab-item ${this.state.callbackVisible&&"active"}`},q().createElement("h4",{className:"opblock-title"},q().createElement("span",null,"Callbacks"))):null):q().createElement("div",{className:"tab-header"},q().createElement("h4",{className:"opblock-title"},"Parameters")),s?q().createElement(y,{isOAS3:p.isOAS3(),hasUserEditedBody:h.hasUserEditedBody(...d),enabled:a,onCancelClick:this.props.onCancelClick,onTryoutClick:t,onResetClick:()=>r(d)}):null),this.state.parametersVisible?q().createElement("div",{className:"parameters-container"},N.length?q().createElement("div",{className:"table-container"},q().createElement("table",{className:"parameters"},q().createElement("thead",null,q().createElement("tr",null,q().createElement("th",{className:"col_header parameters-col_name"},"Name"),q().createElement("th",{className:"col_header parameters-col_description"},"Description"))),q().createElement("tbody",null,_()(N).call(N,((e,t)=>q().createElement(f,{fn:l,specPath:o.push(t.toString()),getComponent:i,getConfigs:c,rawParam:e,param:p.parameterWithMetaByIdentity(d,e),key:`${e.get("in")}.${e.get("name")}`,onChange:this.onChange,onChangeConsumes:this.onChangeConsumesWrapper,specSelectors:p,specActions:u,oas3Actions:m,oas3Selectors:h,pathMethod:d,isExecute:b})))))):q().createElement("div",{className:"opblock-description-wrapper"},q().createElement("p",null,"No parameters"))):null,this.state.callbackVisible?q().createElement("div",{className:"callbacks-container opblock-description-wrapper"},q().createElement(S,{callbacks:(0,T.Map)(g.get("callbacks")),specPath:E()(o).call(o,0,-1).push("callbacks")})):null,w&&A&&this.state.parametersVisible&&q().createElement("div",{className:"opblock-section opblock-section-request-body"},q().createElement("div",{className:"opblock-section-header"},q().createElement("h4",{className:`opblock-title parameter__name ${A.get("required")&&"required"}`},"Request body"),q().createElement("label",null,q().createElement(v,{value:h.requestContentType(...d),contentTypes:A.get("content",(0,T.List)()).keySeq(),onChange:e=>{this.onChangeMediaType({value:e,pathMethod:d})},className:"body-param-content-type",ariaLabel:"Request content type"}))),q().createElement("div",{className:"opblock-description-wrapper"},q().createElement(C,{setRetainRequestBodyValueFlag:e=>m.setRetainRequestBodyValueFlag({value:e,pathMethod:d}),userHasEditedBody:h.hasUserEditedBody(...d),specPath:E()(o).call(o,0,-1).push("requestBody"),requestBody:A,requestBodyValue:h.requestBodyValue(...d),requestBodyInclusionSetting:h.requestBodyInclusionSetting(...d),requestBodyErrors:h.requestBodyErrors(...d),isExecute:b,getConfigs:c,activeExamplesKey:h.activeExamplesMember(...d,"requestBody","requestBody"),updateActiveExamplesKey:e=>{this.props.oas3Actions.setActiveExamplesMember({name:e,pathMethod:this.props.pathMethod,contextType:"requestBody",contextName:"requestBody"})},onChange:(e,t)=>{if(t){const r=h.requestBodyValue(...d),n=T.Map.isMap(r)?r:(0,T.Map)();return m.setRequestBodyValue({pathMethod:d,value:n.setIn(t,e)})}m.setRequestBodyValue({value:e,pathMethod:d})},onChangeIncludeEmpty:(e,t)=>{m.setRequestBodyInclusion({pathMethod:d,value:t,name:e})},contentType:h.requestContentType(...d)}))))}}ce()(Vt,"defaultProps",{onTryoutClick:Function.prototype,onCancelClick:Function.prototype,tryItOutEnabled:!1,allowTryItOut:!0,onChangeKey:[],specPath:[]});const Dt=e=>{let{xKey:t,xVal:r}=e;return q().createElement("div",{className:"parameter__extension"},t,": ",String(r))},Lt={onChange:()=>{},isIncludedOptions:{}};class Ut extends N.Component{constructor(){super(...arguments),ce()(this,"onCheckboxChange",(e=>{const{onChange:t}=this.props;t(e.target.checked)}))}componentDidMount(){const{isIncludedOptions:e,onChange:t}=this.props,{shouldDispatchInit:r,defaultValue:n}=e;r&&t(n)}render(){let{isIncluded:e,isDisabled:t}=this.props;return q().createElement("div",null,q().createElement("label",{className:mt()("parameter__empty_value_toggle",{disabled:t})},q().createElement("input",{type:"checkbox",disabled:t,checked:!t&&e,onChange:this.onCheckboxChange}),"Send empty value"))}}ce()(Ut,"defaultProps",Lt);var zt=r(9069);class Bt extends N.Component{constructor(e,t){var r;super(e,t),r=this,ce()(this,"onChangeWrapper",(function(e){let t,n=arguments.length>1&&void 0!==arguments[1]&&arguments[1],{onChange:s,rawParam:a}=r.props;return t=""===e||e&&0===e.size?null:e,s(a,t,n)})),ce()(this,"_onExampleSelect",(e=>{this.props.oas3Actions.setActiveExamplesMember({name:e,pathMethod:this.props.pathMethod,contextType:"parameters",contextName:this.getParamKey()})})),ce()(this,"onChangeIncludeEmpty",(e=>{let{specActions:t,param:r,pathMethod:n}=this.props;const s=r.get("name"),a=r.get("in");return t.updateEmptyParamInclusion(n,s,a,e)})),ce()(this,"setDefaultValue",(()=>{let{specSelectors:e,pathMethod:t,rawParam:r,oas3Selectors:n}=this.props;const s=e.parameterWithMetaByIdentity(t,r)||(0,T.Map)(),{schema:a}=(0,zt.Z)(s,{isOAS3:e.isOAS3()}),o=s.get("content",(0,T.Map)()).keySeq().first(),l=a?(0,L.xi)(a.toJS(),o,{includeWriteOnly:!0}):null;if(s&&void 0===s.get("value")&&"body"!==s.get("in")){let r;if(e.isSwagger2())r=void 0!==s.get("x-example")?s.get("x-example"):void 0!==s.getIn(["schema","example"])?s.getIn(["schema","example"]):a&&a.getIn(["default"]);else if(e.isOAS3()){const e=n.activeExamplesMember(...t,"parameters",this.getParamKey());r=void 0!==s.getIn(["examples",e,"value"])?s.getIn(["examples",e,"value"]):void 0!==s.getIn(["content",o,"example"])?s.getIn(["content",o,"example"]):void 0!==s.get("example")?s.get("example"):void 0!==(a&&a.get("example"))?a&&a.get("example"):void 0!==(a&&a.get("default"))?a&&a.get("default"):s.get("default")}void 0===r||T.List.isList(r)||(r=(0,L.Pz)(r)),void 0!==r?this.onChangeWrapper(r):a&&"object"===a.get("type")&&l&&!s.get("examples")&&this.onChangeWrapper(T.List.isList(l)?l:(0,L.Pz)(l))}})),this.setDefaultValue()}UNSAFE_componentWillReceiveProps(e){let t,{specSelectors:r,pathMethod:n,rawParam:s}=e,a=r.isOAS3(),o=r.parameterWithMetaByIdentity(n,s)||new T.Map;if(o=o.isEmpty()?s:o,a){let{schema:e}=(0,zt.Z)(o,{isOAS3:a});t=e?e.get("enum"):void 0}else t=o?o.get("enum"):void 0;let l,i=o?o.get("value"):void 0;void 0!==i?l=i:s.get("required")&&t&&t.size&&(l=t.first()),void 0!==l&&l!==i&&this.onChangeWrapper((0,L.D$)(l)),this.setDefaultValue()}getParamKey(){const{param:e}=this.props;return e?`${e.get("name")}-${e.get("in")}`:null}render(){var e,t;let{param:r,rawParam:n,getComponent:s,getConfigs:a,isExecute:o,fn:l,onChangeConsumes:i,specSelectors:c,pathMethod:p,specPath:u,oas3Selectors:d}=this.props,m=c.isOAS3();const{showExtensions:h,showCommonExtensions:g}=a();if(r||(r=n),!n)return null;const f=s("JsonSchemaForm"),y=s("ParamBody");let v=r.get("in"),E="body"!==v?null:q().createElement(y,{getComponent:s,getConfigs:a,fn:l,param:r,consumes:c.consumesOptionsFor(p),consumesValue:c.contentTypeValues(p).get("requestContentType"),onChange:this.onChangeWrapper,onChangeConsumes:i,isExecute:o,specSelectors:c,pathMethod:p});const S=s("modelExample"),C=s("Markdown",!0),b=s("ParameterExt"),x=s("ParameterIncludeEmpty"),w=s("ExamplesSelectValueRetainer"),A=s("Example");let I,N,R,P,{schema:k}=(0,zt.Z)(r,{isOAS3:m}),O=c.parameterWithMetaByIdentity(p,n)||(0,T.Map)(),M=k?k.get("format"):null,j=k?k.get("type"):null,V=k?k.getIn(["items","type"]):null,U="formData"===v,z="FormData"in D.Z,B=r.get("required"),$=O?O.get("value"):"",J=g?(0,L.po)(k):null,F=h?(0,L.nX)(r):null,W=!1;return void 0!==r&&k&&(I=k.get("items")),void 0!==I?(N=I.get("enum"),R=I.get("default")):k&&(N=k.get("enum")),N&&N.size&&N.size>0&&(W=!0),void 0!==r&&(k&&(R=k.get("default")),void 0===R&&(R=r.get("default")),P=r.get("example"),void 0===P&&(P=r.get("x-example"))),q().createElement("tr",{"data-param-name":r.get("name"),"data-param-in":r.get("in")},q().createElement("td",{className:"parameters-col_name"},q().createElement("div",{className:B?"parameter__name required":"parameter__name"},r.get("name"),B?q().createElement("span",null," *"):null),q().createElement("div",{className:"parameter__type"},j,V&&`[${V}]`,M&&q().createElement("span",{className:"prop-format"},"($",M,")")),q().createElement("div",{className:"parameter__deprecated"},m&&r.get("deprecated")?"deprecated":null),q().createElement("div",{className:"parameter__in"},"(",r.get("in"),")"),g&&J.size?_()(e=J.entrySeq()).call(e,(e=>{let[t,r]=e;return q().createElement(b,{key:`${t}-${r}`,xKey:t,xVal:r})})):null,h&&F.size?_()(t=F.entrySeq()).call(t,(e=>{let[t,r]=e;return q().createElement(b,{key:`${t}-${r}`,xKey:t,xVal:r})})):null),q().createElement("td",{className:"parameters-col_description"},r.get("description")?q().createElement(C,{source:r.get("description")}):null,!E&&o||!W?null:q().createElement(C,{className:"parameter__enum",source:"Available values : "+_()(N).call(N,(function(e){return e})).toArray().join(", ")}),!E&&o||void 0===R?null:q().createElement(C,{className:"parameter__default",source:"Default value : "+R}),!E&&o||void 0===P?null:q().createElement(C,{source:"Example : "+P}),U&&!z&&q().createElement("div",null,"Error: your browser does not support FormData"),m&&r.get("examples")?q().createElement("section",{className:"parameter-controls"},q().createElement(w,{examples:r.get("examples"),onSelect:this._onExampleSelect,updateValue:this.onChangeWrapper,getComponent:s,defaultToFirstExample:!0,currentKey:d.activeExamplesMember(...p,"parameters",this.getParamKey()),currentUserInputValue:$})):null,E?null:q().createElement(f,{fn:l,getComponent:s,value:$,required:B,disabled:!o,description:r.get("name"),onChange:this.onChangeWrapper,errors:O.get("errors"),schema:k}),E&&k?q().createElement(S,{getComponent:s,specPath:u.push("schema"),getConfigs:a,isExecute:o,specSelectors:c,schema:k,example:E,includeWriteOnly:!0}):null,!E&&o&&r.get("allowEmptyValue")?q().createElement(x,{onChange:this.onChangeIncludeEmpty,isIncluded:c.parameterInclusionSettingFor(p,r.get("name"),r.get("in")),isDisabled:!(0,L.O2)($)}):null,m&&r.get("examples")?q().createElement(A,{example:r.getIn(["examples",d.activeExamplesMember(...p,"parameters",this.getParamKey())]),getComponent:s,getConfigs:a}):null))}}var $t=r(9300),Jt=r.n($t);class Ft extends N.Component{constructor(){super(...arguments),ce()(this,"handleValidateParameters",(()=>{let{specSelectors:e,specActions:t,path:r,method:n}=this.props;return t.validateParams([r,n]),e.validateBeforeExecute([r,n])})),ce()(this,"handleValidateRequestBody",(()=>{let{path:e,method:t,specSelectors:r,oas3Selectors:n,oas3Actions:s}=this.props,a={missingBodyValue:!1,missingRequiredKeys:[]};s.clearRequestBodyValidateError({path:e,method:t});let o=r.getOAS3RequiredRequestBodyContentType([e,t]),l=n.requestBodyValue(e,t),i=n.validateBeforeExecute([e,t]),c=n.requestContentType(e,t);if(!i)return a.missingBodyValue=!0,s.setRequestBodyValidateError({path:e,method:t,validationErrors:a}),!1;if(!o)return!0;let p=n.validateShallowRequired({oas3RequiredRequestBodyContentType:o,oas3RequestContentType:c,oas3RequestBodyValue:l});return!p||p.length<1||(ut()(p).call(p,(e=>{a.missingRequiredKeys.push(e)})),s.setRequestBodyValidateError({path:e,method:t,validationErrors:a}),!1)})),ce()(this,"handleValidationResultPass",(()=>{let{specActions:e,operation:t,path:r,method:n}=this.props;this.props.onExecute&&this.props.onExecute(),e.execute({operation:t,path:r,method:n})})),ce()(this,"handleValidationResultFail",(()=>{let{specActions:e,path:t,method:r}=this.props;e.clearValidateParams([t,r]),Jt()((()=>{e.validateParams([t,r])}),40)})),ce()(this,"handleValidationResult",(e=>{e?this.handleValidationResultPass():this.handleValidationResultFail()})),ce()(this,"onClick",(()=>{let e=this.handleValidateParameters(),t=this.handleValidateRequestBody(),r=e&&t;this.handleValidationResult(r)})),ce()(this,"onChangeProducesWrapper",(e=>this.props.specActions.changeProducesValue([this.props.path,this.props.method],e)))}render(){const{disabled:e}=this.props;return q().createElement("button",{className:"btn execute opblock-control__btn",onClick:this.onClick,disabled:e},"Execute")}}class Wt extends q().Component{render(){var e;let{headers:t,getComponent:r}=this.props;const n=r("Property"),s=r("Markdown",!0);return t&&t.size?q().createElement("div",{className:"headers-wrapper"},q().createElement("h4",{className:"headers__title"},"Headers:"),q().createElement("table",{className:"headers"},q().createElement("thead",null,q().createElement("tr",{className:"header-row"},q().createElement("th",{className:"header-col"},"Name"),q().createElement("th",{className:"header-col"},"Description"),q().createElement("th",{className:"header-col"},"Type"))),q().createElement("tbody",null,_()(e=t.entrySeq()).call(e,(e=>{let[t,r]=e;if(!P().Map.isMap(r))return null;const a=r.get("description"),o=r.getIn(["schema"])?r.getIn(["schema","type"]):r.getIn(["type"]),l=r.getIn(["schema","example"]);return q().createElement("tr",{key:t},q().createElement("td",{className:"header-col"},t),q().createElement("td",{className:"header-col"},a?q().createElement(s,{source:a}):null),q().createElement("td",{className:"header-col"},o," ",l?q().createElement(n,{propKey:"Example",propVal:l,propClass:"header-example"}):null))})).toArray()))):null}}class Ht extends q().Component{render(){let{editorActions:e,errSelectors:t,layoutSelectors:r,layoutActions:n,getComponent:s}=this.props;const a=s("Collapse");if(e&&e.jumpToLine)var o=e.jumpToLine;let i=t.allErrors(),c=l()(i).call(i,(e=>"thrown"===e.get("type")||"error"===e.get("level")));if(!c||c.count()<1)return null;let p=r.isShown(["errorPane"],!0),u=c.sortBy((e=>e.get("line")));return q().createElement("pre",{className:"errors-wrapper"},q().createElement("hgroup",{className:"error"},q().createElement("h4",{className:"errors__title"},"Errors"),q().createElement("button",{className:"btn errors__clear-btn",onClick:()=>n.show(["errorPane"],!p)},p?"Hide":"Show")),q().createElement(a,{isOpened:p,animated:!0},q().createElement("div",{className:"errors"},_()(u).call(u,((e,t)=>{let r=e.get("type");return"thrown"===r||"auth"===r?q().createElement(Kt,{key:t,error:e.get("error")||e,jumpToLine:o}):"spec"===r?q().createElement(Zt,{key:t,error:e,jumpToLine:o}):void 0})))))}}const Kt=e=>{let{error:t,jumpToLine:r}=e;if(!t)return null;let n=t.get("line");return q().createElement("div",{className:"error-wrapper"},t?q().createElement("div",null,q().createElement("h4",null,t.get("source")&&t.get("level")?Gt(t.get("source"))+" "+t.get("level"):"",t.get("path")?q().createElement("small",null," at ",t.get("path")):null),q().createElement("span",{className:"message thrown"},t.get("message")),q().createElement("div",{className:"error-line"},n&&r?q().createElement("a",{onClick:g()(r).call(r,null,n)},"Jump to line ",n):null)):null)},Zt=e=>{let{error:t,jumpToLine:r}=e,n=null;return t.get("path")?n=T.List.isList(t.get("path"))?q().createElement("small",null,"at ",t.get("path").join(".")):q().createElement("small",null,"at ",t.get("path")):t.get("line")&&!r&&(n=q().createElement("small",null,"on line ",t.get("line"))),q().createElement("div",{className:"error-wrapper"},t?q().createElement("div",null,q().createElement("h4",null,Gt(t.get("source"))+" "+t.get("level")," ",n),q().createElement("span",{className:"message"},t.get("message")),q().createElement("div",{className:"error-line"},r?q().createElement("a",{onClick:g()(r).call(r,null,t.get("line"))},"Jump to line ",t.get("line")):null)):null)};function Gt(e){var t;return _()(t=(e||"").split(" ")).call(t,(e=>e[0].toUpperCase()+E()(e).call(e,1))).join(" ")}Kt.defaultProps={jumpToLine:null};class Yt extends q().Component{constructor(){super(...arguments),ce()(this,"onChangeWrapper",(e=>this.props.onChange(e.target.value)))}componentDidMount(){this.props.contentTypes&&this.props.onChange(this.props.contentTypes.first())}UNSAFE_componentWillReceiveProps(e){var t;e.contentTypes&&e.contentTypes.size&&(Me()(t=e.contentTypes).call(t,e.value)||e.onChange(e.contentTypes.first()))}render(){let{ariaControls:e,ariaLabel:t,className:r,contentTypes:n,controlId:s,value:a}=this.props;return n&&n.size?q().createElement("div",{className:"content-type-wrapper "+(r||"")},q().createElement("select",{"aria-controls":e,"aria-label":t,className:"content-type",id:s,onChange:this.onChangeWrapper,value:a||""},_()(n).call(n,(e=>q().createElement("option",{key:e,value:e},e))).toArray())):null}}ce()(Yt,"defaultProps",{onChange:()=>{},value:null,contentTypes:(0,T.fromJS)(["application/json"])});var Xt=r(4250),Qt=r.n(Xt),er=r(7390),tr=r.n(er);function rr(){for(var e,t=arguments.length,r=new Array(t),n=0;n!!e)).join(" ")).call(e)}class nr extends q().Component{render(){let{fullscreen:e,full:t,...r}=this.props;if(e)return q().createElement("section",r);let n="swagger-container"+(t?"-full":"");return q().createElement("section",Qt()({},r,{className:rr(r.className,n)}))}}const sr={mobile:"",tablet:"-tablet",desktop:"-desktop",large:"-hd"};class ar extends q().Component{render(){const{hide:e,keepContents:t,mobile:r,tablet:n,desktop:s,large:a,...o}=this.props;if(e&&!t)return q().createElement("span",null);let l=[];for(let e in sr){if(!Object.prototype.hasOwnProperty.call(sr,e))continue;let t=sr[e];if(e in this.props){let r=this.props[e];if(r<1){l.push("none"+t);continue}l.push("block"+t),l.push("col-"+r+t)}}e&&l.push("hidden");let i=rr(o.className,...l);return q().createElement("section",Qt()({},o,{className:i}))}}class or extends q().Component{render(){return q().createElement("div",Qt()({},this.props,{className:rr(this.props.className,"wrapper")}))}}class lr extends q().Component{render(){return q().createElement("button",Qt()({},this.props,{className:rr(this.props.className,"button")}))}}ce()(lr,"defaultProps",{className:""});const ir=e=>q().createElement("textarea",e),cr=e=>q().createElement("input",e);class pr extends q().Component{constructor(e,t){let r;super(e,t),ce()(this,"onChange",(e=>{let t,{onChange:r,multiple:n}=this.props,s=E()([]).call(e.target.options);var a;n?t=_()(a=l()(s).call(s,(function(e){return e.selected}))).call(a,(function(e){return e.value})):t=e.target.value;this.setState({value:t}),r&&r(t)})),r=e.value?e.value:e.multiple?[""]:"",this.state={value:r}}UNSAFE_componentWillReceiveProps(e){e.value!==this.props.value&&this.setState({value:e.value})}render(){var e,t;let{allowedValues:r,multiple:n,allowEmptyValue:s,disabled:a}=this.props,o=(null===(e=this.state.value)||void 0===e||null===(t=e.toJS)||void 0===t?void 0:t.call(e))||this.state.value;return q().createElement("select",{className:this.props.className,multiple:n,value:o,onChange:this.onChange,disabled:a},s?q().createElement("option",{value:""},"--"):null,_()(r).call(r,(function(e,t){return q().createElement("option",{key:t,value:String(e)},String(e))})))}}ce()(pr,"defaultProps",{multiple:!1,allowEmptyValue:!0});class ur extends q().Component{render(){return q().createElement("a",Qt()({},this.props,{rel:"noopener noreferrer",className:rr(this.props.className,"link")}))}}const dr=e=>{let{children:t}=e;return q().createElement("div",{className:"no-margin"}," ",t," ")};class mr extends q().Component{renderNotAnimated(){return this.props.isOpened?q().createElement(dr,null,this.props.children):q().createElement("noscript",null)}render(){let{animated:e,isOpened:t,children:r}=this.props;return e?(r=t?r:null,q().createElement(dr,null,r)):this.renderNotAnimated()}}ce()(mr,"defaultProps",{isOpened:!1,animated:!1});class hr extends q().Component{constructor(){var e;super(...arguments),this.setTagShown=g()(e=this._setTagShown).call(e,this)}_setTagShown(e,t){this.props.layoutActions.show(e,t)}showOp(e,t){let{layoutActions:r}=this.props;r.show(e,t)}render(){let{specSelectors:e,layoutSelectors:t,layoutActions:r,getComponent:n}=this.props,s=e.taggedOperations();const a=n("Collapse");return q().createElement("div",null,q().createElement("h4",{className:"overview-title"},"Overview"),_()(s).call(s,((e,n)=>{let s=e.get("operations"),o=["overview-tags",n],l=t.isShown(o,!0);return q().createElement("div",{key:"overview-"+n},q().createElement("h4",{onClick:()=>r.show(o,!l),className:"link overview-tag"}," ",l?"-":"+",n),q().createElement(a,{isOpened:l,animated:!0},_()(s).call(s,(e=>{let{path:n,method:s,id:a}=e.toObject(),o="operations",l=a,i=t.isShown([o,l]);return q().createElement(gr,{key:a,path:n,method:s,id:n+"-"+s,shown:i,showOpId:l,showOpIdPrefix:o,href:`#operation-${l}`,onClick:r.show})})).toArray()))})).toArray(),s.size<1&&q().createElement("h3",null," No operations defined in spec! "))}}class gr extends q().Component{constructor(e){var t;super(e),this.onClick=g()(t=this._onClick).call(t,this)}_onClick(){let{showOpId:e,showOpIdPrefix:t,onClick:r,shown:n}=this.props;r([t,e],!n)}render(){let{id:e,method:t,shown:r,href:n}=this.props;return q().createElement(ur,{href:n,onClick:this.onClick,className:"block opblock-link "+(r?"shown":"")},q().createElement("div",null,q().createElement("small",{className:`bold-label-${t}`},t.toUpperCase()),q().createElement("span",{className:"bold-label"},e)))}}class fr extends q().Component{componentDidMount(){this.props.initialValue&&(this.inputRef.value=this.props.initialValue)}render(){const{value:e,defaultValue:t,initialValue:r,...n}=this.props;return q().createElement("input",Qt()({},n,{ref:e=>this.inputRef=e}))}}class yr extends q().Component{render(){let{host:e,basePath:t}=this.props;return q().createElement("pre",{className:"base-url"},"[ Base URL: ",e,t," ]")}}class vr extends q().Component{render(){let{data:e,getComponent:t,selectedServer:r,url:n}=this.props,s=e.get("name")||"the developer",a=Xe(e.get("url"),n,{selectedServer:r}),o=e.get("email");const l=t("Link");return q().createElement("div",{className:"info__contact"},a&&q().createElement("div",null,q().createElement(l,{href:(0,L.Nm)(a),target:"_blank"},s," - Website")),o&&q().createElement(l,{href:(0,L.Nm)(`mailto:${o}`)},a?`Send email to ${s}`:`Contact ${s}`))}}class Er extends q().Component{render(){let{license:e,getComponent:t,selectedServer:r,url:n}=this.props;const s=t("Link");let a=e.get("name")||"License",o=Xe(e.get("url"),n,{selectedServer:r});return q().createElement("div",{className:"info__license"},o?q().createElement(s,{target:"_blank",href:(0,L.Nm)(o)},a):q().createElement("span",null,a))}}class Sr extends q().PureComponent{render(){const{url:e,getComponent:t}=this.props,r=t("Link");return q().createElement(r,{target:"_blank",href:(0,L.Nm)(e)},q().createElement("span",{className:"url"}," ",e))}}class Cr extends q().Component{render(){let{info:e,url:t,host:r,basePath:n,getComponent:s,externalDocs:a,selectedServer:o,url:l}=this.props,i=e.get("version"),c=e.get("description"),p=e.get("title"),u=Xe(e.get("termsOfService"),l,{selectedServer:o}),d=e.get("contact"),m=e.get("license"),h=Xe(a&&a.get("url"),l,{selectedServer:o}),g=a&&a.get("description");const f=s("Markdown",!0),y=s("Link"),v=s("VersionStamp"),E=s("InfoUrl"),S=s("InfoBasePath");return q().createElement("div",{className:"info"},q().createElement("hgroup",{className:"main"},q().createElement("h2",{className:"title"},p,i&&q().createElement(v,{version:i})),r||n?q().createElement(S,{host:r,basePath:n}):null,t&&q().createElement(E,{getComponent:s,url:t})),q().createElement("div",{className:"description"},q().createElement(f,{source:c})),u&&q().createElement("div",{className:"info__tos"},q().createElement(y,{target:"_blank",href:(0,L.Nm)(u)},"Terms of service")),d&&d.size?q().createElement(vr,{getComponent:s,data:d,selectedServer:o,url:t}):null,m&&m.size?q().createElement(Er,{getComponent:s,license:m,selectedServer:o,url:t}):null,h?q().createElement(y,{className:"info__extdocs",target:"_blank",href:(0,L.Nm)(h)},g||h):null)}}class br extends q().Component{render(){const{specSelectors:e,getComponent:t,oas3Selectors:r}=this.props,n=e.info(),s=e.url(),a=e.basePath(),o=e.host(),l=e.externalDocs(),i=r.selectedServer(),c=t("info");return q().createElement("div",null,n&&n.count()?q().createElement(c,{info:n,url:s,host:o,basePath:a,externalDocs:l,getComponent:t,selectedServer:i}):null)}}class xr extends q().Component{render(){return null}}class wr extends q().Component{render(){return q().createElement("div",{className:"view-line-link copy-to-clipboard",title:"Copy to clipboard"},q().createElement(Ct.CopyToClipboard,{text:this.props.textToCopy},q().createElement("svg",{width:"15",height:"16"},q().createElement("use",{href:"#copy",xlinkHref:"#copy"}))))}}class _r extends q().Component{render(){return q().createElement("div",{className:"footer"})}}class Ar extends q().Component{constructor(){super(...arguments),ce()(this,"onFilterChange",(e=>{const{target:{value:t}}=e;this.props.layoutActions.updateFilter(t)}))}render(){const{specSelectors:e,layoutSelectors:t,getComponent:r}=this.props,n=r("Col"),s="loading"===e.loadingStatus(),a="failed"===e.loadingStatus(),o=t.currentFilter(),l=["operation-filter-input"];return a&&l.push("failed"),s&&l.push("loading"),q().createElement("div",null,null===o||!1===o||"false"===o?null:q().createElement("div",{className:"filter-container"},q().createElement(n,{className:"filter wrapper",mobile:12},q().createElement("input",{className:l.join(" "),placeholder:"Filter by tag",type:"text",onChange:this.onFilterChange,value:!0===o||"true"===o?"":o,disabled:s}))))}}const Ir=Function.prototype;class Nr extends N.PureComponent{constructor(e,t){super(e,t),ce()(this,"updateValues",(e=>{let{param:t,isExecute:r,consumesValue:n=""}=e,s=/xml/i.test(n),a=/json/i.test(n),o=s?t.get("value_xml"):t.get("value");if(void 0!==o){let e=!o&&a?"{}":o;this.setState({value:e}),this.onChange(e,{isXml:s,isEditBox:r})}else s?this.onChange(this.sample("xml"),{isXml:s,isEditBox:r}):this.onChange(this.sample(),{isEditBox:r})})),ce()(this,"sample",(e=>{let{param:t,fn:{inferSchema:r}}=this.props,n=r(t.toJS());return(0,L.xi)(n,e,{includeWriteOnly:!0})})),ce()(this,"onChange",((e,t)=>{let{isEditBox:r,isXml:n}=t;this.setState({value:e,isEditBox:r}),this._onChange(e,n)})),ce()(this,"_onChange",((e,t)=>{(this.props.onChange||Ir)(e,t)})),ce()(this,"handleOnChange",(e=>{const{consumesValue:t}=this.props,r=/xml/i.test(t),n=e.target.value;this.onChange(n,{isXml:r,isEditBox:this.state.isEditBox})})),ce()(this,"toggleIsEditBox",(()=>this.setState((e=>({isEditBox:!e.isEditBox}))))),this.state={isEditBox:!1,value:""}}componentDidMount(){this.updateValues.call(this,this.props)}UNSAFE_componentWillReceiveProps(e){this.updateValues.call(this,e)}render(){let{onChangeConsumes:e,param:t,isExecute:r,specSelectors:n,pathMethod:s,getConfigs:a,getComponent:o}=this.props;const l=o("Button"),i=o("TextArea"),c=o("highlightCode"),p=o("contentType");let u=(n?n.parameterWithMetaByIdentity(s,t):t).get("errors",(0,T.List)()),d=n.contentTypeValues(s).get("requestContentType"),m=this.props.consumes&&this.props.consumes.size?this.props.consumes:Nr.defaultProp.consumes,{value:h,isEditBox:g}=this.state,f=null;return(0,It.O)(h)&&(f="json"),q().createElement("div",{className:"body-param","data-param-name":t.get("name"),"data-param-in":t.get("in")},g&&r?q().createElement(i,{className:"body-param__text"+(u.count()?" invalid":""),value:h,onChange:this.handleOnChange}):h&&q().createElement(c,{className:"body-param__example",language:f,getConfigs:a,value:h}),q().createElement("div",{className:"body-param-options"},r?q().createElement("div",{className:"body-param-edit"},q().createElement(l,{className:g?"btn cancel body-param__example-edit":"btn edit body-param__example-edit",onClick:this.toggleIsEditBox},g?"Cancel":"Edit")):null,q().createElement("label",{htmlFor:""},q().createElement("span",null,"Parameter content type"),q().createElement(p,{value:d,contentTypes:m,onChange:e,className:"body-param-content-type",ariaLabel:"Parameter content type"}))))}}ce()(Nr,"defaultProp",{consumes:(0,T.fromJS)(["application/json"]),param:(0,T.fromJS)({}),onChange:Ir,onChangeConsumes:Ir});var qr=r(8223);class Rr extends q().Component{render(){let{request:e,getConfigs:t}=this.props,r=(0,qr.requestSnippetGenerator_curl_bash)(e);const n=t(),s=ft()(n,"syntaxHighlight.activated")?q().createElement(ht.d3,{language:"bash",className:"curl microlight",style:(0,ht.C2)(ft()(n,"syntaxHighlight.theme"))},r):q().createElement("textarea",{readOnly:!0,className:"curl",value:r});return q().createElement("div",{className:"curl-command"},q().createElement("h4",null,"Curl"),q().createElement("div",{className:"copy-to-clipboard"},q().createElement(Ct.CopyToClipboard,{text:r},q().createElement("button",null))),q().createElement("div",null,s))}}class Tr extends q().Component{constructor(){super(...arguments),ce()(this,"onChange",(e=>{this.setScheme(e.target.value)})),ce()(this,"setScheme",(e=>{let{path:t,method:r,specActions:n}=this.props;n.setScheme(e,t,r)}))}UNSAFE_componentWillMount(){let{schemes:e}=this.props;this.setScheme(e.first())}UNSAFE_componentWillReceiveProps(e){var t;this.props.currentScheme&&Me()(t=e.schemes).call(t,this.props.currentScheme)||this.setScheme(e.schemes.first())}render(){var e;let{schemes:t,currentScheme:r}=this.props;return q().createElement("label",{htmlFor:"schemes"},q().createElement("span",{className:"schemes-title"},"Schemes"),q().createElement("select",{onChange:this.onChange,value:r},_()(e=t.valueSeq()).call(e,(e=>q().createElement("option",{value:e,key:e},e))).toArray()))}}class Pr extends q().Component{render(){const{specActions:e,specSelectors:t,getComponent:r}=this.props,n=t.operationScheme(),s=t.schemes(),a=r("schemes");return s&&s.size?q().createElement(a,{currentScheme:n,schemes:s,specActions:e}):null}}class kr extends N.Component{constructor(e,t){super(e,t),ce()(this,"toggleCollapsed",(()=>{this.props.onToggle&&this.props.onToggle(this.props.modelName,!this.state.expanded),this.setState({expanded:!this.state.expanded})})),ce()(this,"onLoad",(e=>{if(e&&this.props.layoutSelectors){const t=this.props.layoutSelectors.getScrollToKey();P().is(t,this.props.specPath)&&this.toggleCollapsed(),this.props.layoutActions.readyToScroll(this.props.specPath,e.parentElement)}}));let{expanded:r,collapsedContent:n}=this.props;this.state={expanded:r,collapsedContent:n||kr.defaultProps.collapsedContent}}componentDidMount(){const{hideSelfOnExpand:e,expanded:t,modelName:r}=this.props;e&&t&&this.props.onToggle(r,t)}UNSAFE_componentWillReceiveProps(e){this.props.expanded!==e.expanded&&this.setState({expanded:e.expanded})}render(){const{title:e,classes:t}=this.props;return this.state.expanded&&this.props.hideSelfOnExpand?q().createElement("span",{className:t||""},this.props.children):q().createElement("span",{className:t||"",ref:this.onLoad},q().createElement("button",{"aria-expanded":this.state.expanded,className:"model-box-control",onClick:this.toggleCollapsed},e&&q().createElement("span",{className:"pointer"},e),q().createElement("span",{className:"model-toggle"+(this.state.expanded?"":" collapsed")}),!this.state.expanded&&q().createElement("span",null,this.state.collapsedContent)),this.state.expanded&&this.props.children)}}ce()(kr,"defaultProps",{collapsedContent:"{...}",expanded:!1,title:null,onToggle:()=>{},hideSelfOnExpand:!1,specPath:P().List([])});var Or=r(185),Mr=r.n(Or);class jr extends q().Component{constructor(e,t){super(e,t),ce()(this,"activeTab",(e=>{let{target:{dataset:{name:t}}}=e;this.setState({activeTab:t})}));let{getConfigs:r,isExecute:n}=this.props,{defaultModelRendering:s}=r(),a=s;"example"!==s&&"model"!==s&&(a="example"),n&&(a="example"),this.state={activeTab:a}}UNSAFE_componentWillReceiveProps(e){e.isExecute&&!this.props.isExecute&&this.props.example&&this.setState({activeTab:"example"})}render(){let{getComponent:e,specSelectors:t,schema:r,example:n,isExecute:s,getConfigs:a,specPath:o,includeReadOnly:l,includeWriteOnly:i}=this.props,{defaultModelExpandDepth:c}=a();const p=e("ModelWrapper"),u=e("highlightCode"),d=Mr()(5).toString("base64"),m=Mr()(5).toString("base64"),h=Mr()(5).toString("base64"),g=Mr()(5).toString("base64");let f=t.isOAS3();return q().createElement("div",{className:"model-example"},q().createElement("ul",{className:"tab",role:"tablist"},q().createElement("li",{className:mt()("tabitem",{active:"example"===this.state.activeTab}),role:"presentation"},q().createElement("button",{"aria-controls":m,"aria-selected":"example"===this.state.activeTab,className:"tablinks","data-name":"example",id:d,onClick:this.activeTab,role:"tab"},s?"Edit Value":"Example Value")),r&&q().createElement("li",{className:mt()("tabitem",{active:"model"===this.state.activeTab}),role:"presentation"},q().createElement("button",{"aria-controls":g,"aria-selected":"model"===this.state.activeTab,className:mt()("tablinks",{inactive:s}),"data-name":"model",id:h,onClick:this.activeTab,role:"tab"},f?"Schema":"Model"))),"example"===this.state.activeTab&&q().createElement("div",{"aria-hidden":"example"!==this.state.activeTab,"aria-labelledby":d,"data-name":"examplePanel",id:m,role:"tabpanel",tabIndex:"0"},n||q().createElement(u,{value:"(no example available)",getConfigs:a})),"model"===this.state.activeTab&&q().createElement("div",{"aria-hidden":"example"===this.state.activeTab,"aria-labelledby":h,"data-name":"modelPanel",id:g,role:"tabpanel",tabIndex:"0"},q().createElement(p,{schema:r,getComponent:e,getConfigs:a,specSelectors:t,expandDepth:c,specPath:o,includeReadOnly:l,includeWriteOnly:i})))}}class Vr extends N.Component{constructor(){super(...arguments),ce()(this,"onToggle",((e,t)=>{this.props.layoutActions&&this.props.layoutActions.show(this.props.fullPath,t)}))}render(){let{getComponent:e,getConfigs:t}=this.props;const r=e("Model");let n;return this.props.layoutSelectors&&(n=this.props.layoutSelectors.isShown(this.props.fullPath)),q().createElement("div",{className:"model-box"},q().createElement(r,Qt()({},this.props,{getConfigs:t,expanded:n,depth:1,onToggle:this.onToggle,expandDepth:this.props.expandDepth||0})))}}var Dr=r(6024);class Lr extends N.Component{constructor(){super(...arguments),ce()(this,"getSchemaBasePath",(()=>this.props.specSelectors.isOAS3()?["components","schemas"]:["definitions"])),ce()(this,"getCollapsedContent",(()=>" ")),ce()(this,"handleToggle",((e,t)=>{const{layoutActions:r}=this.props;r.show([...this.getSchemaBasePath(),e],t),t&&this.props.specActions.requestResolvedSubtree([...this.getSchemaBasePath(),e])})),ce()(this,"onLoadModels",(e=>{e&&this.props.layoutActions.readyToScroll(this.getSchemaBasePath(),e)})),ce()(this,"onLoadModel",(e=>{if(e){const t=e.getAttribute("data-name");this.props.layoutActions.readyToScroll([...this.getSchemaBasePath(),t],e)}}))}render(){var e;let{specSelectors:t,getComponent:r,layoutSelectors:n,layoutActions:s,getConfigs:a}=this.props,o=t.definitions(),{docExpansion:l,defaultModelsExpandDepth:i}=a();if(!o.size||i<0)return null;const c=this.getSchemaBasePath();let p=n.isShown(c,i>0&&"none"!==l);const u=t.isOAS3(),d=r("ModelWrapper"),m=r("Collapse"),h=r("ModelCollapse"),g=r("JumpToPath",!0);return q().createElement("section",{className:p?"models is-open":"models",ref:this.onLoadModels},q().createElement("h4",null,q().createElement("button",{"aria-expanded":p,className:"models-control",onClick:()=>s.show(c,!p)},q().createElement("span",null,u?"Schemas":"Models"),q().createElement("svg",{width:"20",height:"20","aria-hidden":"true",focusable:"false"},q().createElement("use",{xlinkHref:p?"#large-arrow-up":"#large-arrow-down"})))),q().createElement(m,{isOpened:p},_()(e=o.entrySeq()).call(e,(e=>{let[o]=e;const l=[...c,o],p=P().List(l),u=t.specResolvedSubtree(l),m=t.specJson().getIn(l),f=T.Map.isMap(u)?u:P().Map(),y=T.Map.isMap(m)?m:P().Map(),v=f.get("title")||y.get("title")||o,E=n.isShown(l,!1);E&&0===f.size&&y.size>0&&this.props.specActions.requestResolvedSubtree(l);const S=q().createElement(d,{name:o,expandDepth:i,schema:f||P().Map(),displayName:v,fullPath:l,specPath:p,getComponent:r,specSelectors:t,getConfigs:a,layoutSelectors:n,layoutActions:s,includeReadOnly:!0,includeWriteOnly:!0}),C=q().createElement("span",{className:"model-box"},q().createElement("span",{className:"model model-title"},v));return q().createElement("div",{id:`model-${o}`,className:"model-container",key:`models-section-${o}`,"data-name":o,ref:this.onLoadModel},q().createElement("span",{className:"models-jump-to-path"},q().createElement(g,{specPath:p})),q().createElement(h,{classes:"model-box",collapsedContent:this.getCollapsedContent(o),onToggle:this.handleToggle,title:C,displayName:v,modelName:o,specPath:p,layoutSelectors:n,layoutActions:s,hideSelfOnExpand:!0,expanded:i>0&&E},S))})).toArray()))}}const Ur=e=>{let{value:t,getComponent:r}=e,n=r("ModelCollapse"),s=q().createElement("span",null,"Array [ ",t.count()," ]");return q().createElement("span",{className:"prop-enum"},"Enum:",q().createElement("br",null),q().createElement(n,{collapsedContent:s},"[ ",t.join(", ")," ]"))};class zr extends N.Component{render(){var e,t,r,n;let{schema:s,name:a,displayName:o,isRef:i,getComponent:c,getConfigs:p,depth:d,onToggle:m,expanded:h,specPath:g,...f}=this.props,{specSelectors:y,expandDepth:v,includeReadOnly:S,includeWriteOnly:C}=f;const{isOAS3:b}=y;if(!s)return null;const{showExtensions:x}=p();let w=s.get("description"),A=s.get("properties"),I=s.get("additionalProperties"),N=s.get("title")||o||a,R=s.get("required"),P=l()(s).call(s,((e,t)=>{var r;return-1!==ue()(r=["maxProperties","minProperties","nullable","example"]).call(r,t)})),k=s.get("deprecated"),O=s.getIn(["externalDocs","url"]),M=s.getIn(["externalDocs","description"]);const j=c("JumpToPath",!0),V=c("Markdown",!0),D=c("Model"),U=c("ModelCollapse"),z=c("Property"),B=c("Link"),$=()=>q().createElement("span",{className:"model-jump-to-path"},q().createElement(j,{specPath:g})),J=q().createElement("span",null,q().createElement("span",null,"{"),"...",q().createElement("span",null,"}"),i?q().createElement($,null):""),F=y.isOAS3()?s.get("anyOf"):null,W=y.isOAS3()?s.get("oneOf"):null,H=y.isOAS3()?s.get("not"):null,K=N&&q().createElement("span",{className:"model-title"},i&&s.get("$$ref")&&q().createElement("span",{className:"model-hint"},s.get("$$ref")),q().createElement("span",{className:"model-title__text"},N));return q().createElement("span",{className:"model"},q().createElement(U,{modelName:a,title:K,onToggle:m,expanded:!!h||d<=v,collapsedContent:J},q().createElement("span",{className:"brace-open object"},"{"),i?q().createElement($,null):null,q().createElement("span",{className:"inner-object"},q().createElement("table",{className:"model"},q().createElement("tbody",null,w?q().createElement("tr",{className:"description"},q().createElement("td",null,"description:"),q().createElement("td",null,q().createElement(V,{source:w}))):null,O&&q().createElement("tr",{className:"external-docs"},q().createElement("td",null,"externalDocs:"),q().createElement("td",null,q().createElement(B,{target:"_blank",href:(0,L.Nm)(O)},M||O))),k?q().createElement("tr",{className:"property"},q().createElement("td",null,"deprecated:"),q().createElement("td",null,"true")):null,A&&A.size?_()(e=l()(t=A.entrySeq()).call(t,(e=>{let[,t]=e;return(!t.get("readOnly")||S)&&(!t.get("writeOnly")||C)}))).call(e,(e=>{let[t,r]=e,n=b()&&r.get("deprecated"),s=T.List.isList(R)&&R.contains(t),o=["property-row"];return n&&o.push("deprecated"),s&&o.push("required"),q().createElement("tr",{key:t,className:o.join(" ")},q().createElement("td",null,t,s&&q().createElement("span",{className:"star"},"*")),q().createElement("td",null,q().createElement(D,Qt()({key:`object-${a}-${t}_${r}`},f,{required:s,getComponent:c,specPath:g.push("properties",t),getConfigs:p,schema:r,depth:d+1}))))})).toArray():null,x?q().createElement("tr",null,q().createElement("td",null," ")):null,x?_()(r=s.entrySeq()).call(r,(e=>{let[t,r]=e;if("x-"!==E()(t).call(t,0,2))return;const n=r?r.toJS?r.toJS():r:null;return q().createElement("tr",{key:t,className:"extension"},q().createElement("td",null,t),q().createElement("td",null,u()(n)))})).toArray():null,I&&I.size?q().createElement("tr",null,q().createElement("td",null,"< * >:"),q().createElement("td",null,q().createElement(D,Qt()({},f,{required:!1,getComponent:c,specPath:g.push("additionalProperties"),getConfigs:p,schema:I,depth:d+1})))):null,F?q().createElement("tr",null,q().createElement("td",null,"anyOf ->"),q().createElement("td",null,_()(F).call(F,((e,t)=>q().createElement("div",{key:t},q().createElement(D,Qt()({},f,{required:!1,getComponent:c,specPath:g.push("anyOf",t),getConfigs:p,schema:e,depth:d+1}))))))):null,W?q().createElement("tr",null,q().createElement("td",null,"oneOf ->"),q().createElement("td",null,_()(W).call(W,((e,t)=>q().createElement("div",{key:t},q().createElement(D,Qt()({},f,{required:!1,getComponent:c,specPath:g.push("oneOf",t),getConfigs:p,schema:e,depth:d+1}))))))):null,H?q().createElement("tr",null,q().createElement("td",null,"not ->"),q().createElement("td",null,q().createElement("div",null,q().createElement(D,Qt()({},f,{required:!1,getComponent:c,specPath:g.push("not"),getConfigs:p,schema:H,depth:d+1}))))):null))),q().createElement("span",{className:"brace-close"},"}")),P.size?_()(n=P.entrySeq()).call(n,(e=>{let[t,r]=e;return q().createElement(z,{key:`${t}-${r}`,propKey:t,propVal:r,propClass:"property"})})):null)}}class Br extends N.Component{render(){var e;let{getComponent:t,getConfigs:r,schema:n,depth:s,expandDepth:a,name:o,displayName:i,specPath:c}=this.props,p=n.get("description"),u=n.get("items"),d=n.get("title")||i||o,m=l()(n).call(n,((e,t)=>{var r;return-1===ue()(r=["type","items","description","$$ref","externalDocs"]).call(r,t)})),h=n.getIn(["externalDocs","url"]),g=n.getIn(["externalDocs","description"]);const f=t("Markdown",!0),y=t("ModelCollapse"),v=t("Model"),E=t("Property"),S=t("Link"),C=d&&q().createElement("span",{className:"model-title"},q().createElement("span",{className:"model-title__text"},d));return q().createElement("span",{className:"model"},q().createElement(y,{title:C,expanded:s<=a,collapsedContent:"[...]"},"[",m.size?_()(e=m.entrySeq()).call(e,(e=>{let[t,r]=e;return q().createElement(E,{key:`${t}-${r}`,propKey:t,propVal:r,propClass:"property"})})):null,p?q().createElement(f,{source:p}):m.size?q().createElement("div",{className:"markdown"}):null,h&&q().createElement("div",{className:"external-docs"},q().createElement(S,{target:"_blank",href:(0,L.Nm)(h)},g||h)),q().createElement("span",null,q().createElement(v,Qt()({},this.props,{getConfigs:r,specPath:c.push("items"),name:null,schema:u,required:!1,depth:s+1}))),"]"))}}const $r="property primitive";class Jr extends N.Component{render(){var e,t,r;let{schema:n,getComponent:s,getConfigs:a,name:o,displayName:i,depth:c,expandDepth:p}=this.props;const{showExtensions:u}=a();if(!n||!n.get)return q().createElement("div",null);let d=n.get("type"),m=n.get("format"),h=n.get("xml"),g=n.get("enum"),f=n.get("title")||i||o,y=n.get("description"),v=(0,L.nX)(n),E=l()(n).call(n,((e,t)=>{var r;return-1===ue()(r=["enum","type","format","description","$$ref","externalDocs"]).call(r,t)})).filterNot(((e,t)=>v.has(t))),S=n.getIn(["externalDocs","url"]),C=n.getIn(["externalDocs","description"]);const b=s("Markdown",!0),x=s("EnumModel"),w=s("Property"),A=s("ModelCollapse"),I=s("Link"),N=f&&q().createElement("span",{className:"model-title"},q().createElement("span",{className:"model-title__text"},f));return q().createElement("span",{className:"model"},q().createElement(A,{title:N,expanded:c<=p,collapsedContent:"[...]",hideSelfOnExpand:p!==c},q().createElement("span",{className:"prop"},o&&c>1&&q().createElement("span",{className:"prop-name"},f),q().createElement("span",{className:"prop-type"},d),m&&q().createElement("span",{className:"prop-format"},"($",m,")"),E.size?_()(e=E.entrySeq()).call(e,(e=>{let[t,r]=e;return q().createElement(w,{key:`${t}-${r}`,propKey:t,propVal:r,propClass:$r})})):null,u&&v.size?_()(t=v.entrySeq()).call(t,(e=>{let[t,r]=e;return q().createElement(w,{key:`${t}-${r}`,propKey:t,propVal:r,propClass:$r})})):null,y?q().createElement(b,{source:y}):null,S&&q().createElement("div",{className:"external-docs"},q().createElement(I,{target:"_blank",href:(0,L.Nm)(S)},C||S)),h&&h.size?q().createElement("span",null,q().createElement("br",null),q().createElement("span",{className:$r},"xml:"),_()(r=h.entrySeq()).call(r,(e=>{let[t,r]=e;return q().createElement("span",{key:`${t}-${r}`,className:$r},q().createElement("br",null),"   ",t,": ",String(r))})).toArray()):null,g&&q().createElement(x,{value:g,getComponent:s}))))}}const Fr=e=>{let{propKey:t,propVal:r,propClass:n}=e;return q().createElement("span",{className:n},q().createElement("br",null),t,": ",String(r))};class Wr extends q().Component{render(){const{onTryoutClick:e,onCancelClick:t,onResetClick:r,enabled:n,hasUserEditedBody:s,isOAS3:a}=this.props,o=a&&s;return q().createElement("div",{className:o?"try-out btn-group":"try-out"},n?q().createElement("button",{className:"btn try-out__btn cancel",onClick:t},"Cancel"):q().createElement("button",{className:"btn try-out__btn",onClick:e},"Try it out "),o&&q().createElement("button",{className:"btn try-out__btn reset",onClick:r},"Reset"))}}ce()(Wr,"defaultProps",{onTryoutClick:Function.prototype,onCancelClick:Function.prototype,onResetClick:Function.prototype,enabled:!1,hasUserEditedBody:!1,isOAS3:!1});class Hr extends q().PureComponent{render(){const{bypass:e,isSwagger2:t,isOAS3:r,alsoShow:n}=this.props;return e?q().createElement("div",null,this.props.children):t&&r?q().createElement("div",{className:"version-pragma"},n,q().createElement("div",{className:"version-pragma__message version-pragma__message--ambiguous"},q().createElement("div",null,q().createElement("h3",null,"Unable to render this definition"),q().createElement("p",null,q().createElement("code",null,"swagger")," and ",q().createElement("code",null,"openapi")," fields cannot be present in the same Swagger or OpenAPI definition. Please remove one of the fields."),q().createElement("p",null,"Supported version fields are ",q().createElement("code",null,"swagger: ",'"2.0"')," and those that match ",q().createElement("code",null,"openapi: 3.0.n")," (for example, ",q().createElement("code",null,"openapi: 3.0.0"),").")))):t||r?q().createElement("div",null,this.props.children):q().createElement("div",{className:"version-pragma"},n,q().createElement("div",{className:"version-pragma__message version-pragma__message--missing"},q().createElement("div",null,q().createElement("h3",null,"Unable to render this definition"),q().createElement("p",null,"The provided definition does not specify a valid version field."),q().createElement("p",null,"Please indicate a valid Swagger or OpenAPI version field. Supported version fields are ",q().createElement("code",null,"swagger: ",'"2.0"')," and those that match ",q().createElement("code",null,"openapi: 3.0.n")," (for example, ",q().createElement("code",null,"openapi: 3.0.0"),")."))))}}ce()(Hr,"defaultProps",{alsoShow:null,children:null,bypass:!1});const Kr=e=>{let{version:t}=e;return q().createElement("small",null,q().createElement("pre",{className:"version"}," ",t," "))},Zr=e=>{let{enabled:t,path:r,text:n}=e;return q().createElement("a",{className:"nostyle",onClick:t?e=>e.preventDefault():null,href:t?`#/${r}`:null},q().createElement("span",null,n))},Gr=()=>q().createElement("div",null,q().createElement("svg",{xmlns:"http://www.w3.org/2000/svg",xmlnsXlink:"http://www.w3.org/1999/xlink",className:"svg-assets"},q().createElement("defs",null,q().createElement("symbol",{viewBox:"0 0 20 20",id:"unlocked"},q().createElement("path",{d:"M15.8 8H14V5.6C14 2.703 12.665 1 10 1 7.334 1 6 2.703 6 5.6V6h2v-.801C8 3.754 8.797 3 10 3c1.203 0 2 .754 2 2.199V8H4c-.553 0-1 .646-1 1.199V17c0 .549.428 1.139.951 1.307l1.197.387C5.672 18.861 6.55 19 7.1 19h5.8c.549 0 1.428-.139 1.951-.307l1.196-.387c.524-.167.953-.757.953-1.306V9.199C17 8.646 16.352 8 15.8 8z"})),q().createElement("symbol",{viewBox:"0 0 20 20",id:"locked"},q().createElement("path",{d:"M15.8 8H14V5.6C14 2.703 12.665 1 10 1 7.334 1 6 2.703 6 5.6V8H4c-.553 0-1 .646-1 1.199V17c0 .549.428 1.139.951 1.307l1.197.387C5.672 18.861 6.55 19 7.1 19h5.8c.549 0 1.428-.139 1.951-.307l1.196-.387c.524-.167.953-.757.953-1.306V9.199C17 8.646 16.352 8 15.8 8zM12 8H8V5.199C8 3.754 8.797 3 10 3c1.203 0 2 .754 2 2.199V8z"})),q().createElement("symbol",{viewBox:"0 0 20 20",id:"close"},q().createElement("path",{d:"M14.348 14.849c-.469.469-1.229.469-1.697 0L10 11.819l-2.651 3.029c-.469.469-1.229.469-1.697 0-.469-.469-.469-1.229 0-1.697l2.758-3.15-2.759-3.152c-.469-.469-.469-1.228 0-1.697.469-.469 1.228-.469 1.697 0L10 8.183l2.651-3.031c.469-.469 1.228-.469 1.697 0 .469.469.469 1.229 0 1.697l-2.758 3.152 2.758 3.15c.469.469.469 1.229 0 1.698z"})),q().createElement("symbol",{viewBox:"0 0 20 20",id:"large-arrow"},q().createElement("path",{d:"M13.25 10L6.109 2.58c-.268-.27-.268-.707 0-.979.268-.27.701-.27.969 0l7.83 7.908c.268.271.268.709 0 .979l-7.83 7.908c-.268.271-.701.27-.969 0-.268-.269-.268-.707 0-.979L13.25 10z"})),q().createElement("symbol",{viewBox:"0 0 20 20",id:"large-arrow-down"},q().createElement("path",{d:"M17.418 6.109c.272-.268.709-.268.979 0s.271.701 0 .969l-7.908 7.83c-.27.268-.707.268-.979 0l-7.908-7.83c-.27-.268-.27-.701 0-.969.271-.268.709-.268.979 0L10 13.25l7.418-7.141z"})),q().createElement("symbol",{viewBox:"0 0 20 20",id:"large-arrow-up"},q().createElement("path",{d:"M 17.418 14.908 C 17.69 15.176 18.127 15.176 18.397 14.908 C 18.667 14.64 18.668 14.207 18.397 13.939 L 10.489 6.109 C 10.219 5.841 9.782 5.841 9.51 6.109 L 1.602 13.939 C 1.332 14.207 1.332 14.64 1.602 14.908 C 1.873 15.176 2.311 15.176 2.581 14.908 L 10 7.767 L 17.418 14.908 Z"})),q().createElement("symbol",{viewBox:"0 0 24 24",id:"jump-to"},q().createElement("path",{d:"M19 7v4H5.83l3.58-3.59L8 6l-6 6 6 6 1.41-1.41L5.83 13H21V7z"})),q().createElement("symbol",{viewBox:"0 0 24 24",id:"expand"},q().createElement("path",{d:"M10 18h4v-2h-4v2zM3 6v2h18V6H3zm3 7h12v-2H6v2z"})),q().createElement("symbol",{viewBox:"0 0 15 16",id:"copy"},q().createElement("g",{transform:"translate(2, -1)"},q().createElement("path",{fill:"#ffffff",fillRule:"evenodd",d:"M2 13h4v1H2v-1zm5-6H2v1h5V7zm2 3V8l-3 3 3 3v-2h5v-2H9zM4.5 9H2v1h2.5V9zM2 12h2.5v-1H2v1zm9 1h1v2c-.02.28-.11.52-.3.7-.19.18-.42.28-.7.3H1c-.55 0-1-.45-1-1V4c0-.55.45-1 1-1h3c0-1.11.89-2 2-2 1.11 0 2 .89 2 2h3c.55 0 1 .45 1 1v5h-1V6H1v9h10v-2zM2 5h8c0-.55-.45-1-1-1H8c-.55 0-1-.45-1-1s-.45-1-1-1-1 .45-1 1-.45 1-1 1H3c-.55 0-1 .45-1 1z"}))))));var Yr=r(2552);class Xr extends q().Component{render(){let{errSelectors:e,specSelectors:t,getComponent:r}=this.props,n=r("SvgAssets"),s=r("InfoContainer",!0),a=r("VersionPragmaFilter"),o=r("operations",!0),l=r("Models",!0),i=r("Row"),c=r("Col"),p=r("errors",!0);const u=r("ServersContainer",!0),d=r("SchemesContainer",!0),m=r("AuthorizeBtnContainer",!0),h=r("FilterContainer",!0);let g=t.isSwagger2(),f=t.isOAS3();const y=!t.specStr(),v=t.loadingStatus();let E=null;if("loading"===v&&(E=q().createElement("div",{className:"info"},q().createElement("div",{className:"loading-container"},q().createElement("div",{className:"loading"})))),"failed"===v&&(E=q().createElement("div",{className:"info"},q().createElement("div",{className:"loading-container"},q().createElement("h4",{className:"title"},"Failed to load API definition."),q().createElement(p,null)))),"failedConfig"===v){const t=e.lastError(),r=t?t.get("message"):"";E=q().createElement("div",{className:"info failed-config"},q().createElement("div",{className:"loading-container"},q().createElement("h4",{className:"title"},"Failed to load remote configuration."),q().createElement("p",null,r)))}if(!E&&y&&(E=q().createElement("h4",null,"No API definition provided.")),E)return q().createElement("div",{className:"swagger-ui"},q().createElement("div",{className:"loading-container"},E));const S=t.servers(),C=t.schemes(),b=S&&S.size,x=C&&C.size,w=!!t.securityDefinitions();return q().createElement("div",{className:"swagger-ui"},q().createElement(n,null),q().createElement(a,{isSwagger2:g,isOAS3:f,alsoShow:q().createElement(p,null)},q().createElement(p,null),q().createElement(i,{className:"information-container"},q().createElement(c,{mobile:12},q().createElement(s,null))),b||x||w?q().createElement("div",{className:"scheme-container"},q().createElement(c,{className:"schemes wrapper",mobile:12},b?q().createElement(u,null):null,x?q().createElement(d,null):null,w?q().createElement(m,null):null)):null,q().createElement(h,null),q().createElement(i,null,q().createElement(c,{mobile:12,desktop:12},q().createElement(o,null))),q().createElement(i,null,q().createElement(c,{mobile:12,desktop:12},q().createElement(l,null)))))}}const Qr=require("react-debounce-input");var en=r.n(Qr);const tn={value:"",onChange:()=>{},schema:{},keyName:"",required:!1,errors:(0,T.List)()};class rn extends N.Component{componentDidMount(){const{dispatchInitialValue:e,value:t,onChange:r}=this.props;e?r(t):!1===e&&r("")}render(){let{schema:e,errors:t,value:r,onChange:n,getComponent:s,fn:a,disabled:o}=this.props;const l=e&&e.get?e.get("format"):null,i=e&&e.get?e.get("type"):null;let c=e=>s(e,!1,{failSilently:!0}),p=i?c(l?`JsonSchema_${i}_${l}`:`JsonSchema_${i}`):s("JsonSchema_string");return p||(p=s("JsonSchema_string")),q().createElement(p,Qt()({},this.props,{errors:t,fn:a,getComponent:s,value:r,onChange:n,schema:e,disabled:o}))}}ce()(rn,"defaultProps",tn);class nn extends N.Component{constructor(){super(...arguments),ce()(this,"onChange",(e=>{const t=this.props.schema&&"file"===this.props.schema.get("type")?e.target.files[0]:e.target.value;this.props.onChange(t,this.props.keyName)})),ce()(this,"onEnumChange",(e=>this.props.onChange(e)))}render(){let{getComponent:e,value:t,schema:r,errors:n,required:s,description:a,disabled:o}=this.props;const l=r&&r.get?r.get("enum"):null,i=r&&r.get?r.get("format"):null,c=r&&r.get?r.get("type"):null,p=r&&r.get?r.get("in"):null;if(t||(t=""),n=n.toJS?n.toJS():[],l){const r=e("Select");return q().createElement(r,{className:n.length?"invalid":"",title:n.length?n:"",allowedValues:[...l],value:t,allowEmptyValue:!s,disabled:o,onChange:this.onEnumChange})}const u=o||p&&"formData"===p&&!("FormData"in window),d=e("Input");return c&&"file"===c?q().createElement(d,{type:"file",className:n.length?"invalid":"",title:n.length?n:"",onChange:this.onChange,disabled:u}):q().createElement(en(),{type:i&&"password"===i?"password":"text",className:n.length?"invalid":"",title:n.length?n:"",value:t,minLength:0,debounceTimeout:350,placeholder:a,onChange:this.onChange,disabled:u})}}ce()(nn,"defaultProps",tn);class sn extends N.PureComponent{constructor(e,t){super(e,t),ce()(this,"onChange",(()=>{this.props.onChange(this.state.value)})),ce()(this,"onItemChange",((e,t)=>{this.setState((r=>{let{value:n}=r;return{value:n.set(t,e)}}),this.onChange)})),ce()(this,"removeItem",(e=>{this.setState((t=>{let{value:r}=t;return{value:r.delete(e)}}),this.onChange)})),ce()(this,"addItem",(()=>{let e=un(this.state.value);this.setState((()=>({value:e.push((0,L.xi)(this.state.schema.get("items"),!1,{includeWriteOnly:!0}))})),this.onChange)})),ce()(this,"onEnumChange",(e=>{this.setState((()=>({value:e})),this.onChange)})),this.state={value:un(e.value),schema:e.schema}}UNSAFE_componentWillReceiveProps(e){const t=un(e.value);t!==this.state.value&&this.setState({value:t}),e.schema!==this.state.schema&&this.setState({schema:e.schema})}render(){var e;let{getComponent:t,required:r,schema:n,errors:s,fn:a,disabled:o}=this.props;s=s.toJS?s.toJS():C()(s)?s:[];const i=l()(s).call(s,(e=>"string"==typeof e)),c=_()(e=l()(s).call(s,(e=>void 0!==e.needRemove))).call(e,(e=>e.error)),p=this.state.value,u=!!(p&&p.count&&p.count()>0),d=n.getIn(["items","enum"]),m=n.getIn(["items","type"]),h=n.getIn(["items","format"]),g=n.get("items");let f,y=!1,v="file"===m||"string"===m&&"binary"===h;if(m&&h?f=t(`JsonSchema_${m}_${h}`):"boolean"!==m&&"array"!==m&&"object"!==m||(f=t(`JsonSchema_${m}`)),f||v||(y=!0),d){const e=t("Select");return q().createElement(e,{className:s.length?"invalid":"",title:s.length?s:"",multiple:!0,value:p,disabled:o,allowedValues:d,allowEmptyValue:!r,onChange:this.onEnumChange})}const E=t("Button");return q().createElement("div",{className:"json-schema-array"},u?_()(p).call(p,((e,r)=>{var n;const i=(0,T.fromJS)([..._()(n=l()(s).call(s,(e=>e.index===r))).call(n,(e=>e.error))]);return q().createElement("div",{key:r,className:"json-schema-form-item"},v?q().createElement(on,{value:e,onChange:e=>this.onItemChange(e,r),disabled:o,errors:i,getComponent:t}):y?q().createElement(an,{value:e,onChange:e=>this.onItemChange(e,r),disabled:o,errors:i}):q().createElement(f,Qt()({},this.props,{value:e,onChange:e=>this.onItemChange(e,r),disabled:o,errors:i,schema:g,getComponent:t,fn:a})),o?null:q().createElement(E,{className:`btn btn-sm json-schema-form-item-remove ${c.length?"invalid":null}`,title:c.length?c:"",onClick:()=>this.removeItem(r)}," - "))})):null,o?null:q().createElement(E,{className:`btn btn-sm json-schema-form-item-add ${i.length?"invalid":null}`,title:i.length?i:"",onClick:this.addItem},"Add ",m?`${m} `:"","item"))}}ce()(sn,"defaultProps",tn);class an extends N.Component{constructor(){super(...arguments),ce()(this,"onChange",(e=>{const t=e.target.value;this.props.onChange(t,this.props.keyName)}))}render(){let{value:e,errors:t,description:r,disabled:n}=this.props;return e||(e=""),t=t.toJS?t.toJS():[],q().createElement(en(),{type:"text",className:t.length?"invalid":"",title:t.length?t:"",value:e,minLength:0,debounceTimeout:350,placeholder:r,onChange:this.onChange,disabled:n})}}ce()(an,"defaultProps",tn);class on extends N.Component{constructor(){super(...arguments),ce()(this,"onFileChange",(e=>{const t=e.target.files[0];this.props.onChange(t,this.props.keyName)}))}render(){let{getComponent:e,errors:t,disabled:r}=this.props;const n=e("Input"),s=r||!("FormData"in window);return q().createElement(n,{type:"file",className:t.length?"invalid":"",title:t.length?t:"",onChange:this.onFileChange,disabled:s})}}ce()(on,"defaultProps",tn);class ln extends N.Component{constructor(){super(...arguments),ce()(this,"onEnumChange",(e=>this.props.onChange(e)))}render(){let{getComponent:e,value:t,errors:r,schema:n,required:s,disabled:a}=this.props;r=r.toJS?r.toJS():[];let o=n&&n.get?n.get("enum"):null,l=!o||!s,i=!o&&["true","false"];const c=e("Select");return q().createElement(c,{className:r.length?"invalid":"",title:r.length?r:"",value:String(t),disabled:a,allowedValues:o?[...o]:i,allowEmptyValue:l,onChange:this.onEnumChange})}}ce()(ln,"defaultProps",tn);const cn=e=>_()(e).call(e,(e=>{const t=void 0!==e.propKey?e.propKey:e.index;let r="string"==typeof e?e:"string"==typeof e.error?e.error:null;if(!t&&r)return r;let n=e.error,s=`/${e.propKey}`;for(;"object"==typeof n;){const e=void 0!==n.propKey?n.propKey:n.index;if(void 0===e)break;if(s+=`/${e}`,!n.error)break;n=n.error}return`${s}: ${n}`}));class pn extends N.PureComponent{constructor(){super(),ce()(this,"onChange",(e=>{this.props.onChange(e)})),ce()(this,"handleOnChange",(e=>{const t=e.target.value;this.onChange(t)}))}render(){let{getComponent:e,value:t,errors:r,disabled:n}=this.props;const s=e("TextArea");return r=r.toJS?r.toJS():C()(r)?r:[],q().createElement("div",null,q().createElement(s,{className:mt()({invalid:r.length}),title:r.length?cn(r).join(", "):"",value:(0,L.Pz)(t),disabled:n,onChange:this.handleOnChange}))}}function un(e){return T.List.isList(e)?e:C()(e)?(0,T.fromJS)(e):(0,T.List)()}function dn(){let r={components:{App:he,authorizationPopup:ge,authorizeBtn:fe,AuthorizeBtnContainer:ye,authorizeOperationBtn:ve,auths:Ee,AuthItem:Se,authError:Ce,oauth2:De,apiKeyAuth:be,basicAuth:xe,clear:Le,liveResponse:Be,InitializedInput:fr,info:Cr,InfoContainer:br,JumpToPath:xr,CopyToClipboardBtn:wr,onlineValidatorBadge:$e.Z,operations:We,operation:et,OperationSummary:nt,OperationSummaryMethod:st,OperationSummaryPath:lt,highlightCode:xt,responses:wt,response:Nt,ResponseExtension:qt,responseBody:Ot,parameters:Vt,parameterRow:Bt,execute:Ft,headers:Wt,errors:Ht,contentType:Yt,overview:hr,footer:_r,FilterContainer:Ar,ParamBody:Nr,curl:Rr,schemes:Tr,SchemesContainer:Pr,modelExample:jr,ModelWrapper:Vr,ModelCollapse:kr,Model:Dr.Z,Models:Lr,EnumModel:Ur,ObjectModel:zr,ArrayModel:Br,PrimitiveModel:Jr,Property:Fr,TryItOutButton:Wr,Markdown:Yr.Z,BaseLayout:Xr,VersionPragmaFilter:Hr,VersionStamp:Kr,OperationExt:it,OperationExtRow:ct,ParameterExt:Dt,ParameterIncludeEmpty:Ut,OperationTag:Qe,OperationContainer:me,DeepLink:Zr,InfoUrl:Sr,InfoBasePath:yr,SvgAssets:Gr,Example:we,ExamplesSelect:Ie,ExamplesSelectValueRetainer:qe}},n={components:e},s={components:t};return[ne.default,te.default,X.default,Z.default,K.default,W.default,H.default,G.default,r,n,Q.default,s,ee.default,re.default,se.default,ae.default,oe.default,Y.default,(0,le.default)()]}ce()(pn,"defaultProps",tn);var mn=r(7451);function hn(){return[dn,mn.default]}var gn=r(5308);const{GIT_DIRTY:fn,GIT_COMMIT:yn,PACKAGE_VERSION:vn,BUILD_TIME:En}={PACKAGE_VERSION:"4.15.5",GIT_COMMIT:"gc858a26",GIT_DIRTY:!0,BUILD_TIME:"Wed, 09 Nov 2022 06:53:00 GMT"};function Sn(e){var t;D.Z.versions=D.Z.versions||{},D.Z.versions.swaggerUi={version:vn,gitRevision:yn,gitDirty:fn,buildTimestamp:En};const r={dom_id:null,domNode:null,spec:{},url:"",urls:null,layout:"BaseLayout",docExpansion:"list",maxDisplayedTags:null,filter:null,validatorUrl:"https://validator.swagger.io/validator",oauth2RedirectUrl:`${window.location.protocol}//${window.location.host}${window.location.pathname.substring(0,a()(t=window.location.pathname).call(t,"/"))}/oauth2-redirect.html`,persistAuthorization:!1,configs:{},custom:{},displayOperationId:!1,displayRequestDuration:!1,deepLinking:!1,tryItOutEnabled:!1,requestInterceptor:e=>e,responseInterceptor:e=>e,showMutatedRequest:!0,defaultModelRendering:"example",defaultModelExpandDepth:1,defaultModelsExpandDepth:1,showExtensions:!1,showCommonExtensions:!1,withCredentials:void 0,requestSnippetsEnabled:!1,requestSnippets:{generators:{curl_bash:{title:"cURL (bash)",syntax:"bash"},curl_powershell:{title:"cURL (PowerShell)",syntax:"powershell"},curl_cmd:{title:"cURL (CMD)",syntax:"bash"}},defaultExpanded:!0,languages:null},supportedSubmitMethods:["get","put","post","delete","options","head","patch","trace"],queryConfigEnabled:!1,presets:[hn],plugins:[],pluginsOptions:{pluginLoadType:"legacy"},initialState:{},fn:{},components:{},syntaxHighlight:{activated:!0,theme:"agate"}};let n=e.queryConfigEnabled?(0,L.UG)():{};const s=e.domNode;delete e.domNode;const o=m()({},r,e,n),i={system:{configs:o.configs},plugins:o.presets,pluginsOptions:o.pluginsOptions,state:m()({layout:{layout:o.layout,filter:l()(o)},spec:{spec:"",url:o.url},requestSnippets:o.requestSnippets},o.initialState)};if(o.initialState)for(var p in o.initialState)Object.prototype.hasOwnProperty.call(o.initialState,p)&&void 0===o.initialState[p]&&delete i.state[p];var d=new z(i);d.register([o.plugins,()=>({fn:o.fn,components:o.components,state:o.state})]);var h=d.getSystem();const g=e=>{let t=h.specSelectors.getLocalConfig?h.specSelectors.getLocalConfig():{},r=m()({},t,o,e||{},n);if(s&&(r.domNode=s),d.setConfigs(r),h.configsActions.loaded(),null!==e&&(!n.url&&"object"==typeof r.spec&&c()(r.spec).length?(h.specActions.updateUrl(""),h.specActions.updateLoadingStatus("success"),h.specActions.updateSpec(u()(r.spec))):h.specActions.download&&r.url&&!r.urls&&(h.specActions.updateUrl(r.url),h.specActions.download(r.url))),r.domNode)h.render(r.domNode,"App");else if(r.dom_id){let e=document.querySelector(r.dom_id);h.render(e,"App")}else null===r.dom_id||null===r.domNode||console.error("Skipped rendering: no `dom_id` or `domNode` was specified");return h},f=n.config||o.configUrl;return f&&h.specActions&&h.specActions.getConfigByUrl?(h.specActions.getConfigByUrl({url:f,loadRemoteConfig:!0,requestInterceptor:o.requestInterceptor,responseInterceptor:o.responseInterceptor},g),h):g()}Sn.presets={apis:hn},Sn.plugins=gn.default;const Cn=Sn})(),n=n.default})()})); +//# sourceMappingURL=swagger-ui.js.map \ No newline at end of file diff --git a/vendor/github.com/swaggo/files/v2/dist/swagger-ui.js.map b/vendor/github.com/swaggo/files/v2/dist/swagger-ui.js.map new file mode 100644 index 0000000..703d37a --- /dev/null +++ b/vendor/github.com/swaggo/files/v2/dist/swagger-ui.js.map @@ -0,0 +1 @@ +{"version":3,"file":"swagger-ui.js","mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,GAAIH,GACe,iBAAZC,QACdA,QAAuB,cAAID,IAE3BD,EAAoB,cAAIC,GACzB,CATD,CASGK,MAAM,WACT,M,6JCVA,MAAM,EAA+BC,QAAQ,kC,kDCK9B,MAAMC,UAAcC,KAAwB,cAAD,6CAiBxCC,IAC0B,IAAnC,IAAAA,GAAG,KAAHA,EAAY,kBACRA,EAAIC,QAAQ,sBAAuB,KAEG,IAA1C,IAAAD,GAAG,KAAHA,EAAY,yBACRA,EAAIC,QAAQ,8BAA+B,SADpD,IAGD,yBAEeC,IACd,IAAI,cAAEC,GAAkBP,KAAKQ,MAE7B,OAAOD,EAAcE,eAAeH,EAAM,GAC3C,CAEDI,SACE,IAAI,aAAEC,EAAY,WAAEC,EAAU,cAAEL,EAAa,OAAEM,EAAM,SAAEC,EAAQ,KAAEC,EAAI,MAAEC,EAAK,SAAEC,EAAQ,YAAEC,EAAW,gBACjGC,EAAe,iBAAEC,GAAoBpB,KAAKQ,MAC5C,MAAMa,EAAcV,EAAa,eAC3BW,EAAaX,EAAa,cAC1BY,EAAiBZ,EAAa,kBACpC,IAAIa,EAAO,SACPC,EAAQZ,GAAUA,EAAOa,IAAI,SAWjC,IARMX,GAAQU,IACZV,EAAOf,KAAK2B,aAAcF,KAGtBZ,GAAUY,IACdZ,EAASb,KAAK4B,aAAcb,KAG1BF,EACF,OAAO,0BAAMgB,UAAU,qBACf,0BAAMA,UAAU,qBAAsBX,GAAeH,GACrD,yBAAKe,IAAK7B,EAAQ,MAAiC8B,OAAQ,OAAQC,MAAO,UAIpF,MAAMC,EAAa1B,EAAc2B,UAAYrB,EAAOa,IAAI,cAIxD,OAHAV,OAAkBmB,IAAVnB,EAAsBA,IAAUS,EACxCD,EAAOX,GAAUA,EAAOa,IAAI,SAAWF,EAEhCA,GACL,IAAK,SACH,OAAO,kBAACH,EAAW,KACjBQ,UAAU,UAAc7B,KAAKQ,MAAK,CAClCS,SAAUA,EACVL,WAAaA,EACbC,OAASA,EACTE,KAAOA,EACPkB,WAAYA,EACZjB,MAAQA,EACRG,gBAAmBA,EACnBC,iBAAoBA,KACxB,IAAK,QACH,OAAO,kBAACE,EAAU,KAChBO,UAAU,SAAa7B,KAAKQ,MAAK,CACjCI,WAAaA,EACbC,OAASA,EACTE,KAAOA,EACPkB,WAAYA,EACZnB,SAAWA,EACXK,gBAAmBA,EACnBC,iBAAoBA,KAKxB,QACE,OAAO,kBAACG,EAAc,OACfvB,KAAKQ,MAAK,CACfG,aAAeA,EACfC,WAAaA,EACbC,OAASA,EACTE,KAAOA,EACPkB,WAAYA,EACZnB,SAAWA,KAEnB,EACD,IAlGoBZ,EAAK,YACL,CACjBW,OAAQ,IAAAuB,KAAgBC,WACxB1B,aAAc2B,IAAAA,KAAAA,WACd1B,WAAY0B,IAAAA,KAAAA,WACZ/B,cAAe+B,IAAAA,OAAAA,WACfvB,KAAMuB,IAAAA,OACNpB,YAAaoB,IAAAA,OACbtB,MAAOsB,IAAAA,KACPxB,SAAUwB,IAAAA,KACVC,YAAaD,IAAAA,OACbE,MAAOF,IAAAA,OACPrB,SAAUmB,IAAAA,KAAAA,WACVjB,gBAAiBmB,IAAAA,KACjBlB,iBAAkBkB,IAAAA,M,4JCZP,MAAMG,UAA6BC,IAAAA,UAO9CC,YAAYnC,EAAOoC,GACfC,MAAMrC,EAAOoC,GAAQ,6BASN,KAEjB,IAAI,cAAErC,GAAkBP,KAAKQ,MAG7B,OADkB,IAAIsC,IAAJ,CAAQvC,EAAcwC,MAAOC,EAAAA,EAAAA,UAC9BC,UAAU,IAbzB,IAAI,WAAErC,GAAeJ,GACjB,aAAE0C,GAAiBtC,IACvBZ,KAAKmD,MAAQ,CACTJ,IAAK/C,KAAKoD,mBACVF,kBAA+Bf,IAAjBe,EAA6B,yCAA2CA,EAE9F,CAUFG,iCAAiCC,GAC3B,IAAI,WAAE1C,GAAe0C,GACjB,aAAEJ,GAAiBtC,IAEvBZ,KAAKuD,SAAS,CACVR,IAAK/C,KAAKoD,mBACVF,kBAA+Bf,IAAjBe,EAA6B,yCAA2CA,GAE9F,CAEAxC,SACI,IAAI,WAAEE,GAAeZ,KAAKQ,OACtB,KAAEgD,GAAS5C,IAEX6C,GAAwBC,EAAAA,EAAAA,IAAY1D,KAAKmD,MAAMD,cAEnD,MAAqB,iBAATM,GAAqB,IAAYA,GAAMG,OAAe,KAE7D3D,KAAKmD,MAAMJ,MAAQa,EAAAA,EAAAA,IAAsB5D,KAAKmD,MAAMD,gBACjCU,EAAAA,EAAAA,IAAsB5D,KAAKmD,MAAMJ,KAIjD,0BAAMlB,UAAU,eAChB,uBAAGgC,OAAO,SAASC,IAAI,sBAAsBC,KAAO,GAAGN,eAAqCO,mBAAmBhE,KAAKmD,MAAMJ,QACtH,kBAACkB,EAAc,CAACnC,IAAM,GAAG2B,SAA+BO,mBAAmBhE,KAAKmD,MAAMJ,OAASmB,IAAI,6BALtG,IAQb,EAIJ,MAAMD,UAAuBvB,IAAAA,UAM3BC,YAAYnC,GACVqC,MAAMrC,GACNR,KAAKmD,MAAQ,CACXgB,QAAQ,EACRC,OAAO,EAEX,CAEAC,oBACE,MAAMC,EAAM,IAAIC,MAChBD,EAAIE,OAAS,KACXxE,KAAKuD,SAAS,CACZY,QAAQ,GACR,EAEJG,EAAIG,QAAU,KACZzE,KAAKuD,SAAS,CACZa,OAAO,GACP,EAEJE,EAAIxC,IAAM9B,KAAKQ,MAAMsB,GACvB,CAEAuB,iCAAiCC,GAC/B,GAAIA,EAAUxB,MAAQ9B,KAAKQ,MAAMsB,IAAK,CACpC,MAAMwC,EAAM,IAAIC,MAChBD,EAAIE,OAAS,KACXxE,KAAKuD,SAAS,CACZY,QAAQ,GACR,EAEJG,EAAIG,QAAU,KACZzE,KAAKuD,SAAS,CACZa,OAAO,GACP,EAEJE,EAAIxC,IAAMwB,EAAUxB,GACtB,CACF,CAEApB,SACE,OAAIV,KAAKmD,MAAMiB,MACN,yBAAKF,IAAK,UACPlE,KAAKmD,MAAMgB,OAGhB,yBAAKrC,IAAK9B,KAAKQ,MAAMsB,IAAKoC,IAAKlE,KAAKQ,MAAM0D,MAFxC,IAGX,E,gGCrHF,MAAM,EAA+BjE,QAAQ,sBCAvC,EAA+BA,QAAQ,a,gCCoB7C,SAASyE,EAAS,GAAyC,IAAzC,OAAEC,EAAM,UAAE9C,EAAY,GAAE,WAAEjB,GAAY,EACtD,GAAsB,iBAAX+D,EACT,OAAO,KAGT,MAAMC,EAAK,IAAIC,EAAAA,WAAW,CACxBC,MAAM,EACNC,aAAa,EACbC,QAAQ,EACRC,WAAY,WACXC,IAAIC,EAAAA,SAEPP,EAAGQ,KAAKC,MAAMC,QAAQ,CAAC,eAAgB,gBAEvC,MAAM,kBAAEC,GAAsB3E,IACxBkE,EAAOF,EAAGlE,OAAOiE,GACjBa,EAAYC,EAAUX,EAAM,CAAES,sBAEpC,OAAKZ,GAAWG,GAASU,EAKvB,yBAAK3D,UAAW6D,IAAG7D,EAAW,YAAa8D,wBAAyB,CAAEC,OAAQJ,KAJvE,IAMX,CAtCIK,IAAAA,SACFA,IAAAA,QAAkB,0BAA0B,SAAUC,GAQpD,OAHIA,EAAQ/B,MACV+B,EAAQC,aAAa,MAAO,uBAEvBD,CACT,IAoCFpB,EAASsB,aAAe,CACtBpF,WAAY,KAAM,CAAG2E,mBAAmB,KAG1C,UAEO,SAASE,EAAUQ,GAA0C,IAArC,kBAAEV,GAAoB,GAAU,UAAH,6CAAG,CAAC,EAC9D,MAAMW,EAAkBX,EAClBY,EAAcZ,EAAoB,GAAK,CAAC,QAAS,SAOvD,OALIA,IAAsBE,EAAUW,4BAClCC,QAAQC,KAAM,gHACdb,EAAUW,2BAA4B,GAGjCP,IAAAA,SAAmBI,EAAK,CAC7BM,SAAU,CAAC,UACXC,YAAa,CAAC,QAAS,QACvBN,kBACAC,eAEJ,CACAV,EAAUW,2BAA4B,C,2HCxEtC,MAAMK,EAAUxG,EAAAA,MAEVyG,EAAa,CAAC,EAEpB,IAEA,UAAAD,GAAO,KAAPA,IAAc,QAAU,SAAUE,GAChC,GAAY,eAARA,EACF,OAQF,IAAIC,EAAMH,EAAQE,GAClBD,GAAWG,EAAAA,EAAAA,IAAmBF,IAAQC,EAAIE,QAAUF,EAAIE,QAAUF,CACpE,IAEAF,EAAWK,WAAaA,EAAAA,O,mvBCnBjB,MAAMC,EAAkB,aAClBC,EAAY,YACZC,EAAS,SACTC,EAAuB,uBACvBC,EAAmB,mBACnBC,EAAW,WACXC,EAAiB,iBACjBC,EAAwB,wBAI9B,SAASC,EAAgBC,GAC9B,MAAO,CACLjG,KAAMwF,EACNS,QAASA,EAEb,CAEO,SAASC,EAAUD,GACxB,MAAO,CACLjG,KAAMyF,EACNQ,QAASA,EAEb,CAEO,MAAME,EAA8BF,GAAa,IAAuB,IAAtB,YAAEG,GAAa,EACtEA,EAAYF,UAAUD,GACtBG,EAAYC,8BAA8B,EAGrC,SAASC,EAAOL,GACrB,MAAO,CACLjG,KAAM0F,EACNO,QAASA,EAEb,CAEO,MAAMM,EAA2BN,GAAa,IAAuB,IAAtB,YAAEG,GAAa,EACnEA,EAAYE,OAAOL,GACnBG,EAAYC,8BAA8B,EAG/BG,EAAwBP,GAAa,IAAmC,IAAlC,YAAEG,EAAW,WAAEK,GAAY,GACxE,KAAEC,EAAI,MAAGC,EAAK,QAAEC,GAAYX,GAC5B,OAAE5G,EAAM,KAAEE,GAASmH,EACnBG,EAAOxH,EAAOa,IAAI,eAGfsB,EAAAA,EAAAA,wBAEO,eAATqF,GAA0BD,GAC7BH,EAAWK,WAAY,CACrBC,OAAQxH,EACR4D,OAAQ,OACR6D,MAAO,UACPC,QAAS,kHAIRN,EAAM/D,MACT6D,EAAWK,WAAW,CACpBC,OAAQxH,EACR4D,OAAQ,OACR6D,MAAO,QACPC,QAAS,IAAeN,KAK5BP,EAAYc,iCAAiC,CAAER,OAAMC,SAAQ,EAIxD,SAASQ,EAAgBlB,GAC9B,MAAO,CACLjG,KAAM4F,EACNK,QAASA,EAEb,CAGO,MAAMiB,EAAoCjB,GAAa,IAAuB,IAAtB,YAAEG,GAAa,EAC5EA,EAAYe,gBAAgBlB,GAC5BG,EAAYC,8BAA8B,EAG/Be,EAAsBV,GAAW,IAAuB,IAAtB,YAAEN,GAAa,GACxD,OAAE/G,EAAM,KAAEE,EAAI,SAAE8H,EAAQ,SAAEC,EAAQ,aAAEC,EAAY,SAAEC,EAAQ,aAAEC,GAAiBf,EAC7EgB,EAAO,CACTC,WAAY,WACZC,MAAOlB,EAAKmB,OAAOC,KAjFA,KAkFnBT,WACAC,YAGES,EAAU,CAAC,EAEf,OAAQR,GACN,IAAK,gBAcT,SAA8BlF,EAAQmF,EAAUC,GACzCD,GACH,IAAcnF,EAAQ,CAAC2F,UAAWR,IAG/BC,GACH,IAAcpF,EAAQ,CAAC4F,cAAeR,GAE1C,CArBMS,CAAqBR,EAAMF,EAAUC,GACrC,MAEF,IAAK,QACHM,EAAQI,cAAgB,UAAWC,EAAAA,EAAAA,IAAKZ,EAAW,IAAMC,GACzD,MACF,QACE5C,QAAQC,KAAM,iCAAgCyC,oDAGlD,OAAOnB,EAAYiC,iBAAiB,CAAEC,MAAMC,EAAAA,EAAAA,IAAcb,GAAOnG,IAAKlC,EAAOa,IAAI,YAAaX,OAAMwI,UAASS,MAfjG,CAAC,EAeuG9B,QAAM,EAarH,MAAM+B,EAAyB/B,GAAW,IAAuB,IAAtB,YAAEN,GAAa,GAC3D,OAAE/G,EAAM,OAAEwI,EAAM,KAAEtI,EAAI,SAAEiI,EAAQ,aAAEC,GAAiBf,EACnDqB,EAAU,CACZI,cAAe,UAAWC,EAAAA,EAAAA,IAAKZ,EAAW,IAAMC,IAE9CC,EAAO,CACTC,WAAY,qBACZC,MAAOC,EAAOC,KAxHK,MA2HrB,OAAO1B,EAAYiC,iBAAiB,CAACC,MAAMC,EAAAA,EAAAA,IAAcb,GAAOnI,OAAMgC,IAAKlC,EAAOa,IAAI,YAAawG,OAAMqB,WAAU,EAGxGW,EAAqC,IAAD,IAAE,KAAEhC,EAAI,YAAEiC,GAAa,SAAO,IAAuB,IAAtB,YAAEvC,GAAa,GACzF,OAAE/G,EAAM,KAAEE,EAAI,SAAEiI,EAAQ,aAAEC,EAAY,aAAEmB,GAAiBlC,EACzDgB,EAAO,CACTC,WAAY,qBACZkB,KAAMnC,EAAKmC,KACXb,UAAWR,EACXS,cAAeR,EACfqB,aAAcH,EACdI,cAAeH,GAGjB,OAAOxC,EAAYiC,iBAAiB,CAACC,MAAMC,EAAAA,EAAAA,IAAcb,GAAOnI,OAAMgC,IAAKlC,EAAOa,IAAI,YAAawG,QAAM,CAC1G,EAEYsC,EAA8C,IAAD,IAAE,KAAEtC,EAAI,YAAEiC,GAAa,SAAO,IAAuB,IAAtB,YAAEvC,GAAa,GAClG,OAAE/G,EAAM,KAAEE,EAAI,SAAEiI,EAAQ,aAAEC,EAAY,aAAEmB,GAAiBlC,EACzDqB,EAAU,CACZI,cAAe,UAAWC,EAAAA,EAAAA,IAAKZ,EAAW,IAAMC,IAE9CC,EAAO,CACTC,WAAY,qBACZkB,KAAMnC,EAAKmC,KACXb,UAAWR,EACXsB,aAAcH,EACdI,cAAeH,GAGjB,OAAOxC,EAAYiC,iBAAiB,CAACC,MAAMC,EAAAA,EAAAA,IAAcb,GAAOnI,OAAMgC,IAAKlC,EAAOa,IAAI,YAAawG,OAAMqB,WAAS,CACnH,EAEYM,EAAqBY,GAAW,IAAgG,IAKvIC,GALwC,GAAEC,EAAE,WAAE/J,EAAU,YAAEgH,EAAW,WAAEK,EAAU,cAAE2C,EAAa,cAAErK,EAAa,cAAEsK,GAAe,GAChI,KAAEf,EAAI,MAAEE,EAAM,CAAC,EAAC,QAAET,EAAQ,CAAC,EAAC,KAAExI,EAAI,IAAEgC,EAAG,KAAEmF,GAASuC,GAElD,4BAAEK,GAAgCD,EAAcjK,cAAgB,CAAC,EAIrE,GAAIL,EAAc2B,SAAU,CAC1B,IAAI6I,EAAiBH,EAAcI,qBAAqBJ,EAAcK,kBACtEP,EAAYQ,IAASnI,EAAKgI,GAAgB,EAC5C,MACEL,EAAYQ,IAASnI,EAAKxC,EAAcwC,OAAO,GAGP,iBAAhC+H,IACRJ,EAAUV,MAAQ,IAAc,CAAC,EAAGU,EAAUV,MAAOc,IAGvD,MAAMK,EAAWT,EAAUzH,WAE3B,IAAImI,EAAW,IAAc,CAC3B,OAAS,oCACT,eAAgB,oCAChB,mBAAoB,kBACnB7B,GAEHoB,EAAGU,MAAM,CACPtI,IAAKoI,EACLG,OAAQ,OACR/B,QAAS6B,EACTpB,MAAOA,EACPF,KAAMA,EACNyB,mBAAoB3K,IAAa2K,mBACjCC,oBAAqB5K,IAAa4K,sBAEnCC,MAAK,SAAUC,GACd,IAAIvD,EAAQwD,KAAKC,MAAMF,EAASjB,MAC5BrG,EAAQ+D,IAAWA,EAAM/D,OAAS,IAClCyH,EAAa1D,IAAWA,EAAM0D,YAAc,IAE1CH,EAASI,GAUV1H,GAASyH,EACZ5D,EAAWK,WAAW,CACpBC,OAAQxH,EACRyH,MAAO,QACP7D,OAAQ,OACR8D,QAAS,IAAeN,KAK5BP,EAAYc,iCAAiC,CAAER,OAAMC,UAnBnDF,EAAWK,WAAY,CACrBC,OAAQxH,EACRyH,MAAO,QACP7D,OAAQ,OACR8D,QAASiD,EAASK,YAgBxB,IACCC,OAAMC,IACL,IACIxD,EADM,IAAIyD,MAAMD,GACFxD,QAKlB,GAAIwD,EAAEP,UAAYO,EAAEP,SAASjB,KAAM,CACjC,MAAM0B,EAAUF,EAAEP,SAASjB,KAC3B,IACE,MAAM2B,EAAkC,iBAAZD,EAAuBR,KAAKC,MAAMO,GAAWA,EACrEC,EAAahI,QACfqE,GAAY,YAAW2D,EAAahI,SAClCgI,EAAaC,oBACf5D,GAAY,kBAAiB2D,EAAaC,oBAE5C,CADA,MAAOC,GACP,CAEJ,CACArE,EAAWK,WAAY,CACrBC,OAAQxH,EACRyH,MAAO,QACP7D,OAAQ,OACR8D,QAASA,GACR,GACH,EAGG,SAAS8D,EAAc9E,GAC5B,MAAO,CACLjG,KAAM8F,EACNG,QAASA,EAEb,CAEO,SAAS+E,EAAqB/E,GACnC,MAAO,CACLjG,KAAM+F,EACNE,QAASA,EAEb,CAEO,MAAMI,EAA+B,IAAO,IAAqC,IAApC,cAAEgD,EAAa,WAAEjK,GAAY,EAE/E,GADgBA,IACJ6L,qBACZ,CACE,MAAMC,EAAa7B,EAAc6B,aACjCC,aAAaC,QAAQ,aAAc,IAAeF,EAAWG,QAC/D,GAGWC,EAAY,CAAC/J,EAAKgK,IAA4B,KACzD/J,EAAAA,EAAAA,wBAA8B+J,EAE9B/J,EAAAA,EAAAA,KAASD,EAAI,C,yKCxRA,aACb,MAAO,CACLiK,UAAUC,GACRjN,KAAKkN,YAAclN,KAAKkN,aAAe,CAAC,EACxClN,KAAKkN,YAAYC,UAAYF,EAAOrF,YAAY2E,cAChDvM,KAAKkN,YAAYE,mBAAqB,IAAAA,GAAkB,KAAlBA,EAAwB,KAAMH,GACpEjN,KAAKkN,YAAYG,kBAAoB,IAAAA,GAAiB,KAAjBA,EAAuB,KAAMJ,EACpE,EACAK,aAAc,CACZpF,KAAM,CACJqF,SAAQ,UACRC,QAAO,EACPC,UAASA,GAEXjK,KAAM,CACJkK,YAAaC,IAIrB,CAEO,SAASN,EAAkBJ,EAAQtG,EAAKkC,EAAUC,GACvD,MACElB,aAAa,UAAEF,GACfnH,eAAe,SAAEqN,EAAQ,OAAE1L,IACzB+K,EAEEY,EAAiB3L,IAAW,CAAC,aAAc,mBAAqB,CAAC,uBAEjErB,EAAS+M,IAAWE,MAAM,IAAID,EAAgBlH,IAEpD,OAAI9F,EAIG6G,EAAU,CACf,CAACf,GAAM,CACLoH,MAAO,CACLlF,WACAC,YAEFjI,OAAQA,EAAOgM,UATV,IAYX,CAEO,SAASO,EAAmBH,EAAQtG,EAAKoH,GAC9C,MACEnG,aAAa,UAAEF,GACfnH,eAAe,SAAEqN,EAAQ,OAAE1L,IACzB+K,EAEEY,EAAiB3L,IAAW,CAAC,aAAc,mBAAqB,CAAC,uBAEjErB,EAAS+M,IAAWE,MAAM,IAAID,EAAgBlH,IAEpD,OAAI9F,EAIG6G,EAAU,CACf,CAACf,GAAM,CACLoH,QACAlN,OAAQA,EAAOgM,UANV,IASX,C,oIC3DA,SACE,CAAC7F,EAAAA,iBAAkB,CAAC7D,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EACpC,OAAOtE,EAAM6K,IAAK,kBAAmBvG,EAAS,EAGhD,CAACR,EAAAA,WAAY,CAAC9D,EAAO,KAAiB,IAAD,MAAhB,QAAEsE,GAAS,EAC1BwG,GAAaC,EAAAA,EAAAA,QAAOzG,GACpB0G,EAAMhL,EAAMzB,IAAI,gBAAiB0M,EAAAA,EAAAA,OAwBrC,OArBA,MAAAH,EAAWI,YAAU,QAAW,IAAuB,IAArB1H,EAAK2H,GAAU,EAC/C,KAAKC,EAAAA,EAAAA,IAAOD,EAASR,OACnB,OAAO3K,EAAM6K,IAAI,aAAcG,GAEjC,IAAI3M,EAAO8M,EAASR,MAAM,CAAC,SAAU,SAErC,GAAc,WAATtM,GAA8B,SAATA,EACxB2M,EAAMA,EAAIH,IAAIrH,EAAK2H,QACd,GAAc,UAAT9M,EAAmB,CAC7B,IAAIqH,EAAWyF,EAASR,MAAM,CAAC,QAAS,aACpChF,EAAWwF,EAASR,MAAM,CAAC,QAAS,aAExCK,EAAMA,EAAIK,MAAM,CAAC7H,EAAK,SAAU,CAC9BkC,SAAUA,EACV4F,OAAQ,UAAW7E,EAAAA,EAAAA,IAAKf,EAAW,IAAMC,KAG3CqF,EAAMA,EAAIK,MAAM,CAAC7H,EAAK,UAAW2H,EAAS5M,IAAI,UAChD,KAGKyB,EAAM6K,IAAK,aAAcG,EAAK,EAGvC,CAAC/G,EAAAA,kBAAmB,CAACjE,EAAO,KAAiB,IAEvCuL,GAFsB,QAAEjH,GAAS,GACjC,KAAES,EAAI,MAAEC,GAAUV,EAGtBS,EAAKC,MAAQ,IAAc,CAAC,EAAGA,GAC/BuG,GAAaR,EAAAA,EAAAA,QAAOhG,GAEpB,IAAIiG,EAAMhL,EAAMzB,IAAI,gBAAiB0M,EAAAA,EAAAA,OAGrC,OAFAD,EAAMA,EAAIH,IAAIU,EAAWhN,IAAI,QAASgN,GAE/BvL,EAAM6K,IAAK,aAAcG,EAAK,EAGvC,CAACjH,EAAAA,QAAS,CAAC/D,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EACvBkH,EAASxL,EAAMzB,IAAI,cAAckN,eAAelC,IAChD,IAAAjF,GAAO,KAAPA,GAAiBS,IACfwE,EAAWmC,OAAO3G,EAAK,GACvB,IAGN,OAAO/E,EAAM6K,IAAI,aAAcW,EAAO,EAGxC,CAACrH,EAAAA,gBAAiB,CAACnE,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EACnC,OAAOtE,EAAM6K,IAAI,UAAWvG,EAAQ,EAGtC,CAACF,EAAAA,uBAAwB,CAACpE,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EAC1C,OAAOtE,EAAM6K,IAAI,cAAcE,EAAAA,EAAAA,QAAOzG,EAAQiF,YAAY,E,4VCvE9D,MAAMvJ,EAAQA,GAASA,EAEV2L,GAAmBC,EAAAA,EAAAA,gBAC5B5L,GACA+E,GAAQA,EAAKxG,IAAK,qBAGTsN,GAAyBD,EAAAA,EAAAA,gBAClC5L,GACA,IAAO,IAAyB,IAAD,MAAvB,cAAE5C,GAAe,EACnB0O,EAAc1O,EAAc2O,wBAAyBd,EAAAA,EAAAA,KAAI,CAAC,GAC1De,GAAOC,EAAAA,EAAAA,QAUX,OAPA,MAAAH,EAAYZ,YAAU,QAAW,IAAkB,IAAhB1H,EAAK0I,GAAK,EACvClB,GAAMC,EAAAA,EAAAA,OAEVD,EAAMA,EAAIH,IAAIrH,EAAK0I,GACnBF,EAAOA,EAAKG,KAAKnB,EAAI,IAGhBgB,CAAI,IAKJI,EAAwB,CAAEpM,EAAO8K,IAAiB,IAAyB,IAAD,MAAvB,cAAE1N,GAAe,EAC/E8F,QAAQC,KAAK,+FACb,IAAI4I,EAAsB3O,EAAc2O,sBACpCP,GAASS,EAAAA,EAAAA,QA0Bb,OAxBA,MAAAnB,EAAWuB,YAAU,QAAWC,IAAW,IAAD,EACxC,IAAItB,GAAMC,EAAAA,EAAAA,OACV,MAAAqB,EAAMpB,YAAU,QAAW,IAAoB,IAEzCqB,GAFsB3O,EAAMsI,GAAO,EACnCsG,EAAaT,EAAoBxN,IAAIX,GAGkB,IAAD,EAA1B,WAA3B4O,EAAWjO,IAAI,SAAwB2H,EAAOuG,OACjDF,EAAgBC,EAAWjO,IAAI,UAE/B,MAAAgO,EAAcG,UAAQ,QAAWlJ,IACzB0C,EAAOyG,SAASnJ,KACpB+I,EAAgBA,EAAcb,OAAOlI,GACvC,IAGFgJ,EAAaA,EAAW3B,IAAI,gBAAiB0B,IAG/CvB,EAAMA,EAAIH,IAAIjN,EAAM4O,EAAW,IAGjChB,EAASA,EAAOW,KAAKnB,EAAI,IAGpBQ,CAAM,EAGFoB,EAA6B,SAAC5M,GAAK,IAAE8K,EAAa,UAAH,8CAAGmB,EAAAA,EAAAA,QAAM,OAAM,IAAuB,IAAvB,cAAEvE,GAAe,EAC1F,MAAMmF,EAAiBnF,EAAcmE,2BAA4BI,EAAAA,EAAAA,QACjE,IAAIT,GAASS,EAAAA,EAAAA,QAqBb,OApBA,IAAAY,GAAc,KAAdA,GAAyBL,IACvB,IAAIrB,EAAW,IAAAL,GAAU,KAAVA,GAAgBgC,GAAOA,EAAIvO,IAAIiO,EAAWE,SAASK,WAC7D5B,IACH,IAAAqB,GAAU,KAAVA,GAAoB,CAACnP,EAAOO,KAC1B,GAA2B,WAAtBP,EAAMkB,IAAI,QAAuB,CACpC,MAAMyO,EAAiB7B,EAAS5M,IAAIX,GACpC,IAAIqP,EAAmB5P,EAAMkB,IAAI,UACiC,IAAD,EAAjE,GAAI0N,EAAAA,KAAAA,OAAYe,IAAmB/B,EAAAA,IAAAA,MAAUgC,GAC3C,MAAAA,EAAiBP,UAAQ,QAAWlJ,IAC5BwJ,EAAeL,SAASnJ,KAC5ByJ,EAAmBA,EAAiBvB,OAAOlI,GAC7C,IAEFgJ,EAAaA,EAAW3B,IAAIjN,EAAMP,EAAMwN,IAAI,SAAUoC,GAE1D,KAEFzB,EAASA,EAAOW,KAAKK,GACvB,IAEKhB,CAAM,CACd,EAEYjC,GAAaqC,EAAAA,EAAAA,gBACtB5L,GACA+E,GAAQA,EAAKxG,IAAI,gBAAiB0M,EAAAA,EAAAA,SAIzBiC,EAAe,CAAElN,EAAO8K,IAAiB,IAAyB,IAAD,MAAvB,cAAEpD,GAAe,EAClE6B,EAAa7B,EAAc6B,aAE/B,OAAI0C,EAAAA,KAAAA,OAAYnB,KAIP,MAAAA,EAAWpB,QAAM,QAAWyB,IAAe,IAAD,IAG/C,OAEuB,IAFhB,gBAAYA,IAAS,QAAM3H,KACN+F,EAAWhL,IAAIiF,MACzC,QAAS,EAAa,IACvBhD,OATI,IASE,EAGA/C,GAAamO,EAAAA,EAAAA,gBACtB5L,GACA+E,GAAQA,EAAKxG,IAAK,Y,4DC9Gf,MAAM4O,EAAU,CAAEC,EAAW,KAAF,IAAE,cAAE1F,EAAa,cAAEtK,GAAe,SAAM,IAAyC,IAAzC,KAAEiQ,EAAI,OAAElF,EAAM,UAAEmF,EAAS,OAAEC,GAAQ,EACvGzC,EAAa,CACfvB,WAAY7B,EAAc6B,cAAgB7B,EAAc6B,aAAaG,OACrEoC,YAAa1O,EAAc2O,uBAAyB3O,EAAc2O,sBAAsBrC,OACxF8D,aAAepQ,EAAc+N,YAAc/N,EAAc+N,WAAWzB,QAGtE,OAAO0D,EAAU,CAAEC,OAAMlF,SAAQmF,YAAWxC,gBAAeyC,GAAS,CACrE,C,8HCTM,MAAME,EAAiB,iBACjBC,EAAiB,iBAGvB,SAASC,EAAOC,EAAYC,GACjC,MAAO,CACLxP,KAAMoP,EACNnJ,QAAS,CACP,CAACsJ,GAAaC,GAGpB,CAGO,SAASC,EAAOF,GACrB,MAAO,CACLvP,KAAMqP,EACNpJ,QAASsJ,EAEb,CAIO,MAAM5M,EAAS,IAAO,IAA+B,IAA/B,WAACvD,EAAU,YAAEgH,GAAY,EAGpD,GADgBhH,IACJ6L,qBACZ,CACE,MAAMC,EAAaC,aAAauE,QAAQ,cACrCxE,GAED9E,EAAY4E,qBAAqB,CAC/BE,WAAYf,KAAKC,MAAMc,IAG7B,E,2FCjCK,MAAMyE,EAAkB,CAACC,EAAMnE,KACpC,IACE,OAAOoE,IAAAA,KAAUD,EAMnB,CALE,MAAMnF,GAIN,OAHIgB,GACFA,EAAOhF,WAAWqJ,aAAc,IAAIpF,MAAMD,IAErC,CAAC,CACV,E,2HCHF,MAAM1L,EAAgB,CACpBgR,eAAgB,KACPJ,EAAAA,EAAAA,iBAAgBK,IAKZ,SAASC,IAEtB,MAAO,CACLnE,aAAc,CACZ9J,KAAM,CACJgK,QAASkE,EACTjE,UAAWlN,GAEboR,QAAS,CACPpE,SAAQ,UACRC,QAAO,EACPC,UAASA,IAIjB,C,mFCtBA,SAEE,CAACmD,EAAAA,gBAAiB,CAACzN,EAAOyO,IACjBzO,EAAM0O,OAAM3D,EAAAA,EAAAA,QAAO0D,EAAOnK,UAGnC,CAACoJ,EAAAA,gBAAiB,CAAC1N,EAAOyO,KACxB,MAAMb,EAAaa,EAAOnK,QACpBqK,EAAS3O,EAAMzB,IAAIqP,GACzB,OAAO5N,EAAM6K,IAAI+C,GAAae,EAAO,E,+ECflC,MAAMpQ,EAAM,CAACyB,EAAOqN,IAClBrN,EAAM2K,MAAM,IAAc0C,GAAQA,EAAO,CAACA,G,sGCA5C,MAAMuB,EAAkBC,GAAS/E,IACtC,MAAOtC,IAAI,MAAEU,IAAW4B,EAExB,OAAO5B,EAAM2G,EAAI,EAGNC,EAAiB,CAACD,EAAKE,IAAO,IAAqB,IAArB,YAAER,GAAa,EACxD,GAAIM,EACF,OAAON,EAAYK,eAAeC,GAAKvG,KAAK0G,EAAMA,GAGpD,SAASA,EAAKC,GACRA,aAAelG,OAASkG,EAAIC,QAAU,KACxCX,EAAYY,oBAAoB,gBAChCZ,EAAYY,oBAAoB,gBAChCZ,EAAYa,UAAU,IACtBlM,QAAQjC,MAAMgO,EAAIrG,WAAa,IAAMiG,EAAIjP,KACzCmP,EAAG,OAEHA,GAAGf,EAAAA,EAAAA,iBAAgBiB,EAAII,MAE3B,E,4DCvBK,MAAMC,EAAW1E,GACnBA,EACM2E,QAAQC,UAAU,KAAM,KAAO,IAAG5E,KAElC6E,OAAOC,SAASC,KAAO,E,6FCAnB,aACb,MAAO,CAACC,EAAAA,QAAQ,CACdzF,aAAc,CACZqE,QAAS,CACPjE,YAAa,CACXvJ,OAAQ,CAAC6O,EAAK/F,IAAW,WACvB+F,KAAO,WAEP,MAAMF,EAAOG,mBAAmBL,OAAOC,SAASC,MAChD7F,EAAOiG,cAAcC,kBAAkBL,EACzC,KAINM,eAAgB,CACd3C,UAAW4C,EAAAA,QACXC,aAAcC,EAAAA,UAGpB,C,qQCvBA,MAAM,EAA+BtT,QAAQ,a,0CCK7C,MAAMuT,EAAY,mBACZC,EAAkB,sBAEXC,EAAO,CAACV,EAAK,KAAF,IAAE,WAAEpS,EAAU,gBAAE+S,GAAiB,SAAK,WAAc,IAAD,uBAATC,EAAI,yBAAJA,EAAI,gBAGpE,GAFAZ,KAAOY,GAEHhT,IAAaiT,YAIjB,IACE,IAAKC,EAAYC,GAASH,EAE1BE,EAAa,IAAcA,GAAcA,EAAa,CAACA,GAGvD,MAAME,EAAeL,EAAgBM,2BAA2BH,GAGhE,IAAIE,EAAarQ,OACf,OAEF,MAAOnC,EAAM0S,GAAaF,EAE1B,IAAKD,EACH,OAAOtB,EAAAA,EAAAA,SAAQ,KAGW,IAAxBuB,EAAarQ,QACf8O,EAAAA,EAAAA,UAAQ0B,EAAAA,EAAAA,IAAoB,IAAGnQ,mBAAmBxC,MAASwC,mBAAmBkQ,OAC7C,IAAxBF,EAAarQ,SACtB8O,EAAAA,EAAAA,UAAQ0B,EAAAA,EAAAA,IAAoB,IAAGnQ,mBAAmBxC,MAOtD,CAJE,MAAOyK,GAGP5F,QAAQjC,MAAM6H,EAChB,CACF,CAAC,EAEYmI,EAAY5D,IAChB,CACLhP,KAAMgS,EACN/L,QAAS,IAAc+I,GAAQA,EAAO,CAACA,KAI9B2C,EAAqBkB,GAAa,IAAoD,IAApD,cAAEnB,EAAa,gBAAES,EAAe,WAAE/S,GAAY,EAE3F,GAAIA,IAAaiT,aAIdQ,EAAS,CAAC,IAAD,EACV,IAAIvB,EAAO,IAAAuB,GAAO,KAAPA,EAAc,GAGV,MAAZvB,EAAK,KAENA,EAAO,IAAAA,GAAI,KAAJA,EAAW,IAGL,MAAZA,EAAK,KAINA,EAAO,IAAAA,GAAI,KAAJA,EAAW,IAGpB,MAAMwB,EAAY,MAAAxB,EAAKyB,MAAM,MAAI,QAAKlF,GAAQA,GAAO,KAE/CmF,EAAab,EAAgBc,2BAA2BH,IAEvD9S,EAAMkT,EAAQ,GAAIC,EAAmB,IAAMH,EAElD,GAAY,eAAThT,EAAuB,CAExB,MAAMoT,EAAgBjB,EAAgBc,2BAA2B,CAACC,IAI/D,IAAAA,GAAK,KAALA,EAAc,MAAQ,IACvBrO,QAAQC,KAAK,mGACb4M,EAAcQ,KAAK,IAAAkB,GAAa,KAAbA,GAAkBvF,GAAOA,EAAIhP,QAAQ,KAAM,QAAO,IAGvE6S,EAAcQ,KAAKkB,GAAe,EACpC,EAII,IAAAF,GAAK,KAALA,EAAc,MAAQ,GAAK,IAAAC,GAAgB,KAAhBA,EAAyB,MAAQ,KAC9DtO,QAAQC,KAAK,mGACb4M,EAAcQ,KAAK,IAAAc,GAAU,KAAVA,GAAenF,GAAOA,EAAIhP,QAAQ,KAAM,QAAO,IAGpE6S,EAAcQ,KAAKc,GAAY,GAG/BtB,EAAckB,SAASI,EACzB,GAGWK,EAAgB,CAACL,EAAYpU,IAAS6M,IACjD,MAAM6H,EAAc7H,EAAO0G,gBAAgBoB,iBAExCC,IAAAA,GAAMF,GAAa5G,EAAAA,EAAAA,QAAOsG,MAC3BvH,EAAOiG,cAAc+B,gBAAgB7U,GACrC6M,EAAOiG,cAAcgC,gBACvB,EAIWD,EAAkB,CAAC7U,EAAK+U,IAAelI,IAClD,IACEkI,EAAYA,GAAalI,EAAOtC,GAAGyK,gBAAgBhV,GAClCiV,IAAAA,eAAyBF,GAC/BG,GAAGlV,EAGhB,CAFE,MAAM6L,GACN5F,QAAQjC,MAAM6H,EAChB,GAGWiJ,EAAgB,KACpB,CACL1T,KAAMiS,IA0BV,SACE9I,GAAI,CACFyK,gBAtBJ,SAAyBG,EAASC,GAChC,MAAMC,EAAcC,SAASC,gBAC7B,IAAIC,EAAQC,iBAAiBN,GAC7B,MAAMO,EAAyC,aAAnBF,EAAMG,SAC5BC,EAAgBR,EAAgB,uBAAyB,gBAE/D,GAAuB,UAAnBI,EAAMG,SACR,OAAON,EACT,IAAK,IAAIQ,EAASV,EAAUU,EAASA,EAAOC,eAE1C,GADAN,EAAQC,iBAAiBI,KACrBH,GAA0C,WAAnBF,EAAMG,WAG7BC,EAAcG,KAAKP,EAAMQ,SAAWR,EAAMS,UAAYT,EAAMU,WAC9D,OAAOL,EAGX,OAAOR,CACT,GAMEnI,aAAc,CACZyF,OAAQ,CACNvF,QAAS,CACPyH,kBACAb,WACAc,gBACAL,gBACA1B,qBAEF1F,UAAW,CACTsH,eAAe5R,GACNA,EAAMzB,IAAI,eAEnB+S,2BAA2BtR,EAAO6Q,GAChC,MAAOuC,EAAKC,GAAexC,EAE3B,OAAGwC,EACM,CAAC,aAAcD,EAAKC,GAClBD,EACF,CAAC,iBAAkBA,GAErB,EACT,EACAtC,2BAA2B9Q,EAAOqR,GAChC,IAAKhT,EAAM+U,EAAKC,GAAehC,EAE/B,MAAW,cAARhT,EACM,CAAC+U,EAAKC,GACI,kBAARhV,EACF,CAAC+U,GAEH,EACT,GAEFhJ,SAAU,CACR,CAACiG,GAAU,CAACrQ,EAAOyO,IACVzO,EAAM6K,IAAI,cAAegH,IAAAA,OAAUpD,EAAOnK,UAEnD,CAACgM,GAAiBtQ,GACTA,EAAM0L,OAAO,gBAGxBnB,YAAa,CACXgG,U,6GCzMR,MAqBA,EArBgB,CAAC+C,EAAKxJ,IAAW,cAAkCvK,IAAAA,UAAiB,cAAD,uCAMvEtC,IACR,MAAM,IAAEmW,GAAQvW,KAAKQ,MACfgU,EAAa,CAAC,iBAAkB+B,GACtCtJ,EAAOiG,cAAc2B,cAAcL,EAAYpU,EAAI,GACpD,CAEDM,SACE,OACE,0BAAMN,IAAKJ,KAAK0W,QACd,kBAACD,EAAQzW,KAAKQ,OAGpB,E,6GClBF,MAuBA,EAvBgB,CAACiW,EAAKxJ,IAAW,cAA+BvK,IAAAA,UAAiB,cAAD,uCAMpEtC,IACR,MAAM,UAAEqQ,GAAczQ,KAAKQ,OACrB,IAAE+V,EAAG,YAAEC,GAAgB/F,EAAUkG,WACvC,IAAI,WAAEnC,GAAe/D,EAAUkG,WAC/BnC,EAAaA,GAAc,CAAC,aAAc+B,EAAKC,GAC/CvJ,EAAOiG,cAAc2B,cAAcL,EAAYpU,EAAI,GACpD,CAEDM,SACE,OACE,0BAAMN,IAAKJ,KAAK0W,QACd,kBAACD,EAAQzW,KAAKQ,OAGpB,E,0KCnBa,SAASoW,EAAmBC,GACzC,IAAI,GAAElM,GAAOkM,EAmGb,MAAO,CACLvJ,aAAc,CACZ9J,KAAM,CAAEgK,QAnGI,CACdsJ,SAAW/T,GAAQ,IAA4D,IAA5D,WAAEkF,EAAU,cAAE1H,EAAa,YAAEmR,EAAW,WAAE9Q,GAAY,GACnE,MAAEyK,GAAUV,EAChB,MAAMoM,EAASnW,IAef,SAASuR,EAAKC,GACZ,GAAGA,aAAelG,OAASkG,EAAIC,QAAU,IAKvC,OAJAX,EAAYY,oBAAoB,UAChCrK,EAAWqJ,aAAa,IAAe,IAAIpF,OAAOkG,EAAI3J,SAAW2J,EAAIrG,YAAc,IAAMhJ,GAAM,CAAC4B,OAAQ,iBAEnGyN,EAAIC,QAAUD,aAAelG,OAUtC,WACE,IACE,IAAI8K,EAUJ,GARG,QAAShU,EAAAA,EACVgU,EAAU,IAAI,IAAJ,CAAQjU,IAGlBiU,EAAUtB,SAASuB,cAAc,KACjCD,EAAQjT,KAAOhB,GAGO,WAArBiU,EAAQE,UAAmD,WAA1BlU,EAAAA,EAAAA,SAAAA,SAAoC,CACtE,MAAMoB,EAAQ,IACZ,IAAI8H,MAAO,yEAAwE8K,EAAQE,0FAC3F,CAACvS,OAAQ,UAGX,YADAsD,EAAWqJ,aAAalN,EAE1B,CACA,GAAG4S,EAAQG,SAAWnU,EAAAA,EAAAA,SAAAA,OAAqB,CACzC,MAAMoB,EAAQ,IACZ,IAAI8H,MAAO,uDAAsD8K,EAAQG,oCAAoCnU,EAAAA,EAAAA,SAAAA,mFAC7G,CAAC2B,OAAQ,UAEXsD,EAAWqJ,aAAalN,EAC1B,CAGF,CAFE,MAAO6H,GACP,MACF,CACF,CAxC6CmL,IAG3C1F,EAAYY,oBAAoB,WAChCZ,EAAY2F,WAAWjF,EAAII,MACxBjS,EAAcwC,QAAUA,GACzB2O,EAAYa,UAAUxP,EAE1B,CA3BAA,EAAMA,GAAOxC,EAAcwC,MAC3B2O,EAAYY,oBAAoB,WAChCrK,EAAWqP,MAAM,CAAC3S,OAAQ,UAC1B0G,EAAM,CACJtI,MACAwU,UAAU,EACVhM,mBAAoBwL,EAAOxL,oBAAsB,CAACiM,GAAKA,GACvDhM,oBAAqBuL,EAAOvL,qBAAuB,CAACgM,GAAKA,GACzDC,YAAa,cACblO,QAAS,CACP,OAAU,0BAEXkC,KAAK0G,EAAKA,EA+Cb,EAIFG,oBAAsBD,IACpB,IAAIqF,EAAQ,CAAC,KAAM,UAAW,SAAU,UAAW,gBAKnD,OAJ8B,IAA3B,IAAAA,GAAK,KAALA,EAAcrF,IACfhM,QAAQjC,MAAO,UAASiO,mBAAwB,IAAeqF,MAG1D,CACLlW,KAAM,6BACNiG,QAAS4K,EACV,GAuBgB9E,SAnBN,CACb,2BAA8B,CAACpK,EAAOyO,IACF,iBAAnBA,EAAOnK,QAClBtE,EAAM6K,IAAI,gBAAiB4D,EAAOnK,SAClCtE,GAeuBsK,UAXf,CACdkK,eAAe5I,EAAAA,EAAAA,iBACb5L,GACSA,IAASiL,EAAAA,EAAAA,SAElB5K,GAAQA,EAAK9B,IAAI,kBAAoB,UAS3C,C,iUC3GO,MAAMkW,EAAiB,qBACjBC,EAAuB,2BACvBC,EAAe,mBACfC,EAAqB,yBACrBC,EAAe,mBACfC,EAAQ,YACRC,EAAW,eAEjB,SAAS5G,EAAa6G,GAC3B,MAAO,CACH3W,KAAMoW,EACNnQ,SAAS2Q,EAAAA,EAAAA,gBAAeD,GAE9B,CAEO,SAASE,EAAkBC,GAChC,MAAO,CACH9W,KAAMqW,EACNpQ,QAAS6Q,EAEf,CAEO,SAASC,EAAWJ,GACzB,MAAO,CACH3W,KAAMsW,EACNrQ,QAAS0Q,EAEf,CAEO,SAASK,EAAgBC,GAC9B,MAAO,CACHjX,KAAMuW,EACNtQ,QAASgR,EAEf,CAEO,SAASnQ,EAAW6P,GACzB,MAAO,CACL3W,KAAMwW,EACNvQ,QAAS0Q,EAEb,CAEO,SAASb,IAAoB,IAAdoB,EAAS,UAAH,6CAAG,CAAC,EAE9B,MAAO,CACLlX,KAAMyW,EACNxQ,QAASiR,EAEb,CAEO,SAASC,IAA8B,IAAtBD,EAAS,UAAH,6CAAG,KAAM,EAErC,MAAO,CACLlX,KAAM0W,EACNzQ,QAASiR,EAEb,C,sGC3DA,MAAM,EAA+BzY,QAAQ,iB,aCI7C,MAAM2Y,EAAoB,C,iBAKX,SAASC,EAAiBP,GAAS,IAAD,EAK/C,IAAIQ,EAAS,CACXC,OAAQ,CAAC,GAGPC,EAAoBC,IAAOL,GAAmB,CAACjK,EAAQuK,KACzD,IACE,IAAIC,EAAyBD,EAAYE,UAAUzK,EAAQmK,GAC3D,OAAO,IAAAK,GAAsB,KAAtBA,GAA8BhB,KAASA,GAIhD,CAHE,MAAMlM,GAEN,OADA5F,QAAQjC,MAAM,qBAAsB6H,GAC7B0C,CACT,IACC2J,GAEH,OAAO,UAAAU,GAAiB,KAAjBA,GACGb,KAASA,KAAK,QACjBA,KACCA,EAAIzW,IAAI,SAAWyW,EAAIzW,IAAI,QAGxByW,IAGb,C,2ICrCO,SAASiB,EAAUd,GAGxB,OAAO,IAAAA,GAAM,KAANA,GACAH,IAAQ,IAAD,EACV,IAAIkB,EAAU,sBACVC,EAAI,MAAAnB,EAAIzW,IAAI,YAAU,OAAS2X,GACnC,GAAGC,GAAK,EAAG,CAAC,IAAD,IACT,IAAIC,EAAQ,MAAApB,EAAIzW,IAAI,YAAU,OAAO4X,EAAID,EAAQ1V,QAAQ4Q,MAAM,KAC/D,OAAO4D,EAAInK,IAAI,UAAW,MAAAmK,EAAIzW,IAAI,YAAU,OAAO,EAAG4X,GAO9D,SAAwBC,GACtB,OAAO,IAAAA,GAAK,KAALA,GAAa,CAACC,EAAGC,EAAGH,EAAGI,IACzBJ,IAAMI,EAAI/V,OAAS,GAAK+V,EAAI/V,OAAS,EAC/B6V,EAAI,MAAQC,EACXC,EAAIJ,EAAE,IAAMI,EAAI/V,OAAS,EAC1B6V,EAAIC,EAAI,KACPC,EAAIJ,EAAE,GACPE,EAAIC,EAAI,IAERD,EAAIC,GAEZ,cACL,CAnBmEE,CAAeJ,GAC5E,CACE,OAAOpB,CACT,GAEN,C,8FCXO,SAASiB,EAAUd,EAAQ,GAAa,IAAb,OAAES,GAAQ,EAI1C,OAAOT,CAiBT,C,8FCpBe,WAASrL,GACtB,MAAO,CACLK,aAAc,CACZ6K,IAAK,CACH5K,UAAUqM,EAAAA,EAAAA,SAAa3M,GACvBO,QAAO,EACPC,UAASA,IAIjB,C,6LCAA,IAAIoM,EAA0B,CAE5BC,KAAM,EACNtR,MAAO,QACPC,QAAS,iBAGI,aACb,MAAO,CACL,CAACmP,EAAAA,gBAAiB,CAACzU,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EAC/BrD,EAAQ,IAAcyV,EAAyBpS,EAAS,CAACjG,KAAM,WACnE,OAAO2B,EACJ2N,OAAO,UAAUwH,IAAWA,IAAUlJ,EAAAA,EAAAA,SAAQE,MAAMpB,EAAAA,EAAAA,QAAQ9J,MAC5D0M,OAAO,UAAUwH,IAAUO,EAAAA,EAAAA,SAAgBP,IAAQ,EAGxD,CAACT,EAAAA,sBAAuB,CAAC1U,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EAIzC,OAHAA,EAAU,IAAAA,GAAO,KAAPA,GAAY0Q,IACbjK,EAAAA,EAAAA,QAAO,IAAc2L,EAAyB1B,EAAK,CAAE3W,KAAM,cAE7D2B,EACJ2N,OAAO,UAAUwH,IAAM,aAAI,MAACA,IAAUlJ,EAAAA,EAAAA,SAAQ,KAAF,GAAUlB,EAAAA,EAAAA,QAAQzG,GAAU,IACxEqJ,OAAO,UAAUwH,IAAUO,EAAAA,EAAAA,SAAgBP,IAAQ,EAGxD,CAACR,EAAAA,cAAe,CAAC3U,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EAC7BrD,GAAQ8J,EAAAA,EAAAA,QAAOzG,GAEnB,OADArD,EAAQA,EAAM4J,IAAI,OAAQ,QACnB7K,EACJ2N,OAAO,UAAUwH,IAAWA,IAAUlJ,EAAAA,EAAAA,SAAQE,MAAMpB,EAAAA,EAAAA,QAAO9J,IAAQ2V,QAAO5B,GAAOA,EAAIzW,IAAI,YACzFoP,OAAO,UAAUwH,IAAUO,EAAAA,EAAAA,SAAgBP,IAAQ,EAGxD,CAACP,EAAAA,oBAAqB,CAAC5U,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EAIvC,OAHAA,EAAU,IAAAA,GAAO,KAAPA,GAAY0Q,IACbjK,EAAAA,EAAAA,QAAO,IAAc2L,EAAyB1B,EAAK,CAAE3W,KAAM,YAE7D2B,EACJ2N,OAAO,UAAUwH,IAAM,aAAI,MAACA,IAAUlJ,EAAAA,EAAAA,SAAQ,KAAF,GAASlB,EAAAA,EAAAA,QAAOzG,GAAS,IACrEqJ,OAAO,UAAUwH,IAAUO,EAAAA,EAAAA,SAAgBP,IAAQ,EAGxD,CAACN,EAAAA,cAAe,CAAC7U,EAAO,KAAiB,IAAjB,QAAEsE,GAAS,EAC7BrD,GAAQ8J,EAAAA,EAAAA,QAAO,IAAc,CAAC,EAAGzG,IAGrC,OADArD,EAAQA,EAAM4J,IAAI,OAAQ,QACnB7K,EACJ2N,OAAO,UAAUwH,IAAWA,IAAUlJ,EAAAA,EAAAA,SAAQE,MAAMpB,EAAAA,EAAAA,QAAO9J,MAC3D0M,OAAO,UAAUwH,IAAUO,EAAAA,EAAAA,SAAgBP,IAAQ,EAGxD,CAACL,EAAAA,OAAQ,CAAC9U,EAAO,KAAiB,IAAD,MAAhB,QAAEsE,GAAS,EAC1B,IAAIA,IAAYtE,EAAMzB,IAAI,UACxB,OAAOyB,EAGT,IAAI6W,EAAY,MAAA7W,EAAMzB,IAAI,WAAS,QACzByW,IAAQ,IAAD,EACb,OAAO,MAAAA,EAAItI,UAAQ,QAAOoK,IACxB,MAAMC,EAAW/B,EAAIzW,IAAIuY,GACnBE,EAAc1S,EAAQwS,GAE5B,OAAIE,GAEGD,IAAaC,CAAW,GAC/B,IAEN,OAAOhX,EAAM0O,MAAM,CACjByG,OAAQ0B,GACR,EAGJ,CAAC9B,EAAAA,UAAW,CAAC/U,EAAO,KAAiB,IAAD,MAAhB,QAAEsE,GAAS,EAC7B,IAAIA,GAA8B,mBAAZA,EACpB,OAAOtE,EAET,IAAI6W,EAAY,MAAA7W,EAAMzB,IAAI,WAAS,QACzByW,GACC1Q,EAAQ0Q,KAEnB,OAAOhV,EAAM0O,MAAM,CACjByG,OAAQ0B,GACR,EAGR,C,sGChGA,MAEaI,GAAYrL,EAAAA,EAAAA,iBAFX5L,GAASA,IAIrBgV,GAAOA,EAAIzW,IAAI,UAAU0N,EAAAA,EAAAA,WAGdiL,GAAYtL,EAAAA,EAAAA,gBACvBqL,GACAE,GAAOA,EAAIC,Q,0ECVE,aACb,MAAO,CACL5P,GAAI,CACF6P,UAASA,EAAAA,SAGf,C,sGCRe,WAASC,EAAWC,GACjC,OAAO,IAAAD,GAAS,KAATA,GAAiB,CAACE,EAAQpE,KAAiC,IAAzB,IAAAA,GAAG,KAAHA,EAAYmE,IACvD,C,mMCAO,MAAME,EAAgB,uBAChBC,EAAgB,uBAChBC,EAAc,qBACdC,EAAO,cAIb,SAASC,EAAajI,GAC3B,MAAO,CACLvR,KAAMoZ,EACNnT,QAASsL,EAEb,CAEO,SAASkI,EAAavC,GAC3B,MAAO,CACLlX,KAAMqZ,EACNpT,QAASiR,EAEb,CAEO,SAAShF,EAAKwH,GAAoB,IAAbnH,IAAK,yDAE/B,OADAmH,GAAQC,EAAAA,EAAAA,IAAeD,GAChB,CACL1Z,KAAMuZ,EACNtT,QAAS,CAACyT,QAAOnH,SAErB,CAGO,SAASqH,EAAWF,GAAiB,IAAVG,EAAI,uDAAC,GAErC,OADAH,GAAQC,EAAAA,EAAAA,IAAeD,GAChB,CACL1Z,KAAMsZ,EACNrT,QAAS,CAACyT,QAAOG,QAErB,C,wGCjCe,aACb,MAAO,CACL/N,aAAc,CACZyF,OAAQ,CACNxF,SAAQ,UACRC,QAAO,EACPC,UAASA,GAEXjK,KAAM,CACJ8X,cAAaA,IAIrB,C,uGCVA,SAEE,CAACV,EAAAA,eAAgB,CAACzX,EAAOyO,IAAWzO,EAAM6K,IAAI,SAAU4D,EAAOnK,SAE/D,CAACoT,EAAAA,eAAgB,CAAC1X,EAAOyO,IAAWzO,EAAM6K,IAAI,SAAU4D,EAAOnK,SAE/D,CAACsT,EAAAA,MAAO,CAAC5X,EAAOyO,KACd,MAAM2J,EAAU3J,EAAOnK,QAAQsM,MAGzByH,GAActN,EAAAA,EAAAA,QAAO0D,EAAOnK,QAAQyT,OAI1C,OAAO/X,EAAM2N,OAAO,SAAS5C,EAAAA,EAAAA,QAAO,CAAC,IAAIsJ,GAAKA,EAAExJ,IAAIwN,EAAaD,IAAS,EAG5E,CAACT,EAAAA,aAAc,CAAC3X,EAAOyO,KAAY,IAAD,EAChC,IAAIsJ,EAAQtJ,EAAOnK,QAAQyT,MACvBG,EAAOzJ,EAAOnK,QAAQ4T,KAC1B,OAAOlY,EAAMqL,MAAM,OAAC,UAAQ,OAAQ0M,IAASG,GAAQ,IAAM,GAAG,E,iKCxBlE,MAEavV,EAAU3C,GAASA,EAAMzB,IAAI,UAE7B+Z,EAAgBtY,GAASA,EAAMzB,IAAI,UAEnC6Z,EAAU,CAACpY,EAAO+X,EAAOQ,KACpCR,GAAQC,EAAAA,EAAAA,IAAeD,GAChB/X,EAAMzB,IAAI,SAASwM,EAAAA,EAAAA,QAAO,CAAC,IAAIxM,KAAIwM,EAAAA,EAAAA,QAAOgN,GAAQQ,IAG9CC,EAAW,SAACxY,EAAO+X,GAAmB,IAAZQ,EAAG,uDAAC,GAEzC,OADAR,GAAQC,EAAAA,EAAAA,IAAeD,GAChB/X,EAAM2K,MAAM,CAAC,WAAYoN,GAAQQ,EAC1C,EAEaE,GAAc7M,EAAAA,EAAAA,iBAhBb5L,GAASA,IAkBrBA,IAAUoY,EAAQpY,EAAO,W,2FCrBpB,MAAM0Y,EAAmB,CAACC,EAAa7O,IAAW,SAAC9J,GAAoB,IAAD,uBAATyQ,EAAI,iCAAJA,EAAI,kBACtE,IAAI6G,EAAYqB,EAAY3Y,KAAUyQ,GAEtC,MAAM,GAAEjJ,EAAE,gBAAEgJ,EAAe,WAAE/S,GAAeqM,EAAO8O,YAC7CpK,EAAU/Q,KACV,iBAAEob,GAAqBrK,EAG7B,IAAI+G,EAAS/E,EAAgB8H,gBAW7B,OAVI/C,IACa,IAAXA,GAA8B,SAAXA,GAAgC,UAAXA,IAC1C+B,EAAY9P,EAAG6P,UAAUC,EAAW/B,IAIpCsD,IAAqBC,MAAMD,IAAqBA,GAAoB,IACtEvB,EAAY,IAAAA,GAAS,KAATA,EAAgB,EAAGuB,IAG1BvB,CACT,C,kFCrBe,SAAS,EAAC,GAAY,IAAZ,QAAC9I,GAAQ,EAEhC,MAAMuK,EAAS,CACb,MAAS,EACT,KAAQ,EACR,IAAO,EACP,KAAQ,EACR,MAAS,GAGLC,EAAY3T,GAAU0T,EAAO1T,KAAW,EAE9C,IAAI,SAAE4T,GAAazK,EACf0K,EAAcF,EAASC,GAE3B,SAASE,EAAI9T,GAAiB,IAAD,uBAANoL,EAAI,iCAAJA,EAAI,kBACtBuI,EAAS3T,IAAU6T,GAEpBhW,QAAQmC,MAAUoL,EACtB,CAOA,OALA0I,EAAIhW,KAAO,IAAAgW,GAAG,KAAHA,EAAS,KAAM,QAC1BA,EAAIlY,MAAQ,IAAAkY,GAAG,KAAHA,EAAS,KAAM,SAC3BA,EAAIC,KAAO,IAAAD,GAAG,KAAHA,EAAS,KAAM,QAC1BA,EAAIE,MAAQ,IAAAF,GAAG,KAAHA,EAAS,KAAM,SAEpB,CAAEpP,YAAa,CAAEoP,OAC1B,C,iyBCxBO,MAAMG,EAAyB,mBACzBC,EAA4B,8BAC5BC,EAAwC,oCACxCC,EAAgC,kCAChCC,EAAgC,kCAChCC,EAA8B,gCAC9BC,EAA+B,iCAC/BC,EAA+B,iCAC/BC,EAAkC,uCAClCC,EAAoC,yCACpCC,EAA2B,gCAEjC,SAASC,EAAmBC,EAAmBC,GACpD,MAAO,CACL9b,KAAMib,EACNhV,QAAS,CAAC4V,oBAAmBC,aAEjC,CAEO,SAASC,EAAmB,GAA0B,IAAxB,MAAExP,EAAK,WAAEyP,GAAY,EACxD,MAAO,CACLhc,KAAMkb,EACNjV,QAAS,CAAEsG,QAAOyP,cAEtB,CAEO,MAAMC,EAAiC,IAA2B,IAA3B,MAAE1P,EAAK,WAAEyP,GAAY,EACjE,MAAO,CACLhc,KAAMmb,EACNlV,QAAS,CAAEsG,QAAOyP,cACnB,EAII,SAASE,EAAuB,GAAgC,IAA9B,MAAE3P,EAAK,WAAEyP,EAAU,KAAEzc,GAAM,EAClE,MAAO,CACLS,KAAMob,EACNnV,QAAS,CAAEsG,QAAOyP,aAAYzc,QAElC,CAEO,SAAS4c,EAAuB,GAAmD,IAAjD,KAAE5c,EAAI,WAAEyc,EAAU,YAAEI,EAAW,YAAEC,GAAa,EACrF,MAAO,CACLrc,KAAMqb,EACNpV,QAAS,CAAE1G,OAAMyc,aAAYI,cAAaC,eAE9C,CAEO,SAASC,EAAqB,GAA0B,IAAxB,MAAE/P,EAAK,WAAEyP,GAAY,EAC1D,MAAO,CACLhc,KAAMsb,EACNrV,QAAS,CAAEsG,QAAOyP,cAEtB,CAEO,SAASO,EAAsB,GAA4B,IAA1B,MAAEhQ,EAAK,KAAEyC,EAAI,OAAElF,GAAQ,EAC7D,MAAO,CACL9J,KAAMub,EACNtV,QAAS,CAAEsG,QAAOyC,OAAMlF,UAE5B,CAEO,SAAS0S,EAAsB,GAAoC,IAAlC,OAAEC,EAAM,UAAEX,EAAS,IAAE3W,EAAG,IAAE0I,GAAK,EACrE,MAAO,CACL7N,KAAMwb,EACNvV,QAAS,CAAEwW,SAAQX,YAAW3W,MAAK0I,OAEvC,CAEO,MAAM6O,EAA+B,IAAwC,IAAxC,KAAE1N,EAAI,OAAElF,EAAM,iBAAE6S,GAAkB,EAC5E,MAAO,CACL3c,KAAMyb,EACNxV,QAAS,CAAE+I,OAAMlF,SAAQ6S,oBAC1B,EAGUC,EAAiC,IAAsB,IAAtB,KAAE5N,EAAI,OAAElF,GAAQ,EAC5D,MAAO,CACL9J,KAAM0b,EACNzV,QAAS,CAAE+I,OAAMlF,UAClB,EAGU+S,EAAgC,IAAqB,IAArB,WAAEb,GAAY,EACzD,MAAO,CACLhc,KAAM0b,EACNzV,QAAS,CAAE+I,KAAMgN,EAAW,GAAIlS,OAAQkS,EAAW,IACpD,EAGUc,EAAyB,IAAoB,IAApB,WAAEd,GAAY,EAClD,MAAO,CACLhc,KAAO2b,EACP1V,QAAS,CAAE+V,cACZ,C,oKC1EI,MAAMxO,GAdKuP,GAc6BxP,EAAAA,EAAAA,iBAhBjC5L,GAASA,IAkBlB,IAAD,IAAC,cAAC5C,GAAc,SAAKA,EAAc2O,qBAAqB,IACxD,CAACjC,EAAQgC,KAAiB,IAAD,EAGvB,IAAIE,GAAOC,EAAAA,EAAAA,QAEX,OAAIH,GAIJ,MAAAA,EAAYZ,YAAU,QAAW,IAA6B,IAA3BmQ,EAAS7O,GAAY,EACtD,MAAMnO,EAAOmO,EAAWjO,IAAI,QAEL,IAAD,EAyBtB,GAzBY,WAATF,GACD,MAAAmO,EAAWjO,IAAI,SAAS2M,YAAU,QAAU,IAAwB,IAAvBoQ,EAASC,GAAQ,EACxDC,GAAgBzQ,EAAAA,EAAAA,QAAO,CACzB7F,KAAMoW,EACNG,iBAAkBF,EAAQhd,IAAI,oBAC9Bmd,SAAUH,EAAQhd,IAAI,YACtB2H,OAAQqV,EAAQhd,IAAI,UACpBF,KAAMmO,EAAWjO,IAAI,QACrBod,YAAanP,EAAWjO,IAAI,iBAG9ByN,EAAOA,EAAKG,KAAK,IAAIlB,EAAAA,IAAI,CACvB,CAACoQ,GAAU,IAAAG,GAAa,KAAbA,GAAsBI,QAGlB5c,IAAN4c,MAER,IAGK,SAATvd,GAA4B,WAATA,IACpB2N,EAAOA,EAAKG,KAAK,IAAIlB,EAAAA,IAAI,CACvB,CAACoQ,GAAU7O,MAGH,kBAATnO,GAA4BmO,EAAWjO,IAAI,qBAAsB,CAClE,IAAIsd,EAAWrP,EAAWjO,IAAI,qBAC1Bud,EAASD,EAAStd,IAAI,0BAA4B,CAAC,qBAAsB,YAC7E,IAAAud,GAAM,KAANA,GAAgBC,IAAW,IAAD,EAExB,IAAIC,EAAmBH,EAAStd,IAAI,qBAClC,MAAAsd,EAAStd,IAAI,qBAAmB,QAAQ,CAAC0d,EAAKC,IAAQD,EAAIpR,IAAIqR,EAAK,KAAK,IAAIjR,EAAAA,KAE1EuQ,GAAgBzQ,EAAAA,EAAAA,QAAO,CACzB7F,KAAM6W,EACNN,iBAAkBI,EAAStd,IAAI,0BAC/Bmd,SAAUG,EAAStd,IAAI,kBACvB2H,OAAQ8V,EACR3d,KAAM,SACN8d,iBAAkB3P,EAAWjO,IAAI,sBAGnCyN,EAAOA,EAAKG,KAAK,IAAIlB,EAAAA,IAAI,CACvB,CAACoQ,GAAU,IAAAG,GAAa,KAAbA,GAAsBI,QAGlB5c,IAAN4c,MAER,GAEP,KAGK5P,GA3DEA,CA2DE,IAjFR,CAAC6D,EAAK/F,IAAW,WACtB,MAAMzJ,EAAOyJ,EAAO8O,YAAYxb,cAAcqN,WAAU,2BAD9BgG,EAAI,yBAAJA,EAAI,gBAE9B,IAAG2L,EAAAA,EAAAA,QAAa/b,GAAO,CAErB,IAAIgc,EAAkBvS,EAAOwS,WAAW3R,MAAM,CAAC,OAAQ,mBACrD,aAAc,oBAChB,OAAOyQ,EAAStR,EAAQuS,KAAoB5L,EAC9C,CACE,OAAOZ,KAAOY,EAElB,GAXF,IAAkB2K,C,oJCJlB,MAkDA,EAlDmB/d,IAAW,IAAD,EAC3B,IAAI,UAAEkf,EAAS,aAAE/e,EAAY,SAAEM,GAAaT,EAE5C,MAAMmf,EAAqBhf,EAAa,sBAAsB,GAE9D,IAAI+e,EACF,OAAO,8CAGT,IAAIE,EAAmB,MAAAF,EAAUrR,YAAU,QAAM,IAA8B,IAAD,MAA5BwR,EAAcC,GAAS,EACvE,OAAO,yBAAKnZ,IAAKkZ,GACf,4BAAKA,GACH,MAAAC,EAASzR,YAAU,QAAM,IAA8B,IAAD,MAA5B0R,EAAcC,GAAS,EACjD,MAAoB,UAAjBD,EACM,KAEF,yBAAKpZ,IAAKoZ,GACb,MAAAC,EAAS3R,YAAU,QAAM,IAAyB,IAAxB/C,EAAQmF,GAAU,EAC5C,GAAc,UAAXnF,EACD,OAAO,KAET,IAAI2U,GAAK/R,EAAAA,EAAAA,QAAO,CACduC,cAEF,OAAO,kBAACkP,EAAkB,OACpBnf,EAAK,CACTyf,GAAIA,EACJtZ,IAAK2E,EACLiL,IAAK,GACLjL,OAAQA,EACRkF,KAAMuP,EACN9e,SAAUA,EAASqO,KAAKuQ,EAAcE,EAAczU,GACpD4U,eAAe,IACb,IAEF,IAEJ,IAER,OAAO,6BACJN,EACG,C,sKC3CO,MAAMO,UAAiBzd,IAAAA,UAUpCC,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,qBAiBZqJ,IACT,IAAI,SAAEmU,GAAapgB,KAAKQ,OACpB,MAAEuN,EAAK,KAAEhN,GAASkL,EAAEpI,OAEpBwc,EAAW,IAAc,CAAC,EAAGrgB,KAAKmD,MAAM4K,OAEzChN,EACDsf,EAAStf,GAAQgN,EAEjBsS,EAAWtS,EAGb/N,KAAKuD,SAAS,CAAEwK,MAAOsS,IAAY,IAAMD,EAASpgB,KAAKmD,QAAO,IA5B9D,IAAMpC,KAAAA,EAAI,OAAEF,GAAWb,KAAKQ,MACxBuN,EAAQ/N,KAAKsgB,WAEjBtgB,KAAKmD,MAAQ,CACXpC,KAAMA,EACNF,OAAQA,EACRkN,MAAOA,EAEX,CAEAuS,WACE,IAAI,KAAEvf,EAAI,WAAE2L,GAAe1M,KAAKQ,MAEhC,OAAOkM,GAAcA,EAAWoB,MAAM,CAAC/M,EAAM,SAC/C,CAkBAL,SAAU,IAAD,EACP,IAAI,OAAEG,EAAM,aAAEF,EAAY,aAAE4f,EAAY,KAAExf,GAASf,KAAKQ,MACxD,MAAMggB,EAAQ7f,EAAa,SACrB8f,EAAM9f,EAAa,OACnB+f,EAAM/f,EAAa,OACnBggB,EAAYhgB,EAAa,aACzB+D,EAAW/D,EAAa,YAAY,GACpCigB,EAAajgB,EAAa,cAAc,GAExCkgB,GAAUhgB,EAAOa,IAAI,WAAa,IAAIof,cAC5C,IAAI/S,EAAQ/N,KAAKsgB,WACbhI,EAAS,MAAAiI,EAAanG,aAAW,QAASjC,GAAOA,EAAIzW,IAAI,YAAcX,IAE3E,GAAc,UAAX8f,EAAoB,CAAC,IAAD,EACrB,IAAIhY,EAAWkF,EAAQA,EAAMrM,IAAI,YAAc,KAC/C,OAAO,6BACL,4BACE,8BAAQX,GAAQF,EAAOa,IAAI,SAAgB,kBAEzC,kBAACkf,EAAU,CAACpQ,KAAM,CAAE,sBAAuBzP,MAE7C8H,GAAY,0CACd,kBAAC4X,EAAG,KACF,kBAAC/b,EAAQ,CAACC,OAAS9D,EAAOa,IAAI,kBAEhC,kBAAC+e,EAAG,KACF,4CAEE5X,EAAW,kCAASA,EAAQ,KACxB,kBAAC6X,EAAG,KAAC,kBAACF,EAAK,CAAChf,KAAK,OAAOV,SAAS,WAAWC,KAAK,WAAW,aAAW,sBAAsBqf,SAAWpgB,KAAKogB,SAAWW,WAAS,MAGzI,kBAACN,EAAG,KACF,4CAEI5X,EAAW,0CACA,kBAAC6X,EAAG,KAAC,kBAACF,EAAK,CAACQ,aAAa,eACbjgB,KAAK,WACLS,KAAK,WACL,aAAW,sBACX4e,SAAWpgB,KAAKogB,aAI3C,MAAA9H,EAAO9I,YAAU,QAAM,CAACpL,EAAOuC,IACtB,kBAACga,EAAS,CAACvc,MAAQA,EACRuC,IAAMA,MAIhC,CAEyB,IAAD,EAAxB,MAAc,WAAXka,EAEC,6BACE,4BACE,8BAAQ9f,GAAQF,EAAOa,IAAI,SAAgB,mBAEzC,kBAACkf,EAAU,CAACpQ,KAAM,CAAE,sBAAuBzP,MAE3CgN,GAAS,0CACX,kBAAC0S,EAAG,KACF,kBAAC/b,EAAQ,CAACC,OAAS9D,EAAOa,IAAI,kBAEhC,kBAAC+e,EAAG,KACF,yCAEE1S,EAAQ,0CACR,kBAAC2S,EAAG,KAAC,kBAACF,EAAK,CAAChf,KAAK,OAAO,aAAW,oBAAoB4e,SAAWpgB,KAAKogB,SAAWW,WAAS,MAIjG,MAAAzI,EAAO9I,YAAU,QAAM,CAACpL,EAAOuC,IACtB,kBAACga,EAAS,CAACvc,MAAQA,EACxBuC,IAAMA,OAMX,6BACL,4BAAI,2BAAI5F,GAAS,4CAA2C,IAAG8f,MAEjE,E,gJCzHF,SACEI,UAAS,UACTd,SAAQ,UACRe,YAAW,UACXC,QAAO,UACPC,iBAAgB,UAChBC,kBAAiB,UACjBC,iBAAgB,UAChBC,cAAeC,EAAAA,Q,wICbjB,MAAMA,UAAsBC,EAAAA,UAC1B/gB,SACE,MAAM,KAAEghB,EAAI,KAAE3gB,EAAI,aAAEJ,GAAiBX,KAAKQ,MAEpCkE,EAAW/D,EAAa,YAAY,GAE1C,IAAIghB,EAAWD,EAAKhgB,IAAI,gBAAkBggB,EAAKhgB,IAAI,gBAC/CkgB,EAAaF,EAAKhgB,IAAI,eAAiBggB,EAAKhgB,IAAI,cAAcmL,OAC9DiS,EAAc4C,EAAKhgB,IAAI,eAE3B,OAAO,yBAAKG,UAAU,kBACpB,yBAAKA,UAAU,eACb,2BAAG,8BAAOd,IACR+d,EAAc,kBAACpa,EAAQ,CAACC,OAAQma,IAA2B,MAE/D,2CACc6C,EAAQ,IAAE,6BAAM,6BAAM,cAQ1C,SAAmBE,EAAGC,GAAS,IAAD,EAC5B,GAAqB,iBAAXA,EAAuB,MAAO,GACxC,OAAO,MAAAA,EACJvN,MAAM,OAAK,QACP,CAACuF,EAAMR,IAAMA,EAAI,EAAIyI,MAAMF,EAAI,GAAGvY,KAAK,KAAOwQ,EAAOA,IACzDxQ,KAAK,KACV,CAboB0Y,CAAU,EAAG,IAAeJ,EAAY,KAAM,KAAO,KAAK,8BAG5E,EAkBF,S,qHCtCe,MAAMN,UAAyB5e,IAAAA,UAAiB,cAAD,kDAiBvCub,IACnB,MAAM,KAAEzN,EAAI,OAAElF,GAAWtL,KAAKQ,MAI9B,OADAR,KAAKiiB,cACEjiB,KAAKQ,MAAM4c,kBAAkBa,EAAS,GAAEzN,KAAQlF,IAAS,IACjE,mCAEyB4W,IACxB,MAAM,KAAE1R,EAAI,OAAElF,GAAWtL,KAAKQ,MAI9B,OADAR,KAAKiiB,cACEjiB,KAAKQ,MAAMwd,uBAAuB,IACpCkE,EACH5E,UAAY,GAAE9M,KAAQlF,KACtB,IACH,8BAEmB,KAClB,MAAM,KAAEkF,EAAI,OAAElF,GAAWtL,KAAKQ,MAC9B,OAAOR,KAAKQ,MAAM2hB,kBAAmB,GAAE3R,KAAQlF,IAAS,IACzD,8BAEmB,CAAC2S,EAAQtX,KAC3B,MAAM,KAAE6J,EAAI,OAAElF,GAAWtL,KAAKQ,MAC9B,OAAOR,KAAKQ,MAAM4hB,kBAAkB,CAClC9E,UAAY,GAAE9M,KAAQlF,IACtB2S,UACCtX,EAAI,IACR,oCAE0BsX,IACzB,MAAM,KAAEzN,EAAI,OAAElF,GAAWtL,KAAKQ,MAC9B,OAAOR,KAAKQ,MAAM6hB,wBAAwB,CACxCpE,SACAX,UAAY,GAAE9M,KAAQlF,KACtB,GACH,CAED5K,SACE,MAAM,iBAEJ4hB,EAAgB,YAChBC,EAAW,aAGX5hB,GACEX,KAAKQ,MAET,IAAI8hB,IAAqBC,EACvB,OAAO,KAGT,MAAMpB,EAAUxgB,EAAa,WAEvB6hB,EAAmBF,GAAoBC,EACvCE,EAAaH,EAAmB,YAAc,OAEpD,OAAO,yBAAKzgB,UAAU,qCACpB,yBAAKA,UAAU,0BACb,yBAAKA,UAAU,cACb,wBAAIA,UAAU,iBAAe,aAGjC,yBAAKA,UAAU,+BACb,wBAAIA,UAAU,WAAS,SACd4gB,EAAU,sDAEnB,kBAACtB,EAAO,CACNuB,QAASF,EACTG,cAAe3iB,KAAKmiB,oBACpB/E,kBAAmBpd,KAAKod,kBACxBY,uBAAwBhe,KAAKge,uBAC7BoE,kBAAmBpiB,KAAKoiB,kBACxBC,wBAAyBriB,KAAKqiB,2BAItC,E,4IC/FF,MAAMO,EAAOC,SAASC,UAEP,MAAMzB,UAA0B0B,EAAAA,cAe7CpgB,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,8BAYFU,IACnB,MAAM,SAAE8c,EAAQ,aAAE4C,GAAkB1f,GAAwBtD,KAAKQ,MAMjE,OAJAR,KAAKuD,SAAS,CACZwK,MAAOiV,IAGF5C,EAAS4C,EAAa,IAC9B,qBAEWjV,IACV/N,KAAKQ,MAAM4f,UAAS6C,EAAAA,EAAAA,IAAUlV,GAAO,IACtC,wBAEa9B,IACZ,MAAMiX,EAAajX,EAAEpI,OAAOkK,MAE5B/N,KAAKuD,SAAS,CACZwK,MAAOmV,IACN,IAAMljB,KAAKogB,SAAS8C,IAAY,IA7BnCljB,KAAKmD,MAAQ,CACX4K,OAAOkV,EAAAA,EAAAA,IAAUziB,EAAMuN,QAAUvN,EAAMwiB,cAMzCxiB,EAAM4f,SAAS5f,EAAMuN,MACvB,CAwBA1K,iCAAiCC,GAE7BtD,KAAKQ,MAAMuN,QAAUzK,EAAUyK,OAC/BzK,EAAUyK,QAAU/N,KAAKmD,MAAM4K,OAG/B/N,KAAKuD,SAAS,CACZwK,OAAOkV,EAAAA,EAAAA,IAAU3f,EAAUyK,UAM3BzK,EAAUyK,OAASzK,EAAU0f,cAAkBhjB,KAAKmD,MAAM4K,OAG5D/N,KAAKmjB,kBAAkB7f,EAE3B,CAEA5C,SACE,IAAI,aACFC,EAAY,OACZ2X,GACEtY,KAAKQ,OAEL,MACFuN,GACE/N,KAAKmD,MAELigB,EAAY9K,EAAO1I,KAAO,EAC9B,MAAMyT,EAAW1iB,EAAa,YAE9B,OACE,yBAAKkB,UAAU,cACb,kBAACwhB,EAAQ,CACPxhB,UAAW6D,IAAG,mBAAoB,CAAE4d,QAASF,IAC7CG,MAAOjL,EAAO1I,KAAO0I,EAAOhP,KAAK,MAAQ,GACzCyE,MAAOA,EACPqS,SAAWpgB,KAAKwjB,cAKxB,EACD,IA/FoBnC,EAAiB,eAUd,CACpBjB,SAAUwC,EACVa,mBAAmB,G,+OCZhB,MAAMC,EAA6B,CAACC,EAAaC,EAAWC,KACjE,MAAMC,EAAiBH,EAAY7V,MAAM,CAAC,UAAW8V,IAC/C/iB,EAASijB,EAAepiB,IAAI,UAAUmL,OAEtCkX,OAAoD5hB,IAAnC2hB,EAAepiB,IAAI,YACpCsiB,EAAgBF,EAAepiB,IAAI,WACnCuiB,EAAmBF,EACrBD,EAAehW,MAAM,CACrB,WACA+V,EACA,UAEAG,EAEEE,GAAeC,EAAAA,EAAAA,IACnBtjB,EACA+iB,EACA,CACExiB,kBAAkB,GAEpB6iB,GAEF,OAAOhB,EAAAA,EAAAA,IAAUiB,EAAa,EAiThC,EA5SqB,IAkBd,IAlBc,kBACnBT,EAAiB,YACjBE,EAAW,iBACXS,EAAgB,4BAChBC,EAA2B,kBAC3BC,EAAiB,aACjB3jB,EAAY,WACZC,EAAU,cACVL,EAAa,GACboK,EAAE,YACF4Z,EAAW,UACXC,EAAS,SACTvjB,EAAQ,SACRmf,EAAQ,qBACRqE,EAAoB,kBACpBZ,EAAiB,wBACjBa,EAAuB,8BACvBjH,GACD,EACC,MAAMkH,EAAc1Y,IAClBmU,EAASnU,EAAEpI,OAAO+gB,MAAM,GAAG,EAEvBC,EAAwBle,IAC5B,IAAIme,EAAU,CACZne,MACAoe,oBAAoB,EACpB/B,cAAc,GAOhB,MAJyB,aADFqB,EAA4B3iB,IAAIiF,EAAK,cAE1Dme,EAAQC,oBAAqB,GAGxBD,CAAO,EAGVpgB,EAAW/D,EAAa,YAAY,GACpCqkB,EAAerkB,EAAa,gBAC5B0gB,EAAoB1gB,EAAa,qBACjCskB,EAAgBtkB,EAAa,iBAC7BukB,EAA8BvkB,EAAa,+BAC3CwkB,EAAUxkB,EAAa,WACvBykB,EAAwBzkB,EAAa,0BAErC,qBAAE0kB,GAAyBzkB,IAE3B0kB,EAA0B3B,GAAeA,EAAYjiB,IAAI,gBAAmB,KAC5E6jB,EAAsB5B,GAAeA,EAAYjiB,IAAI,YAAe,IAAI8jB,EAAAA,WAC9EjB,EAAcA,GAAegB,EAAmB1V,SAASK,SAAW,GAEpE,MAAM4T,EAAiByB,EAAmB7jB,IAAI6iB,GAAaiB,EAAAA,EAAAA,eACrDC,EAAqB3B,EAAepiB,IAAI,UAAU8jB,EAAAA,EAAAA,eAClDE,EAAyB5B,EAAepiB,IAAI,WAAY,MACxDikB,EAAqBD,aAAsB,EAAtB,IAAAA,GAAsB,KAAtBA,GAA4B,CAACvQ,EAAWxO,KAAS,IAAD,EACzE,MAAM0I,EAAe,QAAZ,EAAG8F,SAAS,aAAT,EAAWzT,IAAI,QAAS,MAQpC,OAPG2N,IACD8F,EAAYA,EAAUnH,IAAI,QAAS0V,EACjCC,EACAY,EACA5d,GACC0I,IAEE8F,CAAS,IAQlB,GAFAmP,EAAoBlV,EAAAA,KAAAA,OAAYkV,GAAqBA,GAAoBlV,EAAAA,EAAAA,SAErE0U,EAAelU,KACjB,OAAO,KAGT,MAAMgW,EAA+D,WAA7C9B,EAAehW,MAAM,CAAC,SAAU,SAClD+X,EAAgE,WAA/C/B,EAAehW,MAAM,CAAC,SAAU,WACjDgY,EAAgE,WAA/ChC,EAAehW,MAAM,CAAC,SAAU,WAEvD,GACkB,6BAAhByW,GACqC,IAAlC,IAAAA,GAAW,KAAXA,EAAoB,WACc,IAAlC,IAAAA,GAAW,KAAXA,EAAoB,WACc,IAAlC,IAAAA,GAAW,KAAXA,EAAoB,WACpBsB,GACAC,EACH,CACA,MAAMtF,EAAQ7f,EAAa,SAE3B,OAAI6jB,EAMG,kBAAChE,EAAK,CAAChf,KAAM,OAAQ4e,SAAUuE,IAL7B,mEACgC,8BAAOJ,GAAmB,gBAKrE,CAEA,GACEqB,IAEkB,sCAAhBrB,GACsC,IAAtC,IAAAA,GAAW,KAAXA,EAAoB,gBAEtBkB,EAAmB/jB,IAAI,cAAc8jB,EAAAA,EAAAA,eAAc5V,KAAO,EAC1D,CAAC,IAAD,EACA,MAAMmW,EAAiBplB,EAAa,kBAC9BqlB,EAAerlB,EAAa,gBAC5BslB,EAAiBR,EAAmB/jB,IAAI,cAAc8jB,EAAAA,EAAAA,eAG5D,OAFApB,EAAmBhW,EAAAA,IAAAA,MAAUgW,GAAoBA,GAAmBoB,EAAAA,EAAAA,cAE7D,yBAAK3jB,UAAU,mBAClByjB,GACA,kBAAC5gB,EAAQ,CAACC,OAAQ2gB,IAEpB,+BACE,+BAEIlX,EAAAA,IAAAA,MAAU6X,IAAmB,MAAAA,EAAe5X,YAAU,QAAM,IAAiB,IAAD,QAAf1H,EAAKuf,GAAK,EACrE,GAAIA,EAAKxkB,IAAI,YAAa,OAE1B,IAAIykB,EAAYd,GAAuBe,EAAAA,EAAAA,IAAoBF,GAAQ,KACnE,MAAMplB,EAAW,MAAA2kB,EAAmB/jB,IAAI,YAAY0N,EAAAA,EAAAA,UAAO,OAAUzI,GAC/DnF,EAAO0kB,EAAKxkB,IAAI,QAChB2kB,EAASH,EAAKxkB,IAAI,UAClBod,EAAcoH,EAAKxkB,IAAI,eACvB4kB,EAAelC,EAAiBtW,MAAM,CAACnH,EAAK,UAC5C4f,EAAgBnC,EAAiBtW,MAAM,CAACnH,EAAK,YAAc2d,EAC3DkC,EAAWnC,EAA4B3iB,IAAIiF,KAAQ,EAEnD8f,EAAiCP,EAAKQ,IAAI,YAC3CR,EAAKQ,IAAI,YACTR,EAAKS,MAAM,CAAC,QAAS,aACrBT,EAAKS,MAAM,CAAC,QAAS,YACpBC,EAAwBV,EAAKQ,IAAI,UAAsC,IAA1BR,EAAKxkB,IAAI,QAAQkO,MAAc9O,GAC5E+lB,EAAkBJ,GAAkCG,EAE1D,IAAIE,EAAe,GACN,UAATtlB,GAAqBqlB,IACvBC,EAAe,KAEJ,WAATtlB,GAAqBqlB,KAEvBC,GAAe3C,EAAAA,EAAAA,IAAgB+B,GAAM,EAAO,CAC1C9kB,kBAAkB,KAIM,iBAAjB0lB,GAAsC,WAATtlB,IACvCslB,GAAe7D,EAAAA,EAAAA,IAAU6D,IAEE,iBAAjBA,GAAsC,UAATtlB,IACtCslB,EAAenb,KAAKC,MAAMkb,IAG5B,MAAMC,EAAkB,WAATvlB,IAAiC,WAAX6kB,GAAkC,WAAXA,GAE5D,OAAO,wBAAI1f,IAAKA,EAAK9E,UAAU,aAAa,qBAAoB8E,GAChE,wBAAI9E,UAAU,uBACZ,yBAAKA,UAAWf,EAAW,2BAA6B,mBACpD6F,EACC7F,EAAkB,oCAAP,MAEhB,yBAAKe,UAAU,mBACXL,EACA6kB,GAAU,0BAAMxkB,UAAU,eAAa,KAAIwkB,EAAM,KACjDhB,GAAyBc,EAAUvW,KAAc,MAAAuW,EAAU9X,YAAU,QAAM,IAAD,IAAE1H,EAAKoY,GAAE,SAAK,kBAACiH,EAAY,CAACrf,IAAM,GAAEA,KAAOoY,IAAKiI,KAAMrgB,EAAKsgB,KAAMlI,GAAK,IAAtG,MAE9C,yBAAKld,UAAU,yBACXqkB,EAAKxkB,IAAI,cAAgB,aAAc,OAG7C,wBAAIG,UAAU,8BACZ,kBAAC6C,EAAQ,CAACC,OAASma,IAClB0F,EAAY,6BACX,kBAACuB,EAAc,CACbpb,GAAIA,EACJuc,sBAAuBH,EACvBlmB,OAAQqlB,EACRpH,YAAanY,EACbhG,aAAcA,EACdoN,WAAwB5L,IAAjBmkB,EAA6BQ,EAAeR,EACnDxlB,SAAaA,EACbwX,OAAWiO,EACXnG,SAAWrS,IACTqS,EAASrS,EAAO,CAACpH,GAAK,IAGzB7F,EAAW,KACV,kBAACskB,EAAqB,CACpBhF,SAAWrS,GAAU0W,EAAqB9d,EAAKoH,GAC/CoZ,WAAYX,EACZY,kBAAmBvC,EAAqBle,GACxC0gB,WAAY,IAAcf,GAAwC,IAAxBA,EAAa3iB,SAAgB2jB,EAAAA,EAAAA,IAAahB,MAGjF,MAEN,MAMjB,CAEA,MAAMiB,EAAoB7D,EACxBC,EACAY,EACAV,GAEF,IAAI2D,EAAW,KAMf,OALuBC,EAAAA,EAAAA,GAAkCF,KAEvDC,EAAW,QAGN,6BACHlC,GACA,kBAAC5gB,EAAQ,CAACC,OAAQ2gB,IAGlBK,EACE,kBAACT,EAA2B,CACxBzB,kBAAmBA,EACnBiE,SAAU/B,EACVgC,WAAY9D,EACZ+D,sBAAuBxD,EACvByD,SAlKoBlhB,IAC5B+d,EAAwB/d,EAAI,EAkKpBmhB,YAAa1H,EACb2H,uBAAuB,EACvBpnB,aAAcA,EACd8c,8BAA+BA,IAEjC,KAGJ+G,EACE,6BACE,kBAACnD,EAAiB,CAChBtT,MAAOqW,EACP9L,OAAQgM,EACRtB,aAAcuE,EACdnH,SAAUA,EACVzf,aAAcA,KAIlB,kBAACqkB,EAAY,CACXrkB,aAAeA,EACfC,WAAaA,EACbL,cAAgBA,EAChBgC,YAAa,EACbiiB,UAAWA,EACX3jB,OAAQijB,EAAepiB,IAAI,UAC3BT,SAAUA,EAASqO,KAAK,UAAWiV,GACnCyD,QACE,kBAAC/C,EAAa,CACZpjB,UAAU,sBACVjB,WAAYA,EACZ4mB,SAAUA,EACVzZ,OAAOkV,EAAAA,EAAAA,IAAUmB,IAAqBmD,IAG1CnmB,kBAAkB,IAKtBukB,EACE,kBAACR,EAAO,CACN6C,QAASrC,EAAmBjkB,IAAImiB,GAChCljB,aAAcA,EACdC,WAAYA,IAEZ,KAEF,C,0FCnTO,MAAMwgB,UAAyB1e,IAAAA,UAS5ChC,SACE,MAAM,cAACH,EAAa,cAAEqK,EAAa,YAAEqd,EAAW,aAAEtnB,GAAgBX,KAAKQ,MAEjEkiB,EAAUniB,EAAcmiB,UAExBvB,EAAUxgB,EAAa,WAE7B,OAAO+hB,GAAWA,EAAQ9S,KACxB,6BACE,0BAAM/N,UAAU,iBAAe,WAC/B,kBAACsf,EAAO,CACNuB,QAASA,EACTC,cAAe/X,EAAcK,iBAC7BmS,kBAAmB6K,EAAY7K,kBAC/BY,uBAAwBiK,EAAYjK,uBACpCoE,kBAAmBxX,EAAcsd,oBACjC7F,wBAAyBzX,EAAcI,wBAEhC,IACf,E,qKC1Ba,MAAMmW,UAAgBze,IAAAA,UAAiB,cAAD,+CAiEjCuJ,IAChBjM,KAAKmoB,UAAWlc,EAAEpI,OAAOkK,MAAO,IAGjC,wCAE+B9B,IAC9B,IAAI,uBACF+R,EAAsB,cACtB2E,GACE3iB,KAAKQ,MAEL4nB,EAAenc,EAAEpI,OAAOwkB,aAAa,iBACrCC,EAAmBrc,EAAEpI,OAAOkK,MAEK,mBAA3BiQ,GACRA,EAAuB,CACrBC,OAAQ0E,EACRhc,IAAKyhB,EACL/Y,IAAKiZ,GAET,IACD,sBAEava,IACZ,IAAI,kBAAEqP,GAAsBpd,KAAKQ,MAEjC4c,EAAkBrP,EAAM,GACzB,CAlFD1J,oBAAqB,IAAD,EAClB,IAAI,QAAEqe,EAAO,cAAEC,GAAkB3iB,KAAKQ,MAEnCmiB,GAKH3iB,KAAKmoB,UAAyB,QAAhB,EAACzF,EAAQxS,eAAO,aAAf,EAAiBxO,IAAI,OACtC,CAEA2B,iCAAiCC,GAC/B,IAAI,QACFof,EAAO,uBACP1E,EAAsB,kBACtBoE,GACE9e,EACJ,GAAItD,KAAKQ,MAAMmiB,gBAAkBrf,EAAUqf,eAAiB3iB,KAAKQ,MAAMkiB,UAAYpf,EAAUof,QAAS,CAAC,IAAD,EAEpG,IAAI6F,EAA0B,IAAA7F,GAAO,KAAPA,GACtB3D,GAAKA,EAAErd,IAAI,SAAW4B,EAAUqf,gBACpC6F,EAAuB,MAAAxoB,KAAKQ,MAAMkiB,SAAO,QACrC3D,GAAKA,EAAErd,IAAI,SAAW1B,KAAKQ,MAAMmiB,kBAAkB6C,EAAAA,EAAAA,cAE3D,IAAI+C,EACF,OAAOvoB,KAAKmoB,UAAUzF,EAAQxS,QAAQxO,IAAI,QAG5C,IAAI+mB,EAAyBD,EAAqB9mB,IAAI,eAAgB8jB,EAAAA,EAAAA,cAElEkD,GAD+B,IAAAD,GAAsB,KAAtBA,GAA4B1J,GAAKA,EAAErd,IAAI,eAAe8jB,EAAAA,EAAAA,eACvB9jB,IAAI,WAElEinB,EAA4BJ,EAAwB7mB,IAAI,eAAgB8jB,EAAAA,EAAAA,cAExEoD,GADkC,IAAAD,GAAyB,KAAzBA,GAA+B5J,GAAKA,EAAErd,IAAI,eAAe8jB,EAAAA,EAAAA,eACvB9jB,IAAI,WAE5E,IAAAinB,GAAyB,KAAzBA,GAA8B,CAACtZ,EAAK1I,KACfyb,EAAkB9e,EAAUqf,cAAehc,IAMzC+hB,IAAmCE,GACtD5K,EAAuB,CACrBC,OAAQ3a,EAAUqf,cAClBhc,MACA0I,IAAKA,EAAI3N,IAAI,YAAc,IAE/B,GAEJ,CACF,CAgCAhB,SAAU,IAAD,IACP,IAAI,QAAEgiB,EAAO,cACXC,EAAa,kBACbP,EAAiB,wBACjBC,GACEriB,KAAKQ,MAKLmoB,GAF0B,IAAAjG,GAAO,KAAPA,GAAamG,GAAKA,EAAEnnB,IAAI,SAAWihB,MAAkB6C,EAAAA,EAAAA,eAE3B9jB,IAAI,eAAgB8jB,EAAAA,EAAAA,cAExEsD,EAA0D,IAAnCH,EAA0B/Y,KAErD,OACE,yBAAK/N,UAAU,WACb,2BAAOknB,QAAQ,WACb,4BAAQ3I,SAAWpgB,KAAKgpB,eAAiBjb,MAAO4U,GAC5C,MAAAD,EAAQlT,YAAU,QAChByO,GACF,4BACElQ,MAAQkQ,EAAOvc,IAAI,OACnBiF,IAAMsX,EAAOvc,IAAI,QACfuc,EAAOvc,IAAI,OACXuc,EAAOvc,IAAI,gBAAmB,MAAKuc,EAAOvc,IAAI,oBAElDunB,YAGJH,EACA,6BAEE,yBAAKjnB,UAAW,gBAAe,gBAE7B,8BACGwgB,EAAwBM,KAG7B,gDACA,+BACE,+BAEI,MAAAgG,EAA0Bta,YAAU,QAAM,IAAiB,IAAD,MAAftN,EAAMsO,GAAI,EACnD,OAAO,wBAAI1I,IAAK5F,GACd,4BAAKA,GACL,4BACIsO,EAAI3N,IAAI,QACR,4BAAQ,gBAAeX,EAAMqf,SAAUpgB,KAAKkpB,6BACzC,MAAA7Z,EAAI3N,IAAI,SAAO,QAAKynB,GACZ,4BACLC,SAAUD,IAAc/G,EAAkBO,EAAe5hB,GACzD4F,IAAKwiB,EACLpb,MAAOob,GACNA,MAIP,2BACE3nB,KAAM,OACNuM,MAAOqU,EAAkBO,EAAe5hB,IAAS,GACjDqf,SAAUpgB,KAAKkpB,4BACf,gBAAenoB,KAIlB,OAKP,KAIhB,E,wKC5KK,SAASmB,EAAO6W,GACrB,MAAMsQ,EAAatQ,EAAOrX,IAAI,WAC9B,MAAyB,iBAAf2nB,IAQH,IAAAA,GAAU,KAAVA,EAAsB,SAAWA,EAAW1lB,OAAS,EAC9D,CAEO,SAAS2lB,EAAWvQ,GACzB,MAAMwQ,EAAiBxQ,EAAOrX,IAAI,WAClC,MAA6B,iBAAnB6nB,GAIH,IAAAA,GAAc,KAAdA,EAA0B,MACnC,CAEO,SAASC,EAAyB/H,GACvC,MAAO,CAAChL,EAAKxJ,IAAYzM,IACvB,GAAGyM,GAAUA,EAAO1M,eAAiB0M,EAAO1M,cAAcqN,SAAU,CAGlE,OAAG1L,EAFU+K,EAAO1M,cAAcqN,YAGzB,kBAAC6T,EAAS,OAAKjhB,EAAWyM,EAAM,CAAEwJ,IAAKA,KAEvC,kBAACA,EAAQjW,EAEpB,CAEE,OADA6F,QAAQC,KAAK,mCACN,IACT,CAEJ,C,gJC5Be,aACb,MAAO,CACLmjB,WAAU,UACVrW,eAAc,UACd9F,aAAc,CACZ9J,KAAM,CACJ8X,cAAeoO,EACfjc,UAAWlN,GAEb2H,KAAM,CACJoT,cAAeqO,GAEjBC,KAAM,CACJpc,QAASya,EACT1a,SAAUsc,EAAAA,QACVpc,UAAW7C,IAInB,C,0ICfA,SACE,CAAC6R,EAAAA,wBAAyB,CAACtZ,EAAO,KAAmD,IAAjDsE,SAAS,kBAAE4V,EAAiB,UAAEC,IAAa,EAC7E,MAAM9M,EAAO8M,EAAY,CAAEA,EAAW,kBAAoB,CAAE,kBAC5D,OAAOna,EAAMqL,MAAOgC,EAAM6M,EAAkB,EAE9C,CAACX,EAAAA,2BAA4B,CAACvZ,EAAO,KAAwC,IAAtCsE,SAAS,MAAEsG,EAAK,WAAEyP,IAAc,GAChEhN,EAAMlF,GAAUkS,EACrB,IAAKpP,EAAAA,IAAAA,MAAUL,GAEb,OAAO5K,EAAMqL,MAAO,CAAE,cAAegC,EAAMlF,EAAQ,aAAeyC,GAEpE,IAKI+b,EALAC,EAAa5mB,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,gBAAiB8C,EAAAA,EAAAA,OACvEA,EAAAA,IAAAA,MAAU2b,KAEbA,GAAa3b,EAAAA,EAAAA,QAGf,SAAU4b,GAAa,IAAAjc,GAAK,KAALA,GAUvB,OATA,IAAAic,GAAS,KAATA,GAAmBC,IACjB,IAAIC,EAAcnc,EAAMD,MAAM,CAACmc,IAC1BF,EAAWrD,IAAIuD,IAER7b,EAAAA,IAAAA,MAAU8b,KADpBJ,EAASC,EAAWvb,MAAM,CAACyb,EAAU,SAAUC,GAIjD,IAEK/mB,EAAMqL,MAAM,CAAC,cAAegC,EAAMlF,EAAQ,aAAcwe,EAAO,EAExE,CAACnN,EAAAA,uCAAwC,CAACxZ,EAAO,KAAwC,IAAtCsE,SAAS,MAAEsG,EAAK,WAAEyP,IAAc,GAC5EhN,EAAMlF,GAAUkS,EACrB,OAAOra,EAAMqL,MAAM,CAAC,cAAegC,EAAMlF,EAAQ,mBAAoByC,EAAM,EAE7E,CAAC6O,EAAAA,+BAAgC,CAACzZ,EAAO,KAA8C,IAA5CsE,SAAS,MAAEsG,EAAK,WAAEyP,EAAU,KAAEzc,IAAQ,GAC1EyP,EAAMlF,GAAUkS,EACrB,OAAOra,EAAMqL,MAAO,CAAE,cAAegC,EAAMlF,EAAQ,gBAAiBvK,GAAQgN,EAAM,EAEpF,CAAC8O,EAAAA,+BAAgC,CAAC1Z,EAAO,KAAiE,IAA/DsE,SAAS,KAAE1G,EAAI,WAAEyc,EAAU,YAAEI,EAAW,YAAEC,IAAe,GAC7FrN,EAAMlF,GAAUkS,EACrB,OAAOra,EAAMqL,MAAO,CAAE,WAAYgC,EAAMlF,EAAQsS,EAAaC,EAAa,iBAAmB9c,EAAK,EAEpG,CAAC+b,EAAAA,6BAA8B,CAAC3Z,EAAO,KAAwC,IAAtCsE,SAAS,MAAEsG,EAAK,WAAEyP,IAAc,GAClEhN,EAAMlF,GAAUkS,EACrB,OAAOra,EAAMqL,MAAO,CAAE,cAAegC,EAAMlF,EAAQ,sBAAwByC,EAAM,EAEnF,CAACgP,EAAAA,8BAA+B,CAAC5Z,EAAO,KAA0C,IAAxCsE,SAAS,MAAEsG,EAAK,KAAEyC,EAAI,OAAElF,IAAU,EAC1E,OAAOnI,EAAMqL,MAAO,CAAE,cAAegC,EAAMlF,EAAQ,uBAAyByC,EAAM,EAEpF,CAACiP,EAAAA,8BAA+B,CAAC7Z,EAAO,KAAkD,IAAhDsE,SAAS,OAAEwW,EAAM,UAAEX,EAAS,IAAE3W,EAAG,IAAE0I,IAAO,EAClF,MAAMmB,EAAO8M,EAAY,CAAEA,EAAW,uBAAwBW,EAAQtX,GAAQ,CAAE,uBAAwBsX,EAAQtX,GAChH,OAAOxD,EAAMqL,MAAMgC,EAAMnB,EAAI,EAE/B,CAAC4N,EAAAA,iCAAkC,CAAC9Z,EAAO,KAAsD,IAApDsE,SAAS,KAAE+I,EAAI,OAAElF,EAAM,iBAAE6S,IAAoB,EACpF7F,EAAS,GAEb,GADAA,EAAOhJ,KAAK,kCACR6O,EAAiBgM,iBAEnB,OAAOhnB,EAAMqL,MAAM,CAAC,cAAegC,EAAMlF,EAAQ,WAAW4C,EAAAA,EAAAA,QAAOoK,IAErE,GAAI6F,EAAiBiM,qBAAuBjM,EAAiBiM,oBAAoBzmB,OAAS,EAAG,CAE3F,MAAM,oBAAEymB,GAAwBjM,EAChC,OAAOhb,EAAMknB,SAAS,CAAC,cAAe7Z,EAAMlF,EAAQ,cAAc4C,EAAAA,EAAAA,QAAO,CAAC,IAAIoc,GACrE,IAAAF,GAAmB,KAAnBA,GAA2B,CAACG,EAAWC,IACrCD,EAAU/b,MAAM,CAACgc,EAAmB,WAAWtc,EAAAA,EAAAA,QAAOoK,KAC5DgS,IAEP,CAEA,OADAjkB,QAAQC,KAAK,sDACNnD,CAAK,EAEd,CAAC+Z,EAAAA,mCAAoC,CAAC/Z,EAAO,KAAmC,IAAjCsE,SAAS,KAAE+I,EAAI,OAAElF,IAAU,EACxE,MAAM8Y,EAAmBjhB,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,cACnE,IAAK8C,EAAAA,IAAAA,MAAUgW,GACb,OAAOjhB,EAAMqL,MAAM,CAAC,cAAegC,EAAMlF,EAAQ,WAAW4C,EAAAA,EAAAA,QAAO,KAErE,SAAU8b,GAAa,IAAA5F,GAAgB,KAAhBA,GACvB,OAAK4F,EAGE7mB,EAAMknB,SAAS,CAAC,cAAe7Z,EAAMlF,EAAQ,cAAc4C,EAAAA,EAAAA,QAAO,CAAC,IAAIuc,GACrE,IAAAT,GAAS,KAATA,GAAiB,CAACO,EAAWG,IAC3BH,EAAU/b,MAAM,CAACkc,EAAM,WAAWxc,EAAAA,EAAAA,QAAO,MAC/Cuc,KALItnB,CAMP,EAEJ,CAACga,EAAAA,0BAA2B,CAACha,EAAO,KAAgC,IAA9BsE,SAAS,WAAE+V,IAAa,GACvDhN,EAAMlF,GAAUkS,EACrB,MAAM4G,EAAmBjhB,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,cACnE,OAAK8Y,EAGAhW,EAAAA,IAAAA,MAAUgW,GAGRjhB,EAAMqL,MAAM,CAAC,cAAegC,EAAMlF,EAAQ,cAAc8C,EAAAA,EAAAA,QAFtDjL,EAAMqL,MAAM,CAAC,cAAegC,EAAMlF,EAAQ,aAAc,IAHxDnI,CAK4D,E,8jBCvGzE,SAASwnB,EAASpM,GAChB,OAAO,sCAAI3K,EAAI,yBAAJA,EAAI,uBAAM3G,IACnB,MAAMzJ,EAAOyJ,EAAO8O,YAAYxb,cAAcqN,WAC9C,OAAG2R,EAAAA,EAAAA,QAAa/b,GACP+a,KAAY3K,GAEZ,IACT,CACD,CACH,CAmBA,MAYa3I,EAAiB0f,GAAS,CAACxnB,EAAOma,KAC3C,MAAM9M,EAAO8M,EAAY,CAACA,EAAW,kBAAoB,CAAC,kBAC1D,OAAOna,EAAM2K,MAAM0C,IAAS,EAAE,IAIrB4T,EAAmBuG,GAAS,CAACxnB,EAAOqN,EAAMlF,IAC5CnI,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,eAAiB,OAIzDsf,EAA+BD,GAAS,CAACxnB,EAAOqN,EAAMlF,IACxDnI,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,sBAAuB,IAI/Duf,EAAgC,CAAC1nB,EAAOqN,EAAMlF,IAAY2B,IACrE,MAAM,cAACrC,EAAa,cAAErK,GAAiB0M,EAAO8O,YACxCvY,EAAOjD,EAAcqN,WAC3B,IAAG2R,EAAAA,EAAAA,QAAa/b,GAAO,CACrB,MAAMsnB,EAAmBlgB,EAAcmgB,mBAAmBva,EAAMlF,GAChE,GAAIwf,EACF,OAAOpH,EAAAA,EAAAA,4BACLnjB,EAAcyqB,oBAAoB,CAAC,QAASxa,EAAMlF,EAAQ,gBAC1Dwf,EACAlgB,EAAcqgB,qBACZza,EAAMlF,EACN,cACA,eAIR,CACA,OAAO,IAAI,EAGA4f,EAAoB,CAAC/nB,EAAOqN,EAAMlF,IAAY2B,IACzD,MAAM,cAACrC,EAAa,cAAErK,GAAiB0M,EAAO8O,YACxCvY,EAAOjD,EAAcqN,WAC3B,IAAG2R,EAAAA,EAAAA,QAAa/b,GAAO,CACrB,IAAIigB,GAAoB,EACxB,MAAMqH,EAAmBlgB,EAAcmgB,mBAAmBva,EAAMlF,GAChE,IAAI6f,EAAwBvgB,EAAcwZ,iBAAiB5T,EAAMlF,GAQjE,GAPI8C,EAAAA,IAAAA,MAAU+c,KAEZA,GAAwBlI,EAAAA,EAAAA,IAAUkI,EAAsBC,YAAYC,GAAOjd,EAAAA,IAAAA,MAAUid,EAAG,IAAM,CAACA,EAAG,GAAIA,EAAG,GAAG3pB,IAAI,UAAY2pB,IAAIxe,SAE/HuC,EAAAA,KAAAA,OAAY+b,KACbA,GAAwBlI,EAAAA,EAAAA,IAAUkI,IAEhCL,EAAkB,CACpB,MAAMQ,GAAmC5H,EAAAA,EAAAA,4BACvCnjB,EAAcyqB,oBAAoB,CAAC,QAASxa,EAAMlF,EAAQ,gBAC1Dwf,EACAlgB,EAAcqgB,qBACZza,EAAMlF,EACN,cACA,gBAGJmY,IAAsB0H,GAAyBA,IAA0BG,CAC3E,CACA,OAAO7H,CACT,CACE,OAAO,IACT,EAGWY,EAA8BsG,GAAS,CAACxnB,EAAOqN,EAAMlF,IACvDnI,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,oBAAqB8C,EAAAA,EAAAA,SAI7DkW,EAAoBqG,GAAS,CAACxnB,EAAOqN,EAAMlF,IAC7CnI,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,YAAc,OAItD2f,EAAuBN,GAAS,CAACxnB,EAAOqN,EAAMlF,EAAQ9J,EAAMT,IAC9DoC,EAAM2K,MAAM,CAAC,WAAY0C,EAAMlF,EAAQ9J,EAAMT,EAAM,mBAAqB,OAItEgqB,EAAqBJ,GAAS,CAACxnB,EAAOqN,EAAMlF,IAC9CnI,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,wBAA0B,OAIlEigB,EAAsBZ,GAAS,CAACxnB,EAAOqN,EAAMlF,IAC/CnI,EAAM2K,MAAM,CAAC,cAAe0C,EAAMlF,EAAQ,yBAA2B,OAInE4c,EAAsByC,GAAS,CAACxnB,EAAOqoB,EAAc7kB,KAC9D,IAAI6J,EAIJ,GAA2B,iBAAjBgb,EAA2B,CACnC,MAAM,OAAEvN,EAAM,UAAEX,GAAckO,EAE5Bhb,EADC8M,EACM,CAACA,EAAW,uBAAwBW,EAAQtX,GAE5C,CAAC,uBAAwBsX,EAAQtX,EAE5C,KAAO,CAEL6J,EAAO,CAAC,uBADOgb,EACyB7kB,EAC1C,CAEA,OAAOxD,EAAM2K,MAAM0C,IAAS,IAAI,IAIvBib,EAAkBd,GAAS,CAACxnB,EAAOqoB,KAC5C,IAAIhb,EAIJ,GAA2B,iBAAjBgb,EAA2B,CACnC,MAAM,OAAEvN,EAAM,UAAEX,GAAckO,EAE5Bhb,EADC8M,EACM,CAACA,EAAW,uBAAwBW,GAEpC,CAAC,uBAAwBA,EAEpC,KAAO,CAELzN,EAAO,CAAC,uBADOgb,EAEjB,CAEA,OAAOroB,EAAM2K,MAAM0C,KAASgV,EAAAA,EAAAA,aAAY,IAI/Bxa,EAAuB2f,GAAS,CAACxnB,EAAOqoB,KACjD,IAAIE,EAAWC,EAIf,GAA2B,iBAAjBH,EAA2B,CACnC,MAAM,OAAEvN,EAAM,UAAEX,GAAckO,EAC9BG,EAAc1N,EAEZyN,EADCpO,EACWna,EAAM2K,MAAM,CAACwP,EAAW,uBAAwBqO,IAEhDxoB,EAAM2K,MAAM,CAAC,uBAAwB6d,GAErD,MACEA,EAAcH,EACdE,EAAYvoB,EAAM2K,MAAM,CAAC,uBAAwB6d,IAGnDD,EAAYA,IAAalG,EAAAA,EAAAA,cACzB,IAAIvf,EAAM0lB,EAMV,OAJA,IAAAD,GAAS,KAATA,GAAc,CAACrc,EAAK1I,KAClBV,EAAMA,EAAI5F,QAAQ,IAAIurB,OAAQ,IAAGjlB,KAAQ,KAAM0I,EAAI,IAG9CpJ,CAAG,IAID4lB,GAjM0BtN,EAkMrC,CAACpb,EAAOqa,IAjL6B,EAACra,EAAOqa,KAC7CA,EAAaA,GAAc,KACAra,EAAM2K,MAAM,CAAC,iBAAkB0P,EAAY,eA+K/CsO,CAA+B3oB,EAAOqa,GAjMtD,sCAAI5J,EAAI,yBAAJA,EAAI,uBAAM3G,IACnB,MAAMW,EAAWX,EAAO8O,YAAYxb,cAAcqN,WAGlD,IAAI4P,EAFa,IAAI5J,GAEK,IAAM,GAGhC,OAFgChG,EAASE,MAAM,CAAC,WAAY0P,EAAY,cAAe,cAG9Ee,KAAY3K,EAIrB,CACD,GAdH,IAAuC2K,EAqMhC,MAAMwN,EAA0B,CAAC5oB,EAAO,KAA0F,IAAD,MAAzF,mCAAE6oB,EAAkC,uBAAEC,EAAsB,qBAAEC,GAAqB,EAC5H9B,EAAsB,GAE1B,IAAKhc,EAAAA,IAAAA,MAAU8d,GACb,OAAO9B,EAET,IAAI+B,EAAe,GAkBnB,OAhBA,UAAYH,EAAmCjB,qBAAmB,QAAUxG,IAC1E,GAAIA,IAAgB0H,EAAwB,CAC1C,IAAIG,EAAiBJ,EAAmCjB,mBAAmBxG,GAC3E,IAAA6H,GAAc,KAAdA,GAAwBC,IAClB,IAAAF,GAAY,KAAZA,EAAqBE,GAAe,GACtCF,EAAa7c,KAAK+c,EACpB,GAEJ,KAEF,IAAAF,GAAY,KAAZA,GAAsBxlB,IACGulB,EAAqBpe,MAAM,CAACnH,EAAK,WAEtDyjB,EAAoB9a,KAAK3I,EAC3B,IAEKyjB,CAAmB,C,+GC7N5B,MAAMjnB,EAAQA,GACLA,IAASiL,EAAAA,EAAAA,OAGZR,GAAWmB,EAAAA,EAAAA,gBACf5L,GACAK,GAAQA,EAAK9B,IAAI,QAAQ0M,EAAAA,EAAAA,UAGrBke,GAAevd,EAAAA,EAAAA,gBACnB5L,GACAK,GAAQA,EAAK9B,IAAI,YAAY0M,EAAAA,EAAAA,UAYlBsU,GAlCKnE,GAkCcxP,EAAAA,EAAAA,iBATnB5L,IACX,IAAIiP,EAAMka,EAAanpB,GAGvB,OAFGiP,EAAIma,QAAU,IACfna,EAAMxE,EAASzK,IACViP,CAAG,IAOV5O,GAAQA,EAAKsK,MAAM,CAAC,cAAeM,EAAAA,EAAAA,SAnC5B,IAAM,SAACnB,GACZ,MAAMzJ,EAAOyJ,EAAO8O,YAAYxb,cAAcqN,WAC9C,IAAG2R,EAAAA,EAAAA,QAAa/b,GAAO,CAAC,IAAD,uBAFAoQ,EAAI,iCAAJA,EAAI,kBAGzB,OAAO2K,KAAY3K,EACrB,CACE,OAAO,IAEX,GARF,IAAkB2K,EAuCX,MAAM+K,EAAa,CAACtW,EAAK/F,IAAW,KACzC,MAAMzJ,EAAOyJ,EAAO8O,YAAYxb,cAAcqN,WAC9C,OAAO4e,EAAAA,EAAAA,YAAiBhpB,EAAK,C,sQCxC/B,SAASmnB,EAASpM,GAChB,MAAO,CAACvL,EAAK/F,IAAW,WACtB,MAAMzJ,EAAOyJ,EAAO8O,YAAYxb,cAAcqN,WAC9C,OAAG2R,EAAAA,EAAAA,QAAa/b,GACP+a,KAAY,WAEZvL,KAAO,UAElB,CACF,CAEA,MAAM7P,EAAQA,GACLA,IAASiL,EAAAA,EAAAA,OAKZqe,EAAmB9B,GAFJ5b,EAAAA,EAAAA,iBAAe,IAAM,QAIpCnB,GAAWmB,EAAAA,EAAAA,gBACf5L,GACAK,GAAQA,EAAK9B,IAAI,QAAQ0M,EAAAA,EAAAA,UAGrBke,GAAevd,EAAAA,EAAAA,gBACnB5L,GACAK,GAAQA,EAAK9B,IAAI,YAAY0M,EAAAA,EAAAA,UAGzB5K,EAAOL,IACX,IAAIiP,EAAMka,EAAanpB,GAGvB,OAFGiP,EAAIma,QAAU,IACfna,EAAMxE,EAASzK,IACViP,CAAG,EAKCnD,EAAc0b,GAAS5b,EAAAA,EAAAA,gBAClCvL,GACAA,IACE,MAAM4O,EAAM5O,EAAKsK,MAAM,CAAC,aAAc,YACtC,OAAOM,EAAAA,IAAAA,MAAUgE,GAAOA,GAAMhE,EAAAA,EAAAA,MAAK,KAI1Bse,EAAU/B,GAAUxnB,GACxBK,EAAKL,GAAOwjB,MAAM,CAAC,UAAW,MAG1BzX,EAAsByb,GAAS5b,EAAAA,EAAAA,gBAC1C4d,EAAAA,8BACAnpB,GAAQA,EAAKsK,MAAM,CAAC,aAAc,qBAAuB,QAG9C8e,EAAOH,EACPI,EAAWJ,EACXK,EAAWL,EACXM,EAAWN,EACXO,EAAUP,EAIV/J,EAAUiI,GAAS5b,EAAAA,EAAAA,gBAC9BvL,GACAA,GAAQA,EAAKsK,MAAM,CAAC,cAAeM,EAAAA,EAAAA,UAGxBlM,EAAS,CAAC8Q,EAAK/F,IAAW,KACrC,MAAMzJ,EAAOyJ,EAAO8O,YAAYxb,cAAcqN,WAC9C,OAAO2R,EAAAA,EAAAA,QAAanR,EAAAA,IAAAA,MAAU5K,GAAQA,GAAO4K,EAAAA,EAAAA,OAAM,EAGxCkb,EAAa,CAACtW,EAAK/F,IAAW,KACzC,MAAMzJ,EAAOyJ,EAAO8O,YAAYxb,cAAcqN,WAC9C,OAAO4e,EAAAA,EAAAA,YAAiBpe,EAAAA,IAAAA,MAAU5K,GAAQA,GAAO4K,EAAAA,EAAAA,OAAM,C,kFChFzD,SAAeob,E,QAAAA,2BAA0B,IAAuB,IAAvB,IAAE/S,KAAQjW,GAAO,EACxD,MAAM,OACJK,EAAM,aAAEF,EAAY,aAAE4f,EAAY,WAAE7T,EAAU,aAAEugB,EAAY,KAAElsB,GAC5DP,EAEE2f,EAAWxf,EAAa,YAG9B,MAAY,SAFCE,EAAOa,IAAI,QAGf,kBAACye,EAAQ,CAACxZ,IAAM5F,EACbF,OAASA,EACTE,KAAOA,EACPwf,aAAeA,EACf7T,WAAaA,EACb/L,aAAeA,EACfyf,SAAW6M,IAEd,kBAACxW,EAAQjW,EAClB,G,wHCdF,SACEkE,SAAQ,UACRwoB,SAAQ,UACRC,kBAAiB,UACjBC,aAAY,UACZ9sB,MAAOJ,EAAAA,QACPmtB,qBAAsB5qB,EAAAA,Q,kFCVxB,SAAe+mB,E,QAAAA,2BAA0B,IAAuB,IAAvB,IAAE/S,KAAQjW,GAAO,EACxD,MAAM,OACJK,EAAM,aACNF,EAAY,OACZ2X,EAAM,SACN8H,GACE5f,EAEE6lB,EAASxlB,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,UAAY,KACvDF,EAAOX,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,QAAU,KACnD8e,EAAQ7f,EAAa,SAE3B,OAAGa,GAAiB,WAATA,GAAsB6kB,IAAsB,WAAXA,GAAkC,WAAXA,GAC1D,kBAAC7F,EAAK,CAAChf,KAAK,OACJK,UAAYyW,EAAO3U,OAAS,UAAY,GACxC4f,MAAQjL,EAAO3U,OAAS2U,EAAS,GACjC8H,SAAWnU,IACTmU,EAASnU,EAAEpI,OAAO+gB,MAAM,GAAG,EAE7B0I,SAAU7W,EAAI4Q,aAEtB,kBAAC5Q,EAAQjW,EAClB,G,8KClBF,MAAM+sB,EAAS,IAAI1oB,EAAAA,WAAW,cAC9B0oB,EAAOC,MAAMnoB,MAAMooB,OAAO,CAAC,UAC3BF,EAAOvf,IAAI,CAAE/I,WAAY,WAElB,MAAMP,EAAY,IAA4C,IAA5C,OAAEC,EAAM,UAAE9C,EAAY,GAAE,WAAEjB,GAAY,EAC7D,GAAqB,iBAAX+D,EACR,OAAO,KAGT,GAAKA,EAAS,CACZ,MAAM,kBAAEY,GAAsB3E,IACxBkE,EAAOyoB,EAAO7sB,OAAOiE,GACrBa,GAAYC,EAAAA,EAAAA,GAAUX,EAAM,CAAES,sBAEpC,IAAImoB,EAMJ,MAJwB,iBAAdloB,IACRkoB,EAAU,IAAAloB,GAAS,KAATA,IAIV,yBACEG,wBAAyB,CACvBC,OAAQ8nB,GAEV7rB,UAAW6D,IAAG7D,EAAW,qBAG/B,CACA,OAAO,IAAI,EAQb6C,EAASsB,aAAe,CACtBpF,WAAY,KAAM,CAAG2E,mBAAmB,KAG1C,SAAeikB,EAAAA,EAAAA,0BAAyB9kB,E,mIC3CxC,MAAMipB,UAAuBlM,EAAAA,UAY3B/gB,SACE,IAAI,WAAEE,EAAU,OAAEC,GAAWb,KAAKQ,MAC9BotB,EAAU,CAAC,aAEXnlB,EAAU,KAOd,OARgD,IAA7B5H,EAAOa,IAAI,gBAI5BksB,EAAQte,KAAK,cACb7G,EAAU,0BAAM5G,UAAU,4BAA0B,gBAG/C,yBAAKA,UAAW+rB,EAAQtkB,KAAK,MACjCb,EACD,kBAAC,IAAK,OAAMzI,KAAKQ,MAAK,CACpBI,WAAaA,EACb4B,MAAQ,EACRD,YAAcvC,KAAKQ,MAAM+B,aAAe,KAG9C,EAGF,SAAeinB,EAAAA,EAAAA,0BAAyBmE,E,kFCnCxC,SAAenE,EAAAA,EAAAA,0BAAyB/mB,EAAAA,E,mFCDxC,SAAe+mB,E,QAAAA,2BAA0BhpB,IACvC,MAAM,IAAEiW,GAAQjW,EAEhB,OAAO,8BACL,kBAACiW,EAAQjW,GACT,2BAAOqB,UAAU,iBACf,yBAAKA,UAAU,WAAS,SAErB,G,mFCXT,IAAIgsB,GAAU,EAEC,aAEb,MAAO,CACLvgB,aAAc,CACZ9J,KAAM,CACJkK,YAAa,CACX2J,WAAarE,GAAQ,WAEnB,OADA6a,GAAU,EACH7a,KAAO,UAChB,EACA8a,eAAgB,CAAC9a,EAAK/F,IAAW,WAC/B,MAAMiF,EAAKjF,EAAOrM,aAAamtB,WAQ/B,OAPGF,GAAyB,mBAAP3b,IAGnB,IAAWA,EAAI,GACf2b,GAAU,GAGL7a,KAAO,UAChB,KAKV,C,2PC3BA,MAAM,EAA+B/S,QAAQ,yD,uECS7C,MAAM+tB,EAAc/T,IAAO,IAAD,EACxB,MAAMgU,EAAU,QAChB,OAAI,IAAAhU,GAAC,KAADA,EAAUgU,GAAW,EAChBhU,EAEF,MAAAA,EAAE1F,MAAM0Z,GAAS,IAAE,OAAO,EAG7BC,EAAejoB,GACP,QAARA,GAIC,WAAWkQ,KAAKlQ,GAHZA,EAIC,IAAMA,EACX5F,QAAQ,KAAM,SAAW,IAK1B8tB,EAAaloB,GAML,SALZA,EAAMA,EACH5F,QAAQ,MAAO,MACfA,QAAQ,OAAQ,SAChBA,QAAQ,KAAM,MACdA,QAAQ,MAAO,QAET4F,EACJ5F,QAAQ,OAAQ,UAGhB,WAAW8V,KAAKlQ,GAGZA,EAFA,IAAOA,EAAM,IAKlBmoB,EAAoBnoB,GACZ,QAARA,EACKA,EAEL,KAAKkQ,KAAKlQ,GACL,OAAUA,EAAI5F,QAAQ,KAAM,OAAQA,QAAQ,KAAM,MAAMA,QAAQ,KAAM,MAAQ,OAGlF,WAAW8V,KAAKlQ,GAKZA,EAJA,IAAMA,EACV5F,QAAQ,KAAM,MACdA,QAAQ,KAAM,MAAQ,IAK7B,SAASguB,EAAmB5nB,GAC1B,IAAI6nB,EAAgB,GACpB,IAAK,IAAKrU,EAAG8E,KAAMtY,EAAQ/E,IAAI,QAAQ2M,WAAY,CACjD,IAAIkgB,EAAeP,EAAW/T,GAC1B8E,aAAa/b,EAAAA,EAAAA,KACfsrB,EAAchf,KAAM,MAAKif,uBAAkCxP,EAAEhe,QAAQge,EAAEvd,KAAQ,mBAAkBud,EAAEvd,QAAU,WAE7G8sB,EAAchf,KAAM,MAAKif,OAAkB,IAAexP,EAAG,KAAM,GAAG1e,QAAQ,gBAAiB,UAEnG,CACA,MAAQ,MAAKiuB,EAAchlB,KAAK,WAClC,CAEA,MAAMklB,EAAU,SAAC/nB,EAASgoB,EAAQC,GAAuB,IAAdC,EAAM,UAAH,6CAAG,GAC3CC,GAA6B,EAC7BC,EAAY,GAChB,MAAMC,EAAW,sCAAIlb,EAAI,yBAAJA,EAAI,uBAAKib,GAAa,IAAM,IAAAjb,GAAI,KAAJA,EAAS6a,GAAQnlB,KAAK,IAAI,EACrEylB,EAA8B,sCAAInb,EAAI,yBAAJA,EAAI,uBAAKib,GAAa,IAAAjb,GAAI,KAAJA,EAAS6a,GAAQnlB,KAAK,IAAI,EAClF0lB,EAAa,IAAMH,GAAc,IAAGH,IACpCO,EAAY,qBAACzmB,EAAQ,UAAH,6CAAG,EAAC,OAAKqmB,GAAa,YAAI,OAAQrmB,EAAM,EAChE,IAAIe,EAAU9C,EAAQ/E,IAAI,WAa1B,GAZAmtB,GAAa,OAASF,EAElBloB,EAAQigB,IAAI,gBACdoI,KAAYroB,EAAQ/E,IAAI,gBAG1BotB,EAAS,KAAMroB,EAAQ/E,IAAI,WAE3BstB,IACAC,IACAF,EAA6B,GAAEtoB,EAAQ/E,IAAI,UAEvC6H,GAAWA,EAAQqG,KACrB,IAAK,IAAI4J,KAAK,MAAA/S,EAAQ/E,IAAI,YAAU,QAAY,CAAC,IAAD,EAC9CstB,IACAC,IACA,IAAKC,EAAGnQ,GAAKvF,EACbuV,EAA4B,KAAO,GAAEG,MAAMnQ,KAC3C6P,EAA6BA,GAA8B,kBAAkBzY,KAAK+Y,IAAM,0BAA0B/Y,KAAK4I,EACzH,CAGF,MAAMjV,EAAOrD,EAAQ/E,IAAI,QACd,IAAD,EAAV,GAAIoI,EACF,GAAI8kB,GAA8B,OAAC,OAAQ,MAAO,UAAQ,OAAUnoB,EAAQ/E,IAAI,WAC9E,IAAK,IAAKuY,EAAG8E,KAAMjV,EAAKuE,WAAY,CAClC,IAAIkgB,EAAeP,EAAW/T,GAC9B+U,IACAC,IACAF,EAA4B,MACxBhQ,aAAa/b,EAAAA,EAAAA,KACf8rB,EAAU,GAAEP,MAAiBxP,EAAEhe,OAAOge,EAAEvd,KAAQ,SAAQud,EAAEvd,OAAS,MAEnEstB,EAAU,GAAEP,KAAgBxP,IAEhC,MACK,GAAGjV,aAAgB9G,EAAAA,EAAAA,KACxBgsB,IACAC,IACAF,EAA6B,mBAAkBjlB,EAAK/I,aAC/C,CACLiuB,IACAC,IACAF,EAA4B,OAC5B,IAAII,EAAUrlB,EACTsE,EAAAA,IAAAA,MAAU+gB,GAMbJ,EAA4BV,EAAmB5nB,KALxB,iBAAZ0oB,IACTA,EAAU,IAAeA,IAE3BJ,EAA4BI,GAIhC,MACUrlB,GAAkC,SAA1BrD,EAAQ/E,IAAI,YAC9BstB,IACAC,IACAF,EAA4B,UAG9B,OAAOF,CACT,EAGaO,EAA2C3oB,GAC/C+nB,EAAQ/nB,EAAS2nB,EAAkB,MAAO,QAItCiB,EAAqC5oB,GACzC+nB,EAAQ/nB,EAASynB,EAAa,QAI1BoB,EAAoC7oB,GACxC+nB,EAAQ/nB,EAAS0nB,EAAW,M,8FC3JrC,aACS,CACL1E,WAAY,CACV8F,gBAAeA,EAAAA,SAEjB5kB,GAAE,EACF2C,aAAc,CACZkiB,gBAAiB,CACf/hB,UAASA,K,kOCJjB,MAAMmI,EAAQ,CACZ6Z,OAAQ,UACRC,WAAY,EACZC,QAAS,cACTC,gBAAiB,qBACjBC,cAAe,IACfC,WAAY,IACZC,OAAQ,4BACRC,aAAc,cACdC,UAAW,OACXC,aAAc,QAGVC,EAAc,CAClBV,OAAQ,UACRC,WAAY,EACZC,QAAS,cACTC,gBAAiB,kBACjBK,UAAW,OACXF,OAAQ,4BACRF,cAAe,IACfC,WAAY,IACZE,aAAc,cACdI,UAAW,OACXC,YAAa,OACbC,WAAY,OACZC,OAAQ,OACRL,aAAc,QA4HhB,EAzHyB,IAAuD,IAAD,QAAtD,QAAEzpB,EAAO,yBAAE+pB,EAAwB,WAAE5vB,GAAY,EACxE,MAAMmW,EAAS0Z,IAAW7vB,GAAcA,IAAe,KACjD8vB,GAAwD,IAAnChvB,IAAIqV,EAAQ,oBAAgCrV,IAAIqV,EAAQ,6BAA6B,GAC1G4Z,GAAUC,EAAAA,EAAAA,QAAO,OAEhBC,EAAgBC,IAAqBC,EAAAA,EAAAA,UAAwD,QAAhD,EAACP,EAAyBQ,8BAAsB,aAA/C,EAAiDnhB,SAASK,UACxG+gB,EAAYC,IAAiBH,EAAAA,EAAAA,UAASP,aAAwB,EAAxBA,EAA0BW,uBACvEC,EAAAA,EAAAA,YAAU,KAIF,GACL,KACHA,EAAAA,EAAAA,YAAU,KAAO,IAAD,EACd,MAAMC,EAAa,UACXV,EAAQ7qB,QAAQurB,aAAW,QACzBC,IAAI,cAAMA,EAAKC,WAA0B,QAAlB,EAAID,EAAKE,iBAAS,aAAd,EAAgB1hB,SAAS,gBAAgB,IAI9E,OAFA,IAAAuhB,GAAU,KAAVA,GAAmBC,GAAQA,EAAKG,iBAAiB,aAAcC,EAAsC,CAAEC,SAAS,MAEzG,KAEL,IAAAN,GAAU,KAAVA,GAAmBC,GAAQA,EAAKM,oBAAoB,aAAcF,IAAsC,CACzG,GACA,CAACjrB,IAEJ,MAAMorB,EAAoBrB,EAAyBQ,uBAC7Cc,EAAkBD,EAAkBnwB,IAAImvB,GACxCkB,EAAUD,EAAgBpwB,IAAI,KAApBowB,CAA0BrrB,GASpCurB,EAAsB,KAC1Bd,GAAeD,EAAW,EAGtBgB,EAAqBtrB,GACrBA,IAAQkqB,EACHV,EAEFva,EAGH8b,EAAwCzlB,IAC5C,MAAM,OAAEpI,EAAM,OAAEquB,GAAWjmB,GACnBkmB,aAAcC,EAAeC,aAAcC,EAAa,UAAEC,GAAc1uB,EAEpDuuB,EAAgBE,IACH,IAAdC,GAAmBL,EAAS,GAFlCI,EAAgBC,GAGSH,GAAiBF,EAAS,IAGtEjmB,EAAEumB,gBACJ,EAGIC,EAAmB/B,EACrB,kBAAC,KAAiB,CAClBlJ,SAAUsK,EAAgBpwB,IAAI,UAC9BG,UAAU,kBACV+T,OAAO8c,EAAAA,EAAAA,IAAShxB,IAAIqV,EAAQ,2BAE3Bgb,GAGH,8BAAUY,UAAU,EAAM9wB,UAAU,OAAOkM,MAAOgkB,IAEpD,OACE,yBAAKlwB,UAAU,mBAAmBzB,IAAKuwB,GACrC,yBAAK/a,MAAO,CAAE5T,MAAO,OAAQ2tB,QAAS,OAAQiD,eAAgB,aAAcC,WAAY,SAAUC,aAAc,SAC9G,wBACEC,QAAS,IAAMf,IACfpc,MAAO,CAAE6Z,OAAQ,YAAY,YAE/B,4BACEsD,QAAS,IAAMf,IACfpc,MAAO,CAAEma,OAAQ,OAAQiD,WAAY,QACrCzP,MAAO0N,EAAa,qBAAuB,oBAE3C,yBAAKpvB,UAAU,QAAQG,MAAM,KAAKD,OAAO,MACvC,yBAAKgC,KAAMktB,EAAa,oBAAsB,eAAgBgC,UAAWhC,EAAa,oBAAsB,oBAKhHA,GAAc,yBAAKpvB,UAAU,gBAC3B,yBAAK+T,MAAO,CAAEsd,YAAa,OAAQC,aAAc,OAAQnxB,MAAO,OAAQ2tB,QAAS,SAE7E,MAAAkC,EAAkBxjB,YAAU,QAAM,IAAgB,IAAf1H,EAAKysB,GAAI,EAC1C,OAAQ,yBAAKxd,MAAOqc,EAAkBtrB,GAAM9E,UAAU,MAAM8E,IAAKA,EAAKosB,QAAS,IAhErE,CAACpsB,IACHkqB,IAAmBlqB,GAErCmqB,EAAkBnqB,EACpB,EA4DiG0sB,CAAgB1sB,IACnG,wBAAIiP,MAAOjP,IAAQkqB,EAAiB,CAAEyC,MAAO,SAAa,CAAC,GAAIF,EAAI1xB,IAAI,UACnE,KAIZ,yBAAKG,UAAU,qBACb,kBAAC,EAAA0xB,gBAAe,CAAC/gB,KAAMuf,GACrB,mCAGJ,6BACGU,IAIH,C,+NChJV,MAAMtvB,EAAQA,GAASA,IAASiL,EAAAA,EAAAA,OAEnBolB,GAAgBzkB,EAAAA,EAAAA,gBAC3B5L,GACAA,IACE,MAAMswB,EAAetwB,EAClBzB,IAAI,aACDgyB,EAAavwB,EAChBzB,IAAI,cAAc0M,EAAAA,EAAAA,QACrB,OAAIqlB,GAAgBA,EAAaE,UACxBD,EAEF,IAAAA,GAAU,KAAVA,GACG,CAAC3U,EAAGpY,IAAQ,IAAA8sB,GAAY,KAAZA,EAAsB9sB,IAAK,IAIxCqqB,EAAwB7tB,GAAW,IAAY,IAAD,QAAX,GAAEwH,GAAI,EAEpD,OAAO,YAAA6oB,EAAcrwB,IAAM,QACpB,CAACiwB,EAAKzsB,KACT,MAAMitB,EAHO,CAACjtB,GAAQgE,EAAI,2BAA0BhE,KAGtCktB,CAASltB,GACvB,MAAoB,mBAAVitB,EACD,KAGFR,EAAIplB,IAAI,KAAM4lB,EAAM,KAC3B,QACM7U,GAAKA,GAAE,EAGN+U,GAAoB/kB,EAAAA,EAAAA,gBAC/B5L,GACAA,GAASA,EACNzB,IAAI,oBAGIyvB,GAAqBpiB,EAAAA,EAAAA,gBAChC5L,GACAA,GAASA,EACNzB,IAAI,oB,kICrCF,MAAMqyB,UAAsBtS,EAAAA,UACjCuS,gCAAgC5vB,GAC9B,MAAO,CAAE6vB,UAAU,EAAM7vB,QAC3B,CAEAzB,cACEE,SAAS,WACT7C,KAAKmD,MAAQ,CAAE8wB,UAAU,EAAO7vB,MAAO,KACzC,CAEA8vB,kBAAkB9vB,EAAO+vB,GACvBn0B,KAAKQ,MAAMmK,GAAGupB,kBAAkB9vB,EAAO+vB,EACzC,CAEAzzB,SACE,MAAM,aAAEC,EAAY,WAAEyzB,EAAU,SAAEC,GAAar0B,KAAKQ,MAEpD,GAAIR,KAAKmD,MAAM8wB,SAAU,CACvB,MAAMK,EAAoB3zB,EAAa,YACvC,OAAO,kBAAC2zB,EAAiB,CAACvzB,KAAMqzB,GAClC,CAEA,OAAOC,CACT,EAWFN,EAAc/tB,aAAe,CAC3BouB,WAAY,iBACZzzB,aAAc,IAAM4zB,EAAAA,QACpB5pB,GAAI,CACFupB,kBAAiBA,EAAAA,mBAEnBG,SAAU,MAGZ,S,0FC9CA,MASA,EATkB,IAAD,IAAC,KAAEtzB,GAAM,SACxB,yBAAKc,UAAU,YAAU,MACpB,+CAA+B,MAATd,EAAe,iBAAmBA,EAAI,sBAC3D,C,wICJD,MAAMmzB,EAAoB7tB,QAAQjC,MAI5BowB,EAAqBzY,GAAe0Y,IAC/C,MAAM,aAAE9zB,EAAY,GAAEgK,GAAOoR,IACvBgY,EAAgBpzB,EAAa,iBAC7ByzB,EAAazpB,EAAG+pB,eAAeD,GAErC,MAAME,UAA0BlT,EAAAA,UAC9B/gB,SACE,OACE,kBAACqzB,EAAa,CAACK,WAAYA,EAAYzzB,aAAcA,EAAcgK,GAAIA,GACrE,kBAAC8pB,EAAgB,OAAKz0B,KAAKQ,MAAWR,KAAK4C,UAGjD,EAdqB,IAAAgyB,EAyBvB,OATAD,EAAkBzzB,YAAe,qBAAoBkzB,MAhB9BQ,EAiBFH,GAjByB3R,WAAa8R,EAAU9R,UAAU+R,mBAsB7EF,EAAkB7R,UAAUgS,gBAAkBL,EAAiB3R,UAAUgS,iBAGpEH,CAAiB,C,4DC7B1B,MAAM,EAA+B10B,QAAQ,uD,aCA7C,MAAM,EAA+BA,QAAQ,oB,2CCM7C,MAmCA,EAnCyB,eAAC,cAAC80B,EAAgB,GAAE,aAAEC,GAAe,GAAS,UAAH,6CAAG,CAAC,EAAC,OAAM,IAAmB,IAAD,MAAlB,UAAEjZ,GAAW,EAC1F,MAiBMkZ,EAAsBD,EAAeD,EAAgB,CAhBzD,MACA,aACA,sBACA,gBACA,mBACA,mBACA,wBACA,kBACA,aACA,qBACA,aACA,YACA,mBACA,SACA,kBAEsFA,GAElF3hB,EAAiB8hB,IAAUD,EAAqB,MAAAlT,MAAMkT,EAAoBtxB,SAAO,QADnE,CAACwxB,EAAU,KAAF,IAAE,GAAExqB,GAAI,SAAKA,EAAG6pB,kBAAkBW,EAAS,KAGxE,MAAO,CACLxqB,GAAI,CACFupB,kBAAiB,oBACjBM,mBAAmBA,EAAAA,EAAAA,mBAAkBzY,IAEvC0N,WAAY,CACVsK,cAAa,UACbQ,SAAQA,EAAAA,SAEVnhB,iBACD,CACF,C,2YCvCD,MAAM,EAA+BnT,QAAQ,O,aCA7C,MAAM,EAA+BA,QAAQ,W,aCA7C,MAAM,EAA+BA,QAAQ,kB,iCCO7C,MAUMm1B,EAAa,CACjB,OAAWv0B,GAAWA,EAAOw0B,QAXC,CAACA,IAC/B,IAEE,OADgB,IAAIC,IAAJ,CAAYD,GACbjC,KAIjB,CAHE,MAAOnnB,GAEP,MAAO,QACT,GAIuCspB,CAAwB10B,EAAOw0B,SAAW,SACjF,aAAgB,IAAM,mBACtB,mBAAoB,KAAM,IAAIG,MAAOC,cACrC,YAAe,KAAM,IAAID,MAAOC,cAAcC,UAAU,EAAG,IAC3D,YAAe,IAAM,uCACrB,gBAAmB,IAAM,cACzB,YAAe,IAAM,gBACrB,YAAe,IAAM,0CACrB,OAAU,IAAM,EAChB,aAAgB,IAAM,EACtB,QAAW,IAAM,EACjB,QAAY70B,GAAqC,kBAAnBA,EAAOiG,SAAwBjG,EAAOiG,SAGhE6uB,EAAa90B,IACjBA,GAAS+0B,EAAAA,EAAAA,IAAU/0B,GACnB,IAAI,KAAEW,EAAI,OAAE6kB,GAAWxlB,EAEnB8J,EAAKyqB,EAAY,GAAE5zB,KAAQ6kB,MAAa+O,EAAW5zB,GAEvD,OAAG+M,EAAAA,EAAAA,IAAO5D,GACDA,EAAG9J,GAEL,iBAAmBA,EAAOW,IAAI,EAKjCq0B,EAAe9nB,IAAU+nB,EAAAA,EAAAA,IAAe/nB,EAAO,SAAUsB,GAC9C,iBAARA,GAAoB,IAAAA,GAAG,KAAHA,EAAY,MAAQ,IAE3C0mB,EAAkB,CAAC,gBAAiB,iBACpCC,EAAiB,CAAC,WAAY,YAC9BC,EAAkB,CACtB,UACA,UACA,mBACA,oBAEIC,EAAkB,CAAC,YAAa,aAEhCC,EAAmB,SAACC,EAAWvyB,GAAyB,IAAD,MAAhBkT,EAAS,UAAH,6CAAG,CAAC,EACrD,MAAMsf,EAA2B1vB,SACZxE,IAAhB0B,EAAO8C,SAAyCxE,IAAnBi0B,EAAUzvB,KACxC9C,EAAO8C,GAAOyvB,EAAUzvB,GAC1B,EAeyE,IAAD,GAZ1E,OACE,UACA,UACA,OACA,MACA,UACGovB,KACAC,KACAC,KACAC,IACJ,QAASvvB,GAAO0vB,EAAwB1vB,UAEfxE,IAAvBi0B,EAAUt1B,UAA0B,IAAcs1B,EAAUt1B,kBACtCqB,IAApB0B,EAAO/C,UAA2B+C,EAAO/C,SAAS6C,SACnDE,EAAO/C,SAAW,IAEpB,MAAAs1B,EAAUt1B,UAAQ,QAAS6F,IAAQ,IAAD,EAC7B,MAAA9C,EAAO/C,UAAQ,OAAU6F,IAG5B9C,EAAO/C,SAASwO,KAAK3I,EAAI,KAG7B,GAAGyvB,EAAUE,WAAY,CACnBzyB,EAAOyyB,aACTzyB,EAAOyyB,WAAa,CAAC,GAEvB,IAAI91B,GAAQo1B,EAAAA,EAAAA,IAAUQ,EAAUE,YAChC,IAAK,IAAIC,KAAY/1B,EAAO,CAaQ,IAAD,EAZjC,GAAKg2B,OAAO1T,UAAU2T,eAAeC,KAAKl2B,EAAO+1B,GAGjD,IAAK/1B,EAAM+1B,KAAa/1B,EAAM+1B,GAAUt0B,WAGxC,IAAKzB,EAAM+1B,KAAa/1B,EAAM+1B,GAAU5D,UAAa5b,EAAO5V,gBAG5D,IAAKX,EAAM+1B,KAAa/1B,EAAM+1B,GAAUI,WAAc5f,EAAO3V,iBAG7D,IAAIyC,EAAOyyB,WAAWC,GACpB1yB,EAAOyyB,WAAWC,GAAY/1B,EAAM+1B,IAChCH,EAAUt1B,UAAY,IAAcs1B,EAAUt1B,YAAuD,IAA1C,MAAAs1B,EAAUt1B,UAAQ,OAASy1B,KACpF1yB,EAAO/C,SAGT+C,EAAO/C,SAASwO,KAAKinB,GAFrB1yB,EAAO/C,SAAW,CAACy1B,GAM3B,CACF,CAQA,OAPGH,EAAUQ,QACP/yB,EAAO+yB,QACT/yB,EAAO+yB,MAAQ,CAAC,GAElB/yB,EAAO+yB,MAAQT,EAAiBC,EAAUQ,MAAO/yB,EAAO+yB,MAAO7f,IAG1DlT,CACT,EAEagzB,EAA0B,SAACh2B,GAAwE,IAAhEkW,EAAM,uDAAC,CAAC,EAAG+f,EAAkB,UAAH,kDAAG30B,EAAW40B,EAAa,UAAH,8CAC7Fl2B,IAAU0N,EAAAA,EAAAA,IAAO1N,EAAOgM,QACzBhM,EAASA,EAAOgM,QAClB,IAAImqB,OAAoC70B,IAApB20B,GAAiCj2B,QAA6BsB,IAAnBtB,EAAOmnB,SAAyBnnB,QAA6BsB,IAAnBtB,EAAOiG,QAEhH,MAAMmwB,GAAYD,GAAiBn2B,GAAUA,EAAOq2B,OAASr2B,EAAOq2B,MAAMvzB,OAAS,EAC7EwzB,GAAYH,GAAiBn2B,GAAUA,EAAOu2B,OAASv2B,EAAOu2B,MAAMzzB,OAAS,EACnF,IAAIqzB,IAAkBC,GAAYE,GAAW,CAC3C,MAAME,GAAczB,EAAAA,EAAAA,IAAUqB,EAC1Bp2B,EAAOq2B,MAAM,GACbr2B,EAAOu2B,MAAM,IAMjB,GAJAjB,EAAiBkB,EAAax2B,EAAQkW,IAClClW,EAAOy2B,KAAOD,EAAYC,MAC5Bz2B,EAAOy2B,IAAMD,EAAYC,UAELn1B,IAAnBtB,EAAOmnB,cAAiD7lB,IAAxBk1B,EAAYrP,QAC7CgP,GAAgB,OACX,GAAGK,EAAYf,WAAY,CAC5Bz1B,EAAOy1B,aACTz1B,EAAOy1B,WAAa,CAAC,GAEvB,IAAI91B,GAAQo1B,EAAAA,EAAAA,IAAUyB,EAAYf,YAClC,IAAK,IAAIC,KAAY/1B,EAAO,CAaQ,IAAD,EAZjC,GAAKg2B,OAAO1T,UAAU2T,eAAeC,KAAKl2B,EAAO+1B,GAGjD,IAAK/1B,EAAM+1B,KAAa/1B,EAAM+1B,GAAUt0B,WAGxC,IAAKzB,EAAM+1B,KAAa/1B,EAAM+1B,GAAU5D,UAAa5b,EAAO5V,gBAG5D,IAAKX,EAAM+1B,KAAa/1B,EAAM+1B,GAAUI,WAAc5f,EAAO3V,iBAG7D,IAAIP,EAAOy1B,WAAWC,GACpB11B,EAAOy1B,WAAWC,GAAY/1B,EAAM+1B,IAChCc,EAAYv2B,UAAY,IAAcu2B,EAAYv2B,YAAyD,IAA5C,MAAAu2B,EAAYv2B,UAAQ,OAASy1B,KAC1F11B,EAAOC,SAGTD,EAAOC,SAASwO,KAAKinB,GAFrB11B,EAAOC,SAAW,CAACy1B,GAM3B,CACF,CACF,CACA,MAAMgB,EAAQ,CAAC,EACf,IAAI,IAAED,EAAG,KAAE91B,EAAI,QAAEwmB,EAAO,WAAEsO,EAAU,qBAAEkB,EAAoB,MAAEZ,GAAU/1B,GAAU,CAAC,GAC7E,gBAAEM,EAAe,iBAAEC,GAAqB2V,EAC5CugB,EAAMA,GAAO,CAAC,EACd,IACIp2B,GADA,KAAEH,EAAI,OAAE02B,EAAM,UAAEna,GAAcga,EAE9BllB,EAAM,CAAC,EAGX,GAAG2kB,IACDh2B,EAAOA,GAAQ,YAEfG,GAAeu2B,EAASA,EAAS,IAAM,IAAM12B,EACxCuc,GAAY,CAGfia,EADsBE,EAAW,SAAWA,EAAW,SAC9Bna,CAC3B,CAICyZ,IACD3kB,EAAIlR,GAAe,IAGrB,MAAMw2B,EAAgBC,GAAS,IAAAA,GAAI,KAAJA,GAAUhxB,GAAO6vB,OAAO1T,UAAU2T,eAAeC,KAAK71B,EAAQ8F,KAE1F9F,IAAWW,IACT80B,GAAckB,GAAwBE,EAAa3B,GACpDv0B,EAAO,SACCo1B,GAASc,EAAa1B,GAC9Bx0B,EAAO,QACCk2B,EAAazB,IACrBz0B,EAAO,SACPX,EAAOW,KAAO,UACLw1B,GAAkBn2B,EAAO+2B,OAelCp2B,EAAO,SACPX,EAAOW,KAAO,WAIlB,MAAMq2B,EAAqBC,IAAiB,IAAD,QACwB,EAAxC,QAAf,QAAN,EAAAj3B,SAAM,aAAN,EAAQk3B,gBAA0C51B,KAAf,QAAN,EAAAtB,SAAM,aAAN,EAAQk3B,YACvCD,EAAc,IAAAA,GAAW,KAAXA,EAAkB,EAAS,QAAR,EAAEj3B,SAAM,aAAN,EAAQk3B,WAE7C,GAAyB,QAAf,QAAN,EAAAl3B,SAAM,aAAN,EAAQm3B,gBAA0C71B,KAAf,QAAN,EAAAtB,SAAM,aAAN,EAAQm3B,UAAwB,CAC/D,IAAI1e,EAAI,EACR,KAAOwe,EAAYn0B,QAAe,QAAT,EAAG9C,SAAM,aAAN,EAAQm3B,WAAU,CAAC,IAAD,EAC5CF,EAAYxoB,KAAKwoB,EAAYxe,IAAMwe,EAAYn0B,QACjD,CACF,CACA,OAAOm0B,CAAW,EAIdt3B,GAAQo1B,EAAAA,EAAAA,IAAUU,GACxB,IAAI2B,EACAC,EAAuB,EAE3B,MAAMC,EAA2B,IAAMt3B,GACT,OAAzBA,EAAOu3B,oBAAmDj2B,IAAzBtB,EAAOu3B,eACxCF,GAAwBr3B,EAAOu3B,cAE9BC,EAA0B,KAC9B,IAAIx3B,IAAWA,EAAOC,SACpB,OAAO,EAET,IAAIw3B,EAAa,EACD,IAAD,EAMR,EANJvB,EACD,MAAAl2B,EAAOC,UAAQ,QAAS6F,GAAO2xB,QAChBn2B,IAAbiQ,EAAIzL,GACA,EACA,IAGN,MAAA9F,EAAOC,UAAQ,QAAS6F,IAAG,aAAI2xB,QACyBn2B,KAAtC,QAAhB,EAAAiQ,EAAIlR,UAAY,aAAhB,eAAuBq3B,QAAgBp2B,IAAXo2B,EAAE5xB,MAC1B,EACA,CAAC,IAGT,OAAO9F,EAAOC,SAAS6C,OAAS20B,CAAU,EAGtCE,EAAsBjC,IAAc,IAAD,EACvC,QAAI11B,GAAWA,EAAOC,UAAaD,EAAOC,SAAS6C,UAG3C,MAAA9C,EAAOC,UAAQ,OAAUy1B,EAAS,EAGtCkC,EAAkBlC,IAClB11B,GAAmC,OAAzBA,EAAOu3B,oBAAmDj2B,IAAzBtB,EAAOu3B,gBAGnDD,OAGCK,EAAmBjC,IAGf11B,EAAOu3B,cAAgBF,EAAuBG,IAA6B,GA4ErF,GAxEEJ,EADClB,EACqB,SAACR,GAAqC,IAA3BmC,EAAY,UAAH,kDAAGv2B,EAC3C,GAAGtB,GAAUL,EAAM+1B,GAAW,CAI5B,GAFA/1B,EAAM+1B,GAAUe,IAAM92B,EAAM+1B,GAAUe,KAAO,CAAC,EAE1C92B,EAAM+1B,GAAUe,IAAIqB,UAAW,CACjC,MAAMC,EAAc,IAAcp4B,EAAM+1B,GAAUqB,MAC9Cp3B,EAAM+1B,GAAUqB,KAAK,QACrBz1B,EACE02B,EAAcr4B,EAAM+1B,GAAUvO,QAC9B8Q,EAAct4B,EAAM+1B,GAAUzvB,QAYpC,YATEywB,EAAM/2B,EAAM+1B,GAAUe,IAAIv2B,MAAQw1B,QADjBp0B,IAAhB02B,EAC6CA,OACtB12B,IAAhB22B,EACsCA,OACtB32B,IAAhBy2B,EACsCA,EAEAjD,EAAUn1B,EAAM+1B,IAIlE,CACA/1B,EAAM+1B,GAAUe,IAAIv2B,KAAOP,EAAM+1B,GAAUe,IAAIv2B,MAAQw1B,CACzD,MAAW/1B,EAAM+1B,KAAsC,IAAzBiB,IAE5Bh3B,EAAM+1B,GAAY,CAChBe,IAAK,CACHv2B,KAAMw1B,KAKZ,IAAIwC,EAAIlC,EAAwBh2B,GAAUL,EAAM+1B,SAAap0B,EAAW4U,EAAQ2hB,EAAW3B,GAMpE,IAAD,EALlB0B,EAAelC,KAInB2B,IACI,IAAca,GAChB3mB,EAAIlR,GAAe,MAAAkR,EAAIlR,IAAY,OAAQ63B,GAE3C3mB,EAAIlR,GAAaoO,KAAKypB,GAE1B,EAEsB,CAACxC,EAAUmC,KAC/B,GAAID,EAAelC,GAAnB,CAGA,GAAGC,OAAO1T,UAAU2T,eAAeC,KAAK71B,EAAQ,kBAC9CA,EAAOm4B,eACPxC,OAAO1T,UAAU2T,eAAeC,KAAK71B,EAAOm4B,cAAe,YAC3Dn4B,EAAOm4B,cAAcC,SACrBzC,OAAO1T,UAAU2T,eAAeC,KAAK71B,EAAQ,UAC7CA,EAAOY,OACPZ,EAAOm4B,cAAcE,eAAiB3C,GACtC,IAAK,IAAI4C,KAAQt4B,EAAOm4B,cAAcC,QACpC,IAAiE,IAA7Dp4B,EAAOY,MAAM23B,OAAOv4B,EAAOm4B,cAAcC,QAAQE,IAAe,CAClE/mB,EAAImkB,GAAY4C,EAChB,KACF,OAGF/mB,EAAImkB,GAAYM,EAAwBr2B,EAAM+1B,GAAWxf,EAAQ2hB,EAAW3B,GAE9EmB,GAjBA,CAiBsB,EAKvBlB,EAAe,CAChB,IAAIqC,EAUJ,GAREA,EAASxD,OADY1zB,IAApB20B,EACoBA,OACD30B,IAAZ6lB,EACaA,EAEAnnB,EAAOiG,UAI1BiwB,EAAY,CAEd,GAAqB,iBAAXsC,GAAgC,WAAT73B,EAC/B,MAAQ,GAAE63B,IAGZ,GAAqB,iBAAXA,GAAgC,WAAT73B,EAC/B,OAAO63B,EAGT,IACE,OAAO1tB,KAAKC,MAAMytB,EAIpB,CAHE,MAAMptB,GAEN,OAAOotB,CACT,CACF,CAQA,GALIx4B,IACFW,EAAO,IAAc63B,GAAU,eAAiBA,GAItC,UAAT73B,EAAkB,CACnB,IAAK,IAAc63B,GAAS,CAC1B,GAAqB,iBAAXA,EACR,OAAOA,EAETA,EAAS,CAACA,EACZ,CACA,MAAMC,EAAaz4B,EACfA,EAAO+1B,WACPz0B,EACDm3B,IACDA,EAAWhC,IAAMgC,EAAWhC,KAAOA,GAAO,CAAC,EAC3CgC,EAAWhC,IAAIv2B,KAAOu4B,EAAWhC,IAAIv2B,MAAQu2B,EAAIv2B,MAEnD,IAAIw4B,EAAc,IAAAF,GAAM,KAANA,GACXxQ,GAAKgO,EAAwByC,EAAYviB,EAAQ8R,EAAGkO,KAW3D,OAVAwC,EAAc1B,EAAkB0B,GAC7BjC,EAAIkC,SACLpnB,EAAIlR,GAAeq4B,EACd5F,IAAQ4D,IACXnlB,EAAIlR,GAAaoO,KAAK,CAACioB,MAAOA,KAIhCnlB,EAAMmnB,EAEDnnB,CACT,CAGA,GAAY,WAAT5Q,EAAmB,CAEpB,GAAqB,iBAAX63B,EACR,OAAOA,EAET,IAAK,IAAI9C,KAAY8C,EACd7C,OAAO1T,UAAU2T,eAAeC,KAAK2C,EAAQ9C,KAG9C11B,GAAUL,EAAM+1B,IAAa/1B,EAAM+1B,GAAU5D,WAAaxxB,GAG1DN,GAAUL,EAAM+1B,IAAa/1B,EAAM+1B,GAAUI,YAAcv1B,IAG3DP,GAAUL,EAAM+1B,IAAa/1B,EAAM+1B,GAAUe,KAAO92B,EAAM+1B,GAAUe,IAAIqB,UAC1EpB,EAAM/2B,EAAM+1B,GAAUe,IAAIv2B,MAAQw1B,GAAY8C,EAAO9C,GAGvD0B,EAAoB1B,EAAU8C,EAAO9C,MAMvC,OAJK5C,IAAQ4D,IACXnlB,EAAIlR,GAAaoO,KAAK,CAACioB,MAAOA,IAGzBnlB,CACT,CAGA,OADAA,EAAIlR,GAAgByyB,IAAQ4D,GAAoC8B,EAA3B,CAAC,CAAC9B,MAAOA,GAAQ8B,GAC/CjnB,CACT,CAIA,GAAY,WAAT5Q,EAAmB,CACpB,IAAK,IAAI+0B,KAAY/1B,EACdg2B,OAAO1T,UAAU2T,eAAeC,KAAKl2B,EAAO+1B,KAG5C/1B,EAAM+1B,IAAa/1B,EAAM+1B,GAAUt0B,YAGnCzB,EAAM+1B,IAAa/1B,EAAM+1B,GAAU5D,WAAaxxB,GAGhDX,EAAM+1B,IAAa/1B,EAAM+1B,GAAUI,YAAcv1B,GAGtD62B,EAAoB1B,IAMtB,GAJIQ,GAAcQ,GAChBnlB,EAAIlR,GAAaoO,KAAK,CAACioB,MAAOA,IAG7BY,IACD,OAAO/lB,EAGT,IAA8B,IAAzBolB,EACAT,EACD3kB,EAAIlR,GAAaoO,KAAK,CAACmqB,eAAgB,yBAEvCrnB,EAAIsnB,gBAAkB,CAAC,EAEzBxB,SACK,GAAKV,EAAuB,CACjC,MAAMmC,GAAkB/D,EAAAA,EAAAA,IAAU4B,GAC5BoC,EAAuB/C,EAAwB8C,EAAiB5iB,OAAQ5U,EAAW40B,GAEzF,GAAGA,GAAc4C,EAAgBrC,KAAOqC,EAAgBrC,IAAIv2B,MAAqC,cAA7B44B,EAAgBrC,IAAIv2B,KAEtFqR,EAAIlR,GAAaoO,KAAKsqB,OACjB,CACL,MAAMC,EAA2C,OAAzBh5B,EAAOi5B,oBAAmD33B,IAAzBtB,EAAOi5B,eAA+B5B,EAAuBr3B,EAAOi5B,cACzHj5B,EAAOi5B,cAAgB5B,EACvB,EACJ,IAAK,IAAI5e,EAAI,EAAGA,GAAKugB,EAAiBvgB,IAAK,CACzC,GAAG6e,IACD,OAAO/lB,EAET,GAAG2kB,EAAY,CACb,MAAMgD,EAAO,CAAC,EACdA,EAAK,iBAAmBzgB,GAAKsgB,EAAgC,UAC7DxnB,EAAIlR,GAAaoO,KAAKyqB,EACxB,MACE3nB,EAAI,iBAAmBkH,GAAKsgB,EAE9B1B,GACF,CACF,CACF,CACA,OAAO9lB,CACT,CAEA,GAAY,UAAT5Q,EAAkB,CACnB,IAAKo1B,EACH,OAGF,IAAIkB,EACY,IAAD,EAKgB,EAL/B,GAAGf,EACDH,EAAMU,IAAMV,EAAMU,MAAa,QAAV,EAAIz2B,SAAM,aAAN,EAAQy2B,MAAO,CAAC,EACzCV,EAAMU,IAAIv2B,KAAO61B,EAAMU,IAAIv2B,MAAQu2B,EAAIv2B,KAGzC,GAAG,IAAc61B,EAAMQ,OACrBU,EAAc,MAAAlB,EAAMQ,OAAK,QAAK9d,GAAKud,EAAwBV,EAAiBS,EAAOtd,EAAGvC,GAASA,OAAQ5U,EAAW40B,UAC7G,GAAG,IAAcH,EAAMM,OAAQ,CAAC,IAAD,EACpCY,EAAc,MAAAlB,EAAMM,OAAK,QAAK5d,GAAKud,EAAwBV,EAAiBS,EAAOtd,EAAGvC,GAASA,OAAQ5U,EAAW40B,IACpH,KAAO,OAAIA,GAAcA,GAAcO,EAAIkC,SAGzC,OAAO3C,EAAwBD,EAAO7f,OAAQ5U,EAAW40B,GAFzDe,EAAc,CAACjB,EAAwBD,EAAO7f,OAAQ5U,EAAW40B,GAGnE,CAEA,OADAe,EAAcD,EAAkBC,GAC7Bf,GAAcO,EAAIkC,SACnBpnB,EAAIlR,GAAe42B,EACdnE,IAAQ4D,IACXnlB,EAAIlR,GAAaoO,KAAK,CAACioB,MAAOA,IAEzBnlB,GAEF0lB,CACT,CAEA,IAAI/pB,EACJ,GAAIlN,GAAU,IAAcA,EAAO+2B,MAEjC7pB,GAAQoN,EAAAA,EAAAA,IAAeta,EAAO+2B,MAAM,OAC/B,KAAG/2B,EA+BR,OA5BA,GADAkN,EAAQ4nB,EAAU90B,GACE,iBAAVkN,EAAoB,CAC5B,IAAIisB,EAAMn5B,EAAOo5B,QACdD,UACEn5B,EAAOq5B,kBACRF,IAEFjsB,EAAQisB,GAEV,IAAIG,EAAMt5B,EAAOu5B,QACdD,UACEt5B,EAAOw5B,kBACRF,IAEFpsB,EAAQosB,EAEZ,CACA,GAAoB,iBAAVpsB,IACiB,OAArBlN,EAAOy5B,gBAA2Cn4B,IAArBtB,EAAOy5B,YACtCvsB,EAAQ,IAAAA,GAAK,KAALA,EAAY,EAAGlN,EAAOy5B,YAEP,OAArBz5B,EAAO05B,gBAA2Cp4B,IAArBtB,EAAO05B,WAAyB,CAC/D,IAAIjhB,EAAI,EACR,KAAOvL,EAAMpK,OAAS9C,EAAO05B,WAC3BxsB,GAASA,EAAMuL,IAAMvL,EAAMpK,OAE/B,CAIJ,CACA,GAAa,SAATnC,EAIJ,OAAGu1B,GACD3kB,EAAIlR,GAAgByyB,IAAQ4D,GAAmCxpB,EAA1B,CAAC,CAACwpB,MAAOA,GAAQxpB,GAC/CqE,GAGFrE,CACT,EAEaysB,EAAetf,IACvBA,EAAMra,SACPqa,EAAQA,EAAMra,QAEbqa,EAAMob,aACPpb,EAAM1Z,KAAO,UAGR0Z,GAGIuf,EAAmB,CAAC55B,EAAQkW,EAAQ2jB,KAC/C,MAAMC,EAAO9D,EAAwBh2B,EAAQkW,EAAQ2jB,GAAG,GACxD,GAAKC,EACL,MAAmB,iBAATA,EACDA,EAEFC,IAAID,EAAM,CAAEE,aAAa,EAAMC,OAAQ,MAAO,EAG1CC,EAAmB,CAACl6B,EAAQkW,EAAQ2jB,IAC/C7D,EAAwBh2B,EAAQkW,EAAQ2jB,GAAG,GAEvCM,EAAW,CAACC,EAAMC,EAAMC,IAAS,CAACF,EAAM,IAAeC,GAAO,IAAeC,IAEtEC,GAA2BC,EAAAA,EAAAA,GAASZ,EAAkBO,GAEtDM,GAA2BD,EAAAA,EAAAA,GAASN,EAAkBC,E,0ECznBpD,SAAS,IACtB,MAAO,CAAErwB,GAAE,EACb,C,whCCJA,MAAM,EAA+B1K,QAAQ,gE,iDCA7C,MAAM,EAA+BA,QAAQ,iD,+HCA7C,MAAM,EAA+BA,QAAQ,kD,qECA7C,MAAM,EAA+BA,QAAQ,mB,aCA7C,MAAM,EAA+BA,QAAQ,mB,aCA7C,MAAM,EAA+BA,QAAQ,c,uBCYtC,MAAMs7B,EAAc,mBACdC,EAAa,kBACbC,EAAc,mBACdC,EAAe,oBACfC,EAA+B,oCAC/BC,EAAkB,sBAClBC,EAAe,oBACfC,EAAc,mBACdC,EAAsB,2BACtBC,EAAc,mBACdC,EAAiB,sBACjBC,EAAgB,qBAChBC,GAAwB,4BACxBC,GAA8B,mCAC9BC,GAAkB,uBAClBC,GAA0B,+BAC1BC,GAAa,aAInB,SAASllB,GAAW7T,GACzB,MAAMg5B,GAHOv2B,EAGYzC,EAHJi5B,IAASx2B,GAAOA,EAAM,IAGX5F,QAAQ,MAAO,MAHnC,IAAC4F,EAIb,GAAmB,iBAATzC,EACR,MAAO,CACLhC,KAAM+5B,EACN9zB,QAAS+0B,EAGf,CAEO,SAASE,GAAel5B,GAC7B,MAAO,CACLhC,KAAM66B,GACN50B,QAASjE,EAEb,CAEO,SAAS+O,GAAUxP,GACxB,MAAO,CAACvB,KAAMg6B,EAAY/zB,QAAS1E,EACrC,CAEO,SAAS+qB,GAAe6M,GAC7B,MAAO,CAACn5B,KAAMi6B,EAAah0B,QAASkzB,EACtC,CAEO,MAAMgC,GAAe12B,GAAS,IAA8C,IAA9C,YAACyL,EAAW,cAAEnR,EAAa,WAAE0H,GAAW,GACvE,QAAE20B,GAAYr8B,EAEdo6B,EAAO,KACX,IACE10B,EAAMA,GAAO22B,IACb30B,EAAWqP,MAAM,CAAE3S,OAAQ,WAC3Bg2B,EAAOtpB,IAAAA,KAAUpL,EAAK,CAAEpF,OAAQg8B,EAAAA,aAUlC,CATE,MAAM5wB,GAGN,OADA5F,QAAQjC,MAAM6H,GACPhE,EAAWsQ,WAAW,CAC3B5T,OAAQ,SACR6D,MAAO,QACPC,QAASwD,EAAE6wB,OACXhjB,KAAM7N,EAAE8wB,MAAQ9wB,EAAE8wB,KAAKjjB,KAAO7N,EAAE8wB,KAAKjjB,KAAO,OAAI3X,GAEpD,CACA,OAAGw4B,GAAwB,iBAATA,EACTjpB,EAAYoc,eAAe6M,GAE7B,CAAC,CAAC,EAGX,IAAIqC,IAAuC,EAEpC,MAAMC,GAAc,CAACtC,EAAM53B,IAAS,IAA4F,IAA5F,YAAC2O,EAAW,cAAEnR,EAAa,WAAE0H,EAAY0C,IAAI,MAAEU,EAAK,QAAE6xB,EAAO,IAAEC,EAAM,CAAC,GAAG,WAAEv8B,GAAW,EAC3Ho8B,KACF32B,QAAQC,KAAM,0HACd02B,IAAuC,GAGzC,MAAM,mBACJI,EAAkB,eAClBC,EAAc,mBACd9xB,EAAkB,oBAClBC,GACE5K,SAEgB,IAAV+5B,IACRA,EAAOp6B,EAAcqN,iBAEJ,IAAT7K,IACRA,EAAMxC,EAAcwC,OAGtB,IAAIu6B,EAAuBH,EAAIG,qBAAuBH,EAAIG,qBAAuB,KAAe,EAE5FV,EAAUr8B,EAAcq8B,UAE5B,OAAOM,EAAQ,CACb7xB,QACA7H,KAAMm3B,EACN4C,QAASx6B,EACTq6B,qBACAC,iBACA9xB,qBACAC,wBACCC,MAAO,IAAoB,IAApB,KAACjI,EAAI,OAAE8U,GAAO,EAIpB,GAHArQ,EAAWqP,MAAM,CACf9V,KAAM,WAEL,IAAc8W,IAAWA,EAAO3U,OAAS,EAAG,CAC7C,IAAI65B,EAAiB,IAAAllB,GAAM,KAANA,GACdH,IACH9R,QAAQjC,MAAM+T,GACdA,EAAI2B,KAAO3B,EAAIslB,SAAWH,EAAqBV,EAASzkB,EAAIslB,UAAY,KACxEtlB,EAAI3H,KAAO2H,EAAIslB,SAAWtlB,EAAIslB,SAASn0B,KAAK,KAAO,KACnD6O,EAAI3P,MAAQ,QACZ2P,EAAI3W,KAAO,SACX2W,EAAIxT,OAAS,WACb,IAAsBwT,EAAK,UAAW,CAAEulB,YAAY,EAAM3vB,MAAOoK,EAAI1P,UAC9D0P,KAEXlQ,EAAWoQ,kBAAkBmlB,EAC/B,CAEA,OAAO9rB,EAAYgrB,eAAel5B,EAAK,GACvC,EAGN,IAAIm6B,GAAe,GAEnB,MAAMC,GAAqBC,KAASC,UAClC,MAAM7wB,EAAS0wB,GAAa1wB,OAE5B,IAAIA,EAEF,YADA5G,QAAQjC,MAAM,oEAGd,MAAM,WACJ6D,EAAU,aACVsY,EACA5V,IAAI,eACFozB,EAAc,MACd1yB,EAAK,IACL8xB,EAAM,CAAC,GACR,cACD58B,EAAa,YACbmR,GACEzE,EAEN,IAAI8wB,EAEF,YADA13B,QAAQjC,MAAM,mFAIhB,IAAIk5B,EAAuBH,EAAIG,qBAAuBH,EAAIG,qBAAuB,KAAe,EAEhG,MAAMV,EAAUr8B,EAAcq8B,WAExB,mBACJQ,EAAkB,eAClBC,EAAc,mBACd9xB,EAAkB,oBAClBC,GACEyB,EAAOrM,aAEX,IACE,IAAIo9B,QAAoB,IAAAL,IAAY,KAAZA,IAAoBG,MAAOG,EAAMztB,KACvD,MAAM,UAAE0tB,EAAS,wBAAEC,SAAkCF,GAC/C,OAAE3lB,EAAM,KAAE9U,SAAeu6B,EAAeI,EAAyB3tB,EAAM,CAC3E+sB,QAASh9B,EAAcwC,MACvBq6B,qBACAC,iBACA9xB,qBACAC,wBAYF,GATG+U,EAAanG,YAAYxK,MAC1B3H,EAAW0Q,SAAQR,IAAQ,IAAD,EAExB,MAA2B,WAApBA,EAAIzW,IAAI,SACY,aAAtByW,EAAIzW,IAAI,YACP,MAAAyW,EAAIzW,IAAI,aAAW,QAAO,CAACiF,EAAK2S,IAAM3S,IAAQ6J,EAAK8I,SAAkBnX,IAAZqO,EAAK8I,IAAiB,IAItF,IAAchB,IAAWA,EAAO3U,OAAS,EAAG,CAC7C,IAAI65B,EAAiB,IAAAllB,GAAM,KAANA,GACdH,IACHA,EAAI2B,KAAO3B,EAAIslB,SAAWH,EAAqBV,EAASzkB,EAAIslB,UAAY,KACxEtlB,EAAI3H,KAAO2H,EAAIslB,SAAWtlB,EAAIslB,SAASn0B,KAAK,KAAO,KACnD6O,EAAI3P,MAAQ,QACZ2P,EAAI3W,KAAO,SACX2W,EAAIxT,OAAS,WACb,IAAsBwT,EAAK,UAAW,CAAEulB,YAAY,EAAM3vB,MAAOoK,EAAI1P,UAC9D0P,KAEXlQ,EAAWoQ,kBAAkBmlB,EAC/B,CAEkG,IAAD,IAA7Fh6B,GAAQjD,EAAc2B,UAAwB,eAAZsO,EAAK,IAAmC,oBAAZA,EAAK,UAE/D,QAAY,gBAAchN,IAAK,QAC1Bqd,GAA2B,kBAAhBA,EAAOrf,QAAyB,QAC/Cs8B,MAAOM,IACV,MAAMpsB,EAAM,CACVjP,IAAKq7B,EAAW9e,iBAChB/T,mBAAoBA,EACpBC,oBAAqBA,GAEvB,IACE,MAAM4G,QAAY/G,EAAM2G,GACpBI,aAAelG,OAASkG,EAAIC,QAAU,IACxChM,QAAQjC,MAAMgO,EAAIrG,WAAa,IAAMiG,EAAIjP,KAEzCq7B,EAAWC,kBAAoB1yB,KAAKC,MAAMwG,EAAII,KAIlD,CAFE,MAAOvG,GACP5F,QAAQjC,MAAM6H,EAChB,MAMN,OAHA+B,IAAIkwB,EAAW1tB,EAAMhN,GACrBwK,IAAImwB,EAAyB3tB,EAAMhN,GAE5B,CACL06B,YACAC,0BACD,GACA,YAAgB,CACjBD,WAAY39B,EAAcyqB,oBAAoB,MAAO5c,EAAAA,EAAAA,QAAOvB,OAC5DsxB,wBAAyB59B,EAAcqN,WAAWf,iBAG7C8wB,GAAa1wB,OACpB0wB,GAAe,EAGjB,CAFE,MAAM1xB,GACN5F,QAAQjC,MAAM6H,EAChB,CAEAyF,EAAY4sB,sBAAsB,GAAIN,EAAYE,UAAU,GAC3D,IAEUK,GAAyB/tB,GAAQvD,IAAW,IAAD,EAGzB,UAAA0wB,IAAY,KAAZA,IACtBjkB,GAAOA,EAAIpQ,KAAK,SAAM,OAClBkH,EAAKlH,KAAK,QAAU,IAM/Bq0B,GAAaruB,KAAKkB,GAClBmtB,GAAa1wB,OAASA,EACtB2wB,KAAoB,EAGf,SAASY,GAAahuB,EAAMiuB,EAAWC,EAAS3wB,EAAO4wB,GAC5D,MAAO,CACLn9B,KAAMk6B,EACNj0B,QAAQ,CAAE+I,OAAMzC,QAAO0wB,YAAWC,UAASC,SAE/C,CAEO,SAASC,GAAuBphB,EAAYqhB,EAAO9wB,EAAO4wB,GAC/D,MAAO,CACLn9B,KAAMk6B,EACNj0B,QAAQ,CAAE+I,KAAMgN,EAAYqhB,QAAO9wB,QAAO4wB,SAE9C,CAEO,MAAML,GAAwB,CAAC9tB,EAAMzC,KACnC,CACLvM,KAAM86B,GACN70B,QAAS,CAAE+I,OAAMzC,WAIR+wB,GAAiC,KACrC,CACLt9B,KAAM86B,GACN70B,QAAS,CACP+I,KAAM,GACNzC,OAAOK,EAAAA,EAAAA,UAKA2wB,GAAiB,CAAEt3B,EAASvF,KAChC,CACLV,KAAMo6B,EACNn0B,QAAQ,CACN+V,WAAY/V,EACZvF,YAKO88B,GAA4B,CAAExhB,EAAYihB,EAAWC,EAASO,KAClE,CACLz9B,KAAMm6B,EACNl0B,QAAQ,CACN+V,aACAihB,YACAC,UACAO,uBAKC,SAASC,GAAqBz3B,GACnC,MAAO,CACLjG,KAAM26B,GACN10B,QAAQ,CAAE+V,WAAY/V,GAE1B,CAEO,SAAS03B,GAAoB3uB,EAAMzC,GACxC,MAAO,CACLvM,KAAM46B,GACN30B,QAAQ,CAAE+I,OAAMzC,QAAOpH,IAAK,kBAEhC,CAEO,SAASy4B,GAAoB5uB,EAAMzC,GACxC,MAAO,CACLvM,KAAM46B,GACN30B,QAAQ,CAAE+I,OAAMzC,QAAOpH,IAAK,kBAEhC,CAEO,MAAM04B,GAAc,CAAE7uB,EAAMlF,EAAQ8G,KAClC,CACL3K,QAAS,CAAE+I,OAAMlF,SAAQ8G,OACzB5Q,KAAMq6B,IAIGyD,GAAa,CAAE9uB,EAAMlF,EAAQ0G,KACjC,CACLvK,QAAS,CAAE+I,OAAMlF,SAAQ0G,OACzBxQ,KAAMs6B,IAIGyD,GAAoB,CAAE/uB,EAAMlF,EAAQ0G,KACxC,CACLvK,QAAS,CAAE+I,OAAMlF,SAAQ0G,OACzBxQ,KAAMu6B,IAKGyD,GAAcxtB,IAClB,CACLvK,QAASuK,EACTxQ,KAAMw6B,IAMGyD,GAAkBztB,GAC5B,IAAiE,IAAjE,GAACrH,EAAE,YAAE+G,EAAW,cAAEnR,EAAa,WAAEK,EAAU,cAAEgK,GAAc,GACtD,SAAE80B,EAAQ,OAAEp0B,EAAM,UAAEmF,GAAcuB,GAClC,mBAAEzG,EAAkB,oBAAEC,GAAwB5K,IAG9Cqf,EAAKxP,EAAU5D,OAI4B,IAAD,IAA1C4D,GAAaA,EAAU/O,IAAI,eAC7B,YAAA+O,EAAU/O,IAAI,eAAa,QACjBm9B,GAASA,IAA0C,IAAjCA,EAAMn9B,IAAI,sBAA4B,QACvDm9B,IACP,GAAIt+B,EAAco/B,6BAA6B,CAACD,EAAUp0B,GAASuzB,EAAMn9B,IAAI,QAASm9B,EAAMn9B,IAAI,OAAQ,CACtGsQ,EAAI4P,WAAa5P,EAAI4P,YAAc,CAAC,EACpC,MAAMge,GAAaC,EAAAA,EAAAA,IAAahB,EAAO7sB,EAAI4P,cAGvCge,GAAeA,GAAkC,IAApBA,EAAWhwB,QAG1CoC,EAAI4P,WAAWid,EAAMn9B,IAAI,SAAW,GAExC,KAaN,GARAsQ,EAAI8tB,WAAa50B,IAAS3K,EAAcwC,OAAOE,WAE5Cgd,GAAMA,EAAGzJ,YACVxE,EAAIwE,YAAcyJ,EAAGzJ,YACbyJ,GAAMyf,GAAYp0B,IAC1B0G,EAAIwE,YAAc7L,EAAGo1B,KAAK9f,EAAIyf,EAAUp0B,IAGvC/K,EAAc2B,SAAU,CACzB,MAAMob,EAAa,GAAEoiB,KAAYp0B,IAEjC0G,EAAIiM,OAASrT,EAAcK,eAAeqS,IAAc1S,EAAcK,iBAEtE,MAAM+0B,EAAqBp1B,EAAc6gB,gBAAgB,CACvDxN,OAAQjM,EAAIiM,OACZX,cACCzQ,OACGozB,EAAkBr1B,EAAc6gB,gBAAgB,CAAExN,OAAQjM,EAAIiM,SAAUpR,OAE9EmF,EAAIyZ,gBAAkB,IAAYuU,GAAoBr8B,OAASq8B,EAAqBC,EAEpFjuB,EAAI+Y,mBAAqBngB,EAAcmgB,mBAAmB2U,EAAUp0B,GACpE0G,EAAIuZ,oBAAsB3gB,EAAc2gB,oBAAoBmU,EAAUp0B,IAAW,MACjF,MAAMqY,EAAc/Y,EAAcwZ,iBAAiBsb,EAAUp0B,GACvD+Y,EAA8BzZ,EAAcyZ,4BAA4Bqb,EAAUp0B,GAEnD,IAAD,EAApC,GAAGqY,GAAeA,EAAY9W,KAC5BmF,EAAI2R,YAAc,UAAAA,GAAW,KAAXA,GAEbtU,GACKjB,EAAAA,IAAAA,MAAUiB,GACLA,EAAI3N,IAAI,SAEV2N,KAEV,QAEC,CAACtB,EAAOpH,KAAS,IAAcoH,GACV,IAAjBA,EAAMpK,SACL2jB,EAAAA,EAAAA,IAAavZ,KACbsW,EAA4B3iB,IAAIiF,KAEtCkG,YAEHmF,EAAI2R,YAAcA,CAEtB,CAEA,IAAIuc,EAAgB,IAAc,CAAC,EAAGluB,GACtCkuB,EAAgBv1B,EAAGw1B,aAAaD,GAEhCxuB,EAAY4tB,WAAWttB,EAAI0tB,SAAU1tB,EAAI1G,OAAQ40B,GASjDluB,EAAIzG,mBAP4BuyB,MAAOsC,IACrC,IAAIC,QAAuB90B,EAAmB+0B,WAAM,EAAM,CAACF,IACvDG,EAAuB,IAAc,CAAC,EAAGF,GAE7C,OADA3uB,EAAY6tB,kBAAkBvtB,EAAI0tB,SAAU1tB,EAAI1G,OAAQi1B,GACjDF,CAAc,EAIvBruB,EAAIxG,oBAAsBA,EAG1B,MAAMg1B,EAAY,MAGlB,OAAO71B,EAAG2F,QAAQ0B,GACjBvG,MAAM2G,IACLA,EAAIquB,SAAW,MAAaD,EAC5B9uB,EAAY2tB,YAAYrtB,EAAI0tB,SAAU1tB,EAAI1G,OAAQ8G,EAAI,IAEvDpG,OACCmM,IAEqB,oBAAhBA,EAAI1P,UACL0P,EAAIpX,KAAO,GACXoX,EAAI1P,QAAU,+IAEhBiJ,EAAY2tB,YAAYrtB,EAAI0tB,SAAU1tB,EAAI1G,OAAQ,CAChDlH,OAAO,EAAM+T,KAAKC,EAAAA,EAAAA,gBAAeD,IACjC,GAEL,EAKQ7H,GAAU,eAAE,KAAEE,EAAI,OAAElF,KAAWoF,GAAQ,uDAAC,CAAC,EAAC,OAAOzD,IAC5D,IAAMtC,IAAG,MAACU,GAAM,cAAE9K,EAAa,YAAEmR,GAAgBzE,EAC7CzJ,EAAOjD,EAAcosB,+BAA+B9f,OACpDgU,EAAStgB,EAAcmgC,gBAAgBlwB,EAAMlF,IAC7C,mBAAEyf,EAAkB,oBAAEQ,GAAwBhrB,EAAcogC,kBAAkB,CAACnwB,EAAMlF,IAASuB,OAC9F8xB,EAAQ,OAAOxoB,KAAK4U,GACpBnJ,EAAarhB,EAAcqgC,gBAAgB,CAACpwB,EAAMlF,GAASqzB,GAAO9xB,OAEtE,OAAO6E,EAAY+tB,eAAe,IAC7B/uB,EACHrF,QACA7H,OACAk8B,SAAUlvB,EACVlF,SAAQsW,aACRmJ,qBACAlK,SACA0K,uBACA,CACH,EAEM,SAASsV,GAAerwB,EAAMlF,GACnC,MAAO,CACL9J,KAAMy6B,EACNx0B,QAAQ,CAAE+I,OAAMlF,UAEpB,CAEO,SAASw1B,GAActwB,EAAMlF,GAClC,MAAO,CACL9J,KAAM06B,EACNz0B,QAAQ,CAAE+I,OAAMlF,UAEpB,CAEO,SAASy1B,GAAWlgB,EAAQrQ,EAAMlF,GACvC,MAAO,CACL9J,KAAM+6B,GACN90B,QAAS,CAAEoZ,SAAQrQ,OAAMlF,UAE7B,C,sGC5gBe,aACb,MAAO,CACLgC,aAAc,CACZ9J,KAAM,CACJkK,YAAW,EACXH,SAAQ,UACRC,QAAO,EACPC,UAASA,IAIjB,C,uKCeA,SAEE,CAAC8tB,EAAAA,aAAc,CAACp4B,EAAOyO,IACa,iBAAnBA,EAAOnK,QAClBtE,EAAM6K,IAAI,OAAQ4D,EAAOnK,SACzBtE,EAGN,CAACq4B,EAAAA,YAAa,CAACr4B,EAAOyO,IACbzO,EAAM6K,IAAI,MAAO4D,EAAOnK,QAAQ,IAGzC,CAACg0B,EAAAA,aAAc,CAACt4B,EAAOyO,IACdzO,EAAM6K,IAAI,QAAQgzB,EAAAA,EAAAA,IAAcpvB,EAAOnK,UAGhD,CAAC40B,EAAAA,iBAAkB,CAACl5B,EAAOyO,IAClBzO,EAAMqL,MAAM,CAAC,aAAawyB,EAAAA,EAAAA,IAAcpvB,EAAOnK,UAGxD,CAAC60B,EAAAA,yBAA0B,CAACn5B,EAAOyO,KACjC,MAAM,MAAE7D,EAAK,KAAEyC,GAASoB,EAAOnK,QAC/B,OAAOtE,EAAMqL,MAAM,CAAC,sBAAuBgC,IAAOwwB,EAAAA,EAAAA,IAAcjzB,GAAO,EAGzE,CAAC2tB,EAAAA,cAAe,CAAEv4B,EAAO,KAAgB,IAAhB,QAACsE,GAAQ,GAC1B+I,KAAMgN,EAAU,UAAEihB,EAAS,QAAEC,EAAO,MAAEG,EAAK,MAAE9wB,EAAK,MAAE4wB,GAAUl3B,EAEhEw5B,EAAWpC,GAAQqC,EAAAA,EAAAA,IAAkBrC,GAAU,GAAEH,KAAWD,IAEhE,MAAMxU,EAAW0U,EAAQ,YAAc,QAEvC,OAAOx7B,EAAMqL,MACX,CAAC,OAAQ,WAAYgP,EAAY,aAAcyjB,EAAUhX,GACzDlc,EACD,EAGH,CAAC4tB,EAAAA,8BAA+B,CAAEx4B,EAAO,KAAgB,IAAhB,QAACsE,GAAQ,GAC5C,WAAE+V,EAAU,UAAEihB,EAAS,QAAEC,EAAO,kBAAEO,GAAsBx3B,EAE5D,IAAIg3B,IAAcC,EAEhB,OADAr4B,QAAQC,KAAK,wEACNnD,EAGT,MAAM89B,EAAY,GAAEvC,KAAWD,IAE/B,OAAOt7B,EAAMqL,MACX,CAAC,OAAQ,WAAYgP,EAAY,uBAAwByjB,GACzDhC,EACD,EAGH,CAACrD,EAAAA,iBAAkB,CAAEz4B,EAAO,KAA0C,IAAxCsE,SAAS,WAAE+V,EAAU,OAAEtb,IAAU,EAC7D,MAAM+d,GAAK0M,EAAAA,EAAAA,8BAA6BxpB,GAAO2K,MAAM,CAAC,WAAY0P,IAC5D2jB,GAAcP,EAAAA,EAAAA,iBAAgBz9B,EAAOqa,GAAY3Q,OAEvD,OAAO1J,EAAMknB,SAAS,CAAC,OAAQ,WAAY7M,EAAY,eAAetP,EAAAA,EAAAA,QAAO,CAAC,IAAIkzB,IAAc,IAAD,EAC7F,OAAO,MAAAnhB,EAAGve,IAAI,cAAc0N,EAAAA,EAAAA,UAAO,QAAQ,CAACgD,EAAKysB,KAC/C,MAAM9wB,GAAQ8xB,EAAAA,EAAAA,IAAahB,EAAOsC,GAC5BE,GAAuB1B,EAAAA,EAAAA,8BAA6Bx8B,EAAOqa,EAAYqhB,EAAMn9B,IAAI,QAASm9B,EAAMn9B,IAAI,OACpG4W,GAASgpB,EAAAA,EAAAA,IAAczC,EAAO9wB,EAAO,CACzCwzB,oBAAqBF,EACrBn/B,WAEF,OAAOkQ,EAAI5D,MAAM,EAAC0yB,EAAAA,EAAAA,IAAkBrC,GAAQ,WAAW3wB,EAAAA,EAAAA,QAAOoK,GAAQ,GACrE8oB,EAAU,GACb,EAEJ,CAACjF,EAAAA,uBAAwB,CAAEh5B,EAAO,KAAmC,IAAjCsE,SAAU,WAAE+V,IAAc,EAC5D,OAAOra,EAAMknB,SAAU,CAAE,OAAQ,WAAY7M,EAAY,eAAgBtP,EAAAA,EAAAA,QAAO,KAAK0T,GAC5E,IAAAA,GAAU,KAAVA,GAAeid,GAASA,EAAM7wB,IAAI,UAAUE,EAAAA,EAAAA,QAAO,QAC1D,EAGJ,CAAC2tB,EAAAA,cAAe,CAAC14B,EAAO,KAAwC,IAC1DwL,GADoBlH,SAAS,IAAE2K,EAAG,KAAE5B,EAAI,OAAElF,IAAU,EAGtDqD,EADGyD,EAAIhO,MACE,IAAc,CACrBA,OAAO,EACPrD,KAAMqR,EAAI+F,IAAIpX,KACd0H,QAAS2J,EAAI+F,IAAI1P,QACjB+4B,WAAYpvB,EAAI+F,IAAIqpB,YACnBpvB,EAAI+F,IAAIzM,UAEF0G,EAIXzD,EAAOpF,QAAUoF,EAAOpF,SAAW,CAAC,EAEpC,IAAIk4B,EAAWt+B,EAAMqL,MAAO,CAAE,YAAagC,EAAMlF,IAAU01B,EAAAA,EAAAA,IAAcryB,IAMzE,OAHI3L,EAAAA,EAAAA,MAAYoP,EAAI3H,gBAAgBzH,EAAAA,EAAAA,OAClCy+B,EAAWA,EAASjzB,MAAO,CAAE,YAAagC,EAAMlF,EAAQ,QAAU8G,EAAI3H,OAEjEg3B,CAAQ,EAGjB,CAAC3F,EAAAA,aAAc,CAAC34B,EAAO,KAAwC,IAAtCsE,SAAS,IAAEuK,EAAG,KAAExB,EAAI,OAAElF,IAAU,EACvD,OAAOnI,EAAMqL,MAAO,CAAE,WAAYgC,EAAMlF,IAAU01B,EAAAA,EAAAA,IAAchvB,GAAK,EAGvE,CAAC+pB,EAAAA,qBAAsB,CAAC54B,EAAO,KAAwC,IAAtCsE,SAAS,IAAEuK,EAAG,KAAExB,EAAI,OAAElF,IAAU,EAC/D,OAAOnI,EAAMqL,MAAO,CAAE,kBAAmBgC,EAAMlF,IAAU01B,EAAAA,EAAAA,IAAchvB,GAAK,EAG9E,CAACoqB,EAAAA,6BAA8B,CAACj5B,EAAO,KAAuC,IAArCsE,SAAS,KAAE+I,EAAI,MAAEzC,EAAK,IAAEpH,IAAO,EAElE+6B,EAAgB,CAAC,WAAYlxB,GAC7BmxB,EAAW,CAAC,OAAQ,WAAYnxB,GAEpC,OACGrN,EAAM2K,MAAM,CAAC,UAAW4zB,KACrBv+B,EAAM2K,MAAM,CAAC,cAAe4zB,KAC5Bv+B,EAAM2K,MAAM,CAAC,sBAAuB4zB,IAMnCv+B,EAAMqL,MAAM,IAAImzB,EAAUh7B,IAAMuH,EAAAA,EAAAA,QAAOH,IAHrC5K,CAG4C,EAGvD,CAAC84B,EAAAA,gBAAiB,CAAC94B,EAAO,KAAmC,IAAjCsE,SAAS,KAAE+I,EAAI,OAAElF,IAAU,EACrD,OAAOnI,EAAMy+B,SAAU,CAAE,YAAapxB,EAAMlF,GAAS,EAGvD,CAAC4wB,EAAAA,eAAgB,CAAC/4B,EAAO,KAAmC,IAAjCsE,SAAS,KAAE+I,EAAI,OAAElF,IAAU,EACpD,OAAOnI,EAAMy+B,SAAU,CAAE,WAAYpxB,EAAMlF,GAAS,EAGtD,CAACixB,EAAAA,YAAa,CAACp5B,EAAO,KAA2C,IAAzCsE,SAAS,OAAEoZ,EAAM,KAAErQ,EAAI,OAAElF,IAAU,EACzD,OAAKkF,GAAQlF,EACJnI,EAAMqL,MAAO,CAAE,SAAUgC,EAAMlF,GAAUuV,GAG7CrQ,GAASlF,OAAd,EACSnI,EAAMqL,MAAO,CAAE,SAAU,kBAAoBqS,EACtD,E,m7CCvKJ,MAEMghB,EAAoB,CACxB,MAAO,MAAO,OAAQ,SAAU,UAAW,OAAQ,QAAS,SAGxD1+B,EAAQA,GACLA,IAASiL,EAAAA,EAAAA,OAGLiM,GAAYtL,EAAAA,EAAAA,gBACvB5L,GACAK,GAAQA,EAAK9B,IAAI,eAGNqB,GAAMgM,EAAAA,EAAAA,gBACjB5L,GACAK,GAAQA,EAAK9B,IAAI,SAGNk7B,GAAU7tB,EAAAA,EAAAA,gBACrB5L,GACAK,GAAQA,EAAK9B,IAAI,SAAW,KAGjBogC,GAAa/yB,EAAAA,EAAAA,gBACxB5L,GACAK,GAAQA,EAAK9B,IAAI,eAAiB,eAGvBkM,GAAWmB,EAAAA,EAAAA,gBACtB5L,GACAK,GAAQA,EAAK9B,IAAI,QAAQ0M,EAAAA,EAAAA,UAGdke,GAAevd,EAAAA,EAAAA,gBAC1B5L,GACAK,GAAQA,EAAK9B,IAAI,YAAY0M,EAAAA,EAAAA,UAGlB4c,EAAsB,CAAC7nB,EAAOqN,IAClCrN,EAAM2K,MAAM,CAAC,sBAAuB0C,QAAOrO,GAG9C4/B,EAAW,CAACC,EAAQlY,IACrB1b,EAAAA,IAAAA,MAAU4zB,IAAW5zB,EAAAA,IAAAA,MAAU0b,GAC7BA,EAAOpoB,IAAI,SAGLooB,GAGFtE,EAAAA,EAAAA,cAAayc,UAClBF,EACAC,EACAlY,GAIGA,EAGI6C,GAA+B5d,EAAAA,EAAAA,gBAC1C5L,GACAK,IAAQgiB,EAAAA,EAAAA,cAAayc,UACnBF,EACAv+B,EAAK9B,IAAI,QACT8B,EAAK9B,IAAI,uBAKA8B,EAAOL,GACRyK,EAASzK,GAIRjB,GAAS6M,EAAAA,EAAAA,gBAKpBvL,GACD,KAAM,IAGM+Y,GAAOxN,EAAAA,EAAAA,gBAClBvL,GACDA,GAAQ0+B,GAAmB1+B,GAAQA,EAAK9B,IAAI,WAGhCygC,GAAepzB,EAAAA,EAAAA,gBAC1BvL,GACDA,GAAQ0+B,GAAmB1+B,GAAQA,EAAK9B,IAAI,mBAGhC0gC,GAAUrzB,EAAAA,EAAAA,gBACtBwN,GACAA,GAAQA,GAAQA,EAAK7a,IAAI,aAGb2gC,GAAStzB,EAAAA,EAAAA,gBACrBqzB,GACAA,IAAO,aAAI,wCAAkCE,KAAKF,IAAQ,OAAO,EAAE,IAGvDG,GAAQxzB,EAAAA,EAAAA,gBACpB4d,GACAnpB,GAAQA,EAAK9B,IAAI,WAGL8gC,GAAazzB,EAAAA,EAAAA,gBACxBwzB,GACAA,IACE,IAAIA,GAASA,EAAM3yB,KAAO,EACxB,OAAOR,EAAAA,EAAAA,QAET,IAAID,GAAOC,EAAAA,EAAAA,QAEX,OAAImzB,GAAU,IAAAA,IAId,IAAAA,GAAK,KAALA,GAAc,CAAC/xB,EAAMkvB,KACnB,IAAIlvB,IAAS,IAAAA,GACX,MAAO,CAAC,EAEV,IAAAA,GAAI,KAAJA,GAAa,CAACC,EAAWnF,KACpB,IAAAu2B,GAAiB,KAAjBA,EAA0Bv2B,GAAU,IAGvC6D,EAAOA,EAAKG,MAAKpB,EAAAA,EAAAA,QAAO,CACtBsC,KAAMkvB,EACNp0B,SACAmF,YACAgyB,GAAK,GAAEn3B,KAAUo0B,OAChB,GACH,IAGGvwB,IApBEC,EAAAA,EAAAA,OAoBE,IAIF0d,GAAW/d,EAAAA,EAAAA,gBACtBvL,GACAA,IAAQk/B,EAAAA,EAAAA,KAAIl/B,EAAK9B,IAAI,eAGVqrB,GAAWhe,EAAAA,EAAAA,gBACtBvL,GACAA,IAAQk/B,EAAAA,EAAAA,KAAIl/B,EAAK9B,IAAI,eAGV4M,GAAWS,EAAAA,EAAAA,gBACpBvL,GACAA,GAAQA,EAAK9B,IAAI,YAAY0N,EAAAA,EAAAA,WAGpBF,GAAsBH,EAAAA,EAAAA,gBAC/BvL,GACAA,GAAQA,EAAK9B,IAAI,yBAIRjB,EAAiB,CAAE0C,EAAOpC,KACrC,MAAM4hC,EAAcx/B,EAAM2K,MAAM,CAAC,mBAAoB,cAAe/M,GAAO,MACrE6hC,EAAgBz/B,EAAM2K,MAAM,CAAC,OAAQ,cAAe/M,GAAO,MACjE,OAAO4hC,GAAeC,GAAiB,IAAI,EAGhC3zB,GAAcF,EAAAA,EAAAA,gBACzBvL,GACAA,IACE,MAAM4O,EAAM5O,EAAK9B,IAAI,eACrB,OAAO0M,EAAAA,IAAAA,MAAUgE,GAAOA,GAAMhE,EAAAA,EAAAA,MAAK,IAI1Bye,GAAW9d,EAAAA,EAAAA,gBACpBvL,GACAA,GAAQA,EAAK9B,IAAI,cAGRkrB,GAAO7d,EAAAA,EAAAA,gBAChBvL,GACAA,GAAQA,EAAK9B,IAAI,UAGRsrB,GAAUje,EAAAA,EAAAA,gBACnBvL,GACAA,GAAQA,EAAK9B,IAAI,WAAW0M,EAAAA,EAAAA,UAGnBy0B,IAA8B9zB,EAAAA,EAAAA,gBACzCyzB,EACA1V,EACAC,GACA,CAACyV,EAAY1V,EAAUC,IACd,IAAAyV,GAAU,KAAVA,GAAgBM,GAAOA,EAAIhyB,OAAO,aAAamP,IACpD,GAAGA,EAAI,CACL,IAAI7R,EAAAA,IAAAA,MAAU6R,GAAO,OACrB,OAAOA,EAAGrR,eAAeqR,IACjBA,EAAGve,IAAI,aACXue,EAAGnP,OAAO,YAAY0G,IAAKkrB,EAAAA,EAAAA,KAAIlrB,GAAG3F,MAAMib,KAEpC7M,EAAGve,IAAI,aACXue,EAAGnP,OAAO,YAAY0G,IAAKkrB,EAAAA,EAAAA,KAAIlrB,GAAG3F,MAAMkb,KAEnC9M,IAEX,CAEE,OAAO7R,EAAAA,EAAAA,MACT,QAMO20B,IAAOh0B,EAAAA,EAAAA,gBAClBvL,GACAm3B,IACE,MAAMoI,EAAOpI,EAAKj5B,IAAI,QAAQ0N,EAAAA,EAAAA,SAC9B,OAAOA,EAAAA,KAAAA,OAAY2zB,GAAQ,IAAAA,GAAI,KAAJA,GAAYxsB,GAAOnI,EAAAA,IAAAA,MAAUmI,MAAQnH,EAAAA,EAAAA,OAAM,IAI7D4zB,GAAa,CAAC7/B,EAAOoT,KAAS,IAAD,EACxC,IAAI0sB,EAAcF,GAAK5/B,KAAUiM,EAAAA,EAAAA,QACjC,OAAO,UAAA6zB,GAAW,KAAXA,EAAmB70B,EAAAA,IAAAA,QAAU,QAAM2qB,GAAKA,EAAEr3B,IAAI,UAAY6U,IAAKnI,EAAAA,EAAAA,OAAM,EAGjE80B,IAAqBn0B,EAAAA,EAAAA,gBAChC8zB,GACAE,IACA,CAACP,EAAYO,IACJ,IAAAP,GAAU,KAAVA,GAAmB,CAACW,EAAWljB,KACpC,IAAI8iB,GAAOL,EAAAA,EAAAA,KAAIziB,EAAGnS,MAAM,CAAC,YAAY,UACrC,OAAGi1B,EAAKxW,QAAU,EACT4W,EAAUryB,OAhPL,WAgPyB1B,EAAAA,EAAAA,SAAQg0B,GAAMA,EAAG9zB,KAAK2Q,KACtD,IAAA8iB,GAAI,KAAJA,GAAa,CAAC3wB,EAAKmE,IAAQnE,EAAItB,OAAOyF,GAAKnH,EAAAA,EAAAA,SAASg0B,GAAOA,EAAG9zB,KAAK2Q,MAAMkjB,EAAW,GAC1F,IAAAJ,GAAI,KAAJA,GAAa,CAACI,EAAW5sB,IACnB4sB,EAAUn1B,IAAIuI,EAAI7U,IAAI,SAAS0N,EAAAA,EAAAA,WACpCoW,EAAAA,EAAAA,kBAIK3J,GAAoB1Y,GAAW,IAAoB,IAAD,MAAnB,WAAEvC,GAAY,GACpD,WAAEyiC,EAAU,iBAAEC,GAAqB1iC,IACvC,OAAO,MAAAsiC,GAAmB//B,GACvB4W,QACC,CAAC1K,EAAK1I,IAAQA,IACd,CAAC48B,EAAMC,KACL,IAAIC,EAAgC,mBAAfJ,EAA4BA,EAAaK,EAAAA,GAAAA,WAAoBL,GAClF,OAASI,EAAgBA,EAAOF,EAAMC,GAApB,IAAyB,KAE9C,QACI,CAACV,EAAKvsB,KACT,IAAIktB,EAAsC,mBAArBH,EAAkCA,EAAmBI,EAAAA,GAAAA,iBAA0BJ,GAChGd,EAAeiB,EAAe,IAAAX,GAAG,KAAHA,EAASW,GAAfX,EAE5B,OAAO10B,EAAAA,EAAAA,KAAI,CAAE40B,WAAYA,GAAW7/B,EAAOoT,GAAMisB,WAAYA,GAAa,GAC1E,EAGOmB,IAAY50B,EAAAA,EAAAA,gBACvB5L,GACAA,GAASA,EAAMzB,IAAK,aAAa0M,EAAAA,EAAAA,UAGtBw1B,IAAW70B,EAAAA,EAAAA,gBACpB5L,GACAA,GAASA,EAAMzB,IAAK,YAAY0M,EAAAA,EAAAA,UAGvBy1B,IAAkB90B,EAAAA,EAAAA,gBAC3B5L,GACAA,GAASA,EAAMzB,IAAK,mBAAmB0M,EAAAA,EAAAA,UAG9B01B,GAAc,CAAC3gC,EAAOqN,EAAMlF,IAChCq4B,GAAUxgC,GAAO2K,MAAM,CAAC0C,EAAMlF,GAAS,MAGnCy4B,GAAa,CAAC5gC,EAAOqN,EAAMlF,IAC/Bs4B,GAASzgC,GAAO2K,MAAM,CAAC0C,EAAMlF,GAAS,MAGlC04B,GAAoB,CAAC7gC,EAAOqN,EAAMlF,IACtCu4B,GAAgB1gC,GAAO2K,MAAM,CAAC0C,EAAMlF,GAAS,MAGzC24B,GAAmB,KAEvB,EAGIC,GAA8B,CAAC/gC,EAAOqa,EAAYqhB,KAC7D,MAAMsF,EAAWxX,EAA6BxpB,GAAO2K,MAAM,CAAC,WAAY0P,EAAY,eAAegI,EAAAA,EAAAA,eAC7F4e,EAAajhC,EAAM2K,MAAM,CAAC,OAAQ,WAAY0P,EAAY,eAAegI,EAAAA,EAAAA,eAEzE6e,EAAe,IAAAF,GAAQ,KAARA,GAAcG,IACjC,MAAMC,EAAkBH,EAAW1iC,IAAK,GAAEm9B,EAAMn9B,IAAI,SAASm9B,EAAMn9B,IAAI,WACjE8iC,EAAgBJ,EAAW1iC,IAAK,GAAEm9B,EAAMn9B,IAAI,SAASm9B,EAAMn9B,IAAI,gBAAgBm9B,EAAM4F,cAC3F,OAAOjf,EAAAA,EAAAA,cAAa3T,MAClByyB,EACAC,EACAC,EACD,IAEH,OAAO,IAAAH,GAAY,KAAZA,GAAkB3Z,GAAQA,EAAKhpB,IAAI,QAAUm9B,EAAMn9B,IAAI,OAASgpB,EAAKhpB,IAAI,UAAYm9B,EAAMn9B,IAAI,UAAS8jB,EAAAA,EAAAA,cAAa,EAGjHma,GAA+B,CAACx8B,EAAOqa,EAAYihB,EAAWC,KACzE,MAAMuC,EAAY,GAAEvC,KAAWD,IAC/B,OAAOt7B,EAAM2K,MAAM,CAAC,OAAQ,WAAY0P,EAAY,uBAAwByjB,IAAW,EAAM,EAIlFyD,GAAoB,CAACvhC,EAAOqa,EAAYihB,EAAWC,KAC9D,MAAMyF,EAAWxX,EAA6BxpB,GAAO2K,MAAM,CAAC,WAAY0P,EAAY,eAAegI,EAAAA,EAAAA,eAC7F8e,EAAe,IAAAH,GAAQ,KAARA,GAActF,GAASA,EAAMn9B,IAAI,QAAUg9B,GAAWG,EAAMn9B,IAAI,UAAY+8B,IAAWjZ,EAAAA,EAAAA,eAC5G,OAAO0e,GAA4B/gC,EAAOqa,EAAY8mB,EAAa,EAGxDK,GAAoB,CAACxhC,EAAOqN,EAAMlF,KAAY,IAAD,EACxD,MAAM2U,EAAK0M,EAA6BxpB,GAAO2K,MAAM,CAAC,QAAS0C,EAAMlF,IAASka,EAAAA,EAAAA,eACxEof,EAAOzhC,EAAM2K,MAAM,CAAC,OAAQ,QAAS0C,EAAMlF,IAASka,EAAAA,EAAAA,eAEpD6e,EAAe,MAAApkB,EAAGve,IAAI,cAAc0N,EAAAA,EAAAA,UAAO,QAAMyvB,GAC9CqF,GAA4B/gC,EAAO,CAACqN,EAAMlF,GAASuzB,KAG5D,OAAOrZ,EAAAA,EAAAA,cACJ3T,MAAMoO,EAAI2kB,GACV52B,IAAI,aAAcq2B,EAAa,EAI7B,SAASQ,GAAa1hC,EAAOqa,EAAYzc,EAAM+jC,GACpDtnB,EAAaA,GAAc,GAC3B,IAAIunB,EAAS5hC,EAAM2K,MAAM,CAAC,OAAQ,WAAY0P,EAAY,eAAetP,EAAAA,EAAAA,QAAO,KAChF,OAAO,IAAA62B,GAAM,KAANA,GAAcvrB,GACZpL,EAAAA,IAAAA,MAAUoL,IAAMA,EAAE9X,IAAI,UAAYX,GAAQyY,EAAE9X,IAAI,QAAUojC,MAC7D12B,EAAAA,EAAAA,MACR,CAEO,MAAMse,IAAU3d,EAAAA,EAAAA,gBACrBvL,GACAA,IACE,MAAMopB,EAAOppB,EAAK9B,IAAI,QACtB,MAAuB,iBAATkrB,GAAqBA,EAAKjpB,OAAS,GAAiB,MAAZipB,EAAK,EAAU,IAKlE,SAASgU,GAAgBz9B,EAAOqa,EAAYmhB,GACjDnhB,EAAaA,GAAc,GAC3B,IAAI2jB,EAAcwD,GAAkBxhC,KAAUqa,GAAY9b,IAAI,cAAc0N,EAAAA,EAAAA,SAC5E,OAAO,IAAA+xB,GAAW,KAAXA,GAAoB,CAACruB,EAAM0G,KAChC,IAAIzL,EAAQ4wB,GAAyB,SAAhBnlB,EAAE9X,IAAI,MAAmB8X,EAAE9X,IAAI,aAAe8X,EAAE9X,IAAI,SACzE,OAAOoR,EAAK9E,KAAIkzB,EAAAA,EAAAA,IAAkB1nB,EAAG,CAAEwrB,aAAa,IAAUj3B,EAAM,IACnEG,EAAAA,EAAAA,QAAO,CAAC,GACb,CAGO,SAAS+2B,GAAoBrjB,GAAyB,IAAbsjB,EAAO,uDAAC,GACtD,GAAG91B,EAAAA,KAAAA,OAAYwS,GACb,OAAO,IAAAA,GAAU,KAAVA,GAAiBpI,GAAKpL,EAAAA,IAAAA,MAAUoL,IAAMA,EAAE9X,IAAI,QAAUwjC,GAEjE,CAGO,SAASC,GAAsBvjB,GAA2B,IAAfwjB,EAAS,uDAAC,GAC1D,GAAGh2B,EAAAA,KAAAA,OAAYwS,GACb,OAAO,IAAAA,GAAU,KAAVA,GAAiBpI,GAAKpL,EAAAA,IAAAA,MAAUoL,IAAMA,EAAE9X,IAAI,UAAY0jC,GAEnE,CAGO,SAASzE,GAAkBx9B,EAAOqa,GACvCA,EAAaA,GAAc,GAC3B,IAAIyC,EAAK0M,EAA6BxpB,GAAO2K,MAAM,CAAC,WAAY0P,IAAatP,EAAAA,EAAAA,QAAO,CAAC,IACjF02B,EAAOzhC,EAAM2K,MAAM,CAAC,OAAQ,WAAY0P,IAAatP,EAAAA,EAAAA,QAAO,CAAC,IAC7Dm3B,EAAgBC,GAAmBniC,EAAOqa,GAE9C,MAAMoE,EAAa3B,EAAGve,IAAI,eAAiB,IAAI0N,EAAAA,KAEzC2b,EACJ6Z,EAAKljC,IAAI,kBAAoBkjC,EAAKljC,IAAI,kBAClCyjC,GAAsBvjB,EAAY,QAAU,sBAC5CujB,GAAsBvjB,EAAY,YAAc,yCAChDzf,EAGN,OAAO+L,EAAAA,EAAAA,QAAO,CACZ6c,qBACAQ,oBAAqB8Z,GAEzB,CAGO,SAASC,GAAmBniC,EAAOqa,GACxCA,EAAaA,GAAc,GAE3B,MAAM/M,EAAYkc,EAA6BxpB,GAAO2K,MAAM,CAAE,WAAY0P,GAAa,MAEvF,GAAiB,OAAd/M,EAED,OAGF,MAAM80B,EAAuBpiC,EAAM2K,MAAM,CAAC,OAAQ,WAAY0P,EAAY,kBAAmB,MACvFgoB,EAAyB/0B,EAAU3C,MAAM,CAAC,WAAY,GAAI,MAEhE,OAAOy3B,GAAwBC,GAA0B,kBAE3D,CAGO,SAASC,GAAmBtiC,EAAOqa,GACxCA,EAAaA,GAAc,GAE3B,MAAMha,EAAOmpB,EAA6BxpB,GACpCsN,EAAYjN,EAAKsK,MAAM,CAAE,WAAY0P,GAAa,MAExD,GAAiB,OAAd/M,EAED,OAGF,MAAOD,GAAQgN,EAETkoB,EAAoBj1B,EAAU/O,IAAI,WAAY,MAC9CikC,EAAmBniC,EAAKsK,MAAM,CAAC,QAAS0C,EAAM,YAAa,MAC3Do1B,EAAiBpiC,EAAKsK,MAAM,CAAC,YAAa,MAEhD,OAAO43B,GAAqBC,GAAoBC,CAClD,CAGO,SAASC,GAAmB1iC,EAAOqa,GACxCA,EAAaA,GAAc,GAE3B,MAAMha,EAAOmpB,EAA6BxpB,GACpCsN,EAAYjN,EAAKsK,MAAM,CAAC,WAAY0P,GAAa,MAEvD,GAAkB,OAAd/M,EAEF,OAGF,MAAOD,GAAQgN,EAETsoB,EAAoBr1B,EAAU/O,IAAI,WAAY,MAC9CqkC,EAAmBviC,EAAKsK,MAAM,CAAC,QAAS0C,EAAM,YAAa,MAC3Dw1B,EAAiBxiC,EAAKsK,MAAM,CAAC,YAAa,MAEhD,OAAOg4B,GAAqBC,GAAoBC,CAClD,CAEO,MAAMtF,GAAkB,CAAEv9B,EAAOqN,EAAMlF,KAC5C,IACI26B,EADM9iC,EAAMzB,IAAI,OACEwkC,MAAM,0BACxBC,EAAY,IAAcF,GAAeA,EAAY,GAAK,KAE9D,OAAO9iC,EAAM2K,MAAM,CAAC,SAAU0C,EAAMlF,KAAYnI,EAAM2K,MAAM,CAAC,SAAU,oBAAsBq4B,GAAa,EAAE,EAGjGC,GAAmB,CAAEjjC,EAAOqN,EAAMlF,KAAa,IAAD,EACzD,OAAO,OAAC,OAAQ,UAAQ,OAASo1B,GAAgBv9B,EAAOqN,EAAMlF,KAAY,CAAC,EAGhE6S,GAAmB,CAAChb,EAAOqa,KACtCA,EAAaA,GAAc,GAC3B,IAAI2jB,EAAch+B,EAAM2K,MAAM,CAAC,OAAQ,WAAY0P,EAAY,eAAetP,EAAAA,EAAAA,QAAO,KACrF,MAAMS,EAAS,GASf,OAPA,IAAAwyB,GAAW,KAAXA,GAAsB3nB,IACpB,IAAIlB,EAASkB,EAAE9X,IAAI,UACd4W,GAAUA,EAAOiU,SACpB,IAAAjU,GAAM,KAANA,GAAgBrM,GAAK0C,EAAOW,KAAKrD,IACnC,IAGK0C,CAAM,EAGFkd,GAAwB,CAAC1oB,EAAOqa,IACW,IAA/CW,GAAiBhb,EAAOqa,GAAY7Z,OAGhC0iC,GAAwC,CAACljC,EAAOqa,KAAgB,IAAD,EAC1E,IAAI8oB,EAAc,CAChB3iB,aAAa,EACboH,mBAAoB,CAAC,GAEnBpH,EAAcxgB,EAAM2K,MAAM,CAAC,mBAAoB,WAAY0P,EAAY,gBAAgBtP,EAAAA,EAAAA,QAAO,KAClG,OAAIyV,EAAY/T,KAAO,IAGnB+T,EAAY7V,MAAM,CAAC,eACrBw4B,EAAY3iB,YAAcA,EAAY7V,MAAM,CAAC,cAE/C,MAAA6V,EAAY7V,MAAM,CAAC,YAAYO,YAAU,QAAUkW,IACjD,MAAM5d,EAAM4d,EAAY,GACxB,GAAIA,EAAY,GAAGzW,MAAM,CAAC,SAAU,aAAc,CAChD,MAAMuB,EAAMkV,EAAY,GAAGzW,MAAM,CAAC,SAAU,aAAajB,OACzDy5B,EAAYvb,mBAAmBpkB,GAAO0I,CACxC,MAVOi3B,CAYS,EAGPC,GAAmC,CAAEpjC,EAAOqa,EAAYsN,EAAkB0b,KACrF,IAAI1b,GAAoB0b,IAAoB1b,IAAqB0b,EAC/D,OAAO,EAET,IAAIjhB,EAAqBpiB,EAAM2K,MAAM,CAAC,mBAAoB,WAAY0P,EAAY,cAAe,YAAYtP,EAAAA,EAAAA,QAAO,KACpH,GAAIqX,EAAmB3V,KAAO,IAAMkb,IAAqB0b,EAEvD,OAAO,EAET,IAAIC,EAAmClhB,EAAmBzX,MAAM,CAACgd,EAAkB,SAAU,eAAe5c,EAAAA,EAAAA,QAAO,KAC/Gw4B,EAAkCnhB,EAAmBzX,MAAM,CAAC04B,EAAiB,SAAU,eAAet4B,EAAAA,EAAAA,QAAO,KACjH,QAASu4B,EAAiCE,OAAOD,EAAgC,EAGnF,SAASxE,GAAmBhgB,GAE1B,OAAO9T,EAAAA,IAAAA,MAAU8T,GAAOA,EAAM,IAAI9T,EAAAA,GACpC,C,2LCvhBO,MAAMiJ,EAAa,CAACrE,EAAK,KAAF,IAAE,YAACtB,GAAY,SAAK,WAChDsB,KAAO,WACPtB,EAAYirB,eAAe,UAC7B,CAAC,EAEY7O,EAAiB,CAAC9a,EAAK,KAAF,IAAE,YAACtB,GAAY,SAAK,WAAc,IAAD,uBAATkC,EAAI,yBAAJA,EAAI,gBAC5DZ,KAAOY,GAEPlC,EAAYotB,iCAGZ,MAAOnE,GAAQ/mB,EACTgzB,EAAYllC,IAAIi5B,EAAM,CAAC,WAAa,CAAC,EACrCkM,EAAe,IAAYD,GAEjC,IAAAC,GAAY,KAAZA,GAAqB5sB,IACPvY,IAAIklC,EAAW,CAAC3sB,IAErB6sB,MACLp1B,EAAY6sB,uBAAuB,CAAC,QAAStkB,GAC/C,IAIFvI,EAAY6sB,uBAAuB,CAAC,aAAc,mBACpD,CAAC,EAGYkB,EAAiB,CAACzsB,EAAK,KAAF,IAAE,YAAEtB,GAAa,SAAMM,IACvDN,EAAY8tB,WAAWxtB,GAChBgB,EAAIhB,GACZ,EAEY+sB,EAAiB,CAAC/rB,EAAK,KAAF,IAAE,cAAEzS,GAAe,SAAMyR,GAClDgB,EAAIhB,EAAKzR,EAAc2B,SAC/B,C,2DCrCM,MAAMiC,EAAS,CAAC6O,EAAK/F,IAAW,WACrC+F,KAAO,WACP,MAAMjF,EAAQd,EAAOrM,aAAammC,qBAErB5kC,IAAV4L,IACDd,EAAOtC,GAAGU,MAAM07B,gBAAmC,iBAAVh5B,EAAgC,SAAVA,IAAsBA,EAEzF,C,4DCPA,MAAM,EAA+B9N,QAAQ,8B,aCA7C,MAAM,EAA+BA,QAAQ,6BCAvC,EAA+BA,QAAQ,0B,aCA7C,MAAM,EAA+BA,QAAQ,sC,iCCO9B,cAAmC,IAA1B,QAAE0R,EAAO,WAAE/Q,GAAY,EAC7C,MAAO,CACL+J,GAAI,CACFU,OAAO27B,EAAAA,EAAAA,UAASC,IAAMt1B,EAAQu1B,SAAUv1B,EAAQw1B,WAChDhH,aAAY,eACZ7vB,QAAO,UACP4sB,QAAO,IACPa,eAAgB,SAAC7b,EAAK1R,EAAM42B,GAC1B,QAAYjlC,IAATilC,EAAoB,CACrB,MAAMC,EAAezmC,IACrBwmC,EAAO,CACLhK,mBAAoBiK,EAAajK,mBACjCC,eAAgBgK,EAAahK,eAC7B9xB,mBAAoB87B,EAAa97B,mBACjCC,oBAAqB67B,EAAa77B,oBAEtC,CAAC,2BATkC87B,EAAI,iCAAJA,EAAI,kBAWvC,OAAOvJ,IAAe7b,EAAK1R,EAAM42B,KAASE,EAC5C,EACAC,aAAY,eACZxH,KAAIA,EAAAA,MAENzyB,aAAc,CACZqE,QAAS,CACPjE,YAAa,CACXvJ,OAAMA,EAAAA,UAKhB,C,0ECpCe,aACb,MAAO,CACLwG,GAAI,CAAE68B,iBAAgB,MAE1B,C,mECNO,MAAM9S,EAAkBD,GAAqBA,EAAiBvzB,aAAeuzB,EAAiB1zB,MAAQ,W,0HCM7G,MA2BA,EAjBoB,IAA0C,IAA1C,cAAC0mC,EAAa,SAAEC,EAAQ,UAAE3rB,GAAU,EAEtD,MAAM4rB,GAZwBh9B,GAYiBhK,EAAAA,EAAAA,cAAaob,EAAW2rB,EAAUD,IAV1EG,EAAAA,EAAAA,IAAQj9B,GADE,sCAAIiJ,EAAI,yBAAJA,EAAI,uBAAK,IAAeA,EAAK,KADrB,IAACjJ,EAa9B,MAAMk9B,EAR8B,CAACl9B,IAE9B0wB,EAAAA,EAAAA,GAAS1wB,GADC,sCAAIiJ,EAAI,yBAAJA,EAAI,uBAAKA,CAAI,IAOHk0B,EAA8BC,EAAAA,EAAAA,qBAAoBhsB,EAAW2rB,EAAUC,IAEtG,MAAO,CACLz6B,YAAa,CACXvM,aAAcgnC,EACdK,oBAAqBH,EACrBnnC,QAAQA,EAAAA,EAAAA,QAAOqb,EAAW2rB,EAAU/mC,EAAAA,aAAc8mC,IAEpD98B,GAAI,CACF+pB,eAAcA,EAAAA,gBAEjB,C,oKC9BH,MAAM,EAA+Bz0B,QAAQ,a,uBCA7C,MAAM,EAA+BA,QAAQ,eCAvC,EAA+BA,QAAQ,e,aCA7C,MAAM,EAA+BA,QAAQ,mB,aCO7C,MAAMgoC,EAAclsB,GAAe0Y,IACjC,MAAM,GAAE9pB,GAAOoR,IAEf,MAAMmsB,UAAmBzmB,EAAAA,UACvB/gB,SACE,OAAO,kBAAC+zB,EAAgB,OAAK1Y,IAAiB/b,KAAKQ,MAAWR,KAAK4C,SACrE,EAGF,OADAslC,EAAWhnC,YAAe,cAAayJ,EAAG+pB,eAAeD,MAClDyT,CAAU,EAGbC,EAAW,CAACpsB,EAAWqsB,IAAgB3T,IAC3C,MAAM,GAAE9pB,GAAOoR,IAEf,MAAMssB,UAAiB5mB,EAAAA,UACrB/gB,SACE,OACE,kBAAC,EAAA4nC,SAAQ,CAACC,MAAOH,GACf,kBAAC3T,EAAgB,OAAKz0B,KAAKQ,MAAWR,KAAK4C,UAGjD,EAGF,OADAylC,EAASnnC,YAAe,YAAWyJ,EAAG+pB,eAAeD,MAC9C4T,CAAQ,EAGXG,EAAc,CAACzsB,EAAW0Y,EAAkB2T,KAOzCK,EAAAA,EAAAA,SACLL,EAAaD,EAASpsB,EAAWqsB,GAAcM,KAC/CC,EAAAA,EAAAA,UARsB,CAACxlC,EAAOylC,KAAc,IAAD,EAC3C,MAAMpoC,EAAQ,IAAIooC,KAAa7sB,KACzB8sB,GAAkD,QAA1B,EAAApU,EAAiB3R,iBAAS,aAA1B,EAA4BgS,kBAAe,CAAK3xB,IAAK,CAAMA,WACzF,OAAO0lC,EAAsB1lC,EAAO3C,EAAM,IAM1CynC,EAAWlsB,GAHN0sB,CAILhU,GAGEqU,EAAc,CAAC/sB,EAAWkd,EAASz4B,EAAOuoC,KAC9C,IAAK,MAAM7iB,KAAQ+S,EAAS,CAC1B,MAAMtuB,EAAKsuB,EAAQ/S,GAED,mBAAPvb,GACTA,EAAGnK,EAAM0lB,GAAO6iB,EAAS7iB,GAAOnK,IAEpC,GAGWgsB,EAAsB,CAAChsB,EAAW2rB,EAAUC,IAAoB,CAACqB,EAAe/P,KAC3F,MAAM,GAAEtuB,GAAOoR,IACT0Y,EAAmBkT,EAAgBqB,EAAe,QAExD,MAAMC,UAA4BxnB,EAAAA,UAChC9e,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GACbkmC,EAAY/sB,EAAWkd,EAASz4B,EAAO,CAAC,EAC1C,CAEA6C,iCAAiCC,GAC/BwlC,EAAY/sB,EAAWkd,EAAS31B,EAAWtD,KAAKQ,MAClD,CAEAE,SACE,MAAMwoC,EAAaC,IAAKnpC,KAAKQ,MAAOy4B,EAAU,IAAYA,GAAW,IACrE,OAAO,kBAACxE,EAAqByU,EAC/B,EAGF,OADAD,EAAoB/nC,YAAe,uBAAsByJ,EAAG+pB,eAAeD,MACpEwU,CAAmB,EAGfvoC,EAAS,CAACqb,EAAW2rB,EAAU/mC,EAAc8mC,IAAmB2B,IAC3E,MAAMC,EAAM1oC,EAAaob,EAAW2rB,EAAUD,EAAlC9mC,CAAiD,MAAO,QACpE2oC,IAAAA,OAAgB,kBAACD,EAAG,MAAID,EAAQ,EAGrBzoC,EAAe,CAACob,EAAW2rB,EAAUD,IAAkB,SAACuB,EAAe7zB,GAA4B,IAAjB4B,EAAS,UAAH,6CAAG,CAAC,EAEvG,GAA6B,iBAAlBiyB,EACT,MAAM,IAAIO,UAAU,2DAA6DP,GAKnF,MAAMpU,EAAY6S,EAAcuB,GAEhC,OAAKpU,EAODzf,EAIa,SAAdA,EACMqzB,EAAYzsB,EAAW6Y,EAAW8S,KAIpCc,EAAYzsB,EAAW6Y,GARrBA,GAPF7d,EAAOyyB,cACVztB,IAAYO,IAAIhW,KAAK,4BAA6B0iC,GAE7C,KAaX,C,qGClHA,MAAM,EAA+B/oC,QAAQ,2C,aCA7C,MAAM,EAA+BA,QAAQ,+D,aCA7C,MAAM,EAA+BA,QAAQ,yD,aCA7C,MAAM,EAA+BA,QAAQ,wD,aCA7C,MAAM,EAA+BA,QAAQ,yD,aCA7C,MAAM,EAA+BA,QAAQ,yD,aCA7C,MAAM,EAA+BA,QAAQ,yD,aCA7C,MAAM,EAA+BA,QAAQ,+D,aCA7C,MAAM,EAA+BA,QAAQ,uD,aCA7C,MAAM,EAA+BA,QAAQ,sD,aCA7C,MAAM,EAA+BA,QAAQ,yD,aCA7C,MAAM,EAA+BA,QAAQ,sD,aCA7C,MAAM,EAA+BA,QAAQ,0D,aCA7C,MAAM,EAA+BA,QAAQ,gE,aCiB7CwpC,IAAAA,iBAAmC,OAAQ9O,KAC3C8O,IAAAA,iBAAmC,KAAMC,KACzCD,IAAAA,iBAAmC,MAAOnS,KAC1CmS,IAAAA,iBAAmC,OAAQr4B,KAC3Cq4B,IAAAA,iBAAmC,OAAQE,KAC3CF,IAAAA,iBAAmC,OAAQG,KAC3CH,IAAAA,iBAAmC,aAAcI,KACjDJ,IAAAA,iBAAmC,aAAcK,KAEjD,MAAMC,EAAS,CAACC,MAAK,IAAEC,KAAI,IAAEC,QAAO,IAAEC,KAAI,IAAEC,SAAQ,IAAE,iBAAkBC,KAC3DC,EAAkB,IAAYP,GAE9BrX,EAAW3xB,GACf,IAAAupC,GAAe,KAAfA,EAAyBvpC,GAIvBgpC,EAAOhpC,IAHVsF,QAAQC,KAAM,kBAAiBvF,kDACxBipC,I,0vBChCf,MAAM,EAA+B/pC,QAAQ,2BCAvC,EAA+BA,QAAQ,oB,aCA7C,MAAM,EAA+BA,QAAQ,qB,+BCA7C,MAAM,EAA+BA,QAAQ,e,aCA7C,MAAM,EAA+BA,QAAQ,e,aCA7C,MAAM,EAA+BA,QAAQ,a,oDCA7C,MAAM,GAA+BA,QAAQ,c,+CCA7C,MAAM,GAA+BA,QAAQ,U,sDC8B7C,MAAMsqC,GAAuB,UAEhBC,GAAeC,GAAUz1B,IAAAA,SAAAA,WAAuBy1B,GAEtD,SAAS7U,GAAW1a,GACzB,OAAIwvB,GAASxvB,GAEVsvB,GAAYtvB,GACNA,EAAMrO,OACRqO,EAHE,CAAC,CAIZ,CAYO,SAAS8lB,GAAc0I,GAAK,IAAD,EAUT,EATvB,GAAIc,GAAYd,GACd,OAAOA,EAET,GAAIA,aAAc1mC,EAAAA,EAAAA,KAChB,OAAO0mC,EAET,IAAKgB,GAAShB,GACZ,OAAOA,EAET,GAAI,IAAcA,GAChB,OAAO,MAAA10B,IAAAA,IAAO00B,IAAG,OAAK1I,IAAe2J,SAEvC,GAAIla,IAAW,IAAAiZ,IAAa,CAAC,IAAD,EAE1B,MAAMkB,EAwBH,SAAkCC,GACvC,IAAKpa,IAAW,IAAAoa,IACd,OAAOA,EAET,MAAMC,EAAS,CAAC,EACV7c,EAAU,QACV8c,EAAY,CAAC,EACnB,IAAK,IAAI5R,KAAQ,IAAA0R,GAAK,KAALA,GACf,GAAKC,EAAO3R,EAAK,KAAS4R,EAAU5R,EAAK,KAAO4R,EAAU5R,EAAK,IAAI6R,iBAE5D,CACL,IAAKD,EAAU5R,EAAK,IAAK,CAEvB4R,EAAU5R,EAAK,IAAM,CACnB6R,kBAAkB,EAClBrnC,OAAQ,GAIVmnC,EADsB,GAAE3R,EAAK,KAAKlL,IAAU8c,EAAU5R,EAAK,IAAIx1B,UACtCmnC,EAAO3R,EAAK,WAE9B2R,EAAO3R,EAAK,GACrB,CACA4R,EAAU5R,EAAK,IAAIx1B,QAAU,EAE7BmnC,EADwB,GAAE3R,EAAK,KAAKlL,IAAU8c,EAAU5R,EAAK,IAAIx1B,UACtCw1B,EAAK,EAClC,MAjBE2R,EAAO3R,EAAK,IAAMA,EAAK,GAmB3B,OAAO2R,CACT,CArD8BG,CAAwBvB,GAClD,OAAO,MAAA10B,IAAAA,WAAc41B,IAAkB,OAAK5J,GAC9C,CACA,OAAO,MAAAhsB,IAAAA,WAAc00B,IAAG,OAAK1I,GAC/B,CA2DO,SAAS7lB,GAAezB,GAC7B,OAAG,IAAcA,GACRA,EACF,CAACA,EACV,CAEO,SAASwxB,GAAKvgC,GACnB,MAAqB,mBAAPA,CAChB,CAEO,SAAS+/B,GAASxoB,GACvB,QAASA,GAAsB,iBAARA,CACzB,CAEO,SAAS3T,GAAO2M,GACrB,MAAyB,mBAAXA,CAChB,CAEO,SAASiwB,GAAQjwB,GACtB,OAAO,IAAcA,EACvB,CAGO,MAAM0sB,GAAUwD,IAEhB,SAASC,GAAOnpB,EAAKvX,GAAK,IAAD,EAC9B,OAAO,UAAYuX,IAAI,QAAQ,CAAC4oB,EAAQnkC,KACtCmkC,EAAOnkC,GAAOgE,EAAGuX,EAAIvb,GAAMA,GACpBmkC,IACN,CAAC,EACN,CAEO,SAASQ,GAAUppB,EAAKvX,GAAK,IAAD,EACjC,OAAO,UAAYuX,IAAI,QAAQ,CAAC4oB,EAAQnkC,KACtC,IAAIyL,EAAMzH,EAAGuX,EAAIvb,GAAMA,GAGvB,OAFGyL,GAAsB,iBAARA,GACf,IAAc04B,EAAQ14B,GACjB04B,CAAM,GACZ,CAAC,EACN,CAGO,SAASS,GAAsBxvB,GACpC,OAAQ,IAA4B,IAA5B,SAAEyvB,EAAQ,SAAE/rB,GAAU,EAC5B,OAAOtN,GAAQP,GACS,mBAAXA,EACFA,EAAOmK,KAGT5J,EAAKP,EACb,CAEL,CAEO,SAAS65B,GAAoB9H,GAAa,IAAD,EAC9C,IAAI+H,EAAQ/H,EAAU9zB,SACtB,OAAO67B,EAAM57B,SAASy6B,IAAwBA,GAAuB,UAAAmB,GAAK,KAALA,GAAc/kC,GAAuB,OAAfA,EAAI,IAAI,MAAW,QAAQuJ,OACxH,CASO,SAASy7B,GAAQC,EAAUjU,GAChC,IAAI3iB,IAAAA,SAAAA,WAAuB42B,GACzB,OAAO52B,IAAAA,OAET,IAAI3F,EAAMu8B,EAAS99B,MAAM,IAAc6pB,GAAQA,EAAO,CAACA,IACvD,OAAO3iB,IAAAA,KAAAA,OAAe3F,GAAOA,EAAM2F,IAAAA,MACrC,CAsCO,SAAS62B,GAA4C99B,GAC1D,IAOI+9B,EAPAC,EAAW,CACb,oCACA,kCACA,wBACA,uBASF,GALA,IAAAA,GAAQ,KAARA,GAAcC,IACZF,EAAmBE,EAAM1J,KAAKv0B,GACF,OAArB+9B,KAGgB,OAArBA,GAA6BA,EAAiBnoC,OAAS,EACzD,IACE,OAAOsP,mBAAmB64B,EAAiB,GAG7C,CAFE,MAAM7/B,GACN5F,QAAQjC,MAAM6H,EAChB,CAGF,OAAO,IACT,CAQO,SAASpF,GAAmBolC,GACjC,OANyBhmC,EAMPgmC,EAAS5rC,QAAQ,YAAa,IALzC6rC,IAAWC,IAAUlmC,IADvB,IAAoBA,CAO3B,CA8IA,SAASmmC,GAAsBr+B,EAAOlN,EAAQwrC,EAAiB9K,EAAqB+K,GAClF,IAAIzrC,EAAQ,MAAO,GACnB,IAAIyX,EAAS,GACTi0B,EAAW1rC,EAAOa,IAAI,YACtB8qC,EAAmB3rC,EAAOa,IAAI,YAC9B04B,EAAUv5B,EAAOa,IAAI,WACrBu4B,EAAUp5B,EAAOa,IAAI,WACrBF,EAAOX,EAAOa,IAAI,QAClB2kB,EAASxlB,EAAOa,IAAI,UACpB44B,EAAYz5B,EAAOa,IAAI,aACvB64B,EAAY15B,EAAOa,IAAI,aACvB+qC,EAAc5rC,EAAOa,IAAI,eACzBq2B,EAAWl3B,EAAOa,IAAI,YACtBs2B,EAAWn3B,EAAOa,IAAI,YACtB2zB,EAAUx0B,EAAOa,IAAI,WAEzB,MAAMgrC,EAAsBL,IAAwC,IAArBG,EACzCG,EAAW5+B,QAkBjB,GARwBw+B,GAAsB,OAAVx+B,IAK9BvM,KATJkrC,GAHwCC,GAAqB,UAATnrC,MAFhCkrC,IAAwBC,IAkB5C,MAAO,GAIT,IAAIC,EAAuB,WAATprC,GAAqBuM,EACnC8+B,EAAsB,UAATrrC,GAAoB,IAAcuM,IAAUA,EAAMpK,OAC/DmpC,EAA0B,UAATtrC,GAAoBwT,IAAAA,KAAAA,OAAejH,IAAUA,EAAMwe,QASxE,MAAMwgB,EAAY,CAChBH,EAAaC,EAAYC,EATK,UAATtrC,GAAqC,iBAAVuM,GAAsBA,EAC/C,SAATvM,GAAmBuM,aAAiB/K,EAAAA,EAAAA,KACxB,YAATxB,IAAuBuM,IAAmB,IAAVA,GACxB,WAATvM,IAAsBuM,GAAmB,IAAVA,GACrB,YAATvM,IAAuBuM,GAAmB,IAAVA,GACxB,WAATvM,GAAsC,iBAAVuM,GAAgC,OAAVA,EACnC,WAATvM,GAAsC,iBAAVuM,GAAsBA,GAOpEi/B,EAAiB,IAAAD,GAAS,KAATA,GAAehuB,KAAOA,IAE7C,GAAI2tB,IAAwBM,IAAmBzL,EAE7C,OADAjpB,EAAOhJ,KAAK,kCACLgJ,EAET,GACW,WAAT9W,IAC+B,OAA9B8qC,GAC+B,qBAA9BA,GACF,CACA,IAAIW,EAAYl/B,EAChB,GAAoB,iBAAVA,EACR,IACEk/B,EAAYthC,KAAKC,MAAMmC,EAIzB,CAHE,MAAO9B,GAEP,OADAqM,EAAOhJ,KAAK,6CACLgJ,CACT,CASsC,IAAD,EAAvC,GAPGzX,GAAUA,EAAO6lB,IAAI,aAAenY,GAAOi+B,EAAiBU,SAAWV,EAAiBU,UACzF,IAAAV,GAAgB,KAAhBA,GAAyB7lC,SACDxE,IAAnB8qC,EAAUtmC,IACX2R,EAAOhJ,KAAK,CAAE69B,QAASxmC,EAAKvC,MAAO,+BACrC,IAGDvD,GAAUA,EAAO6lB,IAAI,cACtB,MAAA7lB,EAAOa,IAAI,eAAa,QAAS,CAAC2N,EAAK1I,KACrC,MAAMymC,EAAOhB,GAAsBa,EAAUtmC,GAAM0I,GAAK,EAAOkyB,EAAqB+K,GACpFh0B,EAAOhJ,QAAQ,IAAA89B,GAAI,KAAJA,GACPhpC,IAAU,CAAG+oC,QAASxmC,EAAKvC,YAAU,GAGnD,CAEA,GAAIixB,EAAS,CACX,IAAIld,EApGuB,EAAC9I,EAAKg+B,KAEnC,IADW,IAAIzhB,OAAOyhB,GACZl3B,KAAK9G,GACX,MAAO,6BAA+Bg+B,CAC1C,EAgGYC,CAAgBv/B,EAAOsnB,GAC7Bld,GAAKG,EAAOhJ,KAAK6I,EACvB,CAEA,GAAI6f,GACW,UAATx2B,EAAkB,CACpB,IAAI2W,EA5HsB,EAAC9I,EAAK2qB,KACpC,IAAK3qB,GAAO2qB,GAAO,GAAK3qB,GAAOA,EAAI1L,OAASq2B,EACxC,MAAQ,+BAA8BA,SAAmB,IAARA,EAAY,GAAK,KACtE,EAyHcuT,CAAiBx/B,EAAOiqB,GAC9B7f,GAAKG,EAAOhJ,KAAK6I,EACvB,CAGF,GAAI4f,GACW,UAATv2B,EAAkB,CACpB,IAAI2W,EA7HsB,EAAC9I,EAAK8qB,KACpC,GAAI9qB,GAAOA,EAAI1L,OAASw2B,EACtB,MAAQ,oCAAmCA,SAAmB,IAARA,EAAY,GAAK,KACzE,EA0HcqT,CAAiBz/B,EAAOgqB,GAC9B5f,GAAKG,EAAOhJ,KAAK,CAAEm+B,YAAY,EAAMrpC,MAAO+T,GAClD,CAGF,GAAIs0B,GACW,UAATjrC,EAAkB,CACpB,IAAIksC,EAhKyB,EAACr+B,EAAKo9B,KACvC,GAAKp9B,IAGe,SAAhBo9B,IAA0C,IAAhBA,GAAsB,CAClD,MAAMt9B,GAAOjB,EAAAA,EAAAA,QAAOmB,GACdrB,EAAMmB,EAAKw+B,QAEjB,GADsBt+B,EAAI1L,OAASqK,EAAI4B,KACrB,CAChB,IAAIg+B,GAAiBlL,EAAAA,EAAAA,OAMrB,GALA,IAAAvzB,GAAI,KAAJA,GAAa,CAAC0+B,EAAMv0B,KACf,IAAAnK,GAAI,KAAJA,GAAY4P,GAAKxQ,GAAOwQ,EAAE4nB,QAAU5nB,EAAE4nB,OAAOkH,GAAQ9uB,IAAM8uB,IAAMj+B,KAAO,IACzEg+B,EAAiBA,EAAeE,IAAIx0B,GACtC,IAEyB,IAAxBs0B,EAAeh+B,KAChB,OAAO,IAAAg+B,GAAc,KAAdA,GAAmBt0B,IAAC,CAAMy0B,MAAOz0B,EAAGlV,MAAO,6BAA4B6kB,SAElF,CACF,GA6IuB+kB,CAAoBjgC,EAAO0+B,GAC1CiB,GAAcp1B,EAAOhJ,QAAQo+B,EACnC,CAGF,GAAIpT,GAA2B,IAAdA,EAAiB,CAChC,IAAIniB,EA5KyB,EAAC9I,EAAK8qB,KACrC,GAAI9qB,EAAI1L,OAASw2B,EACb,MAAQ,gCAA+BA,cAAwB,IAARA,EAAY,IAAM,IAC7E,EAyKY8T,CAAkBlgC,EAAOusB,GAC/BniB,GAAKG,EAAOhJ,KAAK6I,EACvB,CAEA,GAAIoiB,EAAW,CACb,IAAIpiB,EAzIyB,EAAC9I,EAAK2qB,KACrC,GAAI3qB,EAAI1L,OAASq2B,EACb,MAAQ,0BAAyBA,cAAwB,IAARA,EAAY,IAAM,IACvE,EAsIYkU,CAAkBngC,EAAOwsB,GAC/BpiB,GAAKG,EAAOhJ,KAAK6I,EACvB,CAEA,GAAIiiB,GAAuB,IAAZA,EAAe,CAC5B,IAAIjiB,EA7OuB,EAAE9I,EAAK8qB,KACpC,GAAI9qB,EAAM8qB,EACR,MAAQ,2BAA0BA,GACpC,EA0OYgU,CAAgBpgC,EAAOqsB,GAC7BjiB,GAAKG,EAAOhJ,KAAK6I,EACvB,CAEA,GAAI8hB,GAAuB,IAAZA,EAAe,CAC5B,IAAI9hB,EA5OuB,EAAE9I,EAAK2qB,KACpC,GAAI3qB,EAAM2qB,EACR,MAAQ,8BAA6BA,GACvC,EAyOYoU,CAAgBrgC,EAAOksB,GAC7B9hB,GAAKG,EAAOhJ,KAAK6I,EACvB,CAEA,GAAa,WAAT3W,EAAmB,CACrB,IAAI2W,EAQJ,GANEA,EADa,cAAXkO,EA9MwB,CAAChX,IAC7B,GAAI4M,MAAMuZ,KAAK5pB,MAAMyD,IACjB,MAAO,0BACX,EA4MQg/B,CAAiBtgC,GACH,SAAXsY,EA1Ma,CAAChX,IAEzB,GADAA,EAAMA,EAAIpM,WAAW6d,eAChB,2EAA2E3K,KAAK9G,GACjF,MAAO,sBACX,EAuMQi/B,CAAavgC,GAvNK,CAAEsB,IAC9B,GAAKA,GAAsB,iBAARA,EACjB,MAAO,wBACT,EAsNUk/B,CAAexgC,IAElBoK,EAAK,OAAOG,EACjBA,EAAOhJ,KAAK6I,EACd,MAAO,GAAa,YAAT3W,EAAoB,CAC7B,IAAI2W,EApOuB,CAAE9I,IAC/B,GAAe,SAARA,GAA0B,UAARA,IAA2B,IAARA,IAAwB,IAARA,EAC1D,MAAO,yBACT,EAiOYm/B,CAAgBzgC,GAC1B,IAAKoK,EAAK,OAAOG,EACjBA,EAAOhJ,KAAK6I,EACd,MAAO,GAAa,WAAT3W,EAAmB,CAC5B,IAAI2W,EA1PsB,CAAE9I,IAC9B,IAAK,mBAAmB8G,KAAK9G,GAC3B,MAAO,wBACT,EAuPYo/B,CAAe1gC,GACzB,IAAKoK,EAAK,OAAOG,EACjBA,EAAOhJ,KAAK6I,EACd,MAAO,GAAa,YAAT3W,EAAoB,CAC7B,IAAI2W,EAxPuB,CAAE9I,IAC/B,IAAK,UAAU8G,KAAK9G,GAClB,MAAO,0BACT,EAqPYq/B,CAAgB3gC,GAC1B,IAAKoK,EAAK,OAAOG,EACjBA,EAAOhJ,KAAK6I,EACd,MAAO,GAAa,UAAT3W,EAAkB,CAC3B,IAAMqrC,IAAcC,EAClB,OAAOx0B,EAENvK,GACD,IAAAA,GAAK,KAALA,GAAc,CAAC8/B,EAAMv0B,KACnB,MAAM8zB,EAAOhB,GAAsByB,EAAMhtC,EAAOa,IAAI,UAAU,EAAO6/B,EAAqB+K,GAC1Fh0B,EAAOhJ,QAAQ,IAAA89B,GAAI,KAAJA,GACPj1B,IAAQ,CAAG41B,MAAOz0B,EAAGlV,MAAO+T,MAAQ,GAGlD,MAAO,GAAa,SAAT3W,EAAiB,CAC1B,IAAI2W,EAjQoB,CAAE9I,IAC5B,GAAKA,KAASA,aAAerM,EAAAA,EAAAA,MAC3B,MAAO,sBACT,EA8PY2rC,CAAa5gC,GACvB,IAAKoK,EAAK,OAAOG,EACjBA,EAAOhJ,KAAK6I,EACd,CAEA,OAAOG,CACT,CAGO,MAAMgpB,GAAgB,SAACzC,EAAO9wB,GAAiE,IAA1D,OAAE7L,GAAS,EAAK,oBAAEq/B,GAAsB,GAAU,UAAH,6CAAG,CAAC,EAEzFqN,EAAgB/P,EAAMn9B,IAAI,aAExBb,OAAQguC,EAAY,0BAAEvC,IAA8BwC,EAAAA,GAAAA,GAAmBjQ,EAAO,CAAE38B,WAEtF,OAAOkqC,GAAsBr+B,EAAO8gC,EAAcD,EAAerN,EAAqB+K,EACxF,EAEMyC,GAAqB,CAACluC,EAAQkW,EAAQ+f,KAI1C,GAHIj2B,IAAWA,EAAOy2B,MACpBz2B,EAAOy2B,IAAM,CAAC,GAEZz2B,IAAWA,EAAOy2B,IAAIv2B,KAAM,CAC9B,IAAKF,EAAOY,QAAUZ,EAAOW,MAAQX,EAAO+1B,OAAS/1B,EAAOy1B,YAAcz1B,EAAO22B,sBAC/E,MAAO,yHAET,GAAI32B,EAAOY,MAAO,CAChB,IAAIykC,EAAQrlC,EAAOY,MAAMykC,MAAM,eAC/BrlC,EAAOy2B,IAAIv2B,KAAOmlC,EAAM,EAC1B,CACF,CAEA,OAAO9K,EAAAA,EAAAA,0BAAyBv6B,EAAQkW,EAAQ+f,EAAgB,EAG5DkY,GAA6B,CACjC,CACEC,KAAM,OACNC,qBAAsB,CAAC,YAIrBC,GAAwB,CAAC,UAEzBC,GAAgC,CAACvuC,EAAQkW,EAAQwN,EAAauS,KAClE,MAAM1kB,GAAMkpB,EAAAA,EAAAA,0BAAyBz6B,EAAQkW,EAAQ+f,GAC/CuY,SAAiBj9B,EAEjBk9B,EAAmB,IAAAN,IAA0B,KAA1BA,IACvB,CAACz1B,EAAOg2B,IAAeA,EAAWN,KAAK94B,KAAKoO,GACxC,IAAIhL,KAAUg2B,EAAWL,sBACzB31B,GACJ41B,IAEF,OAAOK,IAAKF,GAAkB/W,GAAKA,IAAM8W,IACrC,IAAej9B,EAAK,KAAM,GAC1BA,CAAG,EAGHq9B,GAAsB,CAAC5uC,EAAQkW,EAAQwN,EAAauS,KACxD,MAAM4Y,EAAcN,GAA8BvuC,EAAQkW,EAAQwN,EAAauS,GAC/E,IAAI6Y,EACJ,IACEA,EAAat+B,KAAAA,KAAUA,KAAAA,KAAUq+B,GAAc,CAE7CE,WAAY,GACX,CAAE/uC,OAAQg8B,GAAAA,cAC4B,OAAtC8S,EAAWA,EAAWhsC,OAAS,KAChCgsC,EAAa,IAAAA,GAAU,KAAVA,EAAiB,EAAGA,EAAWhsC,OAAS,GAKzD,CAHE,MAAOsI,GAEP,OADA5F,QAAQjC,MAAM6H,GACP,wCACT,CACA,OAAO0jC,EACJtvC,QAAQ,MAAO,KAAK,EAGZ8jB,GAAkB,SAACtjB,GAAoE,IAA5D0jB,EAAW,uDAAC,GAAIxN,EAAM,uDAAC,CAAC,EAAG+f,EAAkB,UAAH,kDAAG30B,EAMnF,OALGtB,GAAU0N,GAAO1N,EAAOgM,QACzBhM,EAASA,EAAOgM,QACfiqB,GAAmBvoB,GAAOuoB,EAAgBjqB,QAC3CiqB,EAAkBA,EAAgBjqB,QAEhC,MAAMsJ,KAAKoO,GACNwqB,GAAmBluC,EAAQkW,EAAQ+f,GAExC,aAAa3gB,KAAKoO,GACbkrB,GAAoB5uC,EAAQkW,EAAQwN,EAAauS,GAEnDsY,GAA8BvuC,EAAQkW,EAAQwN,EAAauS,EACpE,EAEa+Y,GAAc,KACzB,IAAI1hC,EAAM,CAAC,EACPirB,EAASp2B,EAAAA,EAAAA,SAAAA,OAEb,IAAIo2B,EACF,MAAO,CAAC,EAEV,GAAe,IAAVA,EAAe,CAClB,IAAI2L,EAAS3L,EAAO0W,OAAO,GAAGv7B,MAAM,KAEpC,IAAK,IAAI+E,KAAKyrB,EACPvO,OAAO1T,UAAU2T,eAAeC,KAAKqO,EAAQzrB,KAGlDA,EAAIyrB,EAAOzrB,GAAG/E,MAAM,KACpBpG,EAAI8E,mBAAmBqG,EAAE,KAAQA,EAAE,IAAMrG,mBAAmBqG,EAAE,KAAQ,GAE1E,CAEA,OAAOnL,CAAG,EASCvE,GAAQ3D,IACnB,IAAI8pC,EAQJ,OALEA,EADE9pC,aAAe+pC,GACR/pC,EAEA+pC,GAAOC,KAAKhqC,EAAIhD,WAAY,SAGhC8sC,EAAO9sC,SAAS,SAAS,EAGrBygC,GAAU,CACrBJ,iBAAkB,CAChB4M,MAAO,CAAC14B,EAAG24B,IAAM34B,EAAE9V,IAAI,QAAQ0uC,cAAcD,EAAEzuC,IAAI,SACnD4J,OAAQ,CAACkM,EAAG24B,IAAM34B,EAAE9V,IAAI,UAAU0uC,cAAcD,EAAEzuC,IAAI,YAExD2hC,WAAY,CACV6M,MAAO,CAAC14B,EAAG24B,IAAM34B,EAAE44B,cAAcD,KAIxBpmC,GAAiBU,IAC5B,IAAI4lC,EAAU,GAEd,IAAK,IAAItvC,KAAQ0J,EAAM,CACrB,IAAI4E,EAAM5E,EAAK1J,QACHoB,IAARkN,GAA6B,KAARA,GACvBghC,EAAQ/gC,KAAK,CAACvO,EAAM,IAAKiD,mBAAmBqL,GAAKhP,QAAQ,OAAO,MAAMiJ,KAAK,IAE/E,CACA,OAAO+mC,EAAQ/mC,KAAK,IAAI,EAIbk+B,GAAmB,CAAChwB,EAAE24B,EAAGxY,MAC3B2Y,IAAK3Y,GAAOhxB,GACZ4pC,IAAG/4B,EAAE7Q,GAAMwpC,EAAExpC,MAIjB,SAASjD,GAAYX,GAC1B,MAAkB,iBAARA,GAA4B,KAARA,EACrB,IAGFytC,EAAAA,EAAAA,aAAqBztC,EAC9B,CAEO,SAASa,GAAsB6sC,GACpC,SAAKA,GAAO,IAAAA,GAAG,KAAHA,EAAY,cAAgB,GAAK,IAAAA,GAAG,KAAHA,EAAY,cAAgB,GAAa,SAARA,EAIhF,CAGO,SAASC,GAA6B/M,GAC3C,IAAI3uB,IAAAA,WAAAA,aAA2B2uB,GAE7B,OAAO,KAGT,IAAIA,EAAU/zB,KAEZ,OAAO,KAGT,MAAM+gC,EAAsB,IAAAhN,GAAS,KAATA,GAAe,CAACvxB,EAAK6H,IACxC,IAAAA,GAAC,KAADA,EAAa,MAAQ,IAAY7H,EAAI1Q,IAAI,YAAc,CAAC,GAAGiC,OAAS,IAIvEitC,EAAkBjN,EAAUjiC,IAAI,YAAcsT,IAAAA,aAE9C67B,GAD6BD,EAAgBlvC,IAAI,YAAcsT,IAAAA,cAAiBnF,SAAShD,OACrClJ,OAASitC,EAAkB,KAErF,OAAOD,GAAuBE,CAChC,CAGO,MAAM18B,GAAsBlO,GAAsB,iBAAPA,GAAmBA,aAAe6qC,OAAS,IAAA7qC,GAAG,KAAHA,GAAW5F,QAAQ,MAAO,OAAS,GAEnH0wC,GAAsB9qC,GAAQ+qC,KAAW78B,GAAmBlO,GAAK5F,QAAQ,OAAQ,MAEjF4wC,GAAiBC,GAAW,IAAAA,GAAM,KAANA,GAAc,CAACnyB,EAAG9E,IAAM,MAAM9D,KAAK8D,KAC/DmM,GAAuB8qB,GAAW,IAAAA,GAAM,KAANA,GAAc,CAACnyB,EAAG9E,IAAM,+CAA+C9D,KAAK8D,KAMpH,SAAS6b,GAAeqb,EAAOC,GAAqC,IAAD,MAAxBC,EAAY,UAAH,6CAAG,KAAM,EAClE,GAAoB,iBAAVF,GAAsB,IAAcA,IAAoB,OAAVA,IAAmBC,EACzE,OAAOD,EAGT,MAAMjvB,EAAM,IAAc,CAAC,EAAGivB,GAU9B,OARA,UAAYjvB,IAAI,QAASjI,IACpBA,IAAMm3B,GAAcC,EAAUnvB,EAAIjI,GAAIA,UAChCiI,EAAIjI,GAGbiI,EAAIjI,GAAK6b,GAAe5T,EAAIjI,GAAIm3B,EAAYC,EAAU,IAGjDnvB,CACT,CAEO,SAASe,GAAU/H,GACxB,GAAqB,iBAAVA,EACT,OAAOA,EAOT,GAJIA,GAASA,EAAMrO,OACjBqO,EAAQA,EAAMrO,QAGK,iBAAVqO,GAAgC,OAAVA,EAC/B,IACE,OAAO,IAAeA,EAAO,KAAM,EAIrC,CAFA,MAAOjP,GACL,OAAO6kC,OAAO51B,EAChB,CAGF,OAAGA,QACM,GAGFA,EAAMjY,UACf,CAEO,SAASquC,GAAep2B,GAC7B,MAAoB,iBAAVA,EACDA,EAAMjY,WAGRiY,CACT,CAEO,SAASgmB,GAAkBrC,GAAwD,IAAjD,UAAE0S,GAAY,EAAK,YAAEvM,GAAc,GAAS,UAAH,6CAAG,CAAC,EACpF,IAAIhwB,IAAAA,IAAAA,MAAa6pB,GACf,MAAM,IAAI3yB,MAAM,+DAElB,MAAMuyB,EAAYI,EAAMn9B,IAAI,QACtBg9B,EAAUG,EAAMn9B,IAAI,MAE1B,IAAI8vC,EAAuB,GAgB3B,OAZI3S,GAASA,EAAM4F,UAAY/F,GAAWD,GAAauG,GACrDwM,EAAqBliC,KAAM,GAAEovB,KAAWD,UAAkBI,EAAM4F,cAG/D/F,GAAWD,GACZ+S,EAAqBliC,KAAM,GAAEovB,KAAWD,KAG1C+S,EAAqBliC,KAAKmvB,GAInB8S,EAAYC,EAAwBA,EAAqB,IAAM,EACxE,CAEO,SAAS3R,GAAahB,EAAOsC,GAAc,IAAD,EAC/C,MAAMsQ,EAAiBvQ,GAAkBrC,EAAO,CAAE0S,WAAW,IAU7D,OANe,UAAAE,GAAc,KAAdA,GACRhP,GACItB,EAAYsB,MACnB,QACM10B,QAAmB5L,IAAV4L,IAEL,EAChB,CAGO,SAAS2jC,KACd,OAAOC,GACLC,KAAY,IAAI3uC,SAAS,UAE7B,CAEO,SAAS4uC,GAAoBznC,GAClC,OAAOunC,GACHG,KAAM,UACLhhC,OAAO1G,GACP2nC,OAAO,UAEd,CAEA,SAASJ,GAAmB1rC,GAC1B,OAAOA,EACJ5F,QAAQ,MAAO,KACfA,QAAQ,MAAO,KACfA,QAAQ,KAAM,GACnB,CAEO,MAAMinB,GAAgBvZ,IACtBA,MAIDy8B,GAAYz8B,KAAUA,EAAM4lB,U,8BC74B3B,SAASlM,EAAkCpY,GAGhD,OAbK,SAAsBpJ,GAC3B,IAEE,QADuB0F,KAAKC,MAAM3F,EAKpC,CAHE,MAAOgG,GAEP,OAAO,IACT,CACF,CAIsB+lC,CAAa3iC,GACZ,OAAS,IAChC,C,+DCcA,QA5BA,WACE,IAAIrM,EAAM,CACR6P,SAAU,CAAC,EACXH,QAAS,CAAC,EACVu/B,KAAM,OACNC,MAAO,OACPC,KAAM,WAAY,GAGpB,GAAqB,oBAAXv/B,OACR,OAAO5P,EAGT,IACEA,EAAM4P,OAEN,IAAK,IAAIsT,IADG,CAAC,OAAQ,OAAQ,YAEvBA,KAAQtT,SACV5P,EAAIkjB,GAAQtT,OAAOsT,GAKzB,CAFE,MAAOja,GACP5F,QAAQjC,MAAM6H,EAChB,CAEA,OAAOjJ,CACT,CAEA,E,4GCtBA,MAAMovC,EAAqBp9B,IAAAA,IAAAA,GACzB,OACA,SACA,QACA,UACA,UACA,mBACA,UACA,mBACA,YACA,YACA,UACA,WACA,WACA,cACA,OACA,cAuBa,SAAS85B,EAAmBuD,GAA6B,IAAlB,OAAEnwC,GAAW,UAAH,6CAAG,CAAC,EAElE,IAAK8S,IAAAA,IAAAA,MAAaq9B,GAChB,MAAO,CACLxxC,OAAQmU,IAAAA,MACRs3B,0BAA2B,MAI/B,IAAKpqC,EAEH,MAA4B,SAAxBmwC,EAAU3wC,IAAI,MACT,CACLb,OAAQwxC,EAAU3wC,IAAI,SAAUsT,IAAAA,OAChCs3B,0BAA2B,MAGtB,CACLzrC,OAAQ,IAAAwxC,GAAS,KAATA,GAAiB,CAACtzB,EAAG9E,IAAM,IAAAm4B,GAAkB,KAAlBA,EAA4Bn4B,KAC/DqyB,0BAA2B,MAOjC,GAAI+F,EAAU3wC,IAAI,WAAY,CAC5B,MAIM4qC,EAJ6B+F,EAChC3wC,IAAI,UAAWsT,IAAAA,IAAO,CAAC,IACvBnF,SAE0DK,QAE7D,MAAO,CACLrP,OAAQwxC,EAAUvkC,MAChB,CAAC,UAAWw+B,EAA2B,UACvCt3B,IAAAA,OAEFs3B,4BAEJ,CAEA,MAAO,CACLzrC,OAAQwxC,EAAU3wC,IAAI,UAAY2wC,EAAU3wC,IAAI,SAAUsT,IAAAA,OAAWA,IAAAA,MACrEs3B,0BAA2B,KAE/B,C,iJC3FA,MAAM,EAA+BrsC,QAAQ,6D,kDCS7C,MAAMqyC,EAAsB96B,GAAO24B,GAC1B,IAAc34B,IAAM,IAAc24B,IACpC34B,EAAE7T,SAAWwsC,EAAExsC,QACf,IAAA6T,GAAC,KAADA,GAAQ,CAACnI,EAAK0+B,IAAU1+B,IAAQ8gC,EAAEpC,KAGnC5+B,EAAO,sCAAIyE,EAAI,yBAAJA,EAAI,uBAAKA,CAAI,EAE9B,MAAM2+B,UAAc,KAClB1jC,OAAOlI,GACL,MAAMgxB,EAAO,IAAW,IAAA33B,MAAI,KAAJA,OAClBwyC,EAAW,IAAA7a,GAAI,KAAJA,EAAU2a,EAAmB3rC,IAC9C,OAAO9D,MAAMgM,OAAO2jC,EACtB,CAEA9wC,IAAIiF,GACF,MAAMgxB,EAAO,IAAW,IAAA33B,MAAI,KAAJA,OAClBwyC,EAAW,IAAA7a,GAAI,KAAJA,EAAU2a,EAAmB3rC,IAC9C,OAAO9D,MAAMnB,IAAI8wC,EACnB,CAEA9rB,IAAI/f,GACF,MAAMgxB,EAAO,IAAW,IAAA33B,MAAI,KAAJA,OACxB,OAAoD,IAA7C,IAAA23B,GAAI,KAAJA,EAAe2a,EAAmB3rC,GAC3C,EAGF,MAWA,EAXiB,SAACgE,GAAyB,IAArBqwB,EAAW,UAAH,6CAAG7rB,EAC/B,MAAQojC,MAAOE,GAAkB7K,IACjCA,IAAAA,MAAgB2K,EAEhB,MAAMG,EAAW9K,IAAQj9B,EAAIqwB,GAI7B,OAFA4M,IAAAA,MAAgB6K,EAETC,CACT,C,iBC7CA,IAAIvkC,EAAM,CACT,WAAY,KACZ,oBAAqB,KACrB,kBAAmB,KACnB,qBAAsB,KACtB,sBAAuB,GACvB,8BAA+B,KAC/B,uBAAwB,IACxB,uBAAwB,KACxB,qBAAsB,KACtB,wBAAyB,KACzB,yBAA0B,KAC1B,4BAA6B,KAC7B,4BAA6B,KAC7B,0BAA2B,KAC3B,2BAA4B,KAC5B,2CAA4C,KAC5C,uCAAwC,IACxC,oBAAqB,KACrB,mBAAoB,KACpB,mCAAoC,KACpC,uDAAwD,KACxD,2DAA4D,KAC5D,iBAAkB,KAClB,oBAAqB,KACrB,qBAAsB,KACtB,oBAAqB,KACrB,wBAAyB,KACzB,sBAAuB,KACvB,oBAAqB,KACrB,uBAAwB,KACxB,wBAAyB,KACzB,4CAA6C,KAC7C,kBAAmB,KACnB,oBAAqB,KACrB,2CAA4C,KAC5C,kCAAmC,KACnC,kCAAmC,KACnC,6BAA8B,KAC9B,uCAAwC,KACxC,0CAA2C,KAC3C,4CAA6C,KAC7C,qCAAsC,KACtC,0CAA2C,KAC3C,gCAAiC,KACjC,qBAAsB,KACtB,kBAAmB,KACnB,qBAAsB,KACtB,sBAAuB,KACvB,sCAAuC,KACvC,2CAA4C,KAC5C,uCAAwC,IACxC,kCAAmC,KACnC,gDAAiD,IACjD,sCAAuC,KACvC,mCAAoC,KACpC,mDAAoD,GACpD,2CAA4C,KAC5C,yBAA0B,KAC1B,2BAA4B,KAC5B,8BAA+B,KAC/B,0CAA2C,KAC3C,kCAAmC,KACnC,8CAA+C,KAC/C,wCAAyC,KACzC,uBAAwB,KACxB,yBAA0B,KAC1B,kBAAmB,KACnB,qBAAsB,KACtB,oBAAqB,KACrB,kBAAmB,KACnB,qBAAsB,GACtB,sBAAuB,KACvB,yBAA0B,KAC1B,uCAAwC,KACxC,wBAAyB,KACzB,kBAAmB,KACnB,eAAgB,KAChB,kBAAmB,KACnB,0BAA2B,IAC3B,sBAAuB,KACvB,+BAAgC,KAChC,6BAA8B,KAC9B,gCAAiC,KACjC,iCAAkC,GAClC,yCAA0C,KAC1C,kCAAmC,IACnC,kCAAmC,KACnC,gCAAiC,KACjC,mCAAoC,KACpC,oCAAqC,KACrC,uCAAwC,KACxC,uCAAwC,KACxC,qCAAsC,KACtC,sCAAuC,KACvC,sDAAuD,KACvD,kDAAmD,IACnD,+BAAgC,KAChC,8BAA+B,KAC/B,8CAA+C,KAC/C,kEAAmE,KACnE,sEAAuE,KACvE,4BAA6B,KAC7B,+BAAgC,KAChC,gCAAiC,KACjC,+BAAgC,KAChC,mCAAoC,KACpC,iCAAkC,KAClC,+BAAgC,KAChC,kCAAmC,KACnC,mCAAoC,KACpC,uDAAwD,KACxD,6BAA8B,KAC9B,+BAAgC,KAChC,sDAAuD,KACvD,6CAA8C,KAC9C,6CAA8C,KAC9C,wCAAyC,KACzC,kDAAmD,KACnD,qDAAsD,KACtD,uDAAwD,KACxD,gDAAiD,KACjD,qDAAsD,KACtD,2CAA4C,KAC5C,gCAAiC,KACjC,6BAA8B,KAC9B,gCAAiC,KACjC,iCAAkC,KAClC,iDAAkD,KAClD,sDAAuD,KACvD,kDAAmD,IACnD,6CAA8C,KAC9C,2DAA4D,IAC5D,iDAAkD,KAClD,8CAA+C,KAC/C,8DAA+D,GAC/D,sDAAuD,KACvD,oCAAqC,KACrC,sCAAuC,KACvC,yCAA0C,KAC1C,qDAAsD,KACtD,6CAA8C,KAC9C,yDAA0D,KAC1D,mDAAoD,KACpD,kCAAmC,KACnC,oCAAqC,KACrC,6BAA8B,KAC9B,gCAAiC,KACjC,+BAAgC,KAChC,6BAA8B,KAC9B,gCAAiC,GACjC,iCAAkC,KAClC,oCAAqC,KACrC,kDAAmD,KACnD,mCAAoC,KACpC,6BAA8B,KAC9B,0BAA2B,KAC3B,6BAA8B,KAC9B,qCAAsC,KAIvC,SAASwkC,EAAe3gC,GACvB,IAAIywB,EAAKmQ,EAAsB5gC,GAC/B,OAAO6gC,EAAoBpQ,EAC5B,CACA,SAASmQ,EAAsB5gC,GAC9B,IAAI6gC,EAAoBnY,EAAEvsB,EAAK6D,GAAM,CACpC,IAAI/F,EAAI,IAAIC,MAAM,uBAAyB8F,EAAM,KAEjD,MADA/F,EAAE5B,KAAO,mBACH4B,CACP,CACA,OAAOkC,EAAI6D,EACZ,CACA2gC,EAAehb,KAAO,WACrB,OAAOnB,OAAOmB,KAAKxpB,EACpB,EACAwkC,EAAezV,QAAU0V,EACzB/yC,EAAOD,QAAU+yC,EACjBA,EAAelQ,GAAK,I,stCCnLpB5iC,EAAOD,QAAUK,QAAQ,mD,wBCAzBJ,EAAOD,QAAUK,QAAQ,uD,uBCAzBJ,EAAOD,QAAUK,QAAQ,sD,wBCAzBJ,EAAOD,QAAUK,QAAQ,wD,wBCAzBJ,EAAOD,QAAUK,QAAQ,yD,wBCAzBJ,EAAOD,QAAUK,QAAQ,uD,wBCAzBJ,EAAOD,QAAUK,QAAQ,wD,wBCAzBJ,EAAOD,QAAUK,QAAQ,sD,wBCAzBJ,EAAOD,QAAUK,QAAQ,0D,wBCAzBJ,EAAOD,QAAUK,QAAQ,0D,wBCAzBJ,EAAOD,QAAUK,QAAQ,0D,uBCAzBJ,EAAOD,QAAUK,QAAQ,sD,wBCAzBJ,EAAOD,QAAUK,QAAQ,qD,sBCAzBJ,EAAOD,QAAUK,QAAQ,wD,uBCAzBJ,EAAOD,QAAUK,QAAQ,uD,wBCAzBJ,EAAOD,QAAUK,QAAQ,sD,wBCAzBJ,EAAOD,QAAUK,QAAQ,sD,wBCAzBJ,EAAOD,QAAUK,QAAQ,6D,wBCAzBJ,EAAOD,QAAUK,QAAQ,sD,wBCAzBJ,EAAOD,QAAUK,QAAQ,uD,wBCAzBJ,EAAOD,QAAUK,QAAQ,4C,wBCAzBJ,EAAOD,QAAUK,QAAQ,sD,wBCAzBJ,EAAOD,QAAUK,QAAQ,oD,wBCAzBJ,EAAOD,QAAUK,QAAQ,sD,wBCAzBJ,EAAOD,QAAUK,QAAQ,oD,wBCAzBJ,EAAOD,QAAUK,QAAQ,4C,wBCAzBJ,EAAOD,QAAUK,QAAQ,gD,wBCAzBJ,EAAOD,QAAUK,QAAQ,yC,uBCAzBJ,EAAOD,QAAUK,QAAQ,S,wBCAzBJ,EAAOD,QAAUK,QAAQ,a,wBCAzBJ,EAAOD,QAAUK,QAAQ,Y,wBCAzBJ,EAAOD,QAAUK,QAAQ,U,wBCAzBJ,EAAOD,QAAUK,QAAQ,a,wBCAzBJ,EAAOD,QAAUK,QAAQ,oB,uBCAzBJ,EAAOD,QAAUK,QAAQ,iB,uBCAzBJ,EAAOD,QAAUK,QAAQ,a,uBCAzBJ,EAAOD,QAAUK,QAAQ,c,wBCAzBJ,EAAOD,QAAUK,QAAQ,Q,wBCAzBJ,EAAOD,QAAUK,QAAQ,0B,wBCAzBJ,EAAOD,QAAUK,QAAQ,4B,wBCAzBJ,EAAOD,QAAUK,QAAQ,Q,uBCAzBJ,EAAOD,QAAUK,QAAQ,a,wBCAzBJ,EAAOD,QAAUK,QAAQ,W,sBCAzBJ,EAAOD,QAAUK,QAAQ,kB,wBCAzBJ,EAAOD,QAAUK,QAAQ,4B,wBCAzBJ,EAAOD,QAAUK,QAAQ,Y,GCCrB6yC,EAA2B,CAAC,EAGhC,SAASD,EAAoBE,GAE5B,IAAIC,EAAeF,EAAyBC,GAC5C,QAAqB5wC,IAAjB6wC,EACH,OAAOA,EAAapzC,QAGrB,IAAIC,EAASizC,EAAyBC,GAAY,CAGjDnzC,QAAS,CAAC,GAOX,OAHAqzC,EAAoBF,GAAUlzC,EAAQA,EAAOD,QAASizC,GAG/ChzC,EAAOD,OACf,CCrBAizC,EAAoBhxB,EAAKhiB,IACxB,IAAIqzC,EAASrzC,GAAUA,EAAOszC,WAC7B,IAAOtzC,EAAiB,QACxB,IAAM,EAEP,OADAgzC,EAAoBO,EAAEF,EAAQ,CAAE17B,EAAG07B,IAC5BA,CAAM,ECLdL,EAAoBO,EAAI,CAACxzC,EAAS+P,KACjC,IAAI,IAAIhJ,KAAOgJ,EACXkjC,EAAoBnY,EAAE/qB,EAAYhJ,KAASksC,EAAoBnY,EAAE96B,EAAS+G,IAC5E6vB,OAAO6c,eAAezzC,EAAS+G,EAAK,CAAE+2B,YAAY,EAAMh8B,IAAKiO,EAAWhJ,IAE1E,ECNDksC,EAAoBnY,EAAI,CAACxY,EAAKgE,IAAUsQ,OAAO1T,UAAU2T,eAAeC,KAAKxU,EAAKgE,GCClF2sB,EAAoBzS,EAAKxgC,IACH,oBAAX0zC,QAA0BA,OAAOC,aAC1C/c,OAAO6c,eAAezzC,EAAS0zC,OAAOC,YAAa,CAAExlC,MAAO,WAE7DyoB,OAAO6c,eAAezzC,EAAS,aAAc,CAAEmO,OAAO,GAAO,E,gaCL9D,MAAM,EAA+B9N,QAAQ,gE,sECA7C,MAAM,EAA+BA,QAAQ,e,8LCA7C,MAAM,EAA+BA,QAAQ,mB,YCA7C,MAAM,EAA+BA,QAAQ,gB,2CCY7C,MAAMuzC,EAAOh8B,GAAKA,EAmBH,MAAMi8B,EAEnB9wC,cAAsB,IAAD,MAATykC,EAAI,uDAAC,CAAC,EA+cpB,IAAwBsM,EAAaC,EAAc53B,EA9c/C63B,IAAW5zC,KAAM,CACfmD,MAAO,CAAC,EACR0wC,QAAS,GACTC,eAAgB,CAAC,EACjB7mC,OAAQ,CACN0E,QAAS,CAAC,EACVhH,GAAI,CAAC,EACL8e,WAAY,CAAC,EACbvc,YAAa,CAAC,EACdI,aAAc,CAAC,GAEjBymC,YAAa,CAAC,EACdl9B,QAAS,CAAC,GACTuwB,GAEHpnC,KAAK+b,UAAY,MAAA/b,KAAKg0C,YAAU,OAAMh0C,MAGtCA,KAAKuoC,OA4bemL,EA5bQF,EA4bKG,GA5bCzlC,EAAAA,EAAAA,QAAOlO,KAAKmD,OA4bC4Y,EA5bO/b,KAAK+b,UArC/D,SAAmC23B,EAAaC,EAAc53B,GAE5D,IAAIk4B,EAAa,EAIf1I,EAAAA,EAAAA,IAAuBxvB,IAGzB,MAAMm4B,EAAmBlxC,EAAAA,EAAAA,sCAA4CylC,EAAAA,QAErE,OAAO0L,EAAAA,EAAAA,aAAYT,EAAaC,EAAcO,GAC5CE,EAAAA,EAAAA,oBAAoBH,IAExB,CAodgBI,CAA0BX,EAAaC,EAAc53B,IA1bjE/b,KAAKs0C,aAAY,GAGjBt0C,KAAKu0C,SAASv0C,KAAK6zC,QACrB,CAEAnM,WACE,OAAO1nC,KAAKuoC,KACd,CAEAgM,SAASV,GAAwB,IAAfW,IAAO,yDACvB,IAAIC,EAAeC,EAAeb,EAAS7zC,KAAK+b,YAAa/b,KAAK8zC,gBAClEa,EAAa30C,KAAKiN,OAAQwnC,GACvBD,GACDx0C,KAAKs0C,cAGoBM,EAAcle,KAAK12B,KAAKiN,OAAQ4mC,EAAS7zC,KAAK+b,cAGvE/b,KAAKs0C,aAET,CAEAA,cAAgC,IAApBO,IAAY,yDAClBrJ,EAAWxrC,KAAK0nC,WAAW8D,SAC3B/rB,EAAWzf,KAAK0nC,WAAWjoB,SAE/Bzf,KAAK+zC,YAAc,IAAc,CAAC,EAC9B/zC,KAAK80C,iBACL90C,KAAK+0C,0BAA0BvJ,GAC/BxrC,KAAKg1C,4BAA4Bv1B,EAAUzf,KAAK+b,WAChD/b,KAAKi1C,eAAex1B,GACpBzf,KAAKk1C,QACLl1C,KAAKY,cAGNi0C,GACD70C,KAAKm1C,gBACT,CAEAnB,aACE,OAAOh0C,KAAK+zC,WACd,CAEAe,iBAAkB,IAAD,MACf,OAAO,IAAc,CACnB/4B,UAAW/b,KAAK+b,UAChB2rB,SAAU,MAAA1nC,KAAK0nC,UAAQ,OAAM1nC,MAC7BynC,cAAe,MAAAznC,KAAKynC,eAAa,OAAMznC,MACvCyf,SAAUzf,KAAK0nC,WAAWjoB,SAC1B7e,WAAY,MAAAZ,KAAKo1C,aAAW,OAAMp1C,MAClCgV,GAAE,IACFtS,MAAKA,KACJ1C,KAAKiN,OAAOC,aAAe,CAAC,EACjC,CAEAkoC,cACE,OAAOp1C,KAAKiN,OAAO0E,OACrB,CAEA/Q,aACE,MAAO,CACL+Q,QAAS3R,KAAKiN,OAAO0E,QAEzB,CAEA0jC,WAAW1jC,GACT3R,KAAKiN,OAAO0E,QAAUA,CACxB,CAEAwjC,iBA2TF,IAAsBG,EA1TlBt1C,KAAKuoC,MAAMgN,gBA0TOD,EA1TqBt1C,KAAKiN,OAAOK,aAiUvD,SAAqBkoC,GAAgB,IAAD,EAClC,IAAIjoC,EAAW,UAAYioC,IAAc,QAAQ,CAACtzB,EAAKvb,KACrDub,EAAIvb,GAWR,SAAqB8uC,GACnB,OAAO,WAAgC,IAA/BtyC,EAAQ,UAAH,6CAAG,IAAIiL,EAAAA,IAAOwD,EAAM,uCAC/B,IAAI6jC,EACF,OAAOtyC,EAET,IAAIuyC,EAASD,EAAW7jC,EAAOpQ,MAC/B,GAAGk0C,EAAO,CACR,MAAMtjC,EAAMujC,EAAiBD,EAAjBC,CAAwBxyC,EAAOyO,GAG3C,OAAe,OAARQ,EAAejP,EAAQiP,CAChC,CACA,OAAOjP,CACT,CACF,CAzBeyyC,CAAYJ,EAAc7uC,IAC9Bub,IACP,CAAC,GAEH,OAAI,IAAY3U,GAAU5J,QAInBkyC,EAAAA,EAAAA,iBAAgBtoC,GAHdimC,CAIX,CAdSsC,EAHUzK,EAAAA,EAAAA,IAAOiK,GAASjmC,GACxBA,EAAI9B,aA3Tb,CAMAwoC,QAAQh1C,GACN,IAAIi1C,EAASj1C,EAAK,GAAGk1C,cAAgB,IAAAl1C,GAAI,KAAJA,EAAW,GAChD,OAAOuqC,EAAAA,EAAAA,IAAUtrC,KAAKiN,OAAOK,cAAc,CAAC+B,EAAKiO,KAC7C,IAAIpC,EAAQ7L,EAAItO,GAChB,GAAGma,EACH,MAAO,CAAC,CAACoC,EAAU04B,GAAU96B,EAAM,GAEzC,CAEAg7B,eACE,OAAOl2C,KAAK+1C,QAAQ,YACtB,CAEAI,aACE,IAAIC,EAAgBp2C,KAAK+1C,QAAQ,WAEjC,OAAO1K,EAAAA,EAAAA,IAAO+K,GAAgB5oC,IACrB89B,EAAAA,EAAAA,IAAU99B,GAAS,CAACoE,EAAQykC,KACjC,IAAGnL,EAAAA,EAAAA,IAAKt5B,GACN,MAAO,CAAC,CAACykC,GAAazkC,EAAO,KAGrC,CAEAmjC,0BAA0BvJ,GAAW,IAAD,OAClC,IAAI8K,EAAet2C,KAAKu2C,gBAAgB/K,GACtC,OAAOH,EAAAA,EAAAA,IAAOiL,GAAc,CAAC9oC,EAASgpC,KACpC,IAAIC,EAAWz2C,KAAKiN,OAAOK,aAAa,IAAAkpC,GAAe,KAAfA,EAAsB,GAAG,IAAI9oC,YACnE,OAAG+oC,GACMpL,EAAAA,EAAAA,IAAO79B,GAAS,CAACoE,EAAQykC,KAC9B,IAAIK,EAAOD,EAASJ,GACpB,OAAIK,GAIA,IAAcA,KAChBA,EAAO,CAACA,IAEH,IAAAA,GAAI,KAAJA,GAAY,CAACt3B,EAAKzU,KACvB,IAAIgsC,EAAY,WACd,OAAOhsC,EAAGyU,EAAK,EAAKrD,YAAbpR,IAA6B,UACtC,EACA,KAAIugC,EAAAA,EAAAA,IAAKyL,GACP,MAAM,IAAIpN,UAAU,8FAEtB,OAAOoM,EAAiBgB,EAAU,GACjC/kC,GAAUiR,SAASC,YAdblR,CAcuB,IAG/BpE,CAAO,GAEpB,CAEAwnC,4BAA4Bv1B,EAAU1D,GAAY,IAAD,OAC/C,IAAI66B,EAAiB52C,KAAK62C,kBAAkBp3B,EAAU1D,GACpD,OAAOsvB,EAAAA,EAAAA,IAAOuL,GAAgB,CAACnpC,EAAWqpC,KACxC,IAAIC,EAAY,CAAC,IAAAD,GAAiB,KAAjBA,EAAwB,GAAI,IACzCL,EAAWz2C,KAAKiN,OAAOK,aAAaypC,GAAWz7B,cACjD,OAAGm7B,GACMpL,EAAAA,EAAAA,IAAO59B,GAAW,CAAC8Q,EAAUy4B,KAClC,IAAIN,EAAOD,EAASO,GACpB,OAAIN,GAIA,IAAcA,KAChBA,EAAO,CAACA,IAEH,IAAAA,GAAI,KAAJA,GAAY,CAACt3B,EAAKzU,KACvB,IAAIssC,EAAkB,WAAc,IAAD,uBAATrjC,EAAI,yBAAJA,EAAI,gBAC5B,OAAOjJ,EAAGyU,EAAK,EAAKrD,YAAbpR,CAA0B8U,IAAW3R,MAAMipC,MAAenjC,EACnE,EACA,KAAIs3B,EAAAA,EAAAA,IAAK+L,GACP,MAAM,IAAI1N,UAAU,+FAEtB,OAAO0N,CAAe,GACrB14B,GAAYsE,SAASC,YAdfvE,CAcyB,IAGjC9Q,CAAS,GAEtB,CAEAypC,UAAU/zC,GAAQ,IAAD,EACf,OAAO,UAAYnD,KAAKiN,OAAOK,eAAa,QAAQ,CAAC4U,EAAKvb,KACxDub,EAAIvb,GAAOxD,EAAMzB,IAAIiF,GACdub,IACN,CAAC,EACN,CAEA+yB,eAAex1B,GAAW,IAAD,EACvB,OAAO,UAAYzf,KAAKiN,OAAOK,eAAa,QAAQ,CAAC4U,EAAKvb,KACtDub,EAAIvb,GAAO,IAAK8Y,IAAW/d,IAAIiF,GAC5Bub,IACN,CAAC,EACJ,CAEAgzB,QACE,MAAO,CACLvqC,GAAI3K,KAAKiN,OAAOtC,GAEpB,CAEA88B,cAAc7S,GACZ,MAAMxiB,EAAMpS,KAAKiN,OAAOwc,WAAWmL,GAEnC,OAAG,IAAcxiB,GACR,IAAAA,GAAG,KAAHA,GAAW,CAACY,EAAKmkC,IACfA,EAAQnkC,EAAKhT,KAAK+b,oBAGL,IAAd6Y,EACD50B,KAAKiN,OAAOwc,WAAWmL,GAGzB50B,KAAKiN,OAAOwc,UACrB,CAEAotB,kBAAkBp3B,EAAU1D,GAC1B,OAAOsvB,EAAAA,EAAAA,IAAOrrC,KAAKk2C,gBAAgB,CAACh0B,EAAKvb,KACvC,IAAIowC,EAAY,CAAC,IAAApwC,GAAG,KAAHA,EAAU,GAAI,IAC/B,MAAMywC,EAAiB,IAAK33B,IAAW3R,MAAMipC,GAE7C,OAAO1L,EAAAA,EAAAA,IAAOnpB,GAAMvX,GACX,WAAc,IAAD,uBAATiJ,EAAI,yBAAJA,EAAI,gBACb,IAAIxB,EAAMujC,EAAiBhrC,GAAI21B,MAAM,KAAM,CAAC8W,OAAqBxjC,IAMjE,MAHmB,mBAATxB,IACRA,EAAMujC,EAAiBvjC,EAAjBujC,CAAsB55B,MAEvB3J,CACT,GACA,GAEN,CAEAmkC,gBAAgB/K,GAEdA,EAAWA,GAAYxrC,KAAK0nC,WAAW8D,SAEvC,MAAMh+B,EAAUxN,KAAKm2C,aAEfkB,EAAUC,GACY,mBAAdA,GACHjM,EAAAA,EAAAA,IAAOiM,GAASpxB,GAAQmxB,EAAQnxB,KAGlC,WACL,IAAItU,EAAS,KACb,IACEA,EAAS0lC,KAAY,UAOvB,CALA,MAAOrrC,GACL2F,EAAS,CAACpQ,KAAMoW,EAAAA,eAAgBxT,OAAO,EAAMqD,SAAS2Q,EAAAA,EAAAA,gBAAenM,GACvE,CAAC,QAEC,OAAO2F,CACT,CACF,EAGF,OAAOy5B,EAAAA,EAAAA,IAAO79B,GAAS+pC,IAAiBC,EAAAA,EAAAA,oBAAoBH,EAASE,GAAiB/L,IACxF,CAEAiM,qBACE,MAAO,IACE,IAAc,CAAC,EAAGz3C,KAAK+b,YAElC,CAEA27B,sBAAsBhnC,GACpB,OAAQ86B,GACCoI,IAAW,CAAC,EAAG5zC,KAAK+0C,0BAA0BvJ,GAAWxrC,KAAKk1C,QAASxkC,EAElF,EAIF,SAASgkC,EAAeb,EAASh9B,EAAS8gC,GACxC,IAAGjN,EAAAA,EAAAA,IAASmJ,MAAa1I,EAAAA,EAAAA,IAAQ0I,GAC/B,OAAOhiC,IAAM,CAAC,EAAGgiC,GAGnB,IAAGtlC,EAAAA,EAAAA,IAAOslC,GACR,OAAOa,EAAeb,EAAQh9B,GAAUA,EAAS8gC,GAGnD,IAAGxM,EAAAA,EAAAA,IAAQ0I,GAAU,CAAC,IAAD,EACnB,MAAM+D,EAAwC,UAAjCD,EAAcE,eAA6BhhC,EAAQ4wB,gBAAkB,CAAC,EAEnF,OAAO,UAAAoM,GAAO,KAAPA,GACFiE,GAAUpD,EAAeoD,EAAQjhC,EAAS8gC,MAAe,OACtDhD,EAAciD,EACxB,CAEA,MAAO,CAAC,CACV,CAEA,SAAShD,EAAcf,EAAS5mC,GAA6B,IAArB,UAAE8qC,GAAc,UAAH,6CAAG,CAAC,EACnDC,EAAkBD,EAQtB,OAPGrN,EAAAA,EAAAA,IAASmJ,MAAa1I,EAAAA,EAAAA,IAAQ0I,IACC,mBAAtBA,EAAQ7mC,YAChBgrC,GAAkB,EAClBrC,EAAiB9B,EAAQ7mC,WAAW0pB,KAAK12B,KAAMiN,KAIhDsB,EAAAA,EAAAA,IAAOslC,GACDe,EAAcle,KAAK12B,KAAM6zC,EAAQ5mC,GAASA,EAAQ,CAAE8qC,UAAWC,KAErE7M,EAAAA,EAAAA,IAAQ0I,GACF,IAAAA,GAAO,KAAPA,GAAYiE,GAAUlD,EAAcle,KAAK12B,KAAM83C,EAAQ7qC,EAAQ,CAAE8qC,UAAWC,MAG9EA,CACT,CAKA,SAASrD,IAA+B,IAAlBiD,EAAI,uDAAC,CAAC,EAAG91C,EAAG,uDAAC,CAAC,EAElC,KAAI4oC,EAAAA,EAAAA,IAASkN,GACX,MAAO,CAAC,EAEV,KAAIlN,EAAAA,EAAAA,IAAS5oC,GACX,OAAO81C,EAKN91C,EAAIsR,kBACLi4B,EAAAA,EAAAA,IAAOvpC,EAAIsR,gBAAgB,CAAC6kC,EAAWtxC,KACrC,MAAMqM,EAAM4kC,EAAKnuB,YAAcmuB,EAAKnuB,WAAW9iB,GAC5CqM,GAAO,IAAcA,IACtB4kC,EAAKnuB,WAAW9iB,GAAO,IAAAqM,GAAG,KAAHA,EAAW,CAACilC,WAC5Bn2C,EAAIsR,eAAezM,IAClBqM,IACR4kC,EAAKnuB,WAAW9iB,GAAO,CAACqM,EAAKilC,UACtBn2C,EAAIsR,eAAezM,GAC5B,IAGE,IAAY7E,EAAIsR,gBAAgBzP,eAI3B7B,EAAIsR,gBAQf,MAAM,aAAE9F,GAAiBsqC,EACzB,IAAGlN,EAAAA,EAAAA,IAASp9B,GACV,IAAI,IAAIgQ,KAAahQ,EAAc,CACjC,MAAM4qC,EAAe5qC,EAAagQ,GAClC,KAAIotB,EAAAA,EAAAA,IAASwN,GACX,SAGF,MAAM,YAAExqC,EAAW,cAAE4N,GAAkB48B,EAGvC,IAAIxN,EAAAA,EAAAA,IAASh9B,GACX,IAAI,IAAI2oC,KAAc3oC,EAAa,CACjC,IAAIkE,EAASlE,EAAY2oC,GAQqI,IAAD,EAA7J,GALI,IAAczkC,KAChBA,EAAS,CAACA,GACVlE,EAAY2oC,GAAczkC,GAGzB9P,GAAOA,EAAIwL,cAAgBxL,EAAIwL,aAAagQ,IAAcxb,EAAIwL,aAAagQ,GAAW5P,aAAe5L,EAAIwL,aAAagQ,GAAW5P,YAAY2oC,GAC9Iv0C,EAAIwL,aAAagQ,GAAW5P,YAAY2oC,GAAc,MAAA3oC,EAAY2oC,IAAW,OAAQv0C,EAAIwL,aAAagQ,GAAW5P,YAAY2oC,GAGjI,CAIF,IAAI3L,EAAAA,EAAAA,IAASpvB,GACX,IAAI,IAAI07B,KAAgB17B,EAAe,CACrC,IAAIiD,EAAWjD,EAAc07B,GAQuI,IAAD,EAAnK,GALI,IAAcz4B,KAChBA,EAAW,CAACA,GACZjD,EAAc07B,GAAgBz4B,GAG7Bzc,GAAOA,EAAIwL,cAAgBxL,EAAIwL,aAAagQ,IAAcxb,EAAIwL,aAAagQ,GAAWhC,eAAiBxZ,EAAIwL,aAAagQ,GAAWhC,cAAc07B,GAClJl1C,EAAIwL,aAAagQ,GAAWhC,cAAc07B,GAAgB,MAAA17B,EAAc07B,IAAa,OAAQl1C,EAAIwL,aAAagQ,GAAWhC,cAAc07B,GAG3I,CAEJ,CAGF,OAAOpD,IAAWgE,EAAM91C,EAC1B,CAsCA,SAAS6zC,EAAiBhrC,GAEjB,IAFqB,UAC5BwtC,GAAY,GACV,UAAH,6CAAG,CAAC,EACH,MAAiB,mBAAPxtC,EACDA,EAGF,WACL,IAAK,IAAD,uBADaiJ,EAAI,yBAAJA,EAAI,gBAEnB,OAAOjJ,EAAG+rB,KAAK12B,QAAS4T,EAM1B,CALE,MAAM3H,GAIN,OAHGksC,GACD9xC,QAAQjC,MAAM6H,GAET,IACT,CACF,CACF,C,oPCxee,MAAM0T,WAA2BoD,EAAAA,cAC9CpgB,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,yBAkGV,KACX,IAAI,cAAEsQ,EAAa,IAAEqD,EAAG,YAAEC,EAAW,QAAE+E,GAAYvb,KAAKQ,MACxD,MAAM43C,EAAkBp4C,KAAKq4C,qBACzB98B,QAA+BpZ,IAApBi2C,GAEbp4C,KAAKu+B,yBAEPrrB,EAAcQ,KAAK,CAAC,aAAc6C,EAAKC,IAAe+E,EAAQ,IAC/D,2BAEa,KACZvb,KAAKuD,SAAS,CAAC+0C,iBAAkBt4C,KAAKmD,MAAMm1C,iBAAiB,IAC9D,2BAEc,KACbt4C,KAAKuD,SAAS,CAAC+0C,iBAAkBt4C,KAAKmD,MAAMm1C,iBAAiB,IAC9D,0BAEe96B,IACd,MAAM+6B,EAA0Bv4C,KAAKQ,MAAMoK,cAAcigB,iCAAiCrN,GAC1Fxd,KAAKQ,MAAMynB,YAAY1K,oBAAoB,CAAExP,MAAOwqC,EAAyB/6B,cAAa,IAC3F,uBAEW,KACVxd,KAAKuD,SAAS,CAAEi1C,mBAAmB,GAAO,IAC3C,gCAEoB,KACnB,MAAM,cACJj4C,EAAa,KACbiQ,EAAI,OACJlF,EAAM,SACNrK,GACEjB,KAAKQ,MAET,OAAGS,EACMV,EAAcyqB,oBAAoB/pB,EAAS4L,QAG7CtM,EAAcyqB,oBAAoB,CAAC,QAASxa,EAAMlF,GAAQ,IAClE,oCAEwB,KACvB,MAAM,YACJoG,EAAW,KACXlB,EAAI,OACJlF,EAAM,SACNrK,GACEjB,KAAKQ,MAGT,OAAGS,EACMyQ,EAAY6sB,uBAAuBt9B,EAAS4L,QAG9C6E,EAAY6sB,uBAAuB,CAAC,QAAS/tB,EAAMlF,GAAQ,IAvJlE,MAAM,gBAAEgtC,GAAoB93C,EAAMI,aAElCZ,KAAKmD,MAAQ,CACXm1C,iBAAqC,IAApBA,GAAgD,SAApBA,EAC7CE,mBAAmB,EAEvB,CAyCA1jB,gBAAgB2jB,EAAWj4C,GACzB,MAAM,GAAEyf,EAAE,gBAAEtM,EAAe,WAAE/S,GAAeJ,GACtC,aAAEk4C,EAAY,YAAE7kC,EAAW,mBAAE8kC,EAAkB,uBAAEC,EAAsB,uBAAEC,GAA2Bj4C,IACpGgb,EAAcjI,EAAgBiI,cAC9BpF,EAAcyJ,EAAGnS,MAAM,CAAC,YAAa,2BAA6BmS,EAAGnS,MAAM,CAAC,YAAa,kBAAmBiyB,EAAAA,GAAAA,MAAK9f,EAAGve,IAAI,aAAclB,EAAMgQ,KAAMhQ,EAAM8K,SAAW2U,EAAGve,IAAI,MAC1K8S,EAAa,CAAC,aAAchU,EAAM+V,IAAKC,GACvCsiC,EAAuBjlC,GAA+B,UAAhBA,EACtCqM,EAAgB,KAAA24B,GAAsB,KAAtBA,EAA+Br4C,EAAM8K,SAAW,SAAqC,IAAxB9K,EAAM0f,cACvF1f,EAAMD,cAAc0jC,iBAAiBzjC,EAAMgQ,KAAMhQ,EAAM8K,QAAU9K,EAAM0f,eACnE5R,EAAW2R,EAAGnS,MAAM,CAAC,YAAa,cAAgBtN,EAAMD,cAAc+N,WAE5E,MAAO,CACLkI,cACAsiC,uBACAl9B,cACA+8B,qBACAC,yBACA14B,gBACA5R,WACA+B,aAAc7P,EAAMqK,cAAcwF,aAAa/B,GAC/CiN,QAAS5H,EAAgB4H,QAAQ/G,EAA6B,SAAjBkkC,GAC7CK,UAAY,SAAQv4C,EAAMgQ,QAAQhQ,EAAM8K,SACxCI,SAAUlL,EAAMD,cAAcujC,YAAYtjC,EAAMgQ,KAAMhQ,EAAM8K,QAC5D7E,QAASjG,EAAMD,cAAcwjC,WAAWvjC,EAAMgQ,KAAMhQ,EAAM8K,QAE9D,CAEAjH,oBACE,MAAM,QAAEkX,GAAYvb,KAAKQ,MACnB43C,EAAkBp4C,KAAKq4C,qBAE1B98B,QAA+BpZ,IAApBi2C,GACZp4C,KAAKu+B,wBAET,CAEAl7B,iCAAiCC,GAC/B,MAAM,SAAEoI,EAAQ,QAAE6P,GAAYjY,EACxB80C,EAAkBp4C,KAAKq4C,qBAE1B3sC,IAAa1L,KAAKQ,MAAMkL,UACzB1L,KAAKuD,SAAS,CAAEi1C,mBAAmB,IAGlCj9B,QAA+BpZ,IAApBi2C,GACZp4C,KAAKu+B,wBAET,CA4DA79B,SACE,IACEuf,GAAI+4B,EAAY,IAChBziC,EAAG,KACH/F,EAAI,OACJlF,EAAM,SACNgD,EAAQ,aACR+B,EAAY,YACZmG,EAAW,YACXoF,EAAW,QACXL,EAAO,UACPw9B,EAAS,cACT74B,EAAa,SACbxU,EAAQ,QACRjF,EAAO,mBACPkyC,EAAkB,uBAClBC,EAAsB,qBACtBE,EAAoB,SACpB73C,EAAQ,cACRV,EAAa,YACbmR,EAAW,aACX/Q,EAAY,WACZC,EAAU,gBACV+S,EAAe,cACfT,EAAa,YACbtL,EAAW,cACXiD,EAAa,YACbod,EAAW,cACXrd,EAAa,GACbD,GACE3K,KAAKQ,MAET,MAAMy4C,EAAYt4C,EAAc,aAE1By3C,EAAkBp4C,KAAKq4C,uBAAwBjqC,EAAAA,EAAAA,OAE/C8qC,GAAiBhrC,EAAAA,EAAAA,QAAO,CAC5B+R,GAAIm4B,EACJ7hC,MACA/F,OACA2oC,QAASH,EAAalrC,MAAM,CAAC,YAAa,aAAe,GACzD7L,WAAYm2C,EAAgB12C,IAAI,eAAiBs3C,EAAalrC,MAAM,CAAC,YAAa,iBAAkB,EACpGxC,SACAgD,WACA+B,eACAmG,cACA4iC,oBAAqBhB,EAAgBtqC,MAAM,CAAC,YAAa,0BACzD8N,cACAL,UACAw9B,YACA74B,gBACAzZ,UACAkyC,qBACAC,yBACAE,uBACAN,kBAAmBx4C,KAAKmD,MAAMq1C,kBAC9BF,gBAAiBt4C,KAAKmD,MAAMm1C,kBAG9B,OACE,kBAACW,EAAS,CACRxoC,UAAWyoC,EACXxtC,SAAUA,EACVjF,QAASA,EACT8U,QAASA,EAET89B,YAAar5C,KAAKq5C,YAClBC,cAAet5C,KAAKs5C,cACpBC,aAAcv5C,KAAKu5C,aACnBC,cAAex5C,KAAKw5C,cACpBC,UAAWz5C,KAAKy5C,UAChBx4C,SAAUA,EAEVyQ,YAAcA,EACdnR,cAAgBA,EAChB0nB,YAAaA,EACbrd,cAAeA,EACfsI,cAAgBA,EAChBS,gBAAkBA,EAClB/L,YAAcA,EACdiD,cAAgBA,EAChBlK,aAAeA,EACfC,WAAaA,EACb+J,GAAIA,GAGV,EAED,KAtPoBgV,GAAkB,eA2Cf,CACpB/D,aAAa,EACblQ,SAAU,KACVwU,eAAe,EACfy4B,oBAAoB,EACpBC,wBAAwB,ICnDb,MAAMvP,WAAY3mC,IAAAA,UAE/Bg3C,YACE,IAAI,aAAE/4C,EAAY,gBAAEgT,GAAoB3T,KAAKQ,MAC7C,MAAMm5C,EAAahmC,EAAgB7N,UAC7B2b,EAAY9gB,EAAag5C,GAAY,GAC3C,OAAOl4B,GAAwB,KAAK,uDAAkCk4B,EAAU,MAClF,CAEAj5C,SACE,MAAMk5C,EAAS55C,KAAK05C,YAEpB,OACE,kBAACE,EAAM,KAEX,EAQFvQ,GAAIrjC,aAAe,CACnB,ECxBe,MAAM6zC,WAA2Bn3C,IAAAA,UAAiB,cAAD,uCACvD,KACL,IAAI,YAAEkF,GAAgB5H,KAAKQ,MAE3BoH,EAAYJ,iBAAgB,EAAM,GACnC,CAED9G,SAAU,IAAD,EACP,IAAI,cAAEmK,EAAa,YAAEjD,EAAW,aAAEjH,EAAY,aAAE4f,EAAY,cAAEhgB,EAAeoK,IAAI,IAAEwyB,EAAM,CAAC,IAAQn9B,KAAKQ,MACnGyO,EAAcpE,EAAciE,mBAChC,MAAMgrC,EAAQn5C,EAAa,SAE3B,OACE,yBAAKkB,UAAU,aACb,yBAAKA,UAAU,gBACf,yBAAKA,UAAU,YACb,yBAAKA,UAAU,mBACb,yBAAKA,UAAU,kBACb,yBAAKA,UAAU,mBACb,wDACA,4BAAQL,KAAK,SAASK,UAAU,cAAckxB,QAAU/yB,KAAKkyC,OAC3D,yBAAKlwC,MAAM,KAAKD,OAAO,MACrB,yBAAKgC,KAAK,SAASkvB,UAAU,cAInC,yBAAKpxB,UAAU,oBAGX,MAAAoN,EAAYO,YAAU,QAAK,CAAEG,EAAYhJ,IAChC,kBAACmzC,EAAK,CAACnzC,IAAMA,EACNw2B,IAAKA,EACLluB,YAAcU,EACdhP,aAAeA,EACf4f,aAAeA,EACf1V,cAAgBA,EAChBjD,YAAcA,EACdrH,cAAgBA,UAShD,EC9Ca,MAAMw5C,WAAqBr3C,IAAAA,UAQxChC,SACE,IAAI,aAAE2P,EAAY,UAAE2pC,EAAS,QAAEjnB,EAAO,aAAEpyB,GAAiBX,KAAKQ,MAG9D,MAAMq5C,EAAqBl5C,EAAa,sBAAsB,GAE9D,OACE,yBAAKkB,UAAU,gBACb,4BAAQA,UAAWwO,EAAe,uBAAyB,yBAA0B0iB,QAASA,GAC5F,2CACA,yBAAK/wB,MAAM,KAAKD,OAAO,MACrB,yBAAKgC,KAAOsM,EAAe,UAAY,YAAc4iB,UAAY5iB,EAAe,UAAY,gBAGhG2pC,GAAa,kBAACH,EAAkB,MAGtC,ECzBa,MAAMI,WAA8Bv3C,IAAAA,UAUjDhC,SACE,MAAM,YAAEkH,EAAW,cAAEiD,EAAa,cAAEtK,EAAa,aAAEI,GAAgBX,KAAKQ,MAElE0O,EAAsB3O,EAAc2O,sBACpCgrC,EAA0BrvC,EAAcmE,yBAExC+qC,EAAep5C,EAAa,gBAElC,OAAOuO,EACL,kBAAC6qC,EAAY,CACXhnB,QAAS,IAAMnrB,EAAYJ,gBAAgB0yC,GAC3C7pC,eAAgBxF,EAAc6B,aAAakD,KAC3CoqC,YAAanvC,EAAciE,mBAC3BnO,aAAcA,IAEd,IACN,EC1Ba,MAAMw5C,WAA8Bz3C,IAAAA,UAAiB,cAAD,yCAMvDuJ,IACRA,EAAEmuC,kBACF,IAAI,QAAErnB,GAAY/yB,KAAKQ,MAEpBuyB,GACDA,GACF,GACD,CAEDryB,SACE,IAAI,aAAE2P,GAAiBrQ,KAAKQ,MAE5B,OACE,4BAAQqB,UAAWwO,EAAe,4BAA8B,8BAC9D,aAAYA,EAAe,8BAAgC,gCAC3D0iB,QAAS/yB,KAAK+yB,SACd,yBAAK/wB,MAAM,KAAKD,OAAO,MACrB,yBAAKgC,KAAOsM,EAAe,UAAY,YAAc4iB,UAAY5iB,EAAe,UAAY,eAKpG,EC3Ba,MAAMypC,WAAcp3C,IAAAA,UAUjCC,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,0BAKRsF,IACb,IAAI,KAAEnH,GAASmH,EAEflI,KAAKuD,SAAS,CAAE,CAACxC,GAAOmH,GAAO,IAChC,wBAEY+D,IACXA,EAAEumB,iBAEF,IAAI,YAAE5qB,GAAgB5H,KAAKQ,MAC3BoH,EAAYD,2BAA2B3H,KAAKmD,MAAM,IACnD,yBAEa8I,IACZA,EAAEumB,iBAEF,IAAI,YAAE5qB,EAAW,YAAEqH,GAAgBjP,KAAKQ,MACpC65C,EAAQ,IAAAprC,GAAW,KAAXA,GAAiB,CAACI,EAAK1I,IAC1BA,IACNsiB,UAEHjpB,KAAKuD,SAAS,IAAA82C,GAAK,KAALA,GAAa,CAACpc,EAAM/1B,KAChC+1B,EAAK/1B,GAAQ,GACN+1B,IACN,CAAC,IAEJr2B,EAAYG,wBAAwBsyC,EAAM,IAC3C,mBAEOpuC,IACNA,EAAEumB,iBACF,IAAI,YAAE5qB,GAAgB5H,KAAKQ,MAE3BoH,EAAYJ,iBAAgB,EAAM,IApClCxH,KAAKmD,MAAQ,CAAC,CAChB,CAsCAzC,SAAU,IAAD,EACP,IAAI,YAAEuO,EAAW,aAAEtO,EAAY,cAAEkK,EAAa,aAAE0V,GAAiBvgB,KAAKQ,MACtE,MAAM0sB,EAAWvsB,EAAa,YACxB25C,EAAS35C,EAAa,UAAU,GAChC45C,EAAS55C,EAAa,UAE5B,IAAI+L,EAAa7B,EAAc6B,aAE3B8tC,EAAiB,IAAAvrC,GAAW,KAAXA,GAAoB,CAACU,EAAYhJ,MAC3C+F,EAAWhL,IAAIiF,KAGtB8zC,EAAsB,IAAAxrC,GAAW,KAAXA,GAAoBpO,GAAiC,WAAvBA,EAAOa,IAAI,UAC/Dg5C,EAAmB,IAAAzrC,GAAW,KAAXA,GAAoBpO,GAAiC,WAAvBA,EAAOa,IAAI,UAEhE,OACE,yBAAKG,UAAU,oBAET44C,EAAoB7qC,MAAQ,0BAAM+qC,SAAW36C,KAAK46C,YAEhD,IAAAH,GAAmB,KAAnBA,GAAyB,CAAC55C,EAAQE,IACzB,kBAACmsB,EAAQ,CACdvmB,IAAK5F,EACLF,OAAQA,EACRE,KAAMA,EACNJ,aAAcA,EACdssB,aAAcjtB,KAAKitB,aACnBvgB,WAAYA,EACZ6T,aAAcA,MAEf0I,UAEL,yBAAKpnB,UAAU,oBAEX44C,EAAoB7qC,OAAS4qC,EAAe5qC,KAAO,kBAAC2qC,EAAM,CAAC14C,UAAU,qBAAqBkxB,QAAU/yB,KAAK66C,aAAa,UACtH,kBAACN,EAAM,CAAC/4C,KAAK,SAASK,UAAU,gCAA8B,aAEhE,kBAAC04C,EAAM,CAAC14C,UAAU,8BAA8BkxB,QAAU/yB,KAAKkyC,OAAO,WAM1EwI,GAAoBA,EAAiB9qC,KAAO,6BAC5C,yBAAK/N,UAAU,aACb,6KACA,qHAGE,UAAAoN,GAAW,KAAXA,GAAoBpO,GAAiC,WAAvBA,EAAOa,IAAI,WAAqB,QACtD,CAACb,EAAQE,IACL,yBAAK4F,IAAM5F,GACjB,kBAACu5C,EAAM,CAAC5tC,WAAaA,EACb7L,OAASA,EACTE,KAAOA,OAGjBkoB,WAEC,KAKjB,ECpHa,MAAM6wB,WAAcp3C,IAAAA,UAUjChC,SACE,IAAI,OACFG,EAAM,KACNE,EAAI,aACJJ,EAAY,aACZssB,EAAY,WACZvgB,EAAU,aACV6T,GACEvgB,KAAKQ,MACT,MAAMs6C,EAAan6C,EAAa,cAC1Bo6C,EAAYp6C,EAAa,aAE/B,IAAIq6C,EAEJ,MAAMx5C,EAAOX,EAAOa,IAAI,QAExB,OAAOF,GACL,IAAK,SAAUw5C,EAAS,kBAACF,EAAU,CAACn0C,IAAM5F,EACRF,OAASA,EACTE,KAAOA,EACPwf,aAAeA,EACf7T,WAAaA,EACb/L,aAAeA,EACfyf,SAAW6M,IAC3C,MACF,IAAK,QAAS+tB,EAAS,kBAACD,EAAS,CAACp0C,IAAM5F,EACRF,OAASA,EACTE,KAAOA,EACPwf,aAAeA,EACf7T,WAAaA,EACb/L,aAAeA,EACfyf,SAAW6M,IACzC,MACF,QAAS+tB,EAAS,yBAAKr0C,IAAM5F,GAAM,oCAAoCS,GAGzE,OAAQ,yBAAKmF,IAAM,GAAE5F,UACjBi6C,EAEN,EClDa,MAAMr6B,WAAkBje,IAAAA,UAMrChC,SACE,IAAI,MAAE0D,GAAUpE,KAAKQ,MAEjBgI,EAAQpE,EAAM1C,IAAI,SAClB+G,EAAUrE,EAAM1C,IAAI,WACpBiD,EAASP,EAAM1C,IAAI,UAEvB,OACE,yBAAKG,UAAU,UACb,2BAAK8C,EAAM,IAAK6D,GAChB,8BAAQC,GAGd,ECnBa,MAAMqyC,WAAmBp4C,IAAAA,UAUtCC,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,sBAiBZqJ,IACT,IAAI,SAAEmU,GAAapgB,KAAKQ,MACpBuN,EAAQ9B,EAAEpI,OAAOkK,MACjB0zB,EAAW,IAAc,CAAC,EAAGzhC,KAAKmD,MAAO,CAAE4K,MAAOA,IAEtD/N,KAAKuD,SAASk+B,GACdrhB,EAASqhB,EAAS,IAtBlB,IAAI,KAAE1gC,EAAI,OAAEF,GAAWb,KAAKQ,MACxBuN,EAAQ/N,KAAKsgB,WAEjBtgB,KAAKmD,MAAQ,CACXpC,KAAMA,EACNF,OAAQA,EACRkN,MAAOA,EAEX,CAEAuS,WACE,IAAI,KAAEvf,EAAI,WAAE2L,GAAe1M,KAAKQ,MAEhC,OAAOkM,GAAcA,EAAWoB,MAAM,CAAC/M,EAAM,SAC/C,CAWAL,SAAU,IAAD,IACP,IAAI,OAAEG,EAAM,aAAEF,EAAY,aAAE4f,EAAY,KAAExf,GAASf,KAAKQ,MACxD,MAAMggB,EAAQ7f,EAAa,SACrB8f,EAAM9f,EAAa,OACnB+f,EAAM/f,EAAa,OACnBggB,EAAYhgB,EAAa,aACzB+D,EAAW/D,EAAa,YAAY,GACpCigB,EAAajgB,EAAa,cAAc,GAC9C,IAAIoN,EAAQ/N,KAAKsgB,WACbhI,EAAS,MAAAiI,EAAanG,aAAW,QAASjC,GAAOA,EAAIzW,IAAI,YAAcX,IAE3E,OACE,6BACE,4BACE,8BAAQA,GAAQF,EAAOa,IAAI,SAAgB,YAC3C,kBAACkf,EAAU,CAACpQ,KAAM,CAAE,sBAAuBzP,MAE3CgN,GAAS,0CACX,kBAAC0S,EAAG,KACF,kBAAC/b,EAAQ,CAACC,OAAS9D,EAAOa,IAAI,kBAEhC,kBAAC+e,EAAG,KACF,oCAAS,8BAAQ5f,EAAOa,IAAI,WAE9B,kBAAC+e,EAAG,KACF,kCAAO,8BAAQ5f,EAAOa,IAAI,SAE5B,kBAAC+e,EAAG,KACF,yCAEE1S,EAAQ,0CACA,kBAAC2S,EAAG,KAAC,kBAACF,EAAK,CAAChf,KAAK,OAAO4e,SAAWpgB,KAAKogB,SAAWW,WAAS,MAItE,MAAAzI,EAAO9I,YAAU,QAAM,CAACpL,EAAOuC,IACtB,kBAACga,EAAS,CAACvc,MAAQA,EACRuC,IAAMA,MAKlC,EC9Ea,MAAMo0C,WAAkBr4C,IAAAA,UAUrCC,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,sBAqBZqJ,IACT,IAAI,SAAEmU,GAAapgB,KAAKQ,OACpB,MAAEuN,EAAK,KAAEhN,GAASkL,EAAEpI,OAEpBwc,EAAWrgB,KAAKmD,MAAM4K,MAC1BsS,EAAStf,GAAQgN,EAEjB/N,KAAKuD,SAAS,CAAEwK,MAAOsS,IAEvBD,EAASpgB,KAAKmD,MAAM,IA7BpB,IAAI,OAAEtC,EAAQE,KAAAA,GAASf,KAAKQ,MAGxBqI,EADQ7I,KAAKsgB,WACIzX,SAErB7I,KAAKmD,MAAQ,CACXpC,KAAMA,EACNF,OAAQA,EACRkN,MAAQlF,EAAgB,CACtBA,SAAUA,GADO,CAAC,EAIxB,CAEAyX,WACE,IAAI,WAAE5T,EAAU,KAAE3L,GAASf,KAAKQ,MAEhC,OAAOkM,GAAcA,EAAWoB,MAAM,CAAC/M,EAAM,WAAa,CAAC,CAC7D,CAcAL,SAAU,IAAD,IACP,IAAI,OAAEG,EAAM,aAAEF,EAAY,KAAEI,EAAI,aAAEwf,GAAiBvgB,KAAKQ,MACxD,MAAMggB,EAAQ7f,EAAa,SACrB8f,EAAM9f,EAAa,OACnB+f,EAAM/f,EAAa,OACnBggB,EAAYhgB,EAAa,aACzBigB,EAAajgB,EAAa,cAAc,GACxC+D,EAAW/D,EAAa,YAAY,GAC1C,IAAIkI,EAAW7I,KAAKsgB,WAAWzX,SAC3ByP,EAAS,MAAAiI,EAAanG,aAAW,QAASjC,GAAOA,EAAIzW,IAAI,YAAcX,IAE3E,OACE,6BACE,kDAAuB,kBAAC6f,EAAU,CAACpQ,KAAM,CAAE,sBAAuBzP,MAChE8H,GAAY,0CACd,kBAAC4X,EAAG,KACF,kBAAC/b,EAAQ,CAACC,OAAS9D,EAAOa,IAAI,kBAEhC,kBAAC+e,EAAG,KACF,4CAEE5X,EAAW,kCAASA,EAAQ,KACjB,kBAAC6X,EAAG,KAAC,kBAACF,EAAK,CAAChf,KAAK,OAAOV,SAAS,WAAWC,KAAK,WAAWqf,SAAWpgB,KAAKogB,SAAWW,WAAS,MAG/G,kBAACN,EAAG,KACF,4CAEI5X,EAAW,0CACA,kBAAC6X,EAAG,KAAC,kBAACF,EAAK,CAACQ,aAAa,eACbjgB,KAAK,WACLS,KAAK,WACL4e,SAAWpgB,KAAKogB,aAI3C,MAAA9H,EAAO9I,YAAU,QAAM,CAACpL,EAAOuC,IACtB,kBAACga,EAAS,CAACvc,MAAQA,EACRuC,IAAMA,MAKlC,EClFa,SAASwe,GAAQ3kB,GAC9B,MAAM,QAAEwnB,EAAO,UAAEizB,EAAS,aAAEt6C,EAAY,WAAEC,GAAeJ,EAEnDkE,EAAW/D,EAAa,YAAY,GACpCskB,EAAgBtkB,EAAa,iBAEnC,OAAIqnB,EAGF,yBAAKnmB,UAAU,WACZmmB,EAAQtmB,IAAI,eACX,6BAASG,UAAU,oBACjB,yBAAKA,UAAU,2BAAyB,uBACxC,2BACE,kBAAC6C,EAAQ,CAACC,OAAQqjB,EAAQtmB,IAAI,mBAGhC,KACHu5C,GAAajzB,EAAQtB,IAAI,SACxB,6BAAS7kB,UAAU,oBACjB,yBAAKA,UAAU,2BAAyB,iBACxC,kBAACojB,EAAa,CAACrkB,WAAaA,EAAamN,OAAOkV,EAAAA,EAAAA,IAAU+E,EAAQtmB,IAAI,aAEtE,MAjBY,IAoBtB,C,0BC1Be,MAAMw5C,WAAuBx4C,IAAAA,cAAqB,cAAD,wDAsBlD,SAACiE,GAA6C,IAAxC,kBAAEw0C,GAAoB,GAAU,UAAH,6CAAG,CAAC,EACd,mBAAxB,EAAK36C,MAAMqnB,UACpB,EAAKrnB,MAAMqnB,SAASlhB,EAAK,CACvBw0C,qBAGN,IAAC,0BAEclvC,IACb,GAAmC,mBAAxBjM,KAAKQ,MAAMqnB,SAAyB,CAC7C,MACMlhB,EADUsF,EAAEpI,OAAOu3C,gBAAgB,GACrB/yB,aAAa,SAEjCroB,KAAKq7C,UAAU10C,EAAK,CAClBw0C,mBAAmB,GAEvB,KACD,+BAEmB,KAClB,MAAM,SAAEzzB,EAAQ,kBAAE4zB,GAAsBt7C,KAAKQ,MAEvC+6C,EAAyB7zB,EAAShmB,IAAI45C,GAEtCE,EAAmB9zB,EAAS7X,SAASK,QACrCurC,EAAe/zB,EAAShmB,IAAI85C,GAElC,OAAOD,GAA0BE,GAAgB,KAAI,CAAC,EAAE,GACzD,CAEDp3C,oBAOE,MAAM,SAAEwjB,EAAQ,SAAEH,GAAa1nB,KAAKQ,MAEpC,GAAwB,mBAAbqnB,EAAyB,CAClC,MAAM4zB,EAAe/zB,EAASxX,QACxBwrC,EAAkBh0B,EAASi0B,MAAMF,GAEvCz7C,KAAKq7C,UAAUK,EAAiB,CAC9BP,mBAAmB,GAEvB,CACF,CAEA93C,iCAAiCC,GAC/B,MAAM,kBAAEg4C,EAAiB,SAAE5zB,GAAapkB,EACxC,GAAIokB,IAAa1nB,KAAKQ,MAAMknB,WAAaA,EAAShB,IAAI40B,GAAoB,CAGxE,MAAMG,EAAe/zB,EAASxX,QACxBwrC,EAAkBh0B,EAASi0B,MAAMF,GAEvCz7C,KAAKq7C,UAAUK,EAAiB,CAC9BP,mBAAmB,GAEvB,CACF,CAEAz6C,SACE,MAAM,SACJgnB,EAAQ,kBACR4zB,EAAiB,gBACjBM,EAAe,yBACfC,EAAwB,WACxBC,GACE97C,KAAKQ,MAET,OACE,yBAAKqB,UAAU,mBAEXi6C,EACE,0BAAMj6C,UAAU,kCAAgC,cAC9C,KAEN,4BACEA,UAAU,0BACVue,SAAUpgB,KAAK+7C,aACfhuC,MACE8tC,GAA4BD,EACxB,sBACCN,GAAqB,IAG3BO,EACC,4BAAQ9tC,MAAM,uBAAqB,oBACjC,KACH,IAAA2Z,GAAQ,KAARA,GACM,CAACM,EAASg0B,IAEX,4BACEr1C,IAAKq1C,EACLjuC,MAAOiuC,GAENh0B,EAAQtmB,IAAI,YAAcs6C,KAIhCxsC,YAIX,EACD,KAjIoB0rC,GAAc,eAUX,CACpBxzB,SAAU1S,IAAAA,IAAO,CAAC,GAClB6S,SAAU,sCAAIjU,EAAI,yBAAJA,EAAI,uBAChBvN,QAAQiW,IAEL,8DACE1I,EACJ,EACH0nC,kBAAmB,KACnBQ,YAAY,ICEhB,MAAMG,GAAsB9K,GAC1B/hC,EAAAA,KAAAA,OAAY+hC,GAASA,GAAQluB,EAAAA,EAAAA,IAAUkuB,GAE1B,MAAMjsB,WAAoCxiB,IAAAA,cAiCvDC,YAAYnC,GAAQ,IAAD,EACjBqC,MAAMrC,GAAM,iDAuBiB,KAC7B,MAAM,iBAAE07C,GAAqBl8C,KAAKQ,MAElC,OAAQR,KAAKmD,MAAM+4C,KAAqB9tC,EAAAA,EAAAA,QAAOuI,UAAU,IAC1D,0CAE8BuL,IAC7B,MAAM,iBAAEg6B,GAAqBl8C,KAAKQ,MAElC,OAAOR,KAAKm8C,sBAAsBD,EAAkBh6B,EAAI,IACzD,mCAEuB,CAAC5E,EAAW4E,KAClC,MACMk6B,GADuBp8C,KAAKmD,MAAMma,KAAclP,EAAAA,EAAAA,QACJiuC,UAAUn6B,GAC5D,OAAOliB,KAAKuD,SAAS,CACnB,CAAC+Z,GAAY8+B,GACb,IACH,mDAEuC,KACtC,MAAM,sBAAEx0B,GAA0B5nB,KAAKQ,MAIvC,OAFyBR,KAAKs8C,4BAEF10B,CAAqB,IAClD,iCAEqB,CAAC20B,EAAY/7C,KAGjC,MAAM,SAAEknB,GAAalnB,GAASR,KAAKQ,MACnC,OAAOy7C,IACJv0B,IAAYtZ,EAAAA,EAAAA,KAAI,CAAC,IAAIN,MAAM,CAACyuC,EAAY,UAC1C,IACF,qCAEyB/7C,IAGxB,MAAM,WAAEmnB,GAAennB,GAASR,KAAKQ,MACrC,OAAOR,KAAKw8C,oBAAoB70B,EAAYnnB,GAASR,KAAKQ,MAAM,IACjE,+BAEmB,SAACmG,GAAmD,IAA9C,kBAAEw0C,GAAsB,UAAH,6CAAG,CAAC,EACjD,MAAM,SACJtzB,EAAQ,YACRC,EAAW,sBACXF,EAAqB,kBACrBnE,GACE,EAAKjjB,OACH,oBAAEi8C,GAAwB,EAAKC,+BAE/BC,EAAmB,EAAKH,oBAAoB71C,GAElD,GAAY,wBAARA,EAEF,OADAmhB,EAAYm0B,GAAoBQ,IACzB,EAAKG,6BAA6B,CACvCC,yBAAyB,IAI7B,GAAwB,mBAAbh1B,EAAyB,CAAC,IAAD,uBAlBmBi1B,EAAS,iCAATA,EAAS,kBAmB9Dj1B,EAASlhB,EAAK,CAAEw0C,wBAAwB2B,EAC1C,CAEA,EAAKF,6BAA6B,CAChCG,oBAAqBJ,EACrBE,wBACG1B,GAAqB13B,KACnBmE,GAAyBA,IAA0B+0B,IAItDxB,GAEuB,mBAAhBrzB,GACTA,EAAYm0B,GAAoBU,GAEpC,IApGE,MAAMA,EAAmB38C,KAAKs8C,0BAE9Bt8C,KAAKmD,MAAQ,CAIX,CAAC3C,EAAM07C,mBAAmB9tC,EAAAA,EAAAA,KAAI,CAC5BquC,oBAAqBz8C,KAAKQ,MAAMonB,sBAChCm1B,oBAAqBJ,EACrBE,wBAEE78C,KAAKQ,MAAMijB,mBACXzjB,KAAKQ,MAAMonB,wBAA0B+0B,IAG7C,CAEAK,uBACEh9C,KAAKQ,MAAMid,+BAA8B,EAC3C,CAmFApa,iCAAiCC,GAG/B,MACEskB,sBAAuBvH,EAAQ,SAC/BqH,EAAQ,SACRG,EAAQ,kBACRpE,GACEngB,GAEE,oBACJm5C,EAAmB,oBACnBM,GACE/8C,KAAK08C,+BAEHO,EAA0Bj9C,KAAKw8C,oBACnCl5C,EAAUqkB,WACVrkB,GAGI45C,EAA2B,IAAAx1B,GAAQ,KAARA,GAC9BM,GACCA,EAAQtmB,IAAI,WAAa2e,IAGzB4C,EAAAA,EAAAA,IAAU+E,EAAQtmB,IAAI,YAAc2e,IAGxC,GAAI68B,EAAyBttC,KAAM,CACjC,IAAIjJ,EAGFA,EAFCu2C,EAAyBx2B,IAAIpjB,EAAUqkB,YAElCrkB,EAAUqkB,WAEVu1B,EAAyBrtC,SAASK,QAE1C2X,EAASlhB,EAAK,CACZw0C,mBAAmB,GAEvB,MACE96B,IAAargB,KAAKQ,MAAMonB,uBACxBvH,IAAao8B,GACbp8B,IAAa08B,IAEb/8C,KAAKQ,MAAMid,+BAA8B,GACzCzd,KAAKm8C,sBAAsB74C,EAAU44C,iBAAkB,CACrDO,oBAAqBn5C,EAAUskB,sBAC/Bi1B,wBACEp5B,GAAqBpD,IAAa48B,IAG1C,CAEAv8C,SACE,MAAM,sBACJknB,EAAqB,SACrBF,EAAQ,WACRC,EAAU,aACVhnB,EAAY,kBACZ8iB,GACEzjB,KAAKQ,OACH,oBACJu8C,EAAmB,oBACnBN,EAAmB,wBACnBI,GACE78C,KAAK08C,+BAEHxB,EAAiBv6C,EAAa,kBAEpC,OACE,kBAACu6C,EAAc,CACbxzB,SAAUA,EACV4zB,kBAAmB3zB,EACnBE,SAAU7nB,KAAKm9C,kBACftB,2BACIY,GAAuBA,IAAwBM,EAEnDnB,qBAC6Bz5C,IAA1BylB,GACCi1B,GACAj1B,IAA0B5nB,KAAKs8C,2BACjC74B,GAIR,EACD,KAhOoByB,GAA2B,eAcxB,CACpBzB,mBAAmB,EACnBiE,UAAUtZ,EAAAA,EAAAA,KAAI,CAAC,GACf8tC,iBAAkB,yBAClBz+B,8BAA+B,OAG/BoK,SAAU,sCAAIjU,EAAI,yBAAJA,EAAI,uBAChBvN,QAAQiW,IACN,sEACG1I,EACJ,EACHkU,YAAa,sCAAIlU,EAAI,yBAAJA,EAAI,uBACnBvN,QAAQiW,IACN,yEACG1I,EACJ,I,2FC3DQ,MAAM0mC,WAAe53C,IAAAA,UAelCC,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,mBA0BdqJ,IACPA,EAAEumB,iBACF,IAAI,YAAE5qB,GAAgB5H,KAAKQ,MAE3BoH,EAAYJ,iBAAgB,EAAM,IACnC,uBAEU,KACT,IAAI,YAAEI,EAAW,WAAEK,EAAU,WAAErH,EAAU,cAAEiK,EAAa,cAAED,GAAkB5K,KAAKQ,MAC7EmR,EAAU/Q,IACVw8C,EAAcvyC,EAAcjK,aAEhCqH,EAAWqP,MAAM,CAAC/O,OAAQxH,KAAKS,KAAM,OAAQmD,OAAQ,SCtD1C,SAAkB,GAAgF,IAA7E,KAAEuD,EAAI,YAAEN,EAAW,WAAEK,EAAU,QAAE0J,EAAO,YAAEyrC,EAAY,CAAC,EAAC,cAAEz6B,GAAe,GACvG,OAAE9hB,EAAM,OAAEwI,EAAM,KAAEtI,EAAI,SAAEiI,GAAad,EACrCG,EAAOxH,EAAOa,IAAI,QAClBsI,EAAQ,GAEZ,OAAQ3B,GACN,IAAK,WAEH,YADAT,EAAYgB,kBAAkBV,GAGhC,IAAK,cAYL,IAAK,oBACL,IAAK,qBAGH,YADAN,EAAYqC,qBAAqB/B,GAXnC,IAAK,aAcL,IAAK,oBACL,IAAK,qBAEH8B,EAAMsF,KAAK,sBACX,MAdF,IAAK,WACHtF,EAAMsF,KAAK,uBAgBS,iBAAbtG,GACTgB,EAAMsF,KAAK,aAAetL,mBAAmBgF,IAG/C,IAAImB,EAAcwH,EAAQ0rC,kBAG1B,QAA2B,IAAhBlzC,EAOT,YANAlC,EAAWK,WAAY,CACrBC,OAAQxH,EACR4D,OAAQ,aACR6D,MAAO,QACPC,QAAS,6FAIbuB,EAAMsF,KAAK,gBAAkBtL,mBAAmBmG,IAEhD,IAAImzC,EAAc,GAOlB,GANI,IAAcj0C,GAChBi0C,EAAcj0C,EACL2L,IAAAA,KAAAA,OAAe3L,KACxBi0C,EAAcj0C,EAAO4f,WAGnBq0B,EAAY35C,OAAS,EAAG,CAC1B,IAAI45C,EAAiBH,EAAYG,gBAAkB,IAEnDvzC,EAAMsF,KAAK,SAAWtL,mBAAmBs5C,EAAYh0C,KAAKi0C,IAC5D,CAEA,IAAIp6C,GAAQyG,EAAAA,EAAAA,IAAK,IAAI4rB,MAQrB,GANAxrB,EAAMsF,KAAK,SAAWtL,mBAAmBb,SAER,IAAtBi6C,EAAYI,OACrBxzC,EAAMsF,KAAK,SAAWtL,mBAAmBo5C,EAAYI,SAGzC,sBAATn1C,GAAyC,uBAATA,GAA0C,eAATA,IAA0B+0C,EAAYK,kCAAmC,CAC3I,MAAMrzC,GAAesnC,EAAAA,EAAAA,MACfgM,GAAgB7L,EAAAA,EAAAA,IAAoBznC,GAE1CJ,EAAMsF,KAAK,kBAAoBouC,GAC/B1zC,EAAMsF,KAAK,8BAIXpH,EAAKkC,aAAeA,CACxB,CAEA,IAAI,4BAAEU,GAAgCsyC,EAEtC,IAAK,IAAIz2C,KAAOmE,EAA6B,CACmB,IAAD,OAAb,IAArCA,EAA4BnE,IACrCqD,EAAMsF,KAAK,OAAC3I,EAAKmE,EAA4BnE,KAAK,OAAK3C,oBAAoBsF,KAAK,KAEpF,CAEA,MAAMsV,EAAmB/d,EAAOa,IAAI,oBACpC,IAAIi8C,EAGFA,EAFEh7B,EAE0BzX,MAC1BxH,EAAAA,EAAAA,IAAYkb,GACZ+D,GACA,GACA1f,YAE0BS,EAAAA,EAAAA,IAAYkb,GAE1C,IAKIkB,EALA/c,EAAM,CAAC46C,EAA2B3zC,EAAMV,KAAK,MAAMA,MAAwC,IAAnC,KAAAsV,GAAgB,KAAhBA,EAAyB,KAAc,IAAM,KAOvGkB,EADW,aAATzX,EACST,EAAYI,qBACdo1C,EAAYQ,0CACVh2C,EAAY4C,2CAEZ5C,EAAYsC,kCAGzBtC,EAAYkF,UAAU/J,EAAK,CACzBmF,KAAMA,EACN/E,MAAOA,EACPgH,YAAaA,EACb2V,SAAUA,EACV+9B,MAAO51C,EAAWK,YAEtB,CDxEIw1C,CAAgB,CACd51C,KAAMlI,KAAKmD,MACXwf,cAAe/X,EAAcI,qBAAqBJ,EAAcK,kBAChErD,cACAK,aACA0J,UACAyrC,eACA,IACH,2BAEenxC,IAAO,IAAD,IACpB,IAAI,OAAEpI,GAAWoI,GACb,QAAE8xC,GAAYl6C,EACduF,EAAQvF,EAAOm6C,QAAQjwC,MAE3B,GAAKgwC,IAAiD,IAAtC,OAAA/9C,KAAKmD,MAAMkG,QAAM,OAASD,GAAgB,CAAC,IAAD,EACxD,IAAI60C,EAAY,MAAAj+C,KAAKmD,MAAMkG,QAAM,OAAQ,CAACD,IAC1CpJ,KAAKuD,SAAS,CAAE8F,OAAQ40C,GAC1B,MAAO,IAAMF,GAAW,OAAA/9C,KAAKmD,MAAMkG,QAAM,OAASD,IAAU,EAAG,CAAC,IAAD,EAC7DpJ,KAAKuD,SAAS,CAAE8F,OAAQ,MAAArJ,KAAKmD,MAAMkG,QAAM,QAASgG,GAAQA,IAAQjG,KACpE,KACD,2BAEe6C,IACd,IAAMpI,QAAWm6C,SAAU,KAAEj9C,GAAM,MAAEgN,IAAY9B,EAC7C9I,EAAQ,CACV,CAACpC,GAAOgN,GAGV/N,KAAKuD,SAASJ,EAAM,IACrB,0BAEc8I,IACc,IAAD,EAAtBA,EAAEpI,OAAOm6C,QAAQ1jC,IACnBta,KAAKuD,SAAS,CACZ8F,OAAQ,KAAW,OAACrJ,KAAKQ,MAAMK,OAAOa,IAAI,kBAAoB1B,KAAKQ,MAAMK,OAAOa,IAAI,WAAW,KAAF,MAG/F1B,KAAKuD,SAAS,CAAE8F,OAAQ,IAC1B,IACD,oBAEQ4C,IACPA,EAAEumB,iBACF,IAAI,YAAE5qB,EAAW,WAAEK,EAAU,KAAElH,GAASf,KAAKQ,MAE7CyH,EAAWqP,MAAM,CAAC/O,OAAQxH,EAAMS,KAAM,OAAQmD,OAAQ,SACtDiD,EAAYG,wBAAwB,CAAEhH,GAAO,IArF7C,IAAMA,KAAAA,EAAI,OAAEF,EAAM,WAAE6L,EAAY7B,cAAAA,GAAkB7K,KAAKQ,MACnD0H,EAAOwE,GAAcA,EAAWhL,IAAIX,GACpCq8C,EAAcvyC,EAAcjK,cAAgB,CAAC,EAC7CiI,EAAWX,GAAQA,EAAKxG,IAAI,aAAe,GAC3CsH,EAAWd,GAAQA,EAAKxG,IAAI,aAAe07C,EAAYp0C,UAAY,GACnEC,EAAef,GAAQA,EAAKxG,IAAI,iBAAmB07C,EAAYn0C,cAAgB,GAC/EF,EAAeb,GAAQA,EAAKxG,IAAI,iBAAmB,QACnD2H,EAASnB,GAAQA,EAAKxG,IAAI,WAAa07C,EAAY/zC,QAAU,GAC3C,iBAAXA,IACTA,EAASA,EAAOkL,MAAM6oC,EAAYG,gBAAkB,MAGtDv9C,KAAKmD,MAAQ,CACX+6C,QAASd,EAAYc,QACrBn9C,KAAMA,EACNF,OAAQA,EACRwI,OAAQA,EACRL,SAAUA,EACVC,aAAcA,EACdJ,SAAUA,EACVC,SAAU,GACVC,aAAcA,EAElB,CAiEArI,SAAU,IAAD,IACP,IAAI,OACFG,EAAM,aAAEF,EAAY,cAAEkK,EAAa,aAAE0V,EAAY,KAAExf,EAAI,cAAER,GACvDP,KAAKQ,MACT,MAAMggB,EAAQ7f,EAAa,SACrB8f,EAAM9f,EAAa,OACnB+f,EAAM/f,EAAa,OACnB45C,EAAS55C,EAAa,UACtBggB,EAAYhgB,EAAa,aACzBigB,EAAajgB,EAAa,cAAc,GACxC+D,EAAW/D,EAAa,YAAY,GACpCw9C,EAAmBx9C,EAAa,qBAEhC,OAAEuB,GAAW3B,EAEnB,IAAI69C,EAAUl8C,IAAWrB,EAAOa,IAAI,oBAAsB,KAG1D,MAAM28C,EAAqB,WACrBC,EAAqB,WACrBC,EAAwBr8C,IAAYk8C,EAAU,qBAAuB,oBAAuB,aAC5FI,EAAwBt8C,IAAYk8C,EAAU,qBAAuB,oBAAuB,cAElG,IACIK,KADc5zC,EAAcjK,cAAgB,CAAC,GACb68C,kCAEhCp1C,EAAOxH,EAAOa,IAAI,QAClBg9C,EAAgBr2C,IAASk2C,GAAyBE,EAAkBp2C,EAAO,aAAeA,EAC1FgB,EAASxI,EAAOa,IAAI,kBAAoBb,EAAOa,IAAI,UAEnD2O,IADiBxF,EAAc6B,aAAahL,IAAIX,GAEhDuX,EAAS,MAAAiI,EAAanG,aAAW,QAASjC,GAAOA,EAAIzW,IAAI,YAAcX,IACvEqH,GAAW,IAAAkQ,GAAM,KAANA,GAAeH,GAA6B,eAAtBA,EAAIzW,IAAI,YAA4BkO,KACrEkP,EAAcje,EAAOa,IAAI,eAE7B,OACE,6BACE,4BAAKX,EAAI,aAAa29C,EAAa,KAAI,kBAAC99B,EAAU,CAACpQ,KAAM,CAAE,sBAAuBzP,MAC/Ef,KAAKmD,MAAM+6C,QAAiB,4CAAmBl+C,KAAKmD,MAAM+6C,QAAO,KAA5C,KACtBp/B,GAAe,kBAACpa,EAAQ,CAACC,OAAS9D,EAAOa,IAAI,iBAE7C2O,GAAgB,0CAEhB+tC,GAAW,kDAAuB,8BAAQA,KACxC/1C,IAASg2C,GAAsBh2C,IAASk2C,IAA2B,iDAAsB,8BAAQ19C,EAAOa,IAAI,uBAC5G2G,IAASi2C,GAAsBj2C,IAASk2C,GAAyBl2C,IAASm2C,IAA2B,wCAAa,kCAAS39C,EAAOa,IAAI,cAC1I,uBAAGG,UAAU,QAAM,SAAO,8BAAQ68C,IAGhCr2C,IAASi2C,EAAqB,KAC1B,kBAAC79B,EAAG,KACJ,kBAACA,EAAG,KACF,2BAAOsI,QAAQ,kBAAgB,aAE7B1Y,EAAe,kCAASrQ,KAAKmD,MAAM0F,SAAQ,KACvC,kBAAC6X,EAAG,CAACi+B,OAAQ,GAAIC,QAAS,IAC1B,2BAAOnc,GAAG,iBAAiBjhC,KAAK,OAAO,YAAU,WAAW4e,SAAWpgB,KAAK6+C,cAAgB99B,WAAS,MAO7G,kBAACN,EAAG,KACF,2BAAOsI,QAAQ,kBAAgB,aAE7B1Y,EAAe,0CACX,kBAACqQ,EAAG,CAACi+B,OAAQ,GAAIC,QAAS,IAC1B,2BAAOnc,GAAG,iBAAiBjhC,KAAK,WAAW,YAAU,WAAW4e,SAAWpgB,KAAK6+C,kBAIxF,kBAACp+B,EAAG,KACF,2BAAOsI,QAAQ,iBAAe,gCAE5B1Y,EAAe,kCAASrQ,KAAKmD,MAAM4F,aAAY,KAC3C,kBAAC2X,EAAG,CAACi+B,OAAQ,GAAIC,QAAS,IAC1B,4BAAQnc,GAAG,gBAAgB,YAAU,eAAeriB,SAAWpgB,KAAK6+C,eAClE,4BAAQ9wC,MAAM,SAAO,wBACrB,4BAAQA,MAAM,gBAAc,qBAQxC1F,IAASm2C,GAAyBn2C,IAASg2C,GAAsBh2C,IAASk2C,GAAyBl2C,IAASi2C,MAC3GjuC,GAAgBA,GAAgBrQ,KAAKmD,MAAM6F,WAAa,kBAACyX,EAAG,KAC7D,2BAAOsI,QAAQ,aAAW,cAExB1Y,EAAe,0CACA,kBAACqQ,EAAG,CAACi+B,OAAQ,GAAIC,QAAS,IACxB,kBAACT,EAAgB,CAAC1b,GAAG,YACdjhC,KAAK,OACLV,SAAWuH,IAASi2C,EACpBx3B,aAAe9mB,KAAKmD,MAAM6F,SAC1B,YAAU,WACVoX,SAAWpgB,KAAK6+C,mBAOzCx2C,IAASm2C,GAAyBn2C,IAASk2C,GAAyBl2C,IAASi2C,IAAuB,kBAAC79B,EAAG,KACzG,2BAAOsI,QAAQ,iBAAe,kBAE5B1Y,EAAe,0CACA,kBAACqQ,EAAG,CAACi+B,OAAQ,GAAIC,QAAS,IACxB,kBAACT,EAAgB,CAAC1b,GAAG,gBACd3b,aAAe9mB,KAAKmD,MAAM8F,aAC1BzH,KAAK,WACL,YAAU,eACV4e,SAAWpgB,KAAK6+C,mBAQ3CxuC,GAAgBhH,GAAUA,EAAOuG,KAAO,yBAAK/N,UAAU,UACtD,sCAEE,uBAAGkxB,QAAS/yB,KAAK8+C,aAAc,YAAU,GAAK,cAC9C,uBAAG/rB,QAAS/yB,KAAK8+C,cAAa,gBAE9B,IAAAz1C,GAAM,KAANA,GAAW,CAACyV,EAAa/d,KAAU,IAAD,EAClC,OACE,kBAAC0f,EAAG,CAAC9Z,IAAM5F,GACT,yBAAKc,UAAU,YACb,kBAAC2e,EAAK,CAAC,aAAazf,EACd0hC,GAAK,GAAE1hC,KAAQsH,cAAiBrI,KAAKmD,MAAMpC,OAC1CusB,SAAWjd,EACX0tC,QAAU,OAAA/9C,KAAKmD,MAAMkG,QAAM,OAAUtI,GACrCS,KAAK,WACL4e,SAAWpgB,KAAK++C,gBAClB,2BAAOh2B,QAAU,GAAEhoB,KAAQsH,cAAiBrI,KAAKmD,MAAMpC,QACrD,0BAAMc,UAAU,SAChB,yBAAKA,UAAU,QACb,uBAAGA,UAAU,QAAQd,GACrB,uBAAGc,UAAU,eAAeid,MAInC,IAELmK,WAEE,KAIT,MAAA3Q,EAAO9I,YAAU,QAAM,CAACpL,EAAOuC,IACtB,kBAACga,EAAS,CAACvc,MAAQA,EACRuC,IAAMA,MAG5B,yBAAK9E,UAAU,oBACbuG,IACEiI,EAAe,kBAACkqC,EAAM,CAAC14C,UAAU,+BAA+BkxB,QAAU/yB,KAAK8H,QAAQ,UACzF,kBAACyyC,EAAM,CAAC14C,UAAU,+BAA+BkxB,QAAU/yB,KAAK0H,WAAW,cAG3E,kBAAC6yC,EAAM,CAAC14C,UAAU,8BAA8BkxB,QAAU/yB,KAAKkyC,OAAO,UAK9E,EEpRa,MAAM8M,WAAcv9B,EAAAA,UAAW,cAAD,yCAElC,KACP,IAAI,YAAE/P,EAAW,KAAElB,EAAI,OAAElF,GAAWtL,KAAKQ,MACzCkR,EAAYmvB,cAAerwB,EAAMlF,GACjCoG,EAAYovB,aAActwB,EAAMlF,EAAQ,GACzC,CAED5K,SACE,OACE,4BAAQmB,UAAU,qCAAqCkxB,QAAU/yB,KAAK+yB,SAAS,QAInF,ECbF,MAAMksB,GAAW,IAAiB,IAAhB,QAAE11C,GAAS,EAC3B,OACE,6BACE,gDACA,yBAAK1H,UAAU,cAAc0H,GACxB,EAML21C,GAAY,IAAoB,IAAnB,SAAEze,GAAU,EAC7B,OACE,6BACE,gDACA,yBAAK5+B,UAAU,cAAc4+B,EAAQ,OACjC,EAQK,MAAM0e,WAAqBz8C,IAAAA,UAWxC08C,sBAAsB97C,GAGpB,OAAOtD,KAAKQ,MAAMkL,WAAapI,EAAUoI,UACpC1L,KAAKQ,MAAMgQ,OAASlN,EAAUkN,MAC9BxQ,KAAKQ,MAAM8K,SAAWhI,EAAUgI,QAChCtL,KAAKQ,MAAMo4C,yBAA2Bt1C,EAAUs1C,sBACvD,CAEAl4C,SACE,MAAM,SAAEgL,EAAQ,aAAE/K,EAAY,WAAEC,EAAU,uBAAEg4C,EAAsB,cAAEr4C,EAAa,KAAEiQ,EAAI,OAAElF,GAAWtL,KAAKQ,OACnG,mBAAE6+C,EAAkB,uBAAEC,GAA2B1+C,IAEjD2+C,EAAcF,EAAqB9+C,EAAcyjC,kBAAkBxzB,EAAMlF,GAAU/K,EAAcwjC,WAAWvzB,EAAMlF,GAClH+G,EAAS3G,EAAShK,IAAI,UACtBqB,EAAMw8C,EAAY79C,IAAI,OACtB6H,EAAUmC,EAAShK,IAAI,WAAWmL,OAClC2yC,EAAgB9zC,EAAShK,IAAI,iBAC7B+9C,EAAU/zC,EAAShK,IAAI,SACvBoI,EAAO4B,EAAShK,IAAI,QACpB++B,EAAW/0B,EAAShK,IAAI,YACxBg+C,EAAc,IAAYn2C,GAC1Bgb,EAAchb,EAAQ,iBAAmBA,EAAQ,gBAEjDo2C,EAAeh/C,EAAa,gBAC5Bi/C,EAAe,IAAAF,GAAW,KAAXA,GAAgB/4C,IACnC,IAAIk5C,EAAgB,IAAct2C,EAAQ5C,IAAQ4C,EAAQ5C,GAAK2C,OAASC,EAAQ5C,GAChF,OAAO,0BAAM9E,UAAU,aAAa8E,IAAKA,GAAI,IAAGA,EAAG,KAAIk5C,EAAa,IAAS,IAEzEC,EAAqC,IAAxBF,EAAaj8C,OAC1Be,EAAW/D,EAAa,YAAY,GACpC4uB,EAAkB5uB,EAAa,mBAAmB,GAClDo/C,EAAOp/C,EAAa,QAE1B,OACE,6BACI4+C,KAA2C,IAA3BD,GAA8D,SAA3BA,EACjD,kBAAC/vB,EAAe,CAAC9oB,QAAU84C,IAC3B,kBAACQ,EAAI,CAACt5C,QAAU84C,EAAc3+C,WAAaA,KAC7CmC,GAAO,6BACL,yBAAKlB,UAAU,eACb,2CACA,yBAAKA,UAAU,cAAckB,KAInC,+CACA,2BAAOlB,UAAU,wCACf,+BACA,wBAAIA,UAAU,oBACZ,wBAAIA,UAAU,kCAAgC,QAC9C,wBAAIA,UAAU,uCAAqC,aAGrD,+BACE,wBAAIA,UAAU,YACZ,wBAAIA,UAAU,uBACVwQ,EAEAmtC,EAAgB,yBAAK39C,UAAU,yBACb,8CAEF,MAGpB,wBAAIA,UAAU,4BAEV49C,EAAU,kBAAC/6C,EAAQ,CAACC,OAAS,GAA2B,KAAzB+G,EAAShK,IAAI,QAAkB,GAAEgK,EAAShK,IAAI,YAAc,KAAKgK,EAAShK,IAAI,eACnG,KAGVoI,EAAO,kBAAC61C,EAAY,CAACK,QAAUl2C,EACVya,YAAcA,EACdxhB,IAAMA,EACNwG,QAAUA,EACV3I,WAAaA,EACbD,aAAeA,IAC7B,KAGPm/C,EAAa,kBAACb,GAAO,CAAC11C,QAAUq2C,IAAmB,KAGnDhH,GAA0BnY,EAAW,kBAACye,GAAQ,CAACze,SAAWA,IAAgB,SAQ1F,E,eC9HF,MAAMwf,GAA6B,CACjC,MAAO,MAAO,OAAQ,SAAU,UAAW,OAAQ,SAG/CC,GAAyB,IAAAD,IAA0B,KAA1BA,GAAkC,CAAC,UAGnD,MAAME,WAAmBz9C,IAAAA,UAAiB,cAAD,oDAmCjC,CAACiY,EAAQpE,KAC5B,MAAM,cACJhW,EAAa,aACbI,EAAY,cACZiK,EAAa,gBACb+I,EAAe,cACfT,EAAa,WACbtS,GACEZ,KAAKQ,MACHmf,EAAqBhf,EAAa,sBAAsB,GACxD2S,EAAe3S,EAAa,gBAC5B6hC,EAAa7nB,EAAOjZ,IAAI,cAC9B,OACE,kBAAC4R,EAAY,CACX3M,IAAK,aAAe4P,EACpBoE,OAAQA,EACRpE,IAAKA,EACL3L,cAAeA,EACf+I,gBAAiBA,EACjBT,cAAeA,EACftS,WAAYA,EACZD,aAAcA,EACdqW,QAASzW,EAAcwC,OACvB,yBAAKlB,UAAU,yBAEX,IAAA2gC,GAAU,KAAVA,GAAeviB,IACb,MAAMzP,EAAOyP,EAAGve,IAAI,QACd4J,EAAS2U,EAAGve,IAAI,UAChBT,EAAW+T,IAAAA,KAAQ,CAAC,QAASxE,EAAMlF,IAQnC80C,EAAe7/C,EAAc2B,SACjCg+C,GAAyBD,GAE3B,OAAsC,IAAlC,KAAAG,GAAY,KAAZA,EAAqB90C,GAChB,KAIP,kBAACqU,EAAkB,CACjBhZ,IAAM,GAAE6J,KAAQlF,IAChBrK,SAAUA,EACVgf,GAAIA,EACJzP,KAAMA,EACNlF,OAAQA,EACRiL,IAAKA,GAAO,IAEf0S,WAGM,GAElB,CA5EDvoB,SACE,IAAI,cACFH,GACEP,KAAKQ,MAET,MAAMia,EAAYla,EAAcsb,mBAEhC,OAAsB,IAAnBpB,EAAU7K,KACJ,+DAIP,6BACI,IAAA6K,GAAS,KAATA,EAAcza,KAAKqgD,oBAAoBp3B,UACvCxO,EAAU7K,KAAO,EAAI,gEAA4C,KAGzE,E,0BC5CK,SAAS0wC,GAAcv9C,GAC5B,OAAOA,EAAImjC,MAAM,qBACnB,CAQO,SAASqa,GAAat1C,EAAgB+L,GAC3C,OAAK/L,EACDq1C,GAAcr1C,IARQlI,EAQ4BkI,GAP7Ci7B,MAAM,UAEP,GAAEtzB,OAAOC,SAASqE,WAAWnU,IAFJA,EAS1B,IAAI,KAAJ,CAAQkI,EAAgB+L,GAASjT,KAHZiT,EAPvB,IAAqBjU,CAW5B,CAEO,SAASy9C,GAASz9C,EAAKiU,GAAsC,IAA7B,eAAE/L,EAAe,IAAO,UAAH,6CAAG,CAAC,EAC9D,IAAKlI,EAAK,OACV,GAAIu9C,GAAcv9C,GAAM,OAAOA,EAE/B,MAAM09C,EAAUF,GAAat1C,EAAgB+L,GAC7C,OAAKspC,GAAcG,GAGZ,IAAI,KAAJ,CAAQ19C,EAAK09C,GAAS18C,KAFpB,IAAI,KAAJ,CAAQhB,EAAK6P,OAAOC,SAAS9O,MAAMA,IAG9C,CAMO,SAAS28C,GAAa39C,EAAKiU,GAAsC,IAA7B,eAAE/L,EAAe,IAAO,UAAH,6CAAG,CAAC,EAClE,IACE,OAAOu1C,GAASz9C,EAAKiU,EAAS,CAAE/L,kBAGlC,CAFE,MACA,MACF,CACF,CC9Be,MAAMqI,WAAqB5Q,IAAAA,UAuBxChC,SACE,MAAM,OACJia,EAAM,IACNpE,EAAG,SACH8d,EAAQ,cACRzpB,EAAa,gBACb+I,EAAe,cACfT,EAAa,WACbtS,EAAU,aACVD,EAAY,QACZqW,GACEhX,KAAKQ,MAET,IAAI,aACFk4C,EAAY,YACZ7kC,GACEjT,IAEJ,MAAMk4C,EAAuBjlC,GAA+B,UAAhBA,EAEtC8sC,EAAWhgD,EAAa,YACxB+D,EAAW/D,EAAa,YAAY,GACpCigD,EAAWjgD,EAAa,YACxBkgD,EAAOlgD,EAAa,QAE1B,IAGImgD,EAHAC,EAAiBpmC,EAAO7M,MAAM,CAAC,aAAc,eAAgB,MAC7DkzC,EAA6BrmC,EAAO7M,MAAM,CAAC,aAAc,eAAgB,gBACzEmzC,EAAwBtmC,EAAO7M,MAAM,CAAC,aAAc,eAAgB,QAGtEgzC,GADEvyC,EAAAA,EAAAA,IAAO3D,KAAkB2D,EAAAA,EAAAA,IAAO3D,EAAcK,gBAC3By1C,GAAaO,EAAuBjqC,EAAS,CAAE/L,eAAgBL,EAAcK,mBAE7Eg2C,EAGvB,IAAIzsC,EAAa,CAAC,iBAAkB+B,GAChC2qC,EAAUvtC,EAAgB4H,QAAQ/G,EAA6B,SAAjBkkC,GAA4C,SAAjBA,GAE7E,OACE,yBAAK72C,UAAWq/C,EAAU,8BAAgC,uBAExD,wBACEnuB,QAAS,IAAM7f,EAAcQ,KAAKc,GAAa0sC,GAC/Cr/C,UAAYk/C,EAAyC,cAAxB,sBAC7Bte,GAAI,IAAAjuB,GAAU,KAAVA,GAAeuK,IAAKgyB,EAAAA,EAAAA,IAAmBhyB,KAAIzV,KAAK,KACpD,WAAUiN,EACV,eAAc2qC,GAEd,kBAACN,EAAQ,CACPO,QAASrI,EACTv9B,QAAS2lC,EACT1wC,MAAM2D,EAAAA,EAAAA,IAAmBoC,GACzB/D,KAAM+D,IACNwqC,EACA,+BACE,kBAACr8C,EAAQ,CAACC,OAAQo8C,KAFH,gCAMjBD,EACA,yBAAKj/C,UAAU,sBACb,+BACE,kBAACg/C,EAAI,CACD98C,MAAML,EAAAA,EAAAA,IAAYo9C,GAClB/tB,QAAU9mB,GAAMA,EAAEmuC,kBAClBv2C,OAAO,UACPm9C,GAA8BF,KAPjB,KAavB,4BACE,gBAAeI,EACfr/C,UAAU,mBACV0hB,MAAO29B,EAAU,qBAAuB,mBACxCnuB,QAAS,IAAM7f,EAAcQ,KAAKc,GAAa0sC,IAE/C,yBAAKr/C,UAAU,QAAQG,MAAM,KAAKD,OAAO,KAAK,cAAY,OAAOq/C,UAAU,SACzE,yBAAKr9C,KAAMm9C,EAAU,kBAAoB,oBAAqBjuB,UAAWiuB,EAAU,kBAAoB,yBAK7G,kBAACP,EAAQ,CAACU,SAAUH,GACjB7sB,GAIT,EACD,KAjHoB/gB,GAAY,eAET,CACpBqH,OAAQ3F,IAAAA,OAAU,CAAC,GACnBuB,IAAK,KCHM,MAAM0iC,WAAkBl2B,EAAAA,cAmCrCriB,SACE,IAAI,SACFO,EAAQ,SACRyK,EAAQ,QACRjF,EAAO,YACP4yC,EAAW,cACXC,EAAa,aACbC,EAAY,cACZC,EAAa,UACbC,EAAS,GACT9uC,EAAE,aACFhK,EAAY,WACZC,EAAU,YACV8Q,EAAW,cACXnR,EAAa,YACbqH,EAAW,cACXiD,EAAa,YACbod,EAAW,cACXrd,GACE5K,KAAKQ,MACL04C,EAAiBl5C,KAAKQ,MAAMiQ,WAE5B,WACFxO,EAAU,QACVsZ,EAAO,KACP/K,EAAI,OACJlF,EAAM,GACN2U,EAAE,IACF1J,EAAG,YACHC,EAAW,cACX0J,EAAa,uBACb04B,EAAsB,gBACtBN,EAAe,kBACfE,GACEU,EAAersC,QAEf,YACFiS,EAAW,aACXqjB,EAAY,QACZnV,GACE/M,EAEJ,MAAMqhC,EAAkBnf,EAAeue,GAAave,EAAap/B,IAAKxC,EAAcwC,MAAO,CAAEkI,eAAgBL,EAAcK,mBAAsB,GACjJ,IAAIwF,EAAYyoC,EAAeprC,MAAM,CAAC,OAClC61B,EAAYlzB,EAAU/O,IAAI,aAC1BkgB,GAAa+pB,EAAAA,EAAAA,IAAQl7B,EAAW,CAAC,eACjCiwB,EAAkBngC,EAAcmgC,gBAAgBlwB,EAAMlF,GACtDkJ,EAAa,CAAC,aAAc+B,EAAKC,GACjC+qC,GAAatQ,EAAAA,EAAAA,IAAcxgC,GAE/B,MAAM+wC,EAAY7gD,EAAa,aACzB8gD,EAAa9gD,EAAc,cAC3B+gD,EAAU/gD,EAAc,WACxBq+C,EAAQr+C,EAAc,SACtBggD,EAAWhgD,EAAc,YACzB+D,EAAW/D,EAAa,YAAY,GACpCghD,EAAUhhD,EAAc,WACxB2gB,EAAmB3gB,EAAc,oBACjCihD,EAAejhD,EAAc,gBAC7BkhD,EAAmBlhD,EAAc,oBACjCkgD,EAAOlgD,EAAc,SAErB,eAAEmhD,IAAmBlhD,IAG3B,GAAG+iC,GAAaj4B,GAAYA,EAASkE,KAAO,EAAG,CAC7C,IAAI4vC,GAAiB7b,EAAUjiC,IAAIovC,OAAOplC,EAAShK,IAAI,cAAgBiiC,EAAUjiC,IAAI,WACrFgK,EAAWA,EAASsC,IAAI,gBAAiBwxC,EAC3C,CAEA,IAAIuC,GAAc,CAAEvxC,EAAMlF,GAE1B,MAAM6S,GAAmB5d,EAAc4d,iBAAiB,CAAC3N,EAAMlF,IAE/D,OACI,yBAAKzJ,UAAWI,EAAa,6BAA+BsZ,EAAW,mBAAkBjQ,YAAoB,mBAAkBA,IAAUm3B,IAAIsO,EAAAA,EAAAA,IAAmBv8B,EAAWlL,KAAK,OAC9K,kBAACu4C,EAAgB,CAAC3I,eAAgBA,EAAgB39B,QAASA,EAAS89B,YAAaA,EAAa14C,aAAcA,EAAciH,YAAaA,EAAaiD,cAAeA,EAAe5J,SAAUA,IAC5L,kBAAC0/C,EAAQ,CAACU,SAAU9lC,GAClB,yBAAK1Z,UAAU,gBACV4O,GAAaA,EAAUb,MAAuB,OAAda,EAAqB,KACtD,yBAAK1O,OAAQ,OAAQC,MAAO,OAAQF,IAAK7B,EAAQ,MAAiC4B,UAAU,8BAE5FI,GAAc,wBAAIJ,UAAU,wBAAsB,wBAClDid,GACA,yBAAKjd,UAAU,+BACb,yBAAKA,UAAU,uBACb,kBAAC6C,EAAQ,CAACC,OAASma,MAKvBwiC,EACA,yBAAKz/C,UAAU,iCACb,wBAAIA,UAAU,wBAAsB,qBACpC,yBAAKA,UAAU,yBACZsgC,EAAarjB,aACZ,0BAAMjd,UAAU,sCACd,kBAAC6C,EAAQ,CAACC,OAASw9B,EAAarjB,eAGpC,kBAAC+hC,EAAI,CAACh9C,OAAO,SAAShC,UAAU,8BAA8BkC,MAAML,EAAAA,EAAAA,IAAY49C,IAAmBA,KAE9F,KAGR7wC,GAAcA,EAAUb,KACzB,kBAAC6xC,EAAU,CACT7/B,WAAYA,EACZ3gB,SAAUA,EAASqO,KAAK,cACxBmB,UAAWA,EACXsxC,YAAaA,GACbzI,cAAkBA,EAClBC,aAAiBA,EACjBC,cAAkBA,EAClBlB,gBAAoBA,EACpBp4B,cAAeA,EAEfvV,GAAIA,EACJhK,aAAeA,EACf+Q,YAAcA,EACdnR,cAAgBA,EAChBid,WAAa,CAAChN,EAAMlF,GACpB1K,WAAaA,EACbqnB,YAAcA,EACdrd,cAAgBA,IAnBc,KAuB/B0tC,EACD,kBAACh3B,EAAgB,CACf3gB,aAAcA,EACd6P,KAAMA,EACNlF,OAAQA,EACRgX,iBAAkB7R,EAAU/O,IAAI,WAChC6gB,YAAahiB,EAAcgiC,QAAQz0B,MAAM,CAAC0C,EAAM,YAChD2R,kBAAmBvX,EAAcK,eACjCmS,kBAAmB6K,EAAY7K,kBAC/BY,uBAAwBiK,EAAYjK,uBACpCoE,kBAAmBxX,EAAcsd,oBACjC7F,wBAAyBzX,EAAcI,uBAXtB,KAenBstC,GAAoBp4B,GAAuB8M,GAAWA,EAAQpd,KAAO,yBAAK/N,UAAU,mBAChF,kBAAC8/C,EAAO,CAAC30B,QAAUA,EACVxc,KAAOA,EACPlF,OAASA,EACToG,YAAcA,EACdswC,cAAgBthB,KALO,MASnC4X,IAAoBp4B,GAAiB/B,GAAiBxa,QAAU,EAAI,KAAO,yBAAK9B,UAAU,oCAAkC,gEAE3H,4BACI,IAAAsc,IAAgB,KAAhBA,IAAqB,CAAC/Z,EAAO2pC,IAAU,wBAAIpnC,IAAKonC,GAAM,IAAI3pC,EAAK,SAKzE,yBAAKvC,UAAay2C,GAAoB5sC,GAAawU,EAAqC,YAApB,mBAC/Do4B,GAAoBp4B,EAEnB,kBAACwhC,EAAO,CACNjxC,UAAYA,EACZiB,YAAcA,EACdnR,cAAgBA,EAChBqK,cAAgBA,EAChBqd,YAAcA,EACdzX,KAAOA,EACPlF,OAASA,EACTmuC,UAAYA,EACZnsB,SAAUkrB,IAXuB,KAcnCF,GAAoB5sC,GAAawU,EACjC,kBAAC8+B,EAAK,CACJttC,YAAcA,EACdlB,KAAOA,EACPlF,OAASA,IAJuC,MAQvDktC,EAAoB,yBAAK32C,UAAU,qBAAoB,yBAAKA,UAAU,aAAyB,KAE3F8hC,EACC,kBAAC6d,EAAS,CACR7d,UAAYA,EACZl9B,QAAUA,EACVw7C,iBAAmBv2C,EACnB/K,aAAeA,EACfC,WAAaA,EACbL,cAAgBA,EAChB0nB,YAAaA,EACbrd,cAAeA,EACf8G,YAAcA,EACdqb,SAAUxsB,EAAcklC,mBAAmB,CAACj1B,EAAMlF,IAClD+5B,cAAgB9kC,EAAc+kC,mBAAmB,CAAC90B,EAAMlF,IACxDrK,SAAUA,EAASqO,KAAK,aACxBkB,KAAOA,EACPlF,OAASA,EACTstC,uBAAyBA,EACzBjuC,GAAIA,IAjBK,KAoBZm3C,IAAmBP,EAAW3xC,KAC/B,kBAACgyC,EAAY,CAACL,WAAaA,EAAa5gD,aAAeA,IADjB,OAOpD,EAED,KAzPoBs4C,GAAS,eA2BN,CACpBxoC,UAAW,KACX/E,SAAU,KACVjF,QAAS,KACTxF,UAAUmO,EAAAA,EAAAA,QACV+pC,QAAS,KCzCb,MAAM,GAA+Bl5C,QAAQ,mB,eCO9B,MAAM4hD,WAAyB9+B,EAAAA,cAmB5CriB,SAEE,IAAI,QACF6a,EAAO,YACP89B,EAAW,aACX14C,EAAY,YACZiH,EAAW,cACXiD,EAAa,eACbquC,EAAc,SACdj4C,GACEjB,KAAKQ,OAEL,QACF24C,EAAO,aACP9oC,EAAY,OACZ/E,EAAM,GACN2U,EAAE,YACFrE,EAAW,KACXpL,EAAI,YACJgG,EAAW,oBACX4iC,EAAmB,mBACnBT,GACEO,EAAersC,QAGjBssC,QAAS+I,GACPjiC,EAEA3R,EAAW4qC,EAAex3C,IAAI,YAElC,MAAMy4C,EAAwBx5C,EAAa,yBACrCwhD,EAAyBxhD,EAAa,0BACtCyhD,EAAuBzhD,EAAa,wBACpCigB,EAAajgB,EAAa,cAAc,GACxC0hD,EAAqB1hD,EAAa,sBAAsB,GAExD2hD,EAAch0C,KAAcA,EAASie,QACrCg2B,EAAqBD,GAAiC,IAAlBh0C,EAASsB,MAActB,EAAS4B,QAAQyjB,UAC5E6uB,GAAkBF,GAAeC,EACvC,OACE,yBAAK1gD,UAAY,mCAAkCyJ,KACjD,4BACE,aAAa,GAAEA,KAAUkF,EAAKnQ,QAAQ,MAAO,QAC7C,gBAAekb,EACf1Z,UAAU,0BACVkxB,QAASsmB,GAET,kBAAC8I,EAAsB,CAAC72C,OAAQA,IAChC,kBAAC82C,EAAoB,CAACzhD,aAAcA,EAAcu4C,eAAgBA,EAAgBj4C,SAAUA,IAE1F2a,EACA,yBAAK/Z,UAAU,+BACZoB,KAASi/C,GAAmB/I,IAFjB,KAMfR,IAAuBS,GAAuB5iC,GAAe,0BAAM3U,UAAU,gCAAgCu3C,GAAuB5iC,GAAsB,KAE3J,yBAAK3U,UAAU,QAAQG,MAAM,KAAKD,OAAO,KAAK,cAAY,OAAOq/C,UAAU,SACzE,yBAAKr9C,KAAMwX,EAAU,kBAAoB,oBAAqB0X,UAAW1X,EAAU,kBAAoB,wBAKzGinC,EAAiB,KACf,kBAACrI,EAAqB,CACpB9pC,aAAcA,EACd0iB,QAAS,KACP,MAAM0vB,EAAwB53C,EAAckF,2BAA2BzB,GACvE1G,EAAYJ,gBAAgBi7C,EAAsB,IAI1D,kBAACJ,EAAkB,CAACK,WAAa,GAAEzhD,EAASS,IAAI,OAChD,kBAACkf,EAAU,CAACpQ,KAAMvP,IAIxB,EACD,KAlGoB4gD,GAAgB,eAab,CACpB3I,eAAgB,KAChBj4C,UAAUmO,EAAAA,EAAAA,QACV+pC,QAAS,KCnBE,MAAMgJ,WAA+Bp/B,EAAAA,cAUlDriB,SAEE,IAAI,OACF4K,GACEtL,KAAKQ,MAET,OACE,0BAAMqB,UAAU,0BAA0ByJ,EAAO2qC,cAErD,EACD,KApBoBkM,GAAsB,eAOnB,CACpBjJ,eAAgB,OCZpB,MAAM,GAA+Bj5C,QAAQ,yD,eCM9B,MAAMmiD,WAA6Br/B,EAAAA,cAQhDriB,SACE,IAAI,aACFC,EAAY,eACZu4C,GACEl5C,KAAKQ,OAGL,WACFyB,EAAU,QACVsZ,EAAO,KACP/K,EAAI,IACJ+F,EAAG,YACHC,EAAW,qBACXsiC,GACEI,EAAersC,OAMnB,MAAM81C,EAAYnyC,EAAK+D,MAAM,WAC7B,IAAK,IAAI+E,EAAI,EAAGA,EAAIqpC,EAAUh/C,OAAQ2V,GAAK,EACzC,KAAAqpC,GAAS,KAATA,EAAiBrpC,EAAG,EAAG,yBAAK3S,IAAK2S,KAGnC,MAAMsnC,EAAWjgD,EAAc,YAE/B,OACE,0BAAMkB,UAAYI,EAAa,mCAAqC,uBAClE,YAAWuO,GACX,kBAACowC,EAAQ,CACLO,QAASrI,EACTv9B,QAASA,EACT/K,MAAM2D,EAAAA,EAAAA,IAAoB,GAAEoC,KAAOC,KACnChE,KAAMmwC,IAIhB,ECjDK,MA+BP,GA/B6B,IAAkC,IAAD,MAAjC,WAAEpB,EAAU,aAAE5gD,GAAc,EACjDiiD,EAAkBjiD,EAAa,mBACnC,OACE,yBAAKkB,UAAU,mBACb,yBAAKA,UAAU,0BACb,2CAEF,yBAAKA,UAAU,mBAEb,+BACE,+BACE,4BACE,wBAAIA,UAAU,cAAY,SAC1B,wBAAIA,UAAU,cAAY,WAG9B,+BAEQ,MAAA0/C,EAAWlzC,YAAU,QAAM,IAAD,IAAE4L,EAAG8E,GAAE,SAAK,kBAAC6jC,EAAe,CAACj8C,IAAM,GAAEsT,KAAK8E,IAAKiI,KAAM/M,EAAGgN,KAAMlI,GAAK,OAKrG,ECVZ,GAbgC,IAAoB,IAApB,KAAEiI,EAAI,KAAEC,GAAM,EAC5C,MAAM47B,EAAoB57B,EAAcA,EAAKpa,KAAOoa,EAAKpa,OAASoa,EAAjC,KAE/B,OAAQ,4BACJ,4BAAMD,GACN,4BAAM,IAAe67B,IACpB,E,uGCTT,MAAM,GAA+B5iD,QAAQ,oB,0BCS7C,MAAMglB,GAAiB,IAA+E,IAA/E,MAAClX,EAAK,SAAE+0C,EAAQ,UAAEjhD,EAAS,aAAEkhD,EAAY,WAAEniD,EAAU,QAAEoiD,EAAO,SAAEx7B,GAAS,EAC9F,MAAMzQ,EAAS0Z,KAAW7vB,GAAcA,IAAe,KACjD8vB,GAAwD,IAAnChvB,KAAIqV,EAAQ,oBAAgCrV,KAAIqV,EAAQ,6BAA6B,GAC1G4Z,GAAUC,EAAAA,EAAAA,QAAO,OAEvBQ,EAAAA,EAAAA,YAAU,KAAO,IAAD,EACd,MAAMC,EAAa,WACXV,EAAQ7qB,QAAQurB,aAAW,QACzBC,KAAUA,EAAKC,UAAYD,EAAKE,UAAU1hB,SAAS,gBAK7D,OAFA,KAAAuhB,GAAU,KAAVA,GAAmBC,GAAQA,EAAKG,iBAAiB,aAAcC,EAAsC,CAAEC,SAAS,MAEzG,KAEL,KAAAN,GAAU,KAAVA,GAAmBC,GAAQA,EAAKM,oBAAoB,aAAcF,IAAsC,CACzG,GACA,CAAC3jB,EAAOlM,EAAW2lB,IAEtB,MAIMkK,EAAwCzlB,IAC5C,MAAM,OAAEpI,EAAM,OAAEquB,GAAWjmB,GACnBkmB,aAAcC,EAAeC,aAAcC,EAAa,UAAEC,GAAc1uB,EAEpDuuB,EAAgBE,IACH,IAAdC,GAAmBL,EAAS,GAFlCI,EAAgBC,GAGSH,GAAiBF,EAAS,IAGtEjmB,EAAEumB,gBACJ,EAGF,OACE,yBAAK3wB,UAAU,iBAAiBzB,IAAKuwB,GACjCoyB,EACA,yBAAKlhD,UAAU,oBAAoBkxB,QApBlB,KACrBkwB,KAAOl1C,EAAO+0C,EAAS,GAmBwC,YAD5C,KAMhBE,GACC,yBAAKnhD,UAAU,qBACb,kBAAC,GAAA0xB,gBAAe,CAAC/gB,KAAMzE,GAAO,mCAIjC2iB,EACG,kBAAC,MAAiB,CAClBlJ,SAAUA,EACV3lB,UAAW6D,KAAG7D,EAAW,cACzB+T,OAAO8c,EAAAA,GAAAA,IAAShxB,KAAIqV,EAAQ,wBAAyB,WAEpDhJ,GAED,yBAAKlM,UAAW6D,KAAG7D,EAAW,eAAgBkM,GAG9C,EAcVkX,GAAcjf,aAAe,CAC3B88C,SAAU,gBAGZ,YCjFe,MAAMtB,WAAkB9+C,IAAAA,UAAiB,cAAD,yDAwC1B2M,GAASrP,KAAKQ,MAAMkR,YAAY0tB,oBAAoB,CAACp/B,KAAKQ,MAAMgQ,KAAMxQ,KAAKQ,MAAM8K,QAAS+D,KAAI,yCAE1F,IAAqC,IAArC,qBAAE6zC,EAAoB,MAAEn1C,GAAO,EAC5D,MAAM,YAAEka,EAAW,KAAEzX,EAAI,OAAElF,GAAWtL,KAAKQ,MACxC0iD,GACDj7B,EAAYlK,uBAAuB,CACjChQ,QACAyC,OACAlF,UAEJ,GACD,CAED5K,SAAU,IAAD,EACP,IAAI,UACFijC,EAAS,iBACTse,EAAgB,aAChBthD,EAAY,WACZC,EAAU,cACVL,EAAa,GACboK,EAAE,cACF06B,EAAa,uBACbuT,EAAsB,SACtB33C,EAAQ,KACRuP,EAAI,OACJlF,EAAM,cACNV,EAAa,YACbqd,GACEjoB,KAAKQ,MACL2iD,GAAc1X,EAAAA,EAAAA,IAAmB9H,GAErC,MAAMyf,EAAcziD,EAAc,eAC5Bw+C,EAAex+C,EAAc,gBAC7B0iD,EAAW1iD,EAAc,YAE/B,IAAIosB,EAAW/sB,KAAKQ,MAAMusB,UAAY/sB,KAAKQ,MAAMusB,SAASnd,KAAO5P,KAAKQ,MAAMusB,SAAWy0B,GAAUx7C,aAAa+mB,SAE9G,MAEMu2B,EAFa/iD,EAAc2B,UAG/BwuC,EAAAA,EAAAA,IAA6B/M,GAAa,KAEtC4f,EClFK,SAA2B9gB,GAAwB,IAApB+gB,EAAc,UAAH,6CAAG,IAC1D,OAAO/gB,EAAGpiC,QAAQ,UAAWmjD,EAC/B,CDgFqBC,CAAmB,GAAEn4C,IAASkF,eACzCkzC,EAAa,GAAEH,WAErB,OACE,yBAAK1hD,UAAU,qBACb,yBAAKA,UAAU,0BACb,yCACItB,EAAc2B,SAAW,KAAO,2BAAO6mB,QAAS26B,GAChD,uDACA,kBAACN,EAAW,CAACr1C,MAAOs3B,EACTse,aAAcJ,EACdK,UAAU,wBACV/hD,UAAU,uBACVgiD,aAAc92B,EACd22B,UAAWA,EACXtjC,SAAUpgB,KAAK8jD,4BAGhC,yBAAKjiD,UAAU,mBAEVogD,EACmB,6BACE,kBAAC9C,EAAY,CAACzzC,SAAWu2C,EACXthD,aAAeA,EACfC,WAAaA,EACbL,cAAgBA,EAChBiQ,KAAOxQ,KAAKQ,MAAMgQ,KAClBlF,OAAStL,KAAKQ,MAAM8K,OACpBstC,uBAAyBA,IACvC,0CATF,KActB,2BAAO,YAAU,SAAS/2C,UAAU,kBAAkB4gC,GAAI8gB,EAAUQ,KAAK,UACvE,+BACE,wBAAIliD,UAAU,oBACZ,wBAAIA,UAAU,kCAAgC,QAC9C,wBAAIA,UAAU,uCAAqC,eACjDtB,EAAc2B,SAAW,wBAAIL,UAAU,qCAAmC,SAAc,OAG9F,+BAEI,MAAA8hC,EAAUt1B,YAAU,QAAO,IAAsB,IAArBhE,EAAMqB,GAAS,EAErC7J,EAAYogD,GAAoBA,EAAiBvgD,IAAI,WAAa2I,EAAO,mBAAqB,GAClG,OACE,kBAACg5C,EAAQ,CAAC18C,IAAM0D,EACNmG,KAAMA,EACNlF,OAAQA,EACRrK,SAAUA,EAASqO,KAAKjF,GACxB25C,UAAWb,IAAgB94C,EAC3BM,GAAIA,EACJ9I,UAAYA,EACZwI,KAAOA,EACPqB,SAAWA,EACXnL,cAAgBA,EAChB2iD,qBAAsBx3C,IAAa43C,EACnCW,oBAAqBjkD,KAAKkkD,4BAC1B3/B,YAAc8gB,EACdzkC,WAAaA,EACbijB,kBAAmBjZ,EAAcqgB,qBAC/Bza,EACAlF,EACA,YACAjB,GAEF4d,YAAaA,EACbtnB,aAAeA,GAAgB,IAE1CsoB,aAOjB,EACD,KAjKoBu4B,GAAS,eAmBN,CACpBS,iBAAkB,KAClBl1B,UAAU7e,EAAAA,EAAAA,QAAO,CAAC,qBAClB0qC,wBAAwB,IE7B5B,MAAM,GAA+B34C,QAAQ,yD,0BC0B9B,MAAMojD,WAAiB3gD,IAAAA,UACpCC,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,kCA8BCmL,IACtB,MAAM,oBAAEk2C,EAAmB,qBAAEf,GAAyBljD,KAAKQ,MAC3DR,KAAKuD,SAAS,CAAEgoB,oBAAqBxd,IACrCk2C,EAAoB,CAClBl2C,MAAOA,EACPm1C,wBACA,IACH,kCAEsB,KACrB,MAAM,SAAEx3C,EAAQ,YAAE6Y,EAAW,kBAAEV,GAAsB7jB,KAAKQ,MAEpD2jD,EAAoBnkD,KAAKmD,MAAMooB,qBAAuBhH,EAItDi3B,EAHkB9vC,EAASoC,MAAM,CAAC,UAAWq2C,IAAoB/1C,EAAAA,EAAAA,KAAI,CAAC,IAC/B1M,IAAI,WAAY,MAEfmO,SAASK,QACvD,OAAO2T,GAAqB23B,CAAgB,IA7C5Cx7C,KAAKmD,MAAQ,CACXooB,oBAAqB,GAEzB,CA6CA7qB,SAAU,IAAD,IACP,IAAI,KACF8P,EAAI,OACJlF,EAAM,KACNjB,EAAI,SACJqB,EAAQ,UACR7J,EAAS,SACTZ,EAAQ,GACR0J,EAAE,aACFhK,EAAY,WACZC,EAAU,cACVL,EAAa,YACbgkB,EAAW,qBACX2+B,EAAoB,YACpBj7B,GACEjoB,KAAKQ,OAEL,YAAEg6B,GAAgB7vB,EAClBzI,EAAS3B,EAAc2B,SAC3B,MAAM,eAAE4/C,GAAmBlhD,IAE3B,IAAI2gD,EAAaO,GAAiB7Q,EAAAA,EAAAA,IAAcvlC,GAAY,KACxDnC,EAAUmC,EAAShK,IAAI,WACvB0iD,EAAQ14C,EAAShK,IAAI,SACzB,MAAM2iD,EAAoB1jD,EAAa,qBACjCs+C,EAAUt+C,EAAa,WACvBskB,EAAgBtkB,EAAa,iBAC7BqkB,EAAerkB,EAAa,gBAC5B+D,EAAW/D,EAAa,YAAY,GACpC6gB,EAAgB7gB,EAAa,iBAC7ByiD,EAAcziD,EAAa,eAC3Bu6C,EAAiBv6C,EAAa,kBAC9BwkB,EAAUxkB,EAAa,WAG7B,IAAIE,EAAQyjD,EAEZ,MAAMH,EAAoBnkD,KAAKmD,MAAMooB,qBAAuBhH,EACtDggC,EAAkB74C,EAASoC,MAAM,CAAC,UAAWq2C,IAAoB/1C,EAAAA,EAAAA,KAAI,CAAC,IACtEo2C,EAAuBD,EAAgB7iD,IAAI,WAAY,MAG7D,GAAGQ,EAAQ,CACT,MAAMuiD,EAA2BF,EAAgB7iD,IAAI,UAErDb,EAAS4jD,EAA2BjqB,EAAYiqB,EAAyB53C,QAAU,KACnFy3C,EAA6BG,GAA2Br1C,EAAAA,EAAAA,MAAK,CAAC,UAAWpP,KAAKmD,MAAMooB,oBAAqB,WAAatqB,CACxH,MACEJ,EAAS6K,EAAShK,IAAI,UACtB4iD,EAA6B54C,EAASgb,IAAI,UAAYzlB,EAASqO,KAAK,UAAYrO,EAGlF,IAAIgjB,EAEAygC,EADAC,GAA8B,EAE9BC,EAAkB,CACpBzjD,iBAAiB,GAInB,GAAGe,EAAQ,CAAC,IAAD,EAET,GADAwiD,EAA4C,QAAhC,EAAGH,EAAgB7iD,IAAI,iBAAS,aAA7B,EAA+BmL,OAC3C23C,EAAsB,CACvB,MAAMK,EAAoB7kD,KAAK8kD,uBAGzBC,EAAuBC,GAC3BA,EAActjD,IAAI,SACpBuiB,EAAmB8gC,EAJGP,EACnB9iD,IAAImjD,GAAmBz2C,EAAAA,EAAAA,KAAI,CAAC,UAIPjM,IAArB8hB,IACDA,EAAmB8gC,EAAoB,KAAAP,GAAoB,KAApBA,GAA8BryC,OAAOpE,QAE9E42C,GAA8B,CAChC,WAA6CxiD,IAAnCoiD,EAAgB7iD,IAAI,aAE5BuiB,EAAmBsgC,EAAgB7iD,IAAI,WACvCijD,GAA8B,EAElC,KAAO,CACLD,EAAe7jD,EACf+jD,EAAkB,IAAIA,EAAiBxjD,kBAAkB,GACzD,MAAM6jD,EAAyBv5C,EAASoC,MAAM,CAAC,WAAYq2C,IACxDc,IACDhhC,EAAmBghC,EACnBN,GAA8B,EAElC,CASA,IAAI38B,EApKoB,EAAEk9B,EAAgBjgC,EAAerkB,KAC3D,GACEskD,QAEA,CACA,IAAI19B,EAAW,KAKf,OAJuBC,EAAAA,GAAAA,GAAkCy9B,KAEvD19B,EAAW,QAEN,6BACL,kBAACvC,EAAa,CAACpjB,UAAU,UAAUjB,WAAaA,EAAa4mB,SAAWA,EAAWzZ,OAAQkV,EAAAA,EAAAA,IAAUiiC,KAEzG,CACA,OAAO,IAAI,EAsJKC,EAPShhC,EAAAA,EAAAA,IACrBugC,EACAP,EACAS,EACAD,EAA8B1gC,OAAmB9hB,GAGA8iB,EAAerkB,GAElE,OACE,wBAAIiB,UAAY,aAAgBA,GAAa,IAAM,YAAWwI,GAC5D,wBAAIxI,UAAU,uBACVwI,GAEJ,wBAAIxI,UAAU,4BAEZ,yBAAKA,UAAU,mCACb,kBAAC6C,EAAQ,CAACC,OAAS+G,EAAShK,IAAK,kBAGhCogD,GAAmBP,EAAW3xC,KAAc,MAAA2xC,EAAWlzC,YAAU,QAAM,IAAD,IAAE1H,EAAKoY,GAAE,SAAK,kBAACslC,EAAiB,CAAC19C,IAAM,GAAEA,KAAOoY,IAAKiI,KAAMrgB,EAAKsgB,KAAMlI,GAAK,IAA5G,KAEvC7c,GAAUwJ,EAAShK,IAAI,WACtB,6BAASG,UAAU,qBACjB,yBACEA,UAAW6D,KAAG,8BAA+B,CAC3C,iDAAkDw9C,KAGpD,2BAAOrhD,UAAU,sCAAoC,cAGrD,kBAACuhD,EAAW,CACVr1C,MAAO/N,KAAKmD,MAAMooB,oBAClBs4B,aACEn4C,EAAShK,IAAI,WACTgK,EAAShK,IAAI,WAAWmO,UACxBu1C,EAAAA,EAAAA,OAENhlC,SAAUpgB,KAAKqlD,qBACfzB,UAAU,eAEXV,EACC,2BAAOrhD,UAAU,+CAA6C,YACnD,wCAAmB,YAE5B,MAEL2iD,EACC,yBAAK3iD,UAAU,6BACb,2BAAOA,UAAU,oCAAkC,YAGnD,kBAACq5C,EAAc,CACbxzB,SAAU88B,EACVlJ,kBAAmBt7C,KAAK8kD,uBACxBj9B,SAAUlhB,GACRshB,EAAYtK,wBAAwB,CAClC5c,KAAM4F,EACN6W,WAAY,CAAChN,EAAMlF,GACnBsS,YAAa,YACbC,YAAaxT,IAGjByxC,YAAY,KAGd,MAEJ,KAEF9zB,GAAWnnB,EACX,kBAACmkB,EAAY,CACX/jB,SAAUqjD,EACV3jD,aAAeA,EACfC,WAAaA,EACbL,cAAgBA,EAChBM,QAASmgC,EAAAA,EAAAA,IAAcngC,GACvBmnB,QAAUA,EACV7mB,iBAAkB,IAClB,KAEFe,GAAUsiD,EACR,kBAACr/B,EAAO,CACN6C,QAASw8B,EAAqB9iD,IAAI1B,KAAK8kD,wBAAwB12C,EAAAA,EAAAA,KAAI,CAAC,IACpEzN,aAAcA,EACdC,WAAYA,EACZ0kD,WAAW,IAEb,KAEF/7C,EACA,kBAAC01C,EAAO,CACN11C,QAAUA,EACV5I,aAAeA,IAEf,MAGLuB,EAAS,wBAAIL,UAAU,sBACpBuiD,EACA,MAAAA,EAAMmB,QAAQl3C,YAAU,QAAM,IAAiB,IAAhB1H,EAAK+a,GAAK,EACvC,OAAO,kBAACF,EAAa,CAAC7a,IAAKA,EAAK5F,KAAM4F,EAAK+a,KAAOA,EAAO/gB,aAAcA,GAAe,IAExF,wCACI,KAGd,EACD,KAzPoB0iD,GAAQ,eA2BL,CACpB33C,UAAUwC,EAAAA,EAAAA,QAAO,CAAC,GAClB+1C,oBAAqB,SCpDlB,MAQP,GARkC,IAAoB,IAApB,KAAEj9B,EAAI,KAAEC,GAAM,EAC5C,OAAO,yBAAKplB,UAAU,uBAAwBmlB,EAAI,KAAM8pB,OAAO7pB,GAAa,ECJ1E,GAA+BhnB,QAAQ,oB,eCA7C,MAAM,GAA+BA,QAAQ,kB,eCQ9B,MAAM0/C,WAAqBj9C,IAAAA,cAAqB,cAAD,sCACpD,CACN8iD,cAAe,OAChB,iCAWsBC,IACrB,MAAM,QAAEzF,GAAYhgD,KAAKQ,MAEzB,GAAGilD,IAAgBzF,EAInB,GAAGA,GAAWA,aAAmB0F,KAAM,CACrC,IAAIC,EAAS,IAAIC,WACjBD,EAAOnhD,OAAS,KACdxE,KAAKuD,SAAS,CACZiiD,cAAeG,EAAOh3C,QACtB,EAEJg3C,EAAOE,WAAW7F,EACpB,MACEhgD,KAAKuD,SAAS,CACZiiD,cAAexF,EAAQ/8C,YAE3B,GACD,CAEDoB,oBACErE,KAAK8lD,oBAAoB,KAC3B,CAEAC,mBAAmBC,GACjBhmD,KAAK8lD,oBAAoBE,EAAUhG,QACrC,CAEAt/C,SACE,IAAI,QAAEs/C,EAAO,YAAEz7B,EAAW,IAAExhB,EAAG,QAAEwG,EAAQ,CAAC,EAAC,WAAE3I,EAAU,aAAED,GAAiBX,KAAKQ,MAC/E,MAAM,cAAEglD,GAAkBxlD,KAAKmD,MACzB8hB,EAAgBtkB,EAAa,iBAC7BslD,EAAe,aAAc,IAAIzwB,MAAO0wB,UAC9C,IAAIp8C,EAAMq8C,EAGV,GAFApjD,EAAMA,GAAO,GAGX,8BAA8BoT,KAAKoO,IAClChb,EAAQ,wBAA2B,cAAe4M,KAAK5M,EAAQ,yBAC/DA,EAAQ,wBAA2B,cAAe4M,KAAK5M,EAAQ,yBAC/DA,EAAQ,wBAA2B,iBAAkB4M,KAAK5M,EAAQ,yBAClEA,EAAQ,wBAA2B,iBAAkB4M,KAAK5M,EAAQ,wBAGnE,GAAI,SAAUqJ,OAAQ,CACpB,IAAIpR,EAAO+iB,GAAe,YACtB6hC,EAAQpG,aAAmB0F,KAAQ1F,EAAU,IAAI0F,KAAK,CAAC1F,GAAU,CAACx+C,KAAMA,IACxEuC,EAAO,qBAA2BqiD,GAElCtvC,EAAW,CAACtV,EADDuB,EAAI+sC,OAAO,IAAA/sC,GAAG,KAAHA,EAAgB,KAAO,GACjBgB,GAAMuF,KAAK,KAIvC+8C,EAAc98C,EAAQ,wBAA0BA,EAAQ,uBAC5D,QAA2B,IAAhB88C,EAA6B,CACtC,IAAIva,GAAmBD,EAAAA,EAAAA,IAA4Cwa,GAC1C,OAArBva,IACFh1B,EAAWg1B,EAEf,CAGIqa,EADDnjD,EAAAA,EAAAA,WAAiBA,EAAAA,EAAAA,UAAAA,iBACP,6BAAK,uBAAGe,KAAOA,EAAOgvB,QAAS,IAAM/vB,EAAAA,EAAAA,UAAAA,iBAA+BojD,EAAMtvC,IAAa,kBAEvF,6BAAK,uBAAG/S,KAAOA,EAAO+S,SAAWA,GAAa,iBAE7D,MACEqvC,EAAS,yBAAKtkD,UAAU,cAAY,uGAIjC,GAAI,QAAQsU,KAAKoO,GAAc,CAEpC,IAAIiD,EAAW,MACQC,EAAAA,GAAAA,GAAkCu4B,KAEvDx4B,EAAW,QAEb,IACE1d,EAAO,IAAe6B,KAAKC,MAAMo0C,GAAU,KAAM,KAGnD,CAFE,MAAO57C,GACP0F,EAAO,qCAAuCk2C,CAChD,CAEAmG,EAAS,kBAAClhC,EAAa,CAACuC,SAAUA,EAAUu7B,cAAY,EAACD,SAAW,GAAEmD,SAAqBl4C,MAAQjE,EAAOlJ,WAAaA,EAAaoiD,SAAO,GAG7I,KAAW,OAAO7sC,KAAKoO,IACrBza,EAAOw8C,KAAUtG,EAAS,CACxBuG,qBAAqB,EACrBC,SAAU,OAEZL,EAAS,kBAAClhC,EAAa,CAAC89B,cAAY,EAACD,SAAW,GAAEmD,QAAoBl4C,MAAQjE,EAAOlJ,WAAaA,EAAaoiD,SAAO,KAItHmD,EADkC,cAAzBM,KAAQliC,IAAgC,cAAcpO,KAAKoO,GAC3D,kBAACU,EAAa,CAAC89B,cAAY,EAACD,SAAW,GAAEmD,SAAqBl4C,MAAQiyC,EAAUp/C,WAAaA,EAAaoiD,SAAO,IAGxF,aAAzByD,KAAQliC,IAA+B,YAAYpO,KAAKoO,GACxD,kBAACU,EAAa,CAAC89B,cAAY,EAACD,SAAW,GAAEmD,QAAoBl4C,MAAQiyC,EAAUp/C,WAAaA,EAAaoiD,SAAO,IAGhH,YAAY7sC,KAAKoO,GACvB,KAAAA,GAAW,KAAXA,EAAqB,OACb,iCAAQy7B,EAAO,KAEf,yBAAKl+C,IAAM,qBAA2Bk+C,KAIxC,YAAY7pC,KAAKoO,GACjB,yBAAK1iB,UAAU,cAAa,2BAAO6kD,UAAQ,EAAC//C,IAAM5D,GAAM,4BAAQjB,IAAMiB,EAAMvB,KAAO+iB,MAChE,iBAAZy7B,EACP,kBAAC/6B,EAAa,CAAC89B,cAAY,EAACD,SAAW,GAAEmD,QAAoBl4C,MAAQiyC,EAAUp/C,WAAaA,EAAaoiD,SAAO,IAC/GhD,EAAQpwC,KAAO,EAEtB41C,EAGQ,6BACP,uBAAG3jD,UAAU,KAAG,2DAGhB,kBAACojB,EAAa,CAAC89B,cAAY,EAACD,SAAW,GAAEmD,QAAoBl4C,MAAQy3C,EAAgB5kD,WAAaA,EAAaoiD,SAAO,KAK/G,uBAAGnhD,UAAU,KAAG,kDAMlB,KAGX,OAAUskD,EAAgB,6BACtB,6CACEA,GAFa,IAKrB,E,0BClKa,MAAM1E,WAAmBhgC,EAAAA,UAEtC9e,YAAYnC,GACVqC,MAAMrC,GAAM,sBAqCH,CAACq+B,EAAO9wB,EAAO4wB,KACxB,IACEjtB,aAAa,sBAAEktB,GAAuB,YACtCmjB,GACE/hD,KAAKQ,MAETo+B,EAAsBmjB,EAAaljB,EAAO9wB,EAAO4wB,EAAM,IACxD,qCAE0BtvB,IACzB,IACEqC,aAAa,oBAAEytB,GAAqB,YACpC4iB,GACE/hD,KAAKQ,MAET2+B,EAAoB4iB,EAAa1yC,EAAI,IACtC,uBAEYs3C,GACC,eAARA,EACK3mD,KAAKuD,SAAS,CACnBqjD,mBAAmB,EACnBC,iBAAiB,IAEF,cAARF,EACF3mD,KAAKuD,SAAS,CACnBsjD,iBAAiB,EACjBD,mBAAmB,SAHhB,IAMR,+BAEoB,IAA2B,IAA3B,MAAE74C,EAAK,WAAEyP,GAAY,GACpC,YAAE9L,EAAW,cAAE9G,EAAa,YAAEqd,GAAgBjoB,KAAKQ,MACvD,MAAMijB,EAAoB7Y,EAAcsgB,qBAAqB1N,GACvDoN,EAA+BhgB,EAAcggB,gCAAgCpN,GACnFyK,EAAYnK,sBAAsB,CAAE/P,QAAOyP,eAC3CyK,EAAY5J,6BAA6B,CAAEb,eACtCiG,IACCmH,GACF3C,EAAY1K,oBAAoB,CAAExP,WAAO5L,EAAWqb,eAEtD9L,EAAYmvB,iBAAiBrjB,GAC7B9L,EAAYovB,gBAAgBtjB,GAC5B9L,EAAYwtB,oBAAoB1hB,GAClC,IAjFAxd,KAAKmD,MAAQ,CACX0jD,iBAAiB,EACjBD,mBAAmB,EAEvB,CAgFAlmD,SAAU,IAAD,EAEP,IAAI,cACF44C,EAAa,aACbC,EAAY,WACZ33B,EAAU,cACV1B,EAAa,gBACbo4B,EAAe,SACfr3C,EAAQ,GACR0J,EAAE,aACFhK,EAAY,WACZC,EAAU,cACVL,EAAa,YACbmR,EAAW,WACX8L,EAAU,YACVyK,EAAW,cACXrd,EAAa,UACb6F,GACEzQ,KAAKQ,MAET,MAAMsmD,EAAenmD,EAAa,gBAC5BomD,EAAiBpmD,EAAa,kBAC9ByiD,EAAcziD,EAAa,eAC3BsgB,EAAYtgB,EAAa,aAAa,GACtCugB,EAAcvgB,EAAa,eAAe,GAE1C6jB,EAAY8zB,GAAmBp4B,EAC/Bhe,EAAS3B,EAAc2B,SAGvByhB,EAAclT,EAAU/O,IAAI,eAE5BslD,EAAuB,WAAc,IAAAplC,GAAU,KAAVA,GACjC,CAACxC,EAAKmZ,KACZ,MAAM5xB,EAAM4xB,EAAE72B,IAAI,MAGlB,OAFA0d,EAAIzY,KAAJyY,EAAIzY,GAAS,IACbyY,EAAIzY,GAAK2I,KAAKipB,GACPnZ,CAAG,GACT,CAAC,KAAG,QACC,CAACA,EAAKmZ,IAAM,IAAAnZ,GAAG,KAAHA,EAAWmZ,IAAI,IAGrC,OACE,yBAAK12B,UAAU,mBACb,yBAAKA,UAAU,0BACZK,EACC,yBAAKL,UAAU,cACb,yBAAKkxB,QAAS,IAAM/yB,KAAKinD,UAAU,cAC9BplD,UAAY,YAAW7B,KAAKmD,MAAMyjD,mBAAqB,YAC1D,wBAAI/kD,UAAU,iBAAgB,8CAE/B4O,EAAU/O,IAAI,aAEX,yBAAKqxB,QAAS,IAAM/yB,KAAKinD,UAAU,aAC9BplD,UAAY,YAAW7B,KAAKmD,MAAM0jD,iBAAmB,YACxD,wBAAIhlD,UAAU,iBAAgB,6CAE9B,MAIR,yBAAKA,UAAU,cACb,wBAAIA,UAAU,iBAAe,eAGhCqe,EACC,kBAAC6mC,EAAc,CACb7kD,OAAQ3B,EAAc2B,SACtBgpB,kBAAmBtgB,EAAcsgB,qBAAqB1N,GACtD2jC,QAAS7I,EACTkB,cAAex5C,KAAKQ,MAAMg5C,cAC1BF,cAAeA,EACfC,aAAc,IAAMA,EAAa/7B,KACjC,MAELxd,KAAKmD,MAAMyjD,kBAAoB,yBAAK/kD,UAAU,wBAC3CmlD,EAAqBrjD,OACrB,yBAAK9B,UAAU,mBACb,2BAAOA,UAAU,cACf,+BACA,4BACE,wBAAIA,UAAU,kCAAgC,QAC9C,wBAAIA,UAAU,yCAAuC,iBAGvD,+BAEE,IAAAmlD,GAAoB,KAApBA,GAAyB,CAAC3U,EAAW/4B,IACnC,kBAACwtC,EAAY,CACXn8C,GAAIA,EACJ1J,SAAUA,EAASqO,KAAKgK,EAAErW,YAC1BtC,aAAcA,EACdC,WAAYA,EACZsmD,SAAU7U,EACVxT,MAAOt+B,EAAc2jC,4BAA4B1mB,EAAY60B,GAC7D1rC,IAAM,GAAE0rC,EAAU3wC,IAAI,SAAS2wC,EAAU3wC,IAAI,UAC7C0e,SAAUpgB,KAAKogB,SACf+mC,iBAAkBnnD,KAAKonD,wBACvB7mD,cAAeA,EACfmR,YAAaA,EACbuW,YAAaA,EACbrd,cAAeA,EACf4S,WAAYA,EACZgH,UAAWA,SA3BS,yBAAK3iB,UAAU,+BAA8B,8CAkCtE,KAER7B,KAAKmD,MAAM0jD,gBAAkB,yBAAKhlD,UAAU,mDAC3C,kBAACof,EAAS,CACRvB,WAAWtR,EAAAA,EAAAA,KAAIqC,EAAU/O,IAAI,cAC7BT,SAAU,IAAAA,GAAQ,KAARA,EAAe,GAAI,GAAGqO,KAAK,gBAEhC,KAEPpN,GAAUyhB,GAAe3jB,KAAKmD,MAAMyjD,mBACpC,yBAAK/kD,UAAU,gDACb,yBAAKA,UAAU,0BACb,wBAAIA,UAAY,iCAAgC8hB,EAAYjiB,IAAI,aAAe,cAAa,gBAE5F,+BACE,kBAAC0hD,EAAW,CACVr1C,MAAOnD,EAAcmgB,sBAAsBvN,GAC3CqmC,aAAclgC,EAAYjiB,IAAI,WAAW0N,EAAAA,EAAAA,SAAQS,SACjDuQ,SAAWrS,IACT/N,KAAKqnD,kBAAkB,CAAEt5C,QAAOyP,cAAa,EAE/C3b,UAAU,0BACV+hD,UAAU,2BAGhB,yBAAK/hD,UAAU,+BACb,kBAACqf,EAAW,CACVzD,8BAhGoC6pC,GAAMr/B,EAAYxK,8BAA8B,CAAE1P,MAAOu5C,EAAG9pC,eAiGhGiG,kBAAmB7Y,EAAcsgB,qBAAqB1N,GACtDvc,SAAU,IAAAA,GAAQ,KAARA,EAAe,GAAI,GAAGqO,KAAK,eACrCqU,YAAaA,EACbS,iBAAkBxZ,EAAcwZ,oBAAoB5G,GACpD6G,4BAA6BzZ,EAAcyZ,+BAA+B7G,GAC1E8G,kBAAmB1Z,EAAc0Z,qBAAqB9G,GACtDgH,UAAWA,EACX5jB,WAAYA,EACZijB,kBAAmBjZ,EAAcqgB,wBAC5BzN,EACH,cACA,eAEFkH,wBAAyB/d,IACvB3G,KAAKQ,MAAMynB,YAAYtK,wBAAwB,CAC7C5c,KAAM4F,EACN6W,WAAYxd,KAAKQ,MAAMgd,WACvBI,YAAa,cACbC,YAAa,eACb,EAGJuC,SAAU,CAACrS,EAAOyC,KAChB,GAAIA,EAAM,CACR,MAAM+2C,EAAY38C,EAAcwZ,oBAAoB5G,GAC9CgqC,EAAcp5C,EAAAA,IAAAA,MAAUm5C,GAAaA,GAAYn5C,EAAAA,EAAAA,OACvD,OAAO6Z,EAAY1K,oBAAoB,CACrCC,aACAzP,MAAOy5C,EAAYh5C,MAAMgC,EAAMzC,IAEnC,CACAka,EAAY1K,oBAAoB,CAAExP,QAAOyP,cAAa,EAExDiH,qBAAsB,CAAC1jB,EAAMgN,KAC3Bka,EAAYvK,wBAAwB,CAClCF,aACAzP,QACAhN,QACA,EAEJwjB,YAAa3Z,EAAcmgB,sBAAsBvN,OAM/D,EACD,KAjRoBikC,GAAU,eA+BP,CACpBnI,cAAez2B,SAASC,UACxB02B,cAAe32B,SAASC,UACxBw1B,iBAAiB,EACjBp4B,eAAe,EACf6hC,YAAa,GACb9gD,SAAU,KCvCP,MAQP,GAR6B,IAAoB,IAApB,KAAE+lB,EAAI,KAAEC,GAAM,EACvC,OAAO,yBAAKplB,UAAU,wBAAyBmlB,EAAI,KAAM8pB,OAAO7pB,GAAa,ECU3EwgC,GAAoC,CACxCrnC,SAVW,OAWXgH,kBAAmB,CAAC,GAEP,MAAMhC,WAA8B3D,EAAAA,UAAW,cAAD,kDAYxCxV,IACjB,MAAM,SAAEmU,GAAapgB,KAAKQ,MAC1B4f,EAASnU,EAAEpI,OAAOk6C,QAAQ,GAC3B,CAXD15C,oBACE,MAAM,kBAAE+iB,EAAiB,SAAEhH,GAAapgB,KAAKQ,OACvC,mBAAEukB,EAAkB,aAAE/B,GAAiBoE,EACzCrC,GACF3E,EAAS4C,EAEb,CAOAtiB,SACE,IAAI,WAAEymB,EAAU,WAAEE,GAAernB,KAAKQ,MAEtC,OACE,6BACE,2BAAOqB,UAAW6D,KAAG,gCAAiC,CACpD,SAAY2hB,KAEZ,2BAAO7lB,KAAK,WACV8rB,SAAUjG,EACV02B,SAAU12B,GAAcF,EACxB/G,SAAUpgB,KAAK0nD,mBAAoB,oBAK7C,EACD,KAlCoBtiC,GAAqB,eAElBqiC,I,eCZT,MAAMX,WAAqBrlC,EAAAA,UAkBxC9e,YAAYnC,EAAOoC,GAAU,IAAD,EAC1BC,MAAMrC,EAAOoC,GAAQ,oCAsCL,SAACmL,GAA0B,IAEvC45C,EAFoBhpB,EAAQ,UAAH,+CACzB,SAAEve,EAAQ,SAAE8mC,GAAa,EAAK1mD,MAUlC,OALEmnD,EADW,KAAV55C,GAAiBA,GAAwB,IAAfA,EAAM6B,KACd,KAEA7B,EAGdqS,EAAS8mC,EAAUS,EAAkBhpB,EAC9C,IAAC,8BAEmBh4B,IAClB3G,KAAKQ,MAAMynB,YAAYtK,wBAAwB,CAC7C5c,KAAM4F,EACN6W,WAAYxd,KAAKQ,MAAMgd,WACvBI,YAAa,aACbC,YAAa7d,KAAK4nD,eAClB,IACH,kCAEuBvnC,IACtB,IAAI,YAAE3O,EAAW,MAAEmtB,EAAK,WAAErhB,GAAexd,KAAKQ,MAC9C,MAAMi+B,EAAYI,EAAMn9B,IAAI,QACtBg9B,EAAUG,EAAMn9B,IAAI,MAC1B,OAAOgQ,EAAYstB,0BAA0BxhB,EAAYihB,EAAWC,EAASre,EAAS,IACvF,6BAEiB,KAChB,IAAI,cAAE9f,EAAa,WAAEid,EAAU,SAAE0pC,EAAQ,cAAEt8C,GAAkB5K,KAAKQ,MAElE,MAAMqnD,EAAgBtnD,EAAc2jC,4BAA4B1mB,EAAY0pC,KAAa94C,EAAAA,EAAAA,QACnF,OAAEvN,IAAWiuC,EAAAA,GAAAA,GAAmB+Y,EAAe,CAAE3lD,OAAQ3B,EAAc2B,WACvE4lD,EAAqBD,EACxBnmD,IAAI,WAAW0M,EAAAA,EAAAA,QACfyB,SACAK,QAGG63C,EAAuBlnD,GAASsjB,EAAAA,EAAAA,IAAgBtjB,EAAOgM,OAAQi7C,EAAoB,CAEvF1mD,kBAAkB,IACf,KAEL,GAAKymD,QAAgD1lD,IAA/B0lD,EAAcnmD,IAAI,UAIR,SAA5BmmD,EAAcnmD,IAAI,MAAmB,CACvC,IAAIolB,EAIJ,GAAIvmB,EAAc+oB,aAChBxC,OACqC3kB,IAAnC0lD,EAAcnmD,IAAI,aAChBmmD,EAAcnmD,IAAI,kBAC6BS,IAA/C0lD,EAAc/5C,MAAM,CAAC,SAAU,YAC/B+5C,EAAc/5C,MAAM,CAAC,SAAU,YAC9BjN,GAAUA,EAAOiN,MAAM,CAAC,iBACxB,GAAIvN,EAAc2B,SAAU,CACjC,MAAMo5C,EAAoB1wC,EAAcqgB,wBAAwBzN,EAAY,aAAcxd,KAAK4nD,eAC/F9gC,OACoE3kB,IAAlE0lD,EAAc/5C,MAAM,CAAC,WAAYwtC,EAAmB,UAClDuM,EAAc/5C,MAAM,CAAC,WAAYwtC,EAAmB,eACgBn5C,IAApE0lD,EAAc/5C,MAAM,CAAC,UAAWg6C,EAAoB,YACpDD,EAAc/5C,MAAM,CAAC,UAAWg6C,EAAoB,iBACnB3lD,IAAjC0lD,EAAcnmD,IAAI,WAClBmmD,EAAcnmD,IAAI,gBACoBS,KAArCtB,GAAUA,EAAOa,IAAI,YACrBb,GAAUA,EAAOa,IAAI,gBACgBS,KAArCtB,GAAUA,EAAOa,IAAI,YACrBb,GAAUA,EAAOa,IAAI,WACtBmmD,EAAcnmD,IAAI,UACxB,MAIoBS,IAAjB2kB,GAA+B1X,EAAAA,KAAAA,OAAY0X,KAE5CA,GAAe7D,EAAAA,EAAAA,IAAU6D,SAKP3kB,IAAjB2kB,EACD9mB,KAAKgoD,gBAAgBlhC,GAErBjmB,GAAiC,WAAvBA,EAAOa,IAAI,SAClBqmD,IACCF,EAAcnmD,IAAI,aAOtB1B,KAAKgoD,gBACH54C,EAAAA,KAAAA,OAAY24C,GACVA,GAEA9kC,EAAAA,EAAAA,IAAU8kC,GAIlB,KA/IA/nD,KAAKioD,iBACP,CAEA5kD,iCAAiC7C,GAC/B,IAOI2oB,GAPA,cAAE5oB,EAAa,WAAEid,EAAU,SAAE0pC,GAAa1mD,EAC1C0B,EAAS3B,EAAc2B,SAEvBwiC,EAAoBnkC,EAAc2jC,4BAA4B1mB,EAAY0pC,IAAa,IAAI94C,EAAAA,IAM/F,GAJAs2B,EAAoBA,EAAkB/Q,UAAYuzB,EAAWxiB,EAI1DxiC,EAAQ,CACT,IAAI,OAAErB,IAAWiuC,EAAAA,GAAAA,GAAmBpK,EAAmB,CAAExiC,WACzDinB,EAAYtoB,EAASA,EAAOa,IAAI,aAAUS,CAC5C,MACEgnB,EAAYub,EAAoBA,EAAkBhjC,IAAI,aAAUS,EAElE,IAEI4L,EAFA6xB,EAAa8E,EAAoBA,EAAkBhjC,IAAI,cAAWS,OAIlDA,IAAfy9B,EACH7xB,EAAQ6xB,EACEsnB,EAASxlD,IAAI,aAAeynB,GAAaA,EAAUvZ,OAC7D7B,EAAQob,EAAUjZ,cAGL/N,IAAV4L,GAAuBA,IAAU6xB,GACpC5/B,KAAKgoD,iBAAgB1W,EAAAA,EAAAA,IAAevjC,IAGtC/N,KAAKioD,iBACP,CAgHAL,cACE,MAAM,MAAE/oB,GAAU7+B,KAAKQ,MAEvB,OAAIq+B,EAEI,GAAEA,EAAMn9B,IAAI,WAAWm9B,EAAMn9B,IAAI,QAFvB,IAGpB,CAEAhB,SAAU,IAAD,IACP,IAAI,MAACm+B,EAAK,SAAEqoB,EAAQ,aAAEvmD,EAAY,WAAEC,EAAU,UAAE4jB,EAAS,GAAE7Z,EAAE,iBAAEw8C,EAAgB,cAAE5mD,EAAa,WAAEid,EAAU,SAAEvc,EAAQ,cAAE2J,GAAiB5K,KAAKQ,MAExI0B,EAAS3B,EAAc2B,SAE3B,MAAM,eAAE4/C,EAAc,qBAAEz8B,GAAyBzkB,IAMjD,GAJIi+B,IACFA,EAAQqoB,IAGNA,EAAU,OAAO,KAGrB,MAAMnhC,EAAiBplB,EAAa,kBAC9BunD,EAAYvnD,EAAa,aAC/B,IAAImkC,EAASjG,EAAMn9B,IAAI,MACnBymD,EAAuB,SAAXrjB,EAAoB,KAChC,kBAACojB,EAAS,CAACvnD,aAAcA,EACdC,WAAaA,EACb+J,GAAIA,EACJk0B,MAAOA,EACP/R,SAAWvsB,EAAcslC,mBAAmBroB,GAC5C4qC,cAAgB7nD,EAAcogC,kBAAkBnjB,GAAY9b,IAAI,sBAChE0e,SAAUpgB,KAAKgoD,gBACfb,iBAAkBA,EAClB3iC,UAAYA,EACZjkB,cAAgBA,EAChBid,WAAaA,IAG5B,MAAMwH,EAAerkB,EAAa,gBAC5B+D,EAAW/D,EAAa,YAAY,GACpCqlB,EAAerlB,EAAa,gBAC5BykB,EAAwBzkB,EAAa,yBACrCukB,EAA8BvkB,EAAa,+BAC3CwkB,EAAUxkB,EAAa,WAE7B,IAcI0nD,EACAC,EACAC,EACAC,GAjBA,OAAE3nD,IAAWiuC,EAAAA,GAAAA,GAAmBjQ,EAAO,CAAE38B,WACzC2lD,EAAgBtnD,EAAc2jC,4BAA4B1mB,EAAY0pC,KAAa94C,EAAAA,EAAAA,OAEnFiY,EAASxlB,EAASA,EAAOa,IAAI,UAAY,KACzCF,EAAOX,EAASA,EAAOa,IAAI,QAAU,KACrC+mD,EAAW5nD,EAASA,EAAOiN,MAAM,CAAC,QAAS,SAAW,KACtD46C,EAAwB,aAAX5jB,EACb6jB,EAAsB,aAAc3lD,EAAAA,EACpClC,EAAW+9B,EAAMn9B,IAAI,YAErBqM,EAAQ85C,EAAgBA,EAAcnmD,IAAI,SAAW,GACrDykB,EAAYd,GAAuBe,EAAAA,EAAAA,IAAoBvlB,GAAU,KACjE0gD,EAAaO,GAAiB7Q,EAAAA,EAAAA,IAAcpS,GAAS,KAMrD+pB,GAAqB,EA+BzB,YA7BezmD,IAAV08B,GAAuBh+B,IAC1BwnD,EAAaxnD,EAAOa,IAAI,eAGPS,IAAfkmD,GACFC,EAAYD,EAAW3mD,IAAI,QAC3B6mD,EAAoBF,EAAW3mD,IAAI,YAC1Bb,IACTynD,EAAYznD,EAAOa,IAAI,SAGpB4mD,GAAaA,EAAU14C,MAAQ04C,EAAU14C,KAAO,IACnDg5C,GAAqB,QAIRzmD,IAAV08B,IACCh+B,IACF0nD,EAAoB1nD,EAAOa,IAAI,iBAEPS,IAAtBomD,IACFA,EAAoB1pB,EAAMn9B,IAAI,YAEhC8mD,EAAe3pB,EAAMn9B,IAAI,gBACJS,IAAjBqmD,IACFA,EAAe3pB,EAAMn9B,IAAI,eAK3B,wBAAI,kBAAiBm9B,EAAMn9B,IAAI,QAAS,gBAAem9B,EAAMn9B,IAAI,OAC/D,wBAAIG,UAAU,uBACZ,yBAAKA,UAAWf,EAAW,2BAA6B,mBACpD+9B,EAAMn9B,IAAI,QACTZ,EAAkB,oCAAP,MAEhB,yBAAKe,UAAU,mBACXL,EACAinD,GAAa,IAAGA,KAChBpiC,GAAU,0BAAMxkB,UAAU,eAAa,KAAIwkB,EAAM,MAErD,yBAAKxkB,UAAU,yBACXK,GAAU28B,EAAMn9B,IAAI,cAAgB,aAAc,MAEtD,yBAAKG,UAAU,iBAAe,IAAIg9B,EAAMn9B,IAAI,MAAK,KAC9C2jB,GAAyBc,EAAUvW,KAAc,MAAAuW,EAAU9X,YAAU,QAAM,IAAD,IAAE1H,EAAKoY,GAAE,SAAK,kBAACiH,EAAY,CAACrf,IAAM,GAAEA,KAAOoY,IAAKiI,KAAMrgB,EAAKsgB,KAAMlI,GAAK,IAAtG,KAC1C+iC,GAAmBP,EAAW3xC,KAAc,MAAA2xC,EAAWlzC,YAAU,QAAM,IAAD,IAAE1H,EAAKoY,GAAE,SAAK,kBAACiH,EAAY,CAACrf,IAAM,GAAEA,KAAOoY,IAAKiI,KAAMrgB,EAAKsgB,KAAMlI,GAAK,IAAvG,MAG1C,wBAAIld,UAAU,8BACVg9B,EAAMn9B,IAAI,eAAiB,kBAACgD,EAAQ,CAACC,OAASk6B,EAAMn9B,IAAI,iBAAqB,MAE5EymD,GAAc3jC,IAAcokC,EAK3B,KAJF,kBAAClkD,EAAQ,CAAC7C,UAAU,kBAAkB8C,OAClC,6BAA+B,IAAA2jD,GAAS,KAATA,GAAc,SAASza,GAClD,OAAOA,CACT,IAAG5kB,UAAU3f,KAAK,SAIvB6+C,GAAc3jC,QAAoCriB,IAAtBomD,EAE3B,KADF,kBAAC7jD,EAAQ,CAAC7C,UAAU,qBAAqB8C,OAAQ,0BAA4B4jD,KAI5EJ,GAAc3jC,QAA+BriB,IAAjBqmD,EAE3B,KADF,kBAAC9jD,EAAQ,CAACC,OAAQ,oBAAsB6jD,IAIxCE,IAAeC,GAAwB,8EAGvCzmD,GAAU28B,EAAMn9B,IAAI,YAClB,6BAASG,UAAU,sBACjB,kBAACqjB,EAA2B,CAC1BwC,SAAUmX,EAAMn9B,IAAI,YACpBmmB,SAAU7nB,KAAK6oD,iBACf/gC,YAAa9nB,KAAKgoD,gBAClBrnD,aAAcA,EACdonB,uBAAuB,EACvBJ,WAAY/c,EAAcqgB,wBAAwBzN,EAAY,aAAcxd,KAAK4nD,eACjFhgC,sBAAuB7Z,KAGzB,KAGJo6C,EAAY,KACV,kBAACpiC,EAAc,CAACpb,GAAIA,EACJhK,aAAcA,EACdoN,MAAQA,EACRjN,SAAWA,EACXwsB,UAAW9I,EACX1F,YAAa+f,EAAMn9B,IAAI,QACvB0e,SAAWpgB,KAAKgoD,gBAChB1vC,OAASuvC,EAAcnmD,IAAI,UAC3Bb,OAASA,IAK3BsnD,GAAatnD,EAAS,kBAACmkB,EAAY,CAACrkB,aAAeA,EACfM,SAAUA,EAASqO,KAAK,UACxB1O,WAAaA,EACb4jB,UAAYA,EACZjkB,cAAgBA,EAChBM,OAASA,EACTmnB,QAAUmgC,EACV/mD,kBAAmB,IACnD,MAIH+mD,GAAa3jC,GAAaqa,EAAMn9B,IAAI,mBACrC,kBAAC0jB,EAAqB,CACpBhF,SAAUpgB,KAAKykB,qBACf0C,WAAY5mB,EAAco/B,6BAA6BniB,EAAYqhB,EAAMn9B,IAAI,QAASm9B,EAAMn9B,IAAI,OAChG2lB,aAAaC,EAAAA,EAAAA,IAAavZ,KAC1B,KAIF7L,GAAU28B,EAAMn9B,IAAI,YAClB,kBAACyjB,EAAO,CACN6C,QAAS6W,EAAM/wB,MAAM,CACnB,WACAlD,EAAcqgB,wBAAwBzN,EAAY,aAAcxd,KAAK4nD,iBAEvEjnD,aAAcA,EACdC,WAAYA,IAEZ,MAQd,E,0BC1Xa,MAAM8gD,WAAgBjgC,EAAAA,UAAW,cAAD,0DAclB,KACzB,IAAI,cAAElhB,EAAa,YAAEmR,EAAW,KAAElB,EAAI,OAAElF,GAAWtL,KAAKQ,MAExD,OADAkR,EAAYqtB,eAAe,CAACvuB,EAAMlF,IAC3B/K,EAAcsrB,sBAAsB,CAACrb,EAAMlF,GAAQ,IAC3D,uCAE2B,KAC1B,IAAI,KAAEkF,EAAI,OAAElF,EAAM,cAAE/K,EAAa,cAAEqK,EAAa,YAAEqd,GAAgBjoB,KAAKQ,MACnE2d,EAAmB,CACrBgM,kBAAkB,EAClBC,oBAAqB,IAGvBnC,EAAY7J,8BAA8B,CAAE5N,OAAMlF,WAClD,IAAI0gB,EAAqCzrB,EAAc8lC,sCAAsC,CAAC71B,EAAMlF,IAChG4gB,EAAuBthB,EAAcwZ,iBAAiB5T,EAAMlF,GAC5Dw9C,EAAmCl+C,EAAcihB,sBAAsB,CAACrb,EAAMlF,IAC9E2gB,EAAyBrhB,EAAcmgB,mBAAmBva,EAAMlF,GAEpE,IAAKw9C,EAGH,OAFA3qC,EAAiBgM,kBAAmB,EACpClC,EAAY/J,4BAA4B,CAAE1N,OAAMlF,SAAQ6S,sBACjD,EAET,IAAK6N,EACH,OAAO,EAET,IAAI5B,EAAsBxf,EAAcmhB,wBAAwB,CAC9DC,qCACAC,yBACAC,yBAEF,OAAK9B,GAAuBA,EAAoBzmB,OAAS,IAGzD,KAAAymB,GAAmB,KAAnBA,GAA6B2+B,IAC3B5qC,EAAiBiM,oBAAoB9a,KAAKy5C,EAAW,IAEvD9gC,EAAY/J,4BAA4B,CAAE1N,OAAMlF,SAAQ6S,sBACjD,EAAK,IACb,wCAE4B,KAC3B,IAAI,YAAEzM,EAAW,UAAEjB,EAAS,KAAED,EAAI,OAAElF,GAAWtL,KAAKQ,MAChDR,KAAKQ,MAAMi5C,WAEbz5C,KAAKQ,MAAMi5C,YAEb/nC,EAAYpB,QAAQ,CAAEG,YAAWD,OAAMlF,UAAS,IACjD,wCAE4B,KAC3B,IAAI,YAAEoG,EAAW,KAAElB,EAAI,OAAElF,GAAWtL,KAAKQ,MAEzCkR,EAAYwtB,oBAAoB,CAAC1uB,EAAMlF,IACvC,MAAW,KACToG,EAAYqtB,eAAe,CAACvuB,EAAMlF,GAAQ,GACzC,GAAG,IACP,oCAEyB09C,IACpBA,EACFhpD,KAAKipD,6BAELjpD,KAAKkpD,4BACP,IACD,qBAES,KACR,IAAIC,EAAenpD,KAAKopD,2BACpBC,EAAoBrpD,KAAKspD,4BACzBN,EAASG,GAAgBE,EAC7BrpD,KAAKupD,uBAAuBP,EAAO,IACpC,qCAE2B35C,GAASrP,KAAKQ,MAAMkR,YAAY0tB,oBAAoB,CAACp/B,KAAKQ,MAAMgQ,KAAMxQ,KAAKQ,MAAM8K,QAAS+D,IAAI,CAE1H3O,SACE,MAAM,SAAE4sB,GAAattB,KAAKQ,MAC1B,OACI,4BAAQqB,UAAU,mCAAmCkxB,QAAU/yB,KAAK+yB,QAAUzF,SAAUA,GAAS,UAIvG,EC/Fa,MAAM2xB,WAAgBv8C,IAAAA,UAMnChC,SAAU,IAAD,EACP,IAAI,QAAE6I,EAAO,aAAE5I,GAAiBX,KAAKQ,MAErC,MAAMgpD,EAAW7oD,EAAa,YACxB+D,EAAW/D,EAAa,YAAY,GAE1C,OAAM4I,GAAYA,EAAQqG,KAIxB,yBAAK/N,UAAU,mBACb,wBAAIA,UAAU,kBAAgB,YAC9B,2BAAOA,UAAU,WACf,+BACE,wBAAIA,UAAU,cACZ,wBAAIA,UAAU,cAAY,QAC1B,wBAAIA,UAAU,cAAY,eAC1B,wBAAIA,UAAU,cAAY,UAG9B,+BAEE,MAAA0H,EAAQ8E,YAAU,QAAO,IAAqB,IAAnB1H,EAAK8H,GAAQ,EACtC,IAAIuG,IAAAA,IAAAA,MAAavG,GACf,OAAO,KAGT,MAAMqQ,EAAcrQ,EAAO/M,IAAI,eACzBF,EAAOiN,EAAOX,MAAM,CAAC,WAAaW,EAAOX,MAAM,CAAC,SAAU,SAAWW,EAAOX,MAAM,CAAC,SACnF27C,EAAgBh7C,EAAOX,MAAM,CAAC,SAAU,YAE9C,OAAQ,wBAAInH,IAAMA,GAChB,wBAAI9E,UAAU,cAAe8E,GAC7B,wBAAI9E,UAAU,cACXid,EAAqB,kBAACpa,EAAQ,CAACC,OAASma,IAA1B,MAEjB,wBAAIjd,UAAU,cAAeL,EAAI,IAAKioD,EAAgB,kBAACD,EAAQ,CAACrc,QAAU,UAAYuc,QAAUD,EAAgBE,UA5C9G,mBA4C2I,MAC1I,IACJ1gC,aA/BF,IAqCX,ECpDa,MAAM2gC,WAAelnD,IAAAA,UAUlChC,SACE,IAAI,cAAEmpD,EAAa,aAAEtpC,EAAY,gBAAE5M,EAAe,cAAET,EAAa,aAAEvS,GAAiBX,KAAKQ,MAEzF,MAAMmgD,EAAWhgD,EAAa,YAE9B,GAAGkpD,GAAiBA,EAAcC,WAChC,IAAIA,EAAaD,EAAcC,WAGjC,IAAIxxC,EAASiI,EAAanG,YAGtB2vC,EAAqB,IAAAzxC,GAAM,KAANA,GAAcH,GAA2B,WAApBA,EAAIzW,IAAI,SAAkD,UAArByW,EAAIzW,IAAI,WAE3F,IAAIqoD,GAAsBA,EAAmBx9B,QAAU,EACrD,OAAO,KAGT,IAAIy9B,EAAYr2C,EAAgB4H,QAAQ,CAAC,cAAc,GAGnD0uC,EAAiBF,EAAmBhwC,QAAO5B,GAAOA,EAAIzW,IAAI,UAE9D,OACE,yBAAKG,UAAU,kBACb,4BAAQA,UAAU,SAChB,wBAAIA,UAAU,iBAAe,UAC7B,4BAAQA,UAAU,wBAAwBkxB,QARzB,IAAM7f,EAAcQ,KAAK,CAAC,cAAes2C,IAQeA,EAAY,OAAS,SAEhG,kBAACrJ,EAAQ,CAACU,SAAW2I,EAAYE,UAAQ,GACvC,yBAAKroD,UAAU,UACX,IAAAooD,GAAc,KAAdA,GAAmB,CAAC9xC,EAAKmB,KACzB,IAAI9X,EAAO2W,EAAIzW,IAAI,QACnB,MAAY,WAATF,GAA8B,SAATA,EACf,kBAAC2oD,GAAe,CAACxjD,IAAM2S,EAAIlV,MAAQ+T,EAAIzW,IAAI,UAAYyW,EAAM2xC,WAAYA,IAEtE,SAATtoD,EACM,kBAAC4oD,GAAa,CAACzjD,IAAM2S,EAAIlV,MAAQ+T,EAAM2xC,WAAYA,SAD5D,CAEA,MAMV,EAGJ,MAAMK,GAAmB,IAA6B,IAA5B,MAAE/lD,EAAK,WAAE0lD,GAAY,EAC7C,IAAI1lD,EACF,OAAO,KAET,IAAIimD,EAAYjmD,EAAM1C,IAAI,QAE1B,OACE,yBAAKG,UAAU,iBACVuC,EACD,6BACE,4BAAOA,EAAM1C,IAAI,WAAa0C,EAAM1C,IAAI,SACtC4oD,GAAYlmD,EAAM1C,IAAI,WAAa,IAAM0C,EAAM1C,IAAI,SAAW,GAC9D0C,EAAM1C,IAAI,QAAU,sCAAY0C,EAAM1C,IAAI,SAAkB,MAC9D,0BAAMG,UAAU,kBACZuC,EAAM1C,IAAI,YAEd,yBAAKG,UAAU,cACXwoD,GAAaP,EAAa,uBAAG/2B,QAAS,IAAA+2B,GAAU,KAAVA,EAAgB,KAAMO,IAAW,gBAAgBA,GAAkB,OATtG,KAaP,EAIJD,GAAiB,IAA6B,IAA5B,MAAEhmD,EAAK,WAAE0lD,GAAY,EACvCS,EAAkB,KAYtB,OAVGnmD,EAAM1C,IAAI,QAET6oD,EADCn7C,EAAAA,KAAAA,OAAYhL,EAAM1C,IAAI,SACL,qCAAY0C,EAAM1C,IAAI,QAAQ4H,KAAK,MAEnC,qCAAYlF,EAAM1C,IAAI,SAElC0C,EAAM1C,IAAI,UAAYooD,IAC9BS,EAAkB,0CAAiBnmD,EAAM1C,IAAI,UAI7C,yBAAKG,UAAU,iBACVuC,EACD,6BACE,4BAAMkmD,GAAYlmD,EAAM1C,IAAI,WAAa,IAAM0C,EAAM1C,IAAI,SAAQ,IAAU6oD,GAC3E,0BAAM1oD,UAAU,WAAYuC,EAAM1C,IAAI,YACtC,yBAAKG,UAAU,cACXioD,EACA,uBAAG/2B,QAAS,IAAA+2B,GAAU,KAAVA,EAAgB,KAAM1lD,EAAM1C,IAAI,UAAS,gBAAgB0C,EAAM1C,IAAI,SAC7E,OAPC,KAWP,EAIV,SAAS4oD,GAAYrkD,GAAM,IAAD,EACxB,OAAO,OAACA,GAAO,IACZsO,MAAM,MAAI,QACNu7B,GAAUA,EAAO,GAAGmG,cAAgB,IAAAnG,GAAM,KAANA,EAAa,KACrDxmC,KAAK,IACV,CAOA6gD,GAAgBnkD,aAAe,CAC7B8jD,WAAY,MC1HC,MAAM1G,WAAoB1gD,IAAAA,UAAiB,cAAD,iDAmCrCuJ,GAAKjM,KAAKQ,MAAM4f,SAASnU,EAAEpI,OAAOkK,QAAM,CAjB1D1J,oBAEKrE,KAAKQ,MAAMqjD,cACZ7jD,KAAKQ,MAAM4f,SAASpgB,KAAKQ,MAAMqjD,aAAa3zC,QAEhD,CAEA7M,iCAAiCC,GAAY,IAAD,EACtCA,EAAUugD,cAAiBvgD,EAAUugD,aAAaj0C,OAIlD,OAAAtM,EAAUugD,cAAY,OAAUvgD,EAAUyK,QAC5CzK,EAAU8c,SAAS9c,EAAUugD,aAAa3zC,SAE9C,CAIAxP,SACE,IAAI,aAAEijD,EAAY,UAAEC,EAAS,UAAE/hD,EAAS,aAAEgiD,EAAY,UAAEH,EAAS,MAAE31C,GAAU/N,KAAKQ,MAElF,OAAMqjD,GAAiBA,EAAaj0C,KAIlC,yBAAK/N,UAAY,yBAA4BA,GAAa,KACxD,4BAAQ,gBAAe8hD,EAAc,aAAYC,EAAW/hD,UAAU,eAAe4gC,GAAIihB,EAAWtjC,SAAUpgB,KAAKgoD,gBAAiBj6C,MAAOA,GAAS,IAChJ,IAAA81C,GAAY,KAAZA,GAAmBx0C,GACZ,4BAAQ1I,IAAM0I,EAAMtB,MAAQsB,GAAQA,KAC1C4Z,YAPA,IAWX,EACD,KArDoBm6B,GAAW,eAYR,CACpBhjC,SAfS,OAgBTrS,MAAO,KACP81C,cAAc31C,EAAAA,EAAAA,QAAO,CAAC,uB,gDCnB1B,SAASs8C,KAAgB,IAAC,IAAD,qBAAN52C,EAAI,yBAAJA,EAAI,gBACrB,OAAO,WAAAA,GAAI,KAAJA,GAAY4D,KAAOA,IAAGlO,KAAK,MAAI,OACxC,CAEO,MAAMmhD,WAAkB/nD,IAAAA,UAC7BhC,SACE,IAAI,WAAEgqD,EAAU,KAAEC,KAASrjB,GAAStnC,KAAKQ,MAGzC,GAAGkqD,EACD,OAAO,4BAAapjB,GAEtB,IAAIsjB,EAAiB,qBAAuBD,EAAO,QAAU,IAC7D,OACE,oCAAarjB,EAAI,CAAEzlC,UAAW2oD,GAAOljB,EAAKzlC,UAAW+oD,KAEzD,EASF,MAAMC,GAAU,CACd,OAAU,GACV,OAAU,UACV,QAAW,WACX,MAAS,OAGJ,MAAMnqC,WAAYhe,IAAAA,UAEvBhC,SACE,MAAM,KACJoqD,EAAI,aACJC,EAAY,OAIZC,EAAM,OACNrM,EAAM,QACNC,EAAO,MACPqM,KAEG3jB,GACDtnC,KAAKQ,MAET,GAAGsqD,IAASC,EACV,OAAO,+BAET,IAAIG,EAAY,GAEhB,IAAK,IAAIC,KAAUN,GAAS,CAC1B,IAAKr0B,OAAO1T,UAAU2T,eAAeC,KAAKm0B,GAASM,GACjD,SAEF,IAAIC,EAAcP,GAAQM,GAC1B,GAAGA,KAAUnrD,KAAKQ,MAAO,CACvB,IAAI6O,EAAMrP,KAAKQ,MAAM2qD,GAErB,GAAG97C,EAAM,EAAG,CACV67C,EAAU57C,KAAK,OAAS87C,GACxB,QACF,CAEAF,EAAU57C,KAAK,QAAU87C,GACzBF,EAAU57C,KAAK,OAASD,EAAM+7C,EAChC,CACF,CAEIN,GACFI,EAAU57C,KAAK,UAGjB,IAAIse,EAAU48B,GAAOljB,EAAKzlC,aAAcqpD,GAExC,OACE,oCAAa5jB,EAAI,CAAEzlC,UAAW+rB,IAElC,EAcK,MAAMnN,WAAY/d,IAAAA,UAEvBhC,SACE,OAAO,gCAASV,KAAKQ,MAAK,CAAEqB,UAAW2oD,GAAOxqD,KAAKQ,MAAMqB,UAAW,aACtE,EAQK,MAAM04C,WAAe73C,IAAAA,UAU1BhC,SACE,OAAO,mCAAYV,KAAKQ,MAAK,CAAEqB,UAAW2oD,GAAOxqD,KAAKQ,MAAMqB,UAAW,YACzE,EAED,KAdY04C,GAAM,eAMK,CACpB14C,UAAW,KAUR,MAAMwhB,GAAY7iB,GAAU,6BAAcA,GAEpCggB,GAAShgB,GAAU,0BAAWA,GAEpC,MAAM6qD,WAAe3oD,IAAAA,UAgB1BC,YAAYnC,EAAOoC,GAGjB,IAAImL,EAFJlL,MAAMrC,EAAOoC,GAAQ,sBAaXqJ,IACV,IAEI8B,GAFA,SAAEqS,EAAQ,SAAEkrC,GAAatrD,KAAKQ,MAC9BskB,EAAU,QAAS4R,KAAKzqB,EAAEpI,OAAOihB,SAItB,IAAD,EAAVwmC,EACFv9C,EAAQ,UAAA+W,GAAO,KAAPA,GAAe,SAAUymC,GAC7B,OAAOA,EAAOniC,QAChB,KAAE,QACG,SAAUmiC,GACb,OAAOA,EAAOx9C,KAChB,IAEFA,EAAQ9B,EAAEpI,OAAOkK,MAGnB/N,KAAKuD,SAAS,CAACwK,MAAOA,IAEtBqS,GAAYA,EAASrS,EAAM,IA3BzBA,EADEvN,EAAMuN,MACAvN,EAAMuN,MAENvN,EAAM8qD,SAAW,CAAC,IAAM,GAGlCtrD,KAAKmD,MAAQ,CAAE4K,MAAOA,EACxB,CAwBA1K,iCAAiCC,GAE5BA,EAAUyK,QAAU/N,KAAKQ,MAAMuN,OAChC/N,KAAKuD,SAAS,CAAEwK,MAAOzK,EAAUyK,OAErC,CAEArN,SAAS,IAAD,IACN,IAAI,cAAE8qD,EAAa,SAAEF,EAAQ,gBAAEG,EAAe,SAAEn+B,GAAattB,KAAKQ,MAC9DuN,GAAwB,QAAhB,EAAA/N,KAAKmD,MAAM4K,aAAK,OAAM,QAAN,EAAhB,EAAkBlB,YAAI,WAAN,EAAhB,YAA8B7M,KAAKmD,MAAM4K,MAErD,OACE,4BAAQlM,UAAW7B,KAAKQ,MAAMqB,UAAWypD,SAAWA,EAAWv9C,MAAOA,EAAOqS,SAAWpgB,KAAKogB,SAAWkN,SAAUA,GAC9Gm+B,EAAkB,4BAAQ19C,MAAM,IAAE,MAAe,KAEjD,IAAAy9C,GAAa,KAAbA,GAAkB,SAAU3d,EAAMlnC,GAChC,OAAO,4BAAQA,IAAMA,EAAMoH,MAAQ+iC,OAAOjD,IAAUiD,OAAOjD,GAC7D,IAIR,EACD,KA1EYwd,GAAM,eAWK,CACpBC,UAAU,EACVG,iBAAiB,IA+Dd,MAAM5K,WAAan+C,IAAAA,UAExBhC,SACE,OAAO,8BAAOV,KAAKQ,MAAK,CAAEsD,IAAI,sBAAsBjC,UAAW2oD,GAAOxqD,KAAKQ,MAAMqB,UAAW,UAC9F,EAQF,MAAM6pD,GAAY,IAAD,IAAC,SAACr3B,GAAS,SAAK,yBAAKxyB,UAAU,aAAW,IAAGwyB,EAAQ,IAAQ,EAMvE,MAAMssB,WAAiBj+C,IAAAA,UAa5BipD,oBACE,OAAI3rD,KAAKQ,MAAM6gD,SAGb,kBAACqK,GAAQ,KACN1rD,KAAKQ,MAAM6zB,UAHP,kCAMX,CAEA3zB,SACE,IAAI,SAAEwpD,EAAQ,SAAE7I,EAAQ,SAAEhtB,GAAar0B,KAAKQ,MAE5C,OAAI0pD,GAGJ71B,EAAWgtB,EAAWhtB,EAAW,KAE/B,kBAACq3B,GAAQ,KACNr3B,IALIr0B,KAAK2rD,mBAQhB,EAED,KArCYhL,GAAQ,eAQG,CACpBU,UAAU,EACV6I,UAAU,ICvOC,MAAM0B,WAAiBlpD,IAAAA,UAEpCC,cAAsB,IAAD,EACnBE,SAAS,WACT7C,KAAK6rD,YAAc,MAAA7rD,KAAK8rD,cAAY,OAAM9rD,KAC5C,CAEA8rD,aAAaC,EAAWh4C,GACtB/T,KAAKQ,MAAM0S,cAAcQ,KAAKq4C,EAAWh4C,EAC3C,CAEAi4C,OAAOrlD,EAAKoN,GACV,IAAI,cAAEb,GAAkBlT,KAAKQ,MAC7B0S,EAAcQ,KAAK/M,EAAKoN,EAC1B,CAEArT,SACE,IAAI,cAAEH,EAAa,gBAAEoT,EAAe,cAAET,EAAa,aAAEvS,GAAiBX,KAAKQ,MACvEia,EAAYla,EAAcsb,mBAE9B,MAAM8kC,EAAWhgD,EAAa,YAE9B,OACI,6BACE,wBAAIkB,UAAU,kBAAgB,YAG5B,IAAA4Y,GAAS,KAATA,GAAe,CAACE,EAAQpE,KACtB,IAAIisB,EAAa7nB,EAAOjZ,IAAI,cAExBqqD,EAAY,CAAC,gBAAiBx1C,GAC9B2qC,EAAUvtC,EAAgB4H,QAAQwwC,GAAW,GAGjD,OACE,yBAAKplD,IAAK,YAAY4P,GAGpB,wBAAIwc,QANS,IAAK7f,EAAcQ,KAAKq4C,GAAY7K,GAMxBr/C,UAAU,qBAAmB,IAAGq/C,EAAU,IAAM,IAAK3qC,GAE9E,kBAACoqC,EAAQ,CAACU,SAAUH,EAASgJ,UAAQ,GAEjC,IAAA1nB,GAAU,KAAVA,GAAgBviB,IACd,IAAI,KAAEzP,EAAI,OAAElF,EAAM,GAAEm3B,GAAOxiB,EAAGtJ,WAC1Bs1C,EAAiB,aACjBC,EAAWzpB,EACX1uB,EAAQJ,EAAgB4H,QAAQ,CAAC0wC,EAAgBC,IACrD,OAAO,kBAAC1qC,GAAa,CAAC7a,IAAK87B,EACLjyB,KAAMA,EACNlF,OAAQA,EACRm3B,GAAIjyB,EAAO,IAAMlF,EACjByI,MAAOA,EACPm4C,SAAUA,EACVD,eAAgBA,EAChBloD,KAAO,cAAamoD,IACpBn5B,QAAS7f,EAAcQ,MAAQ,IACpDuV,WAIH,IAEPA,UAGHxO,EAAU7K,KAAO,GAAK,gEAGhC,EAWK,MAAM4R,WAAsB9e,IAAAA,UAEjCC,YAAYnC,GAAQ,IAAD,EACjBqC,MAAMrC,GACNR,KAAK+yB,QAAU,MAAA/yB,KAAKmsD,UAAQ,OAAMnsD,KACpC,CAEAmsD,WACE,IAAI,SAAED,EAAQ,eAAED,EAAc,QAAEl5B,EAAO,MAAEhf,GAAU/T,KAAKQ,MACxDuyB,EAAQ,CAACk5B,EAAgBC,IAAYn4C,EACvC,CAEArT,SACE,IAAI,GAAE+hC,EAAE,OAAEn3B,EAAM,MAAEyI,EAAK,KAAEhQ,GAAS/D,KAAKQ,MAEvC,OACE,kBAACqgD,GAAI,CAAC98C,KAAOA,EAAOgvB,QAAS/yB,KAAK+yB,QAASlxB,UAAY,uBAAqBkS,EAAQ,QAAU,KAC5F,6BACE,2BAAOlS,UAAY,cAAayJ,KAAWA,EAAO2qC,eAClD,0BAAMp0C,UAAU,cAAe4gC,IAIvC,EC3Fa,MAAM0b,WAAyBz7C,IAAAA,UAC5C2B,oBAGKrE,KAAKQ,MAAMsmB,eACZ9mB,KAAKosD,SAASr+C,MAAQ/N,KAAKQ,MAAMsmB,aAErC,CAEApmB,SAIE,MAAM,MAAEqN,EAAK,aAAEiV,EAAY,aAAE8D,KAAiBulC,GAAersD,KAAKQ,MAClE,OAAO,kCAAW6rD,EAAU,CAAEjsD,IAAKqZ,GAAKzZ,KAAKosD,SAAW3yC,IAC1D,ECvBK,MAAM6yC,WAAqB5pD,IAAAA,UAMhChC,SACE,IAAI,KAAEksB,EAAI,SAAEC,GAAa7sB,KAAKQ,MAE9B,OACE,yBAAKqB,UAAU,YAAU,eACV+qB,EAAMC,EAAQ,KAGjC,EAIF,MAAM0/B,WAAgB7pD,IAAAA,UASpBhC,SACE,IAAI,KAAE+J,EAAI,aAAE9J,EAAY,eAAEsK,EAAgBlI,IAAKiU,GAAWhX,KAAKQ,MAC3DO,EAAO0J,EAAK/I,IAAI,SAAW,gBAC3BqB,EAAM29C,GAAaj2C,EAAK/I,IAAI,OAAQsV,EAAS,CAAC/L,mBAC9CuhD,EAAQ/hD,EAAK/I,IAAI,SAErB,MAAMm/C,EAAOlgD,EAAa,QAE1B,OACE,yBAAKkB,UAAU,iBACXkB,GAAO,6BAAK,kBAAC89C,EAAI,CAAC98C,MAAOL,EAAAA,EAAAA,IAAYX,GAAOc,OAAO,UAAW9C,EAAI,eAClEyrD,GACA,kBAAC3L,EAAI,CAAC98C,MAAML,EAAAA,EAAAA,IAAa,UAAS8oD,MAC9BzpD,EAAO,iBAAgBhC,IAAU,WAAUA,KAKvD,EAGF,MAAM0rD,WAAgB/pD,IAAAA,UASpBhC,SACE,IAAI,QAAEgsD,EAAO,aAAE/rD,EAAY,eAAEsK,EAAgBlI,IAAKiU,GAAYhX,KAAKQ,MAEnE,MAAMqgD,EAAOlgD,EAAa,QAC1B,IAAII,EAAO2rD,EAAQhrD,IAAI,SAAW,UAC9BqB,EAAM29C,GAAagM,EAAQhrD,IAAI,OAAQsV,EAAS,CAAC/L,mBAErD,OACE,yBAAKpJ,UAAU,iBAEXkB,EAAM,kBAAC89C,EAAI,CAACh9C,OAAO,SAASE,MAAOL,EAAAA,EAAAA,IAAYX,IAAShC,GACxD,8BAAQA,GAIhB,EAGK,MAAM4rD,WAAgBjqD,IAAAA,cAO3BhC,SACE,MAAM,IAAEqC,EAAG,aAAEpC,GAAiBX,KAAKQ,MAE7BqgD,EAAOlgD,EAAa,QAE1B,OAAO,kBAACkgD,EAAI,CAACh9C,OAAO,SAASE,MAAOL,EAAAA,EAAAA,IAAYX,IAAO,0BAAMlB,UAAU,OAAK,IAAIkB,GAClF,EAGa,MAAM6pD,WAAalqD,IAAAA,UAYhChC,SACE,IAAI,KAAE6b,EAAI,IAAExZ,EAAG,KAAE6pB,EAAI,SAAEC,EAAQ,aAAElsB,EAAY,aAAEwhC,EAAY,eAAEl3B,EAAgBlI,IAAKiU,GAAYhX,KAAKQ,MAC/F4hC,EAAU7lB,EAAK7a,IAAI,WACnBod,EAAcvC,EAAK7a,IAAI,eACvB6hB,EAAQhH,EAAK7a,IAAI,SACjBmrD,EAAoBnM,GAAankC,EAAK7a,IAAI,kBAAmBsV,EAAS,CAAC/L,mBACvE6hD,EAAUvwC,EAAK7a,IAAI,WACnBgrD,EAAUnwC,EAAK7a,IAAI,WAEnB4/C,EAAkBZ,GADGve,GAAgBA,EAAazgC,IAAI,OACHsV,EAAS,CAAC/L,mBAC7D8hD,EAA0B5qB,GAAgBA,EAAazgC,IAAI,eAE/D,MAAMgD,EAAW/D,EAAa,YAAY,GACpCkgD,EAAOlgD,EAAa,QACpBysB,EAAezsB,EAAa,gBAC5BgsD,EAAUhsD,EAAa,WACvB2rD,EAAe3rD,EAAa,gBAElC,OACE,yBAAKkB,UAAU,QACb,4BAAQA,UAAU,QAChB,wBAAIA,UAAU,SAAW0hB,EACrB6e,GAAW,kBAAChV,EAAY,CAACgV,QAASA,KAEpCxV,GAAQC,EAAW,kBAACy/B,EAAY,CAAC1/B,KAAOA,EAAOC,SAAWA,IAAgB,KAC1E9pB,GAAO,kBAAC4pD,EAAO,CAAChsD,aAAcA,EAAcoC,IAAKA,KAGrD,yBAAKlB,UAAU,eACb,kBAAC6C,EAAQ,CAACC,OAASma,KAInB+tC,GAAqB,yBAAKhrD,UAAU,aAClC,kBAACg/C,EAAI,CAACh9C,OAAO,SAASE,MAAOL,EAAAA,EAAAA,IAAYmpD,IAAoB,qBAIhEC,GAAWA,EAAQl9C,KAAO,kBAAC28C,GAAO,CAAC5rD,aAAcA,EAAc8J,KAAOqiD,EAAU7hD,eAAgBA,EAAgBlI,IAAKA,IAAU,KAC/H2pD,GAAWA,EAAQ98C,KAAO,kBAAC68C,GAAO,CAAC9rD,aAAcA,EAAc+rD,QAAUA,EAAUzhD,eAAgBA,EAAgBlI,IAAKA,IAAS,KAChIu+C,EACE,kBAACT,EAAI,CAACh/C,UAAU,gBAAgBgC,OAAO,SAASE,MAAML,EAAAA,EAAAA,IAAY49C,IAAmByL,GAA2BzL,GAClH,KAIR,ECzJa,MAAM0L,WAAsBtqD,IAAAA,UASzChC,SACE,MAAM,cAACH,EAAa,aAAEI,EAAY,cAAEiK,GAAiB5K,KAAKQ,MAEpD+b,EAAOhc,EAAcgc,OACrBxZ,EAAMxC,EAAcwC,MACpB8pB,EAAWtsB,EAAcssB,WACzBD,EAAOrsB,EAAcqsB,OACrBuV,EAAe5hC,EAAc4hC,eAC7Bl3B,EAAiBL,EAAcK,iBAE/B2hD,EAAOjsD,EAAa,QAE1B,OACE,6BACG4b,GAAQA,EAAKgQ,QACZ,kBAACqgC,EAAI,CAACrwC,KAAMA,EAAMxZ,IAAKA,EAAK6pB,KAAMA,EAAMC,SAAUA,EAAUsV,aAAcA,EACpExhC,aAAcA,EAAcsK,eAAgBA,IAChD,KAGV,EC5Ba,MAAM2V,WAAmBle,IAAAA,UACtChC,SACE,OAAO,IACT,ECEa,MAAM2hD,WAA2B3/C,IAAAA,UAC9ChC,SACE,OACE,yBAAKmB,UAAU,mCAAmC0hB,MAAM,qBACtD,kBAAC,GAAAgQ,gBAAe,CAAC/gB,KAAMxS,KAAKQ,MAAMkiD,YAChC,yBAAK1gD,MAAM,KAAKD,OAAO,MACrB,yBAAKgC,KAAK,QAAQkvB,UAAU,YAKtC,EClBa,MAAMg6B,WAAevqD,IAAAA,UAClChC,SACE,OACE,yBAAKmB,UAAU,UAEnB,ECJa,MAAMqrD,WAAwBxqD,IAAAA,UAAiB,cAAD,gDASzCuJ,IAChB,MAAOpI,QAAQ,MAACkK,IAAU9B,EAC1BjM,KAAKQ,MAAM0S,cAAc+H,aAAalN,EAAM,GAC7C,CAEDrN,SACE,MAAM,cAACH,EAAa,gBAAEoT,EAAe,aAAEhT,GAAgBX,KAAKQ,MACtDkgB,EAAM/f,EAAa,OAEnBwsD,EAA8C,YAAlC5sD,EAAcoX,gBAC1By1C,EAA6C,WAAlC7sD,EAAcoX,gBACzBe,EAAS/E,EAAgB8H,gBAEzB4xC,EAAa,CAAC,0BAIpB,OAHID,GAAUC,EAAW/9C,KAAK,UAC1B69C,GAAWE,EAAW/9C,KAAK,WAG7B,6BACc,OAAXoJ,IAA8B,IAAXA,GAA+B,UAAXA,EAAqB,KAC3D,yBAAK7W,UAAU,oBACb,kBAAC6e,EAAG,CAAC7e,UAAU,iBAAiBmpD,OAAQ,IACtC,2BAAOnpD,UAAWwrD,EAAW/jD,KAAK,KAAMgkD,YAAY,gBAAgB9rD,KAAK,OAClE4e,SAAUpgB,KAAKutD,eAAgBx/C,OAAkB,IAAX2K,GAA8B,SAAXA,EAAoB,GAAKA,EAClF4U,SAAU6/B,MAM7B,ECpCF,MAAMvqC,GAAOC,SAASC,UAEP,MAAMolC,WAAkBnlC,EAAAA,cAuBrCpgB,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,0BAiBPpC,IACd,IAAI,MAAEq+B,EAAK,UAAEra,EAAS,cAAE4jC,EAAc,IAAO5nD,EACzCm+B,EAAQ,OAAOxoB,KAAKiyC,GACpBoF,EAAS,QAAQr3C,KAAKiyC,GACtBxoB,EAAajB,EAAQE,EAAMn9B,IAAI,aAAem9B,EAAMn9B,IAAI,SAE5D,QAAoBS,IAAfy9B,EAA2B,CAC9B,IAAIvwB,GAAOuwB,GAAc4tB,EAAS,KAAO5tB,EACzC5/B,KAAKuD,SAAS,CAAEwK,MAAOsB,IACvBrP,KAAKogB,SAAS/Q,EAAK,CAACsvB,MAAOA,EAAO8uB,UAAWjpC,GAC/C,MACMma,EACF3+B,KAAKogB,SAASpgB,KAAKq5B,OAAO,OAAQ,CAACsF,MAAOA,EAAO8uB,UAAWjpC,IAE5DxkB,KAAKogB,SAASpgB,KAAKq5B,SAAU,CAACo0B,UAAWjpC,GAE7C,IACD,oBAES8S,IACR,IAAI,MAAEuH,EAAOl0B,IAAG,YAAC6vB,IAAiBx6B,KAAKQ,MACnCK,EAAS25B,EAAYqE,EAAMhyB,QAE/B,OAAOsX,EAAAA,EAAAA,IAAgBtjB,EAAQy2B,EAAK,CAClCl2B,kBAAkB,GAClB,IACH,sBAEU,CAAC2M,EAAO,KAA0B,IAA1B,UAAE0/C,EAAS,MAAE9uB,GAAO,EACrC3+B,KAAKuD,SAAS,CAACwK,QAAO0/C,cACtBztD,KAAK0tD,UAAU3/C,EAAO4wB,EAAM,IAC7B,uBAEW,CAACtvB,EAAKsvB,MAAa3+B,KAAKQ,MAAM4f,UAAYwC,IAAMvT,EAAKsvB,EAAM,IAAE,4BAExD1yB,IACf,MAAM,cAACm8C,GAAiBpoD,KAAKQ,MACvBm+B,EAAQ,OAAOxoB,KAAKiyC,GACpBllC,EAAajX,EAAEpI,OAAOkK,MAC5B/N,KAAKogB,SAAS8C,EAAY,CAACyb,QAAO8uB,UAAWztD,KAAKmD,MAAMsqD,WAAW,IACpE,6BAEiB,IAAMztD,KAAKuD,UAAUJ,IAAK,CAAMsqD,WAAYtqD,EAAMsqD,gBAzDlEztD,KAAKmD,MAAQ,CACXsqD,WAAW,EACX1/C,MAAO,GAGX,CAEA1J,oBACErE,KAAK2tD,aAAaj3B,KAAK12B,KAAMA,KAAKQ,MACpC,CAEA6C,iCAAiCC,GAC/BtD,KAAK2tD,aAAaj3B,KAAK12B,KAAMsD,EAC/B,CA8CA5C,SACE,IAAI,iBACFymD,EAAgB,MAChBtoB,EAAK,UACLra,EAAS,cACTjkB,EAAa,WACbid,EAAU,WACV5c,EAAU,aACVD,GACEX,KAAKQ,MAET,MAAM+5C,EAAS55C,EAAa,UACtB0iB,EAAW1iB,EAAa,YACxBskB,EAAgBtkB,EAAa,iBAC7ByiD,EAAcziD,EAAa,eAEjC,IACI2X,GADY/X,EAAgBA,EAAc2jC,4BAA4B1mB,EAAYqhB,GAASA,GACxEn9B,IAAI,UAAU0N,EAAAA,EAAAA,SACjCg5C,EAAgB7nD,EAAcogC,kBAAkBnjB,GAAY9b,IAAI,sBAChEorB,EAAW9sB,KAAKQ,MAAMssB,UAAY9sB,KAAKQ,MAAMssB,SAASld,KAAO5P,KAAKQ,MAAMssB,SAAWo7B,GAAU0F,YAAY9gC,UAEzG,MAAE/e,EAAK,UAAE0/C,GAAcztD,KAAKmD,MAC5BqkB,EAAW,KAMf,OALuBC,EAAAA,GAAAA,GAAkC1Z,KAEvDyZ,EAAW,QAIX,yBAAK3lB,UAAU,aAAa,kBAAiBg9B,EAAMn9B,IAAI,QAAS,gBAAem9B,EAAMn9B,IAAI,OAErF+rD,GAAajpC,EACT,kBAACnB,EAAQ,CAACxhB,UAAY,oBAAuByW,EAAOiU,QAAU,WAAa,IAAKxe,MAAOA,EAAOqS,SAAWpgB,KAAK6tD,iBAC7G9/C,GAAS,kBAACkX,EAAa,CAACpjB,UAAU,sBACvB2lB,SAAWA,EACX5mB,WAAaA,EACbmN,MAAQA,IAE1B,yBAAKlM,UAAU,sBAEV2iB,EACY,yBAAK3iB,UAAU,mBAChB,kBAAC04C,EAAM,CAAC14C,UAAW4rD,EAAY,sCAAwC,oCAC9D16B,QAAS/yB,KAAK8tD,iBAAmBL,EAAY,SAAW,SAHhE,KAOf,2BAAO1kC,QAAQ,IACb,wDACA,kBAACq6B,EAAW,CACVr1C,MAAQq6C,EACRvE,aAAe/2B,EACf1M,SAAU+mC,EACVtlD,UAAU,0BACV+hD,UAAU,6BAOtB,EACD,KAnJoBsE,GAAS,cAgBP,CACnBp7B,UAAU5e,EAAAA,EAAAA,QAAO,CAAC,qBAClB2wB,OAAO3wB,EAAAA,EAAAA,QAAO,CAAC,GACfkS,SAAUwC,GACVukC,iBAAkBvkC,K,eCrBP,MAAMm9B,WAAar9C,IAAAA,UAMhChC,SACE,IAAI,QAAE+F,EAAO,WAAE7F,GAAeZ,KAAKQ,MAC/ButD,GAAO1+B,EAAAA,GAAAA,mCAAkC5oB,GAE7C,MAAMsQ,EAASnW,IAETotD,EAAYtsD,KAAIqV,EAAQ,6BAC1B,kBAAC,MAAiB,CAChByQ,SAAS,OACT3lB,UAAU,kBACV+T,OAAO8c,EAAAA,GAAAA,IAAShxB,KAAIqV,EAAQ,2BAE3Bg3C,GAGL,8BAAUp7B,UAAU,EAAM9wB,UAAU,OAAOkM,MAAOggD,IAEpD,OACE,yBAAKlsD,UAAU,gBACb,oCACA,yBAAKA,UAAU,qBACX,kBAAC,GAAA0xB,gBAAe,CAAC/gB,KAAMu7C,GAAM,mCAEjC,6BACGC,GAIT,ECtCa,MAAMrM,WAAgBj/C,IAAAA,UAAiB,cAAD,0CAyBvCuJ,IACVjM,KAAK+gC,UAAW90B,EAAEpI,OAAOkK,MAAO,IACjC,uBAEaA,IACZ,IAAI,KAAEyC,EAAI,OAAElF,EAAM,YAAEoG,GAAgB1R,KAAKQ,MAEzCkR,EAAYqvB,UAAWhzB,EAAOyC,EAAMlF,EAAQ,GAC7C,CAvBD2iD,4BACE,IAAI,QAAEjhC,GAAYhtB,KAAKQ,MAGvBR,KAAK+gC,UAAU/T,EAAQ9c,QACzB,CAEA7M,iCAAiCC,GAAY,IAAD,EACpCtD,KAAKQ,MAAMwhD,eAAkB,OAAA1+C,EAAU0pB,SAAO,OAAUhtB,KAAKQ,MAAMwhD,gBAGvEhiD,KAAK+gC,UAAUz9B,EAAU0pB,QAAQ9c,QAErC,CAYAxP,SAAU,IAAD,EACP,IAAI,QAAEssB,EAAO,cAAEg1B,GAAkBhiD,KAAKQ,MAEtC,OACE,2BAAOuoB,QAAQ,WACb,0BAAMlnB,UAAU,iBAAe,WAC/B,4BAAQue,SAAWpgB,KAAKogB,SAAWrS,MAAOi0C,GACtC,MAAAh1B,EAAQxd,YAAU,QAChBqR,GAAY,4BAAQ9S,MAAQ8S,EAASla,IAAMka,GAAWA,KACxDoI,WAIV,EChDa,MAAMilC,WAAyBxrD,IAAAA,UAQ5ChC,SACE,MAAM,YAACgR,EAAW,cAAEnR,EAAa,aAAEI,GAAgBX,KAAKQ,MAElDwhD,EAAgBzhD,EAAcmgC,kBAC9B1T,EAAUzsB,EAAcysB,UAExB20B,EAAUhhD,EAAa,WAI7B,OAF0BqsB,GAAWA,EAAQpd,KAGzC,kBAAC+xC,EAAO,CACNK,cAAeA,EACfh1B,QAASA,EACTtb,YAAaA,IAEb,IACR,ECvBa,MAAMy8C,WAAsB1sC,EAAAA,UAwBzC9e,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,6BA0BP,KACX5C,KAAKQ,MAAM4tD,UACZpuD,KAAKQ,MAAM4tD,SAASpuD,KAAKQ,MAAM6tD,WAAWruD,KAAKmD,MAAMmrD,UAGvDtuD,KAAKuD,SAAS,CACZ+qD,UAAWtuD,KAAKmD,MAAMmrD,UACtB,IACH,oBAESluD,IACR,GAAIA,GAAOJ,KAAKQ,MAAMmT,gBAAiB,CACrC,MAAMmB,EAAc9U,KAAKQ,MAAMmT,gBAAgBoB,iBAE3CC,IAAAA,GAAMF,EAAa9U,KAAKQ,MAAMS,WAAYjB,KAAKuuD,kBACnDvuD,KAAKQ,MAAM0S,cAAc2B,cAAc7U,KAAKQ,MAAMS,SAAUb,EAAI8V,cAClE,KAxCA,IAAI,SAAEo4C,EAAQ,iBAAEE,GAAqBxuD,KAAKQ,MAE1CR,KAAKmD,MAAQ,CACXmrD,SAAWA,EACXE,iBAAkBA,GAAoBL,GAAcnoD,aAAawoD,iBAErE,CAEAnqD,oBACE,MAAM,iBAAEoqD,EAAgB,SAAEH,EAAQ,UAAED,GAAcruD,KAAKQ,MACpDiuD,GAAoBH,GAIrBtuD,KAAKQ,MAAM4tD,SAASC,EAAWC,EAEnC,CAEAjrD,iCAAiCC,GAC5BtD,KAAKQ,MAAM8tD,WAAahrD,EAAUgrD,UACjCtuD,KAAKuD,SAAS,CAAC+qD,SAAUhrD,EAAUgrD,UAEzC,CAqBA5tD,SACE,MAAM,MAAE6iB,EAAK,QAAEqK,GAAY5tB,KAAKQ,MAEhC,OAAGR,KAAKmD,MAAMmrD,UACTtuD,KAAKQ,MAAMiuD,iBACL,0BAAM5sD,UAAW+rB,GAAW,IAChC5tB,KAAKQ,MAAM6zB,UAMhB,0BAAMxyB,UAAW+rB,GAAW,GAAIxtB,IAAKJ,KAAK0W,QACxC,4BAAQ,gBAAe1W,KAAKmD,MAAMmrD,SAAUzsD,UAAU,oBAAoBkxB,QAAS/yB,KAAKuuD,iBACpFhrC,GAAS,0BAAM1hB,UAAU,WAAW0hB,GACtC,0BAAM1hB,UAAY,gBAAmB7B,KAAKmD,MAAMmrD,SAAW,GAAK,iBAC7DtuD,KAAKmD,MAAMmrD,UAAY,8BAAOtuD,KAAKmD,MAAMqrD,mBAG5CxuD,KAAKmD,MAAMmrD,UAAYtuD,KAAKQ,MAAM6zB,SAG1C,EACD,KA7FoB85B,GAAa,eAeV,CACpBK,iBAAkB,QAClBF,UAAU,EACV/qC,MAAO,KACP6qC,SAAU,OACVK,kBAAkB,EAClBxtD,SAAU+T,IAAAA,KAAQ,M,yBCpBP,MAAMgQ,WAAqBtiB,IAAAA,UAaxCC,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,uBAmBTqJ,IACZ,IAAMpI,QAAWm6C,SAAU,KAAEj9C,KAAakL,EAE1CjM,KAAKuD,SAAS,CACZmrD,UAAW3tD,GACX,IAvBF,IAAI,WAAEH,EAAU,UAAE4jB,GAAcxkB,KAAKQ,OACjC,sBAAEmuD,GAA0B/tD,IAE5B8tD,EAAYC,EAEc,YAA1BA,GAAiE,UAA1BA,IACzCD,EAAY,WAGXlqC,IACDkqC,EAAY,WAGd1uD,KAAKmD,MAAQ,CACXurD,YAEJ,CAUArrD,iCAAiCC,GAE7BA,EAAUkhB,YACTxkB,KAAKQ,MAAMgkB,WACZxkB,KAAKQ,MAAMwnB,SAEXhoB,KAAKuD,SAAS,CAAEmrD,UAAW,WAE/B,CAEAhuD,SACE,IAAI,aAAEC,EAAY,cAAEJ,EAAa,OAAEM,EAAM,QAAEmnB,EAAO,UAAExD,EAAS,WAAE5jB,EAAU,SAAEK,EAAQ,gBAAEE,EAAe,iBAAEC,GAAqBpB,KAAKQ,OAC5H,wBAAEouD,GAA4BhuD,IAClC,MAAMiuD,EAAeluD,EAAa,gBAC5BskB,EAAgBtkB,EAAa,iBAC7BmuD,EAAeld,KAAY,GAAG3uC,SAAS,UACvC8rD,EAAiBnd,KAAY,GAAG3uC,SAAS,UACzC+rD,EAAapd,KAAY,GAAG3uC,SAAS,UACrCgsD,EAAerd,KAAY,GAAG3uC,SAAS,UAE7C,IAAIf,EAAS3B,EAAc2B,SAE3B,OACE,yBAAKL,UAAU,iBACb,wBAAIA,UAAU,MAAMkiD,KAAK,WACvB,wBAAIliD,UAAW6D,KAAG,UAAW,CAAEwpD,OAAiC,YAAzBlvD,KAAKmD,MAAMurD,YAA4B3K,KAAK,gBACjF,4BACE,gBAAegL,EACf,gBAAwC,YAAzB/uD,KAAKmD,MAAMurD,UAC1B7sD,UAAU,WACV,YAAU,UACV4gC,GAAIqsB,EACJ/7B,QAAU/yB,KAAK0uD,UACf3K,KAAK,OAEJv/B,EAAY,aAAe,kBAG9B3jB,GACA,wBAAIgB,UAAW6D,KAAG,UAAW,CAAEwpD,OAAiC,UAAzBlvD,KAAKmD,MAAMurD,YAA0B3K,KAAK,gBAC/E,4BACE,gBAAekL,EACf,gBAAwC,UAAzBjvD,KAAKmD,MAAMurD,UAC1B7sD,UAAW6D,KAAG,WAAY,CAAEypD,SAAU3qC,IACtC,YAAU,QACVie,GAAIusB,EACJj8B,QAAU/yB,KAAK0uD,UACf3K,KAAK,OAEJ7hD,EAAS,SAAW,WAKH,YAAzBlC,KAAKmD,MAAMurD,WACV,yBACE,cAAsC,YAAzB1uD,KAAKmD,MAAMurD,UACxB,kBAAiBI,EACjB,YAAU,eACVrsB,GAAIssB,EACJhL,KAAK,WACLqL,SAAS,KAERpnC,GACC,kBAAC/C,EAAa,CAAClX,MAAM,yBAAyBnN,WAAaA,KAKvC,UAAzBZ,KAAKmD,MAAMurD,WACV,yBACE,cAAsC,YAAzB1uD,KAAKmD,MAAMurD,UACxB,kBAAiBM,EACjB,YAAU,aACVvsB,GAAIwsB,EACJlL,KAAK,WACLqL,SAAS,KAET,kBAACP,EAAY,CACXhuD,OAASA,EACTF,aAAeA,EACfC,WAAaA,EACbL,cAAgBA,EAChBgC,YAAcqsD,EACd3tD,SAAUA,EACVE,gBAAmBA,EACnBC,iBAAoBA,KAMhC,ECvIa,MAAMytD,WAAqBptC,EAAAA,UAAW,cAAD,0CAkBvC,CAAC1gB,EAAKwa,KAEZvb,KAAKQ,MAAM0S,eACZlT,KAAKQ,MAAM0S,cAAcQ,KAAK1T,KAAKQ,MAAMi9B,SAAUliB,EACrD,GACD,CAED7a,SACE,IAAI,aAAEC,EAAY,WAAEC,GAAeZ,KAAKQ,MACxC,MAAMN,EAAQS,EAAa,SAE3B,IAAI2tD,EAMJ,OALGtuD,KAAKQ,MAAMmT,kBAEZ26C,EAAWtuD,KAAKQ,MAAMmT,gBAAgB4H,QAAQvb,KAAKQ,MAAMi9B,WAGpD,yBAAK57B,UAAU,aACpB,kBAAC3B,EAAK,QAAMF,KAAKQ,MAAK,CAAGI,WAAaA,EAAa0tD,SAAUA,EAAU9rD,MAAQ,EAAI4rD,SAAWpuD,KAAKouD,SAAW7rD,YAAcvC,KAAKQ,MAAM+B,aAAe,KAE1J,E,eCtCa,MAAM8sD,WAAe5tC,EAAAA,UAAW,cAAD,mDAUxB,IACHzhB,KAAKQ,MAAMD,cAAc2B,SACxB,CAAC,aAAc,WAAa,CAAC,iBAC9C,iCAEqB,IACb,MACR,0BAEc,CAACnB,EAAMkwB,KACpB,MAAM,cAAE/d,GAAkBlT,KAAKQ,MAC/B0S,EAAcQ,KAAK,IAAI1T,KAAKsvD,oBAAqBvuD,GAAOkwB,GACrDA,GACDjxB,KAAKQ,MAAMkR,YAAY6sB,uBAAuB,IAAIv+B,KAAKsvD,oBAAqBvuD,GAC9E,IACD,0BAEeX,IACVA,GACFJ,KAAKQ,MAAM0S,cAAc2B,cAAc7U,KAAKsvD,oBAAqBlvD,EACnE,IACD,yBAEcA,IACb,GAAIA,EAAK,CACP,MAAMW,EAAOX,EAAIioB,aAAa,aAC9BroB,KAAKQ,MAAM0S,cAAc2B,cAAc,IAAI7U,KAAKsvD,oBAAqBvuD,GAAOX,EAC9E,IACD,CAEDM,SAAS,IAAD,EACN,IAAI,cAAEH,EAAa,aAAEI,EAAY,gBAAEgT,EAAe,cAAET,EAAa,WAAEtS,GAAeZ,KAAKQ,MACnFyO,EAAc1O,EAAc0O,eAC5B,aAAEypC,EAAY,yBAAE6W,GAA6B3uD,IACjD,IAAKqO,EAAYW,MAAQ2/C,EAA2B,EAAG,OAAO,KAE9D,MAAMC,EAAexvD,KAAKsvD,oBAC1B,IAAIG,EAAa97C,EAAgB4H,QAAQi0C,EAAcD,EAA2B,GAAsB,SAAjB7W,GACvF,MAAMx2C,EAAS3B,EAAc2B,SAEvB2sD,EAAeluD,EAAa,gBAC5BggD,EAAWhgD,EAAa,YACxBwtD,EAAgBxtD,EAAa,iBAC7BigB,EAAajgB,EAAa,cAAc,GAE9C,OAAO,6BAASkB,UAAY4tD,EAAa,iBAAmB,SAAUrvD,IAAKJ,KAAK0vD,cAC9E,4BACE,4BACE,gBAAeD,EACf5tD,UAAU,iBACVkxB,QAAS,IAAM7f,EAAcQ,KAAK87C,GAAeC,IAEjD,8BAAOvtD,EAAS,UAAY,UAC5B,yBAAKF,MAAM,KAAKD,OAAO,KAAK,cAAY,OAAOq/C,UAAU,SACvD,yBAAKnuB,UAAWw8B,EAAa,kBAAoB,yBAIvD,kBAAC9O,EAAQ,CAACU,SAAUoO,GAEhB,MAAAxgD,EAAYZ,YAAU,QAAM,IAAU,IAATtN,GAAK,EAEhC,MAAM08B,EAAW,IAAI+xB,EAAczuD,GAC7BE,EAAW+T,IAAAA,KAAQyoB,GAEnBkyB,EAAcpvD,EAAcyqB,oBAAoByS,GAChDmyB,EAAiBrvD,EAAcqN,WAAWE,MAAM2vB,GAEhD58B,EAASuN,EAAAA,IAAAA,MAAUuhD,GAAeA,EAAc36C,IAAAA,MAChD66C,EAAYzhD,EAAAA,IAAAA,MAAUwhD,GAAkBA,EAAiB56C,IAAAA,MAEzD9T,EAAcL,EAAOa,IAAI,UAAYmuD,EAAUnuD,IAAI,UAAYX,EAC/Dwa,EAAU5H,EAAgB4H,QAAQkiB,GAAU,GAE9CliB,GAA4B,IAAhB1a,EAAO+O,MAAcigD,EAAUjgD,KAAO,GAGpD5P,KAAKQ,MAAMkR,YAAY6sB,uBAAuBd,GAGhD,MAAMuiB,EAAU,kBAAC6O,EAAY,CAAC9tD,KAAOA,EACnCwB,YAAcgtD,EACd1uD,OAASA,GAAUmU,IAAAA,MACnB9T,YAAaA,EACbu8B,SAAUA,EACVx8B,SAAUA,EACVN,aAAeA,EACfJ,cAAgBA,EAChBK,WAAcA,EACd+S,gBAAmBA,EACnBT,cAAiBA,EACjB/R,iBAAmB,EACnBC,kBAAoB,IAEhBmiB,EAAQ,0BAAM1hB,UAAU,aAC5B,0BAAMA,UAAU,qBACbX,IAIL,OAAO,yBAAKuhC,GAAM,SAAQ1hC,IAASc,UAAU,kBAAkB8E,IAAO,kBAAiB5F,IAC/E,YAAWA,EAAMX,IAAKJ,KAAK8vD,aACjC,0BAAMjuD,UAAU,uBAAsB,kBAAC+e,EAAU,CAAC3f,SAAUA,KAC5D,kBAACktD,EAAa,CACZvgC,QAAQ,YACR4gC,iBAAkBxuD,KAAK+vD,oBAAoBhvD,GAC3CqtD,SAAUpuD,KAAKgwD,aACfzsC,MAAOA,EACPriB,YAAaA,EACbmtD,UAAWttD,EACXE,SAAUA,EACV0S,gBAAiBA,EACjBT,cAAeA,EACfu7C,kBAAkB,EAClBH,SAAWiB,EAA2B,GAAKh0C,GACzCykC,GACE,IACP/2B,WAIX,ECpIF,MAeA,GAfmB,IAA6B,IAA7B,MAAElb,EAAK,aAAEpN,GAAc,EACpCwtD,EAAgBxtD,EAAa,iBAC7B6tD,EAAmB,yCAAgBzgD,EAAMwe,QAAO,MACpD,OAAO,0BAAM1qB,UAAU,aAAW,QAC3B,6BACL,kBAACssD,EAAa,CAACK,iBAAmBA,GAAkB,KAC9CzgD,EAAMzE,KAAK,MAAK,MAEjB,ECDM,MAAMjI,WAAoBogB,EAAAA,UAkBvC/gB,SAAS,IAAD,QACN,IAAI,OAAEG,EAAM,KAAEE,EAAI,YAAEG,EAAW,MAAEF,EAAK,aAAEL,EAAY,WAAEC,EAAU,MAAE4B,EAAK,SAAE4rD,EAAQ,SAAEE,EAAQ,SAAErtD,KAAaorD,GAAersD,KAAKQ,OAC1H,cAAED,EAAa,YAACgC,EAAW,gBAAEpB,EAAe,iBAAEC,GAAoBirD,EACtE,MAAM,OAAEnqD,GAAW3B,EAEnB,IAAIM,EACF,OAAO,KAGT,MAAM,eAAEihD,GAAmBlhD,IAE3B,IAAIke,EAAcje,EAAOa,IAAI,eACzB40B,EAAaz1B,EAAOa,IAAI,cACxB81B,EAAuB32B,EAAOa,IAAI,wBAClC6hB,EAAQ1iB,EAAOa,IAAI,UAAYR,GAAeH,EAC9CkvD,EAAqBpvD,EAAOa,IAAI,YAChCwuD,EAAiB,IAAArvD,GAAM,KAANA,GACV,CAAEke,EAAGpY,KAAG,aAAiF,IAA5E,QAAC,gBAAiB,gBAAiB,WAAY,YAAU,OAASA,EAAW,IACjG1E,EAAapB,EAAOa,IAAI,cACxB4/C,EAAkBzgD,EAAOiN,MAAM,CAAC,eAAgB,QAChDi/C,EAA0BlsD,EAAOiN,MAAM,CAAC,eAAgB,gBAE5D,MAAM8S,EAAajgB,EAAa,cAAc,GACxC+D,EAAW/D,EAAa,YAAY,GACpCT,EAAQS,EAAa,SACrBwtD,EAAgBxtD,EAAa,iBAC7B6oD,EAAW7oD,EAAa,YACxBkgD,EAAOlgD,EAAa,QAEpBwvD,EAAoB,IACjB,0BAAMtuD,UAAU,sBAAqB,kBAAC+e,EAAU,CAAC3f,SAAUA,KAE9DutD,EAAoB,8BACtB,8BAvDU,KAuDgB,MAAG,8BAtDlB,KAwDTxtD,EAAQ,kBAACmvD,EAAiB,MAAM,IAIhC/4B,EAAQ72B,EAAc2B,SAAWrB,EAAOa,IAAI,SAAW,KACvDw1B,EAAQ32B,EAAc2B,SAAWrB,EAAOa,IAAI,SAAW,KACvD0uD,EAAM7vD,EAAc2B,SAAWrB,EAAOa,IAAI,OAAS,KAEnD2uD,EAAU9sC,GAAS,0BAAM1hB,UAAU,eACrCb,GAASH,EAAOa,IAAI,UAAY,0BAAMG,UAAU,cAAehB,EAAOa,IAAI,UAC5E,0BAAMG,UAAU,qBAAsB0hB,IAGxC,OAAO,0BAAM1hB,UAAU,SACrB,kBAACssD,EAAa,CACZE,UAAWttD,EACXwiB,MAAO8sC,EACPjC,SAAYA,EACZE,WAAWA,GAAkB9rD,GAASD,EACtCisD,iBAAmBA,GAElB,0BAAM3sD,UAAU,qBA9EP,KAgFLb,EAAe,kBAACmvD,EAAiB,MAAzB,KAEX,0BAAMtuD,UAAU,gBAEZ,2BAAOA,UAAU,SAAQ,+BAEtBid,EAAqB,wBAAIjd,UAAU,eAChC,4CACA,4BACE,kBAAC6C,EAAQ,CAACC,OAASma,MAHV,KAQfwiC,GACA,wBAAIz/C,UAAW,iBACb,6CAGA,4BACE,kBAACg/C,EAAI,CAACh9C,OAAO,SAASE,MAAML,EAAAA,EAAAA,IAAY49C,IAAmByL,GAA2BzL,KAKzFr/C,EACC,wBAAIJ,UAAW,YACb,2CAGA,qCALU,KAWZy0B,GAAcA,EAAW1mB,KAAe,YAAA0mB,EAAWjoB,YAAU,QAC1D,IAAe,IAAd,CAAEN,GAAM,EACR,QAASA,EAAMrM,IAAI,aAAeP,MAC9B4M,EAAMrM,IAAI,cAAgBN,EAAiB,KAEpD,QACI,IAAkB,IAAjBuF,EAAKoH,GAAM,EACPuiD,EAAepuD,KAAY6L,EAAMrM,IAAI,cACrCW,EAAa+M,EAAAA,KAAAA,OAAY6gD,IAAuBA,EAAmBngD,SAASnJ,GAE5E0mD,EAAa,CAAC,gBAUlB,OARIiD,GACFjD,EAAW/9C,KAAK,cAGdjN,GACFgrD,EAAW/9C,KAAK,YAGV,wBAAI3I,IAAKA,EAAK9E,UAAWwrD,EAAW/jD,KAAK,MAC/C,4BACI3C,EAAOtE,GAAc,0BAAMR,UAAU,QAAM,MAE/C,4BACE,kBAAC3B,EAAK,MAACyG,IAAO,UAAS5F,KAAQ4F,KAAOoH,KAAes+C,EAAU,CACxDvrD,SAAWuB,EACX1B,aAAeA,EACfM,SAAUA,EAASqO,KAAK,aAAc3I,GACtC/F,WAAaA,EACbC,OAASkN,EACTvL,MAAQA,EAAQ,MAEtB,IACJymB,UAlC4B,KAsClC64B,EAAwB,4BAAI,kCAAX,KAGjBA,EACC,MAAAjhD,EAAOwN,YAAU,QACd,IAAkB,IAAjB1H,EAAKoH,GAAM,EACX,GAAsB,OAAnB,IAAApH,GAAG,KAAHA,EAAU,EAAE,GACb,OAGF,MAAM4pD,EAAmBxiD,EAAeA,EAAMlB,KAAOkB,EAAMlB,OAASkB,EAAnC,KAEjC,OAAQ,wBAAIpH,IAAKA,EAAK9E,UAAU,aAC9B,4BACI8E,GAEJ,4BACI,IAAe4pD,IAEhB,IACJtnC,UAjBW,KAoBjBuO,GAAyBA,EAAqB5nB,KAC3C,4BACA,4BAAM,UACN,4BACE,kBAAC1P,EAAK,QAAMmsD,EAAU,CAAGvrD,UAAW,EAC7BH,aAAeA,EACfM,SAAUA,EAASqO,KAAK,wBACxB1O,WAAaA,EACbC,OAAS22B,EACTh1B,MAAQA,EAAQ,OATyB,KAcrD40B,EACG,4BACA,4BAAM,YACN,4BACG,IAAAA,GAAK,KAALA,GAAU,CAACv2B,EAAQoZ,IACX,yBAAKtT,IAAKsT,GAAG,kBAAC/Z,EAAK,QAAMmsD,EAAU,CAAGvrD,UAAW,EAC/CH,aAAeA,EACfM,SAAUA,EAASqO,KAAK,QAAS2K,GACjCrZ,WAAaA,EACbC,OAASA,EACT2B,MAAQA,EAAQ,UAVxB,KAgBR00B,EACG,4BACA,4BAAM,YACN,4BACG,IAAAA,GAAK,KAALA,GAAU,CAACr2B,EAAQoZ,IACX,yBAAKtT,IAAKsT,GAAG,kBAAC/Z,EAAK,QAAMmsD,EAAU,CAAGvrD,UAAW,EAC/CH,aAAeA,EACfM,SAAUA,EAASqO,KAAK,QAAS2K,GACjCrZ,WAAaA,EACbC,OAASA,EACT2B,MAAQA,EAAQ,UAVxB,KAgBR4tD,EACG,4BACA,4BAAM,UACN,4BACE,6BACE,kBAAClwD,EAAK,QAAMmsD,EAAU,CACfvrD,UAAW,EACXH,aAAeA,EACfM,SAAUA,EAASqO,KAAK,OACxB1O,WAAaA,EACbC,OAASuvD,EACT5tD,MAAQA,EAAQ,QAXxB,QAmBf,0BAAMX,UAAU,eAjPL,MAoPXquD,EAAetgD,KAAO,MAAAsgD,EAAe7hD,YAAU,QAAO,IAAD,IAAI1H,EAAKoY,GAAG,SAAM,kBAACyqC,EAAQ,CAAC7iD,IAAM,GAAEA,KAAOoY,IAAKouB,QAAUxmC,EAAM+iD,QAAU3qC,EAAI4qC,UAnPzH,YAmPmJ,IAAI,KAGvK,ECvPa,MAAMroD,WAAmBmgB,EAAAA,UAgBtC/gB,SAAS,IAAD,EACN,IAAI,aAAEC,EAAY,WAAEC,EAAU,OAAEC,EAAM,MAAE2B,EAAK,YAAED,EAAW,KAAExB,EAAI,YAAEG,EAAW,SAAED,GAAajB,KAAKQ,MAC7Fse,EAAcje,EAAOa,IAAI,eACzBk1B,EAAQ/1B,EAAOa,IAAI,SACnB6hB,EAAQ1iB,EAAOa,IAAI,UAAYR,GAAeH,EAC9Cu1B,EAAa,IAAAz1B,GAAM,KAANA,GAAe,CAAEke,EAAGpY,KAAG,aAAiF,IAA5E,QAAC,OAAQ,QAAS,cAAe,QAAS,iBAAe,OAASA,EAAW,IACtH26C,EAAkBzgD,EAAOiN,MAAM,CAAC,eAAgB,QAChDi/C,EAA0BlsD,EAAOiN,MAAM,CAAC,eAAgB,gBAG5D,MAAMpJ,EAAW/D,EAAa,YAAY,GACpCwtD,EAAgBxtD,EAAa,iBAC7BT,EAAQS,EAAa,SACrB6oD,EAAW7oD,EAAa,YACxBkgD,EAAOlgD,EAAa,QAEpB0vD,EAAU9sC,GACd,0BAAM1hB,UAAU,eACd,0BAAMA,UAAU,qBAAsB0hB,IAQ1C,OAAO,0BAAM1hB,UAAU,SACrB,kBAACssD,EAAa,CAAC5qC,MAAO8sC,EAAS/B,SAAW9rD,GAASD,EAAcisD,iBAAiB,SAAO,IAGnFl4B,EAAW1mB,KAAO,MAAA0mB,EAAWjoB,YAAU,QAAO,IAAD,IAAI1H,EAAKoY,GAAG,SAAM,kBAACyqC,EAAQ,CAAC7iD,IAAM,GAAEA,KAAOoY,IAAKouB,QAAUxmC,EAAM+iD,QAAU3qC,EAAI4qC,UAhDrH,YAgD+I,IAAI,KAGxJ7qC,EACC,kBAACpa,EAAQ,CAACC,OAASma,IADLwX,EAAW1mB,KAAO,yBAAK/N,UAAU,aAAoB,KAGrEy/C,GACA,yBAAKz/C,UAAU,iBACZ,kBAACg/C,EAAI,CAACh9C,OAAO,SAASE,MAAML,EAAAA,EAAAA,IAAY49C,IAAmByL,GAA2BzL,IAG3F,8BACE,kBAACphD,EAAK,QACCF,KAAKQ,MAAK,CACfI,WAAaA,EACbK,SAAUA,EAASqO,KAAK,SACxBvO,KAAM,KACNF,OAAS+1B,EACT91B,UAAW,EACX0B,MAAQA,EAAQ,MAEb,KAIf,EC1EF,MAAMmnD,GAAY,qBAEH,MAAM6G,WAAkB/uC,EAAAA,UAWrC/gB,SAAU,IAAD,MACP,IAAI,OAAEG,EAAM,aAAEF,EAAY,WAAEC,EAAU,KAAEG,EAAI,YAAEG,EAAW,MAAEsB,EAAK,YAAED,GAAgBvC,KAAKQ,MAEvF,MAAM,eAAEshD,GAAmBlhD,IAE3B,IAAKC,IAAWA,EAAOa,IAErB,OAAO,8BAGT,IAAIF,EAAOX,EAAOa,IAAI,QAClB2kB,EAASxlB,EAAOa,IAAI,UACpB41B,EAAMz2B,EAAOa,IAAI,OACjB+uD,EAAY5vD,EAAOa,IAAI,QACvB6hB,EAAQ1iB,EAAOa,IAAI,UAAYR,GAAeH,EAC9C+d,EAAcje,EAAOa,IAAI,eACzB6/C,GAAatQ,EAAAA,EAAAA,IAAcpwC,GAC3By1B,EAAa,IAAAz1B,GAAM,KAANA,GACP,CAAC6vD,EAAG/pD,KAAG,aAA0F,IAArF,QAAC,OAAQ,OAAQ,SAAU,cAAe,QAAS,iBAAe,OAASA,EAAW,IACzGgqD,WAAU,CAACD,EAAG/pD,IAAQ46C,EAAW76B,IAAI/f,KACpC26C,EAAkBzgD,EAAOiN,MAAM,CAAC,eAAgB,QAChDi/C,EAA0BlsD,EAAOiN,MAAM,CAAC,eAAgB,gBAE5D,MAAMpJ,EAAW/D,EAAa,YAAY,GACpCiwD,EAAYjwD,EAAa,aACzB6oD,EAAW7oD,EAAa,YACxBwtD,EAAgBxtD,EAAa,iBAC7BkgD,EAAOlgD,EAAa,QAEpB0vD,EAAU9sC,GACd,0BAAM1hB,UAAU,eACd,0BAAMA,UAAU,qBAAqB0hB,IAGzC,OAAO,0BAAM1hB,UAAU,SACrB,kBAACssD,EAAa,CAAC5qC,MAAO8sC,EAAS/B,SAAU9rD,GAASD,EAAaisD,iBAAiB,QAAQC,iBAAkBlsD,IAAgBC,GACxH,0BAAMX,UAAU,QACbd,GAAQyB,EAAQ,GAAK,0BAAMX,UAAU,aAAa0hB,GACnD,0BAAM1hB,UAAU,aAAaL,GAC5B6kB,GAAU,0BAAMxkB,UAAU,eAAa,KAAIwkB,EAAM,KAEhDiQ,EAAW1mB,KAAO,MAAA0mB,EAAWjoB,YAAU,QAAM,IAAD,IAAE1H,EAAKoY,GAAE,SAAK,kBAACyqC,EAAQ,CAAC7iD,IAAM,GAAEA,KAAOoY,IAAKouB,QAASxmC,EAAK+iD,QAAS3qC,EAAG4qC,UAAWA,IAAa,IAAI,KAG9I7H,GAAkBP,EAAW3xC,KAAO,MAAA2xC,EAAWlzC,YAAU,QAAM,IAAD,IAAE1H,EAAKoY,GAAE,SAAK,kBAACyqC,EAAQ,CAAC7iD,IAAM,GAAEA,KAAOoY,IAAKouB,QAASxmC,EAAK+iD,QAAS3qC,EAAG4qC,UAAWA,IAAa,IAAI,KAG/J7qC,EACC,kBAACpa,EAAQ,CAACC,OAAQma,IADL,KAIfwiC,GACA,yBAAKz/C,UAAU,iBACZ,kBAACg/C,EAAI,CAACh9C,OAAO,SAASE,MAAML,EAAAA,EAAAA,IAAY49C,IAAmByL,GAA2BzL,IAIzFhqB,GAAOA,EAAI1nB,KAAQ,8BAAM,6BAAM,0BAAM/N,UAAW8nD,IAAU,QAEtD,MAAAryB,EAAIjpB,YAAU,QAAM,IAAD,IAAE1H,EAAKoY,GAAE,SAAK,0BAAMpY,IAAM,GAAEA,KAAOoY,IAAKld,UAAW8nD,IAAW,6BAAM,MAAmBhjD,EAAG,KAAImqC,OAAO/xB,GAAU,IAAEkK,WAE7H,KAGXwnC,GAAa,kBAACG,EAAS,CAAC7iD,MAAO0iD,EAAW9vD,aAAcA,MAKlE,ECnFK,MAYP,GAZyB,IAAqC,IAArC,QAAEwsC,EAAO,QAAEuc,EAAO,UAAEC,GAAW,EACpD,OACI,0BAAM9nD,UAAY8nD,GAChB,6BAAQxc,EAAO,KAAM2D,OAAO4Y,GAAiB,ECHxC,MAAM3C,WAAuBrkD,IAAAA,UAoB1ChC,SACE,MAAM,cAAE44C,EAAa,cAAEE,EAAa,aAAED,EAAY,QAAE4H,EAAO,kBAAEj2B,EAAiB,OAAEhpB,GAAWlC,KAAKQ,MAE1FqwD,EAAY3uD,GAAUgpB,EAC5B,OACE,yBAAKrpB,UAAWgvD,EAAY,oBAAsB,WAE9C1P,EAAU,4BAAQt/C,UAAU,0BAA0BkxB,QAAUymB,GAAe,UACrE,4BAAQ33C,UAAU,mBAAmBkxB,QAAUumB,GAAe,eAIxEuX,GAAa,4BAAQhvD,UAAU,yBAAyBkxB,QAAUwmB,GAAc,SAIxF,EACD,KArCoBwN,GAAc,eAWX,CACpBzN,cAAez2B,SAASC,UACxB02B,cAAe32B,SAASC,UACxBy2B,aAAc12B,SAASC,UACvBq+B,SAAS,EACTj2B,mBAAmB,EACnBhpB,QAAQ,ICjBG,MAAM4uD,WAA4BpuD,IAAAA,cAe/ChC,SACE,MAAM,OAAEqwD,EAAM,WAAEznC,EAAU,OAAEpnB,EAAM,SAAE8uD,GAAahxD,KAAKQ,MAEtD,OAAGuwD,EACM,6BAAO/wD,KAAKQ,MAAM6zB,UAGxB/K,GAAcpnB,EACR,yBAAKL,UAAU,kBACnBmvD,EACD,yBAAKnvD,UAAU,8DACb,6BACE,gEACA,2BAAG,yCAAoB,QAAK,yCAAoB,yGAChD,2DAAgC,0CAAgB,SAAiB,yBAAsB,gDAA2B,kBAAe,gDAA2B,SAMhKynB,GAAepnB,EAaZ,6BAAOlC,KAAKQ,MAAM6zB,UAZhB,yBAAKxyB,UAAU,kBACnBmvD,EACD,yBAAKnvD,UAAU,4DACb,6BACE,gEACA,8FACA,qHAA0F,0CAAgB,SAAiB,yBAAsB,gDAA2B,kBAAe,gDAA2B,QAOhO,EACD,KAlDoBivD,GAAmB,eAShB,CACpBE,SAAU,KACV38B,SAAU,KACV08B,QAAQ,ICZZ,MAQA,GARsB,IAAiB,IAAjB,QAAE3uB,GAAS,EAC/B,OAAO,+BAAO,yBAAKvgC,UAAU,WAAS,IAAIugC,EAAO,KAAiB,ECepE,GAhByB,IAA6B,IAA7B,QAAE+e,EAAO,KAAE3wC,EAAI,KAAEgC,GAAM,EAC5C,OACI,uBAAG3Q,UAAU,UACXkxB,QAASouB,EAAWl1C,GAAMA,EAAEumB,iBAAmB,KAC/CzuB,KAAMo9C,EAAW,KAAI3wC,IAAS,MAC9B,8BAAOgC,GACL,ECsCZ,GA9CkB,IAChB,6BACE,yBAAKy+C,MAAM,6BAA6BC,WAAW,+BAA+BrvD,UAAU,cAC1F,8BACE,4BAAQsvD,QAAQ,YAAY1uB,GAAG,YAC7B,0BAAM2Q,EAAE,+TAGV,4BAAQ+d,QAAQ,YAAY1uB,GAAG,UAC7B,0BAAM2Q,EAAE,qUAGV,4BAAQ+d,QAAQ,YAAY1uB,GAAG,SAC7B,0BAAM2Q,EAAE,kVAGV,4BAAQ+d,QAAQ,YAAY1uB,GAAG,eAC7B,0BAAM2Q,EAAE,wLAGV,4BAAQ+d,QAAQ,YAAY1uB,GAAG,oBAC7B,0BAAM2Q,EAAE,qLAGV,4BAAQ+d,QAAQ,YAAY1uB,GAAG,kBAC7B,0BAAM2Q,EAAE,6RAGV,4BAAQ+d,QAAQ,YAAY1uB,GAAG,WAC7B,0BAAM2Q,EAAE,iEAGV,4BAAQ+d,QAAQ,YAAY1uB,GAAG,UAC7B,0BAAM2Q,EAAE,oDAGV,4BAAQ+d,QAAQ,YAAY1uB,GAAG,QAC7B,uBAAGrpB,UAAU,oBACX,0BAAMg4C,KAAK,UAAUC,SAAS,UAAUje,EAAE,wV,eCpCvC,MAAMke,WAAmB5uD,IAAAA,UAWtChC,SACE,IAAI,aAAC6f,EAAY,cAAEhgB,EAAa,aAAEI,GAAgBX,KAAKQ,MAEnD+wD,EAAY5wD,EAAa,aACzBqsD,EAAgBrsD,EAAa,iBAAiB,GAC9CmwD,EAAsBnwD,EAAa,uBACnCw/C,EAAax/C,EAAa,cAAc,GACxC0uD,EAAS1uD,EAAa,UAAU,GAChC8f,EAAM9f,EAAa,OACnB+f,EAAM/f,EAAa,OACnBipD,EAASjpD,EAAa,UAAU,GAEpC,MAAMygB,EAAmBzgB,EAAa,oBAAoB,GACpDutD,EAAmBvtD,EAAa,oBAAoB,GACpDs5C,EAAwBt5C,EAAa,yBAAyB,GAC9DusD,EAAkBvsD,EAAa,mBAAmB,GACxD,IAAI2oB,EAAa/oB,EAAc+oB,aAC3BpnB,EAAS3B,EAAc2B,SAE3B,MAAMsvD,GAAejxD,EAAcq8B,UAE7BjlB,EAAgBpX,EAAcoX,gBAEpC,IAAI85C,EAAiB,KAmBrB,GAjBqB,YAAlB95C,IACD85C,EAAiB,yBAAK5vD,UAAU,QAC9B,yBAAKA,UAAU,qBACb,yBAAKA,UAAU,eAKA,WAAlB8V,IACD85C,EAAiB,yBAAK5vD,UAAU,QAC9B,yBAAKA,UAAU,qBACb,wBAAIA,UAAU,SAAO,kCACrB,kBAAC+nD,EAAM,SAKS,iBAAlBjyC,EAAkC,CACpC,MAAM+5C,EAAUnxC,EAAalG,YACvBs3C,EAAaD,EAAUA,EAAQhwD,IAAI,WAAa,GACtD+vD,EAAiB,yBAAK5vD,UAAU,sBAC9B,yBAAKA,UAAU,qBACb,wBAAIA,UAAU,SAAO,wCACrB,2BAAI8vD,IAGV,CAMA,IAJIF,GAAkBD,IACpBC,EAAiB,4DAGhBA,EACD,OAAO,yBAAK5vD,UAAU,cACpB,yBAAKA,UAAU,qBACZ4vD,IAKP,MAAM/uC,EAAUniB,EAAcmiB,UACxBsK,EAAUzsB,EAAcysB,UAExB4kC,EAAalvC,GAAWA,EAAQ9S,KAChCiiD,EAAa7kC,GAAWA,EAAQpd,KAChCkiD,IAA2BvxD,EAAc2O,sBAE/C,OACE,yBAAKrN,UAAU,cACb,kBAAC0vD,EAAS,MACV,kBAACT,EAAmB,CAACxnC,WAAYA,EAAYpnB,OAAQA,EAAQ8uD,SAAU,kBAACpH,EAAM,OAC5E,kBAACA,EAAM,MACP,kBAACnpC,EAAG,CAAC5e,UAAU,yBACb,kBAAC6e,EAAG,CAACsqC,OAAQ,IACX,kBAACgC,EAAa,QAIjB4E,GAAcC,GAAcC,EAC3B,yBAAKjwD,UAAU,oBACb,kBAAC6e,EAAG,CAAC7e,UAAU,kBAAkBmpD,OAAQ,IACtC4G,EAAc,kBAACxwC,EAAgB,MAAO,KACtCywC,EAAc,kBAAC3D,EAAgB,MAAO,KACtC4D,EAA0B,kBAAC7X,EAAqB,MAAO,OAG1D,KAEJ,kBAACiT,EAAe,MAEhB,kBAACzsC,EAAG,KACF,kBAACC,EAAG,CAACsqC,OAAQ,GAAIpM,QAAS,IACxB,kBAACuB,EAAU,QAGf,kBAAC1/B,EAAG,KACF,kBAACC,EAAG,CAACsqC,OAAQ,GAAIpM,QAAS,IACxB,kBAACyQ,EAAM,SAMnB,EC1HF,MAAM,GAA+BpvD,QAAQ,wB,eCS7C,MAeM8xD,GAAyB,CAC7BhkD,MAAO,GACPqS,SAjBW,OAkBXvf,OAAQ,CAAC,EACTmxD,QAAS,GACTlxD,UAAU,EACVwX,QAAQlJ,EAAAA,EAAAA,SAGH,MAAM2W,WAAuBtE,EAAAA,UAKlCpd,oBACE,MAAM,qBAAE6iB,EAAoB,MAAEnZ,EAAK,SAAEqS,GAAapgB,KAAKQ,MACpD0mB,EACD9G,EAASrS,IACwB,IAAzBmZ,GACR9G,EAAS,GAEb,CAEA1f,SACE,IAAI,OAAEG,EAAM,OAAEyX,EAAM,MAAEvK,EAAK,SAAEqS,EAAQ,aAAEzf,EAAY,GAAEgK,EAAE,SAAE2iB,GAAattB,KAAKQ,MAC3E,MAAM6lB,EAASxlB,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,UAAY,KACvDF,EAAOX,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,QAAU,KAEzD,IAAIuwD,EAAwBlxD,GAASJ,EAAaI,GAAM,EAAO,CAAEyoC,cAAc,IAC3E0oB,EAAO1wD,EACTywD,EADgB5rC,EACM,cAAa7kB,KAAQ6kB,IACrB,cAAa7kB,KACnCb,EAAa,qBAIf,OAHKuxD,IACHA,EAAOvxD,EAAa,sBAEf,kBAACuxD,EAAI,QAAMlyD,KAAKQ,MAAK,CAAG8X,OAAQA,EAAQ3N,GAAIA,EAAIhK,aAAcA,EAAcoN,MAAOA,EAAOqS,SAAUA,EAAUvf,OAAQA,EAAQysB,SAAUA,IACjJ,EACD,KA7BYvH,GAAc,eAGHgsC,IA4BjB,MAAM5kC,WAA0B1L,EAAAA,UAAW,cAAD,0CAGnCxV,IACV,MAAM8B,EAAQ/N,KAAKQ,MAAMK,QAA4C,SAAlCb,KAAKQ,MAAMK,OAAOa,IAAI,QAAqBuK,EAAEpI,OAAO+gB,MAAM,GAAK3Y,EAAEpI,OAAOkK,MAC3G/N,KAAKQ,MAAM4f,SAASrS,EAAO/N,KAAKQ,MAAMwxD,QAAQ,IAC/C,0BACe3iD,GAAQrP,KAAKQ,MAAM4f,SAAS/Q,IAAI,CAChD3O,SACE,IAAI,aAAEC,EAAY,MAAEoN,EAAK,OAAElN,EAAM,OAAEyX,EAAM,SAAExX,EAAQ,YAAEge,EAAW,SAAEwO,GAAattB,KAAKQ,MACpF,MAAM2oB,EAAYtoB,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,QAAU,KACxD2kB,EAASxlB,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,UAAY,KACvDF,EAAOX,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,QAAU,KACnDywD,EAAWtxD,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,MAAQ,KAM3D,GALKqM,IACHA,EAAQ,IAEVuK,EAASA,EAAOzL,KAAOyL,EAAOzL,OAAS,GAElCsc,EAAY,CACf,MAAMkiC,EAAS1qD,EAAa,UAC5B,OAAQ,kBAAC0qD,EAAM,CAACxpD,UAAYyW,EAAO3U,OAAS,UAAY,GACxC4f,MAAQjL,EAAO3U,OAAS2U,EAAS,GACjCkzC,cAAgB,IAAIriC,GACpBpb,MAAQA,EACR09C,iBAAmB3qD,EACnBwsB,SAAUA,EACVlN,SAAWpgB,KAAKoyD,cAClC,CAEA,MAAM/qC,EAAaiG,GAAa6kC,GAAyB,aAAbA,KAA6B,aAAcv/C,QACjF4N,EAAQ7f,EAAa,SAC3B,OAAIa,GAAiB,SAATA,EAER,kBAACgf,EAAK,CAAChf,KAAK,OACVK,UAAWyW,EAAO3U,OAAS,UAAY,GACvC4f,MAAOjL,EAAO3U,OAAS2U,EAAS,GAChC8H,SAAUpgB,KAAKogB,SACfkN,SAAUjG,IAKZ,kBAAC,KAAa,CACZ7lB,KAAM6kB,GAAqB,aAAXA,EAAwB,WAAa,OACrDxkB,UAAWyW,EAAO3U,OAAS,UAAY,GACvC4f,MAAOjL,EAAO3U,OAAS2U,EAAS,GAChCvK,MAAOA,EACPwsB,UAAW,EACX83B,gBAAiB,IACjB/E,YAAaxuC,EACbsB,SAAUpgB,KAAKogB,SACfkN,SAAUjG,GAGlB,EACD,KAxDY8F,GAAiB,eAEN4kC,IAwDjB,MAAMO,WAAyBvvC,EAAAA,cAKpCpgB,YAAYnC,EAAOoC,GACjBC,MAAMrC,EAAOoC,GAAQ,sBAaZ,KACT5C,KAAKQ,MAAM4f,SAASpgB,KAAKmD,MAAM4K,MAAM,IACtC,0BAEc,CAACwkD,EAASj5C,KACvBtZ,KAAKuD,UAAU,IAAD,IAAC,MAAEwK,GAAO,QAAM,CAC5BA,MAAOA,EAAMC,IAAIsL,EAAGi5C,GACrB,GAAGvyD,KAAKogB,SAAS,IACnB,wBAEa9G,IACZtZ,KAAKuD,UAAU,IAAD,IAAC,MAAEwK,GAAO,QAAM,CAC5BA,MAAOA,EAAMc,OAAOyK,GACrB,GAAGtZ,KAAKogB,SAAS,IACnB,qBAES,KACR,IAAIC,EAAWmyC,GAAiBxyD,KAAKmD,MAAM4K,OAC3C/N,KAAKuD,UAAS,KAAM,CAClBwK,MAAOsS,EAAS/Q,MAAK6U,EAAAA,EAAAA,IAAgBnkB,KAAKmD,MAAMtC,OAAOa,IAAI,UAAU,EAAO,CAC1EN,kBAAkB,QAElBpB,KAAKogB,SAAS,IACnB,0BAEerS,IACd/N,KAAKuD,UAAS,KAAM,CAClBwK,MAAOA,KACL/N,KAAKogB,SAAS,IAxClBpgB,KAAKmD,MAAQ,CAAE4K,MAAOykD,GAAiBhyD,EAAMuN,OAAQlN,OAAQL,EAAMK,OACrE,CAEAwC,iCAAiC7C,GAC/B,MAAMuN,EAAQykD,GAAiBhyD,EAAMuN,OAClCA,IAAU/N,KAAKmD,MAAM4K,OACtB/N,KAAKuD,SAAS,CAAEwK,UAEfvN,EAAMK,SAAWb,KAAKmD,MAAMtC,QAC7Bb,KAAKuD,SAAS,CAAE1C,OAAQL,EAAMK,QAClC,CAiCAH,SAAU,IAAD,EACP,IAAI,aAAEC,EAAY,SAAEG,EAAQ,OAAED,EAAM,OAAEyX,EAAM,GAAE3N,EAAE,SAAE2iB,GAAattB,KAAKQ,MAEpE8X,EAASA,EAAOzL,KAAOyL,EAAOzL,OAAS,IAAcyL,GAAUA,EAAS,GACxE,MAAMm6C,EAAc,IAAAn6C,GAAM,KAANA,GAAcrM,GAAkB,iBAANA,IACxCymD,EAAmB,UAAAp6C,GAAM,KAANA,GAAcrM,QAAsB9J,IAAjB8J,EAAEwhC,cAAyB,QAChExhC,GAAKA,EAAE7H,QACR2J,EAAQ/N,KAAKmD,MAAM4K,MACnB4kD,KACJ5kD,GAASA,EAAMwe,OAASxe,EAAMwe,QAAU,GACpCqmC,EAAkB/xD,EAAOiN,MAAM,CAAC,QAAS,SACzC+kD,EAAkBhyD,EAAOiN,MAAM,CAAC,QAAS,SACzCglD,EAAoBjyD,EAAOiN,MAAM,CAAC,QAAS,WAC3CilD,EAAoBlyD,EAAOa,IAAI,SACrC,IAAIsxD,EACAC,GAAkB,EAClBC,EAAuC,SAApBL,GAAmD,WAApBA,GAAsD,WAAtBC,EAYtF,GAXID,GAAmBC,EACrBE,EAAsBryD,EAAc,cAAakyD,KAAmBC,KACvC,YAApBD,GAAqD,UAApBA,GAAmD,WAApBA,IACzEG,EAAsBryD,EAAc,cAAakyD,MAI9CG,GAAwBE,IAC3BD,GAAkB,GAGfL,EAAkB,CACrB,MAAMvH,EAAS1qD,EAAa,UAC5B,OAAQ,kBAAC0qD,EAAM,CAACxpD,UAAYyW,EAAO3U,OAAS,UAAY,GACxC4f,MAAQjL,EAAO3U,OAAS2U,EAAS,GACjCgzC,UAAW,EACXv9C,MAAQA,EACRuf,SAAUA,EACVk+B,cAAgBoH,EAChBnH,iBAAmB3qD,EACnBsf,SAAWpgB,KAAKoyD,cAClC,CAEA,MAAM7X,EAAS55C,EAAa,UAC5B,OACE,yBAAKkB,UAAU,qBACZ8wD,EACE,IAAA5kD,GAAK,KAALA,GAAU,CAAC8/B,EAAMv0B,KAAO,IAAD,EACtB,MAAM65C,GAAajlD,EAAAA,EAAAA,QAAO,IACrB,UAAAoK,GAAM,KAANA,GAAeH,GAAQA,EAAI41B,QAAUz0B,KAAE,QACrCrN,GAAKA,EAAE7H,UAEd,OACE,yBAAKuC,IAAK2S,EAAGzX,UAAU,yBAEnBqxD,EACE,kBAACE,GAAuB,CACxBrlD,MAAO8/B,EACPztB,SAAW/Q,GAAOrP,KAAKqzD,aAAahkD,EAAKiK,GACzCgU,SAAUA,EACVhV,OAAQ66C,EACRxyD,aAAcA,IAEZsyD,EACA,kBAACK,GAAuB,CACtBvlD,MAAO8/B,EACPztB,SAAW/Q,GAAQrP,KAAKqzD,aAAahkD,EAAKiK,GAC1CgU,SAAUA,EACVhV,OAAQ66C,IAER,kBAACH,EAAmB,QAAKhzD,KAAKQ,MAAK,CACnCuN,MAAO8/B,EACPztB,SAAW/Q,GAAQrP,KAAKqzD,aAAahkD,EAAKiK,GAC1CgU,SAAUA,EACVhV,OAAQ66C,EACRtyD,OAAQkyD,EACRpyD,aAAcA,EACdgK,GAAIA,KAGV2iB,EAOE,KANF,kBAACitB,EAAM,CACL14C,UAAY,2CAA0C6wD,EAAiB/uD,OAAS,UAAY,OAC5F4f,MAAOmvC,EAAiB/uD,OAAS+uD,EAAmB,GAEpD3/B,QAAS,IAAM/yB,KAAKuzD,WAAWj6C,IAAG,OAGlC,IAGN,KAEJgU,EAQE,KAPF,kBAACitB,EAAM,CACL14C,UAAY,wCAAuC4wD,EAAY9uD,OAAS,UAAY,OACpF4f,MAAOkvC,EAAY9uD,OAAS8uD,EAAc,GAC1C1/B,QAAS/yB,KAAKwzD,SAAQ,OAEjBX,EAAmB,GAAEA,KAAqB,GAAE,QAK3D,EACD,KAxJYP,GAAgB,eAGLP,IAuJjB,MAAMuB,WAAgC7xC,EAAAA,UAAW,cAAD,0CAIzCxV,IACV,MAAM8B,EAAQ9B,EAAEpI,OAAOkK,MACvB/N,KAAKQ,MAAM4f,SAASrS,EAAO/N,KAAKQ,MAAMwxD,QAAQ,GAC/C,CAEDtxD,SACE,IAAI,MAAEqN,EAAK,OAAEuK,EAAM,YAAEwG,EAAW,SAAEwO,GAAattB,KAAKQ,MAMpD,OALKuN,IACHA,EAAQ,IAEVuK,EAASA,EAAOzL,KAAOyL,EAAOzL,OAAS,GAE/B,kBAAC,KAAa,CACpBrL,KAAM,OACNK,UAAWyW,EAAO3U,OAAS,UAAY,GACvC4f,MAAOjL,EAAO3U,OAAS2U,EAAS,GAChCvK,MAAOA,EACPwsB,UAAW,EACX83B,gBAAiB,IACjB/E,YAAaxuC,EACbsB,SAAUpgB,KAAKogB,SACfkN,SAAUA,GACd,EACD,KA3BYgmC,GAAuB,eAEZvB,IA2BjB,MAAMqB,WAAgC3xC,EAAAA,UAAW,cAAD,8CAIrCxV,IACd,MAAM8B,EAAQ9B,EAAEpI,OAAO+gB,MAAM,GAC7B5kB,KAAKQ,MAAM4f,SAASrS,EAAO/N,KAAKQ,MAAMwxD,QAAQ,GAC/C,CAEDtxD,SACE,IAAI,aAAEC,EAAY,OAAE2X,EAAM,SAAEgV,GAAattB,KAAKQ,MAC9C,MAAMggB,EAAQ7f,EAAa,SACrB0mB,EAAaiG,KAAc,aAAc1a,QAE/C,OAAQ,kBAAC4N,EAAK,CAAChf,KAAK,OAClBK,UAAWyW,EAAO3U,OAAS,UAAY,GACvC4f,MAAOjL,EAAO3U,OAAS2U,EAAS,GAChC8H,SAAUpgB,KAAKyzD,aACfnmC,SAAUjG,GACd,EACD,KApBY+rC,GAAuB,eAEZrB,IAoBjB,MAAM2B,WAA2BjyC,EAAAA,UAAW,cAAD,8CAIhCpS,GAAQrP,KAAKQ,MAAM4f,SAAS/Q,IAAI,CAChD3O,SACE,IAAI,aAAEC,EAAY,MAAEoN,EAAK,OAAEuK,EAAM,OAAEzX,EAAM,SAAEC,EAAQ,SAAEwsB,GAAattB,KAAKQ,MACvE8X,EAASA,EAAOzL,KAAOyL,EAAOzL,OAAS,GACvC,IAAIsc,EAAYtoB,GAAUA,EAAOa,IAAMb,EAAOa,IAAI,QAAU,KACxD+pD,GAAmBtiC,IAAcroB,EACjC6yD,GAAgBxqC,GAAa,CAAC,OAAQ,SAC1C,MAAMkiC,EAAS1qD,EAAa,UAE5B,OAAQ,kBAAC0qD,EAAM,CAACxpD,UAAYyW,EAAO3U,OAAS,UAAY,GACxC4f,MAAQjL,EAAO3U,OAAS2U,EAAS,GACjCvK,MAAQ+iC,OAAO/iC,GACfuf,SAAWA,EACXk+B,cAAgBriC,EAAY,IAAIA,GAAawqC,EAC7ClI,gBAAkBA,EAClBrrC,SAAWpgB,KAAKoyD,cAClC,EACD,KArBYsB,GAAkB,eAEP3B,IAqBxB,MAAM6B,GAAyBt7C,GACtB,IAAAA,GAAM,KAANA,GAAWH,IAChB,MAAMysB,OAAuBziC,IAAhBgW,EAAIg1B,QAAwBh1B,EAAIg1B,QAAUh1B,EAAI41B,MAC3D,IAAI8lB,EAA6B,iBAAR17C,EAAmBA,EAA2B,iBAAdA,EAAI/T,MAAqB+T,EAAI/T,MAAQ,KAE9F,IAAIwgC,GAAQivB,EACV,OAAOA,EAET,IAAIC,EAAe37C,EAAI/T,MACnBoM,EAAQ,IAAG2H,EAAIg1B,UACnB,KAA8B,iBAAjB2mB,GAA2B,CACtC,MAAMC,OAAgC5xD,IAAzB2xD,EAAa3mB,QAAwB2mB,EAAa3mB,QAAU2mB,EAAa/lB,MACtF,QAAY5rC,IAAT4xD,EACD,MAGF,GADAvjD,GAAS,IAAGujD,KACPD,EAAa1vD,MAChB,MAEF0vD,EAAeA,EAAa1vD,KAC9B,CACA,MAAQ,GAAEoM,MAASsjD,GAAc,IAI9B,MAAME,WAA0BjxC,EAAAA,cACrCpgB,cACEE,QAAO,sBAMGkL,IACV/N,KAAKQ,MAAM4f,SAASrS,EAAM,IAC3B,4BAEgB9B,IACf,MAAMiX,EAAajX,EAAEpI,OAAOkK,MAE5B/N,KAAKogB,SAAS8C,EAAW,GAZ3B,CAeAxiB,SACE,IAAI,aACFC,EAAY,MACZoN,EAAK,OACLuK,EAAM,SACNgV,GACEttB,KAAKQ,MAET,MAAM6iB,EAAW1iB,EAAa,YAG9B,OAFA2X,EAASA,EAAOzL,KAAOyL,EAAOzL,OAAS,IAAcyL,GAAUA,EAAS,GAGtE,6BACE,kBAAC+K,EAAQ,CACPxhB,UAAW6D,KAAG,CAAE4d,QAAShL,EAAO3U,SAChC4f,MAAQjL,EAAO3U,OAASiwD,GAAsBt7C,GAAQhP,KAAK,MAAQ,GACnEyE,OAAOkV,EAAAA,EAAAA,IAAUlV,GACjBuf,SAAUA,EACVlN,SAAWpgB,KAAK6tD,iBAGxB,EAGF,SAAS2E,GAAiBzkD,GACxB,OAAOqB,EAAAA,KAAAA,OAAYrB,GAASA,EAAQ,IAAcA,IAASG,EAAAA,EAAAA,QAAOH,IAASqB,EAAAA,EAAAA,OAC7E,CCpUe,cAEb,IAAI6kD,EAAiB,CACnBxqC,WAAY,CACV4f,IAAG,GACH6qB,mBAAoBra,GACpBsa,aAAcpa,GACdE,sBAAqB,GACrBma,sBAAuBja,GACvBE,MAAOP,GACP5sB,SAAUA,GACVmnC,UAAW1zC,GACX2zC,OAAQha,GACRia,WAAYzZ,GACZ0Z,UAAWzZ,GACXzjC,MAAO0nC,GACPyV,aAActV,GACdhB,iBAAgB,GAChB5hC,KAAMqwC,GACNI,cAAa,GACbpsC,WAAU,GACVyhC,mBAAkB,GAClBh1B,qBAAsB5qB,GAAAA,EACtB+/B,WAAY2d,GACZ1vC,UAAWwoC,GACX4I,iBAAgB,GAChBM,uBAAsB,GACtBC,qBAAoB,GACpBsS,cAAezvC,GACf0e,UAAW6d,GACX91C,SAAU23C,GACVgB,kBAAmBA,GACnBsQ,aAAchV,GACd/9B,WAAY6/B,GACZmT,aAAc9N,GACdx2C,QAASoxC,GACTn4C,QAAS01C,GACT3mC,OAAQsxC,GACRrlC,YAAa6+B,GACbyR,SAAUjJ,GACVkJ,OAAQ7H,GACRC,gBAAe,GACfhF,UAAWA,GACX6F,KAAMhO,GACN/yB,QAAS20B,GACTuM,iBAAgB,GAChB6G,aAAc/vC,GACd6pC,aAAY,GACZV,cAAa,GACbjuD,MAAK,KACLmvD,OAAM,GACNuB,UAAS,GACTvvD,YAAW,GACXC,WAAU,GACVC,eAAc,GACdioD,SAAQ,GACRzC,eAAc,GACdriD,SAAQ,KACR4sD,WAAU,GACVR,oBAAmB,GACnB1jC,aAAY,GACZw0B,aAAY,GACZgB,gBAAe,GACf58B,aAAY,GACZZ,sBAAqB,GACrB9R,aAAY,GACZqM,mBAAkB,GAClBihC,SAAQ,GACR+L,QAAO,GACPL,aAAY,GACZiF,UAAS,GACTpsC,QAAO,GACP+1B,eAAc,GACdh2B,4BAA2BA,KAI3B8vC,EAAiB,CACnBvrC,WAAYwrC,GAGVC,EAAuB,CACzBzrC,WAAY0rC,GAGd,MAAO,CACL1jD,GAAAA,QACA2jD,GAAAA,QACAC,EAAAA,QACAC,EAAAA,QACA9xD,EAAAA,QACA2U,EAAAA,QACApF,EAAAA,QACAwiD,EAAAA,QACAtB,EACAe,EACAQ,EAAAA,QACAN,EACAhtD,GAAAA,QACA0O,GAAAA,QACA6+C,GAAAA,QACA/8C,GAAAA,QACAqV,GAAAA,QACAyB,EAAAA,SACAkmC,EAAAA,GAAAA,WAEJ,CDsNC,KAxCY1B,GAAiB,eAMNjC,I,eExXT,SAAS4D,KAEtB,MAAO,CACLC,GACAC,GAAAA,QAEJ,C,eCFA,MAAM,UAAEC,GAAS,WAAEC,GAAU,gBAAEC,GAAe,WAAEC,IAAeC,CAAAA,gBAAAA,SAAAA,WAAAA,WAAAA,WAAAA,EAAAA,WAAAA,iCAEhD,SAASC,GAAU/uB,GAAO,IAAD,EAEtCpkC,EAAAA,EAAAA,SAAeA,EAAAA,EAAAA,UAAgB,CAAC,EAChCA,EAAAA,EAAAA,SAAAA,UAAyB,CACvBo/B,QAAS4zB,GACTI,YAAaL,GACbM,SAAUP,GACVQ,eAAgBL,IAGlB,MAAMM,EAAW,CAEfC,OAAQ,KACRptB,QAAS,KACT5lC,KAAM,CAAC,EACPT,IAAK,GACL0zD,KAAM,KACN1jD,OAAQ,aACR2lC,aAAc,OACd18B,iBAAkB,KAClBtD,OAAQ,KACRxV,aAAc,yCACdm6C,kBAAoB,GAAEzqC,OAAOC,SAASqE,aAAatE,OAAOC,SAAS+Z,OAAOha,OAAOC,SAAS6jD,SAAShhC,UAAU,EAAG,MAAA9iB,OAAOC,SAAS6jD,UAAQ,OAAa,6BACrJjqD,sBAAsB,EACtBkF,QAAS,CAAC,EACVglD,OAAQ,CAAC,EACThe,oBAAoB,EACpBC,wBAAwB,EACxB/kC,aAAa,EACbykC,iBAAiB,EACjB/sC,mBAAqBiM,GAAKA,EAC1BhM,oBAAsBgM,GAAKA,EAC3B6nC,oBAAoB,EACpBsP,sBAAuB,UACvBC,wBAAyB,EACzBW,yBAA0B,EAC1BzN,gBAAgB,EAChBz8B,sBAAsB,EACtB0hB,qBAAiB5kC,EACjBm9C,wBAAwB,EACxB9vB,gBAAiB,CACfkE,WAAY,CACV,UAAa,CACXnQ,MAAO,cACPqzC,OAAQ,QAEV,gBAAmB,CACjBrzC,MAAO,oBACPqzC,OAAQ,cAEV,SAAY,CACVrzC,MAAO,aACPqzC,OAAQ,SAGZC,iBAAiB,EACjBC,UAAW,MAEbje,uBAAwB,CACtB,MACA,MACA,OACA,SACA,UACA,OACA,QACA,SAEFke,oBAAoB,EAIpBC,QAAS,CACPC,IAIFpjB,QAAS,GAGTC,eAAgB,CAId+D,eAAgB,UAIlBlE,aAAc,CAAE,EAGhBhpC,GAAI,CAAE,EACN8e,WAAY,CAAE,EAEdytC,gBAAiB,CACfC,WAAW,EACXC,MAAO,UAIX,IAAIC,EAAcjwB,EAAK2vB,oBAAqBlnB,EAAAA,EAAAA,MAAgB,CAAC,EAE7D,MAAMzG,EAAUhC,EAAKgC,eACdhC,EAAKgC,QAEZ,MAAMkuB,EAAoB1jB,IAAW,CAAC,EAAG2iB,EAAUnvB,EAAMiwB,GAEnDE,EAAe,CACnBtqD,OAAQ,CACN0E,QAAS2lD,EAAkB3lD,SAE7BkiC,QAASyjB,EAAkBN,QAC3BljB,eAAgBwjB,EAAkBxjB,eAClC3wC,MAAOywC,IAAW,CAChB7gC,OAAQ,CACNA,OAAQukD,EAAkBvkD,OAC1B2F,OAAQ,IAAA4+C,IAEV9zD,KAAM,CACJA,KAAM,GACNT,IAAKu0D,EAAkBv0D,KAEzBysB,gBAAiB8nC,EAAkB9nC,iBAClC8nC,EAAkB3jB,eAGvB,GAAG2jB,EAAkB3jB,aAInB,IAAK,IAAIhtC,KAAO2wD,EAAkB3jB,aAE9Bnd,OAAO1T,UAAU2T,eAAeC,KAAK4gC,EAAkB3jB,aAAchtC,SAC1BxE,IAAxCm1D,EAAkB3jB,aAAahtC,WAE3B4wD,EAAap0D,MAAMwD,GAahC,IAAI4hC,EAAQ,IAAIivB,EAAOD,GACvBhvB,EAAMgM,SAAS,CAAC+iB,EAAkBzjB,QATf,KACV,CACLlpC,GAAI2sD,EAAkB3sD,GACtB8e,WAAY6tC,EAAkB7tC,WAC9BtmB,MAAOm0D,EAAkBn0D,UAO7B,IAAI8J,EAASs7B,EAAMxsB,YAEnB,MAAM07C,EAAgBC,IACpB,IAAIC,EAAc1qD,EAAO1M,cAAcgR,eAAiBtE,EAAO1M,cAAcgR,iBAAmB,CAAC,EAC7FqmD,EAAehkB,IAAW,CAAC,EAAG+jB,EAAaL,EAAmBI,GAAiB,CAAC,EAAGL,GAqBvF,GAlBGjuB,IACDwuB,EAAaxuB,QAAUA,GAGzBb,EAAM8M,WAAWuiB,GACjB3qD,EAAO4qD,eAAe1zD,SAEA,OAAlBuzD,KACGL,EAAYt0D,KAAoC,iBAAtB60D,EAAap0D,MAAqB,IAAYo0D,EAAap0D,MAAMG,QAC9FsJ,EAAOyE,YAAYa,UAAU,IAC7BtF,EAAOyE,YAAYY,oBAAoB,WACvCrF,EAAOyE,YAAY2F,WAAW,IAAeugD,EAAap0D,QACjDyJ,EAAOyE,YAAYoF,UAAY8gD,EAAa70D,MAAQ60D,EAAanB,OAC1ExpD,EAAOyE,YAAYa,UAAUqlD,EAAa70D,KAC1CkK,EAAOyE,YAAYoF,SAAS8gD,EAAa70D,OAI1C60D,EAAaxuB,QACdn8B,EAAOvM,OAAOk3D,EAAaxuB,QAAS,YAC/B,GAAGwuB,EAAapB,OAAQ,CAC7B,IAAIptB,EAAU1zB,SAASoiD,cAAcF,EAAapB,QAClDvpD,EAAOvM,OAAO0oC,EAAS,MACzB,MAAkC,OAAxBwuB,EAAapB,QAA4C,OAAzBoB,EAAaxuB,SAIrD/iC,QAAQjC,MAAM,6DAGhB,OAAO6I,CAAM,EAGT8qD,EAAYV,EAAYtgD,QAAUugD,EAAkBS,UAE1D,OAAIA,GAAa9qD,EAAOyE,aAAezE,EAAOyE,YAAYO,gBACxDhF,EAAOyE,YAAYO,eAAe,CAChClP,IAAKg1D,EACLC,kBAAkB,EAClBzsD,mBAAoB+rD,EAAkB/rD,mBACtCC,oBAAqB8rD,EAAkB9rD,qBACtCisD,GAKExqD,GAHEwqD,GAIX,CAGAtB,GAAUa,QAAU,CAClBiB,KAAMhB,IAIRd,GAAUtiB,QAAUqkB,GAAAA,QC9NpB,W","sources":["webpack://SwaggerUICore/webpack/universalModuleDefinition","webpack://SwaggerUICore/external commonjs \"react-immutable-pure-component\"","webpack://SwaggerUICore/./src/core/components/model.jsx","webpack://SwaggerUICore/./src/core/components/online-validator-badge.jsx","webpack://SwaggerUICore/external commonjs \"remarkable/linkify\"","webpack://SwaggerUICore/external commonjs \"dompurify\"","webpack://SwaggerUICore/./src/core/components/providers/markdown.jsx","webpack://SwaggerUICore/./src/core/plugins/all.js","webpack://SwaggerUICore/./src/core/plugins/auth/actions.js","webpack://SwaggerUICore/./src/core/plugins/auth/index.js","webpack://SwaggerUICore/./src/core/plugins/auth/reducers.js","webpack://SwaggerUICore/./src/core/plugins/auth/selectors.js","webpack://SwaggerUICore/./src/core/plugins/auth/spec-wrap-actions.js","webpack://SwaggerUICore/./src/core/plugins/configs/actions.js","webpack://SwaggerUICore/./src/core/plugins/configs/helpers.js","webpack://SwaggerUICore/./src/core/plugins/configs/index.js","webpack://SwaggerUICore/./src/core/plugins/configs/reducers.js","webpack://SwaggerUICore/./src/core/plugins/configs/selectors.js","webpack://SwaggerUICore/./src/core/plugins/configs/spec-actions.js","webpack://SwaggerUICore/./src/core/plugins/deep-linking/helpers.js","webpack://SwaggerUICore/./src/core/plugins/deep-linking/index.js","webpack://SwaggerUICore/external commonjs \"zenscroll\"","webpack://SwaggerUICore/./src/core/plugins/deep-linking/layout.js","webpack://SwaggerUICore/./src/core/plugins/deep-linking/operation-tag-wrapper.jsx","webpack://SwaggerUICore/./src/core/plugins/deep-linking/operation-wrapper.jsx","webpack://SwaggerUICore/./src/core/plugins/download-url.js","webpack://SwaggerUICore/./src/core/plugins/err/actions.js","webpack://SwaggerUICore/external commonjs \"lodash/reduce\"","webpack://SwaggerUICore/./src/core/plugins/err/error-transformers/hook.js","webpack://SwaggerUICore/./src/core/plugins/err/error-transformers/transformers/not-of-type.js","webpack://SwaggerUICore/./src/core/plugins/err/error-transformers/transformers/parameter-oneof.js","webpack://SwaggerUICore/./src/core/plugins/err/index.js","webpack://SwaggerUICore/./src/core/plugins/err/reducers.js","webpack://SwaggerUICore/./src/core/plugins/err/selectors.js","webpack://SwaggerUICore/./src/core/plugins/filter/index.js","webpack://SwaggerUICore/./src/core/plugins/filter/opsFilter.js","webpack://SwaggerUICore/./src/core/plugins/layout/actions.js","webpack://SwaggerUICore/./src/core/plugins/layout/index.js","webpack://SwaggerUICore/./src/core/plugins/layout/reducers.js","webpack://SwaggerUICore/./src/core/plugins/layout/selectors.js","webpack://SwaggerUICore/./src/core/plugins/layout/spec-extensions/wrap-selector.js","webpack://SwaggerUICore/./src/core/plugins/logs/index.js","webpack://SwaggerUICore/./src/core/plugins/oas3/actions.js","webpack://SwaggerUICore/./src/core/plugins/oas3/auth-extensions/wrap-selectors.js","webpack://SwaggerUICore/./src/core/plugins/oas3/components/callbacks.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/components/http-auth.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/components/index.js","webpack://SwaggerUICore/./src/core/plugins/oas3/components/operation-link.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/components/operation-servers.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/components/request-body-editor.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/components/request-body.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/components/servers-container.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/components/servers.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/helpers.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/index.js","webpack://SwaggerUICore/./src/core/plugins/oas3/reducers.js","webpack://SwaggerUICore/./src/core/plugins/oas3/selectors.js","webpack://SwaggerUICore/./src/core/plugins/oas3/spec-extensions/selectors.js","webpack://SwaggerUICore/./src/core/plugins/oas3/spec-extensions/wrap-selectors.js","webpack://SwaggerUICore/./src/core/plugins/oas3/wrap-components/auth-item.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/wrap-components/index.js","webpack://SwaggerUICore/./src/core/plugins/oas3/wrap-components/json-schema-string.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/wrap-components/markdown.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/wrap-components/model.jsx","webpack://SwaggerUICore/./src/core/plugins/oas3/wrap-components/online-validator-badge.js","webpack://SwaggerUICore/./src/core/plugins/oas3/wrap-components/version-stamp.jsx","webpack://SwaggerUICore/./src/core/plugins/on-complete/index.js","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/repeat\"","webpack://SwaggerUICore/./src/core/plugins/request-snippets/fn.js","webpack://SwaggerUICore/./src/core/plugins/request-snippets/index.js","webpack://SwaggerUICore/./src/core/plugins/request-snippets/request-snippets.jsx","webpack://SwaggerUICore/./src/core/plugins/request-snippets/selectors.js","webpack://SwaggerUICore/./src/core/plugins/safe-render/components/error-boundary.jsx","webpack://SwaggerUICore/./src/core/plugins/safe-render/components/fallback.jsx","webpack://SwaggerUICore/./src/core/plugins/safe-render/fn.jsx","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/fill\"","webpack://SwaggerUICore/external commonjs \"lodash/zipObject\"","webpack://SwaggerUICore/./src/core/plugins/safe-render/index.js","webpack://SwaggerUICore/external commonjs \"xml\"","webpack://SwaggerUICore/external commonjs \"randexp\"","webpack://SwaggerUICore/external commonjs \"lodash/isEmpty\"","webpack://SwaggerUICore/./src/core/plugins/samples/fn.js","webpack://SwaggerUICore/./src/core/plugins/samples/index.js","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/object/define-property\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/promise\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/date/now\"","webpack://SwaggerUICore/external commonjs \"lodash/isString\"","webpack://SwaggerUICore/external commonjs \"lodash/debounce\"","webpack://SwaggerUICore/external commonjs \"lodash/set\"","webpack://SwaggerUICore/./src/core/plugins/spec/actions.js","webpack://SwaggerUICore/./src/core/plugins/spec/index.js","webpack://SwaggerUICore/./src/core/plugins/spec/reducers.js","webpack://SwaggerUICore/./src/core/plugins/spec/selectors.js","webpack://SwaggerUICore/./src/core/plugins/spec/wrap-actions.js","webpack://SwaggerUICore/./src/core/plugins/swagger-js/configs-wrap-actions.js","webpack://SwaggerUICore/external commonjs \"swagger-client/es/resolver\"","webpack://SwaggerUICore/external commonjs \"swagger-client/es/execute\"","webpack://SwaggerUICore/external commonjs \"swagger-client/es/http\"","webpack://SwaggerUICore/external commonjs \"swagger-client/es/subtree-resolver\"","webpack://SwaggerUICore/./src/core/plugins/swagger-js/index.js","webpack://SwaggerUICore/./src/core/plugins/util/index.js","webpack://SwaggerUICore/./src/core/plugins/view/fn.js","webpack://SwaggerUICore/./src/core/plugins/view/index.js","webpack://SwaggerUICore/external commonjs \"react-dom\"","webpack://SwaggerUICore/external commonjs \"react-redux\"","webpack://SwaggerUICore/external commonjs \"lodash/omit\"","webpack://SwaggerUICore/external commonjs \"lodash/identity\"","webpack://SwaggerUICore/./src/core/plugins/view/root-injects.jsx","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/light\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/languages/hljs/javascript\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/languages/hljs/json\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/languages/hljs/xml\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/languages/hljs/bash\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/languages/hljs/yaml\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/languages/hljs/http\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/languages/hljs/powershell\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/styles/hljs/agate\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/styles/hljs/arta\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/styles/hljs/monokai\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/styles/hljs/nord\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/styles/hljs/obsidian\"","webpack://SwaggerUICore/external commonjs \"react-syntax-highlighter/dist/esm/styles/hljs/tomorrow-night\"","webpack://SwaggerUICore/./src/core/syntax-highlighting.js","webpack://SwaggerUICore/external commonjs \"@braintree/sanitize-url\"","webpack://SwaggerUICore/external commonjs \"lodash/camelCase\"","webpack://SwaggerUICore/external commonjs \"lodash/upperFirst\"","webpack://SwaggerUICore/external commonjs \"lodash/find\"","webpack://SwaggerUICore/external commonjs \"lodash/some\"","webpack://SwaggerUICore/external commonjs \"lodash/eq\"","webpack://SwaggerUICore/external commonjs \"css.escape\"","webpack://SwaggerUICore/external commonjs \"sha.js\"","webpack://SwaggerUICore/./src/core/utils.js","webpack://SwaggerUICore/./src/core/utils/jsonParse.js","webpack://SwaggerUICore/./src/core/window.js","webpack://SwaggerUICore/./src/helpers/get-parameter-schema.js","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/find-index\"","webpack://SwaggerUICore/./src/helpers/memoizeN.js","webpack://SwaggerUICore//home/ubuntu/workspace/oss-swagger-ui-release/src/core/plugins|sync|/\\.jsx","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/array/from\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/array/is-array\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/bind\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/concat\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/entries\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/every\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/filter\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/find\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/for-each\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/includes\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/index-of\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/keys\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/map\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/reduce\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/slice\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/some\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/sort\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/starts-with\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/trim\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/json/stringify\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/map\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/object/assign\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/object/keys\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/object/values\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/set-timeout\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/url\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/helpers/defineProperty\"","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/helpers/extends\"","webpack://SwaggerUICore/external commonjs \"buffer\"","webpack://SwaggerUICore/external commonjs \"classnames\"","webpack://SwaggerUICore/external commonjs \"immutable\"","webpack://SwaggerUICore/external commonjs \"js-yaml\"","webpack://SwaggerUICore/external commonjs \"lodash/get\"","webpack://SwaggerUICore/external commonjs \"lodash/isFunction\"","webpack://SwaggerUICore/external commonjs \"lodash/memoize\"","webpack://SwaggerUICore/external commonjs \"prop-types\"","webpack://SwaggerUICore/external commonjs \"randombytes\"","webpack://SwaggerUICore/external commonjs \"react\"","webpack://SwaggerUICore/external commonjs \"react-copy-to-clipboard\"","webpack://SwaggerUICore/external commonjs \"react-immutable-proptypes\"","webpack://SwaggerUICore/external commonjs \"redux\"","webpack://SwaggerUICore/external commonjs \"remarkable\"","webpack://SwaggerUICore/external commonjs \"reselect\"","webpack://SwaggerUICore/external commonjs \"serialize-error\"","webpack://SwaggerUICore/external commonjs \"swagger-client/es/helpers\"","webpack://SwaggerUICore/external commonjs \"url-parse\"","webpack://SwaggerUICore/webpack/bootstrap","webpack://SwaggerUICore/webpack/runtime/compat get default export","webpack://SwaggerUICore/webpack/runtime/define property getters","webpack://SwaggerUICore/webpack/runtime/hasOwnProperty shorthand","webpack://SwaggerUICore/webpack/runtime/make namespace object","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/last-index-of\"","webpack://SwaggerUICore/external commonjs \"deep-extend\"","webpack://SwaggerUICore/external commonjs \"redux-immutable\"","webpack://SwaggerUICore/external commonjs \"lodash/merge\"","webpack://SwaggerUICore/./src/core/system.js","webpack://SwaggerUICore/./src/core/containers/OperationContainer.jsx","webpack://SwaggerUICore/./src/core/components/app.jsx","webpack://SwaggerUICore/./src/core/components/auth/authorization-popup.jsx","webpack://SwaggerUICore/./src/core/components/auth/authorize-btn.jsx","webpack://SwaggerUICore/./src/core/containers/authorize-btn.jsx","webpack://SwaggerUICore/./src/core/components/auth/authorize-operation-btn.jsx","webpack://SwaggerUICore/./src/core/components/auth/auths.jsx","webpack://SwaggerUICore/./src/core/components/auth/auth-item.jsx","webpack://SwaggerUICore/./src/core/components/auth/error.jsx","webpack://SwaggerUICore/./src/core/components/auth/api-key-auth.jsx","webpack://SwaggerUICore/./src/core/components/auth/basic-auth.jsx","webpack://SwaggerUICore/./src/core/components/example.jsx","webpack://SwaggerUICore/./src/core/components/examples-select.jsx","webpack://SwaggerUICore/./src/core/components/examples-select-value-retainer.jsx","webpack://SwaggerUICore/./src/core/components/auth/oauth2.jsx","webpack://SwaggerUICore/./src/core/oauth2-authorize.js","webpack://SwaggerUICore/./src/core/components/clear.jsx","webpack://SwaggerUICore/./src/core/components/live-response.jsx","webpack://SwaggerUICore/./src/core/components/operations.jsx","webpack://SwaggerUICore/./src/core/utils/url.js","webpack://SwaggerUICore/./src/core/components/operation-tag.jsx","webpack://SwaggerUICore/./src/core/components/operation.jsx","webpack://SwaggerUICore/external commonjs \"lodash/toString\"","webpack://SwaggerUICore/./src/core/components/operation-summary.jsx","webpack://SwaggerUICore/./src/core/components/operation-summary-method.jsx","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/splice\"","webpack://SwaggerUICore/./src/core/components/operation-summary-path.jsx","webpack://SwaggerUICore/./src/core/components/operation-extensions.jsx","webpack://SwaggerUICore/./src/core/components/operation-extension-row.jsx","webpack://SwaggerUICore/external commonjs \"js-file-download\"","webpack://SwaggerUICore/./src/core/components/highlight-code.jsx","webpack://SwaggerUICore/./src/core/components/responses.jsx","webpack://SwaggerUICore/./src/helpers/create-html-ready-id.js","webpack://SwaggerUICore/external commonjs \"@babel/runtime-corejs3/core-js-stable/instance/values\"","webpack://SwaggerUICore/./src/core/components/response.jsx","webpack://SwaggerUICore/./src/core/components/response-extension.jsx","webpack://SwaggerUICore/external commonjs \"xml-but-prettier\"","webpack://SwaggerUICore/external commonjs \"lodash/toLower\"","webpack://SwaggerUICore/./src/core/components/response-body.jsx","webpack://SwaggerUICore/./src/core/components/parameters/parameters.jsx","webpack://SwaggerUICore/./src/core/components/parameter-extension.jsx","webpack://SwaggerUICore/./src/core/components/parameter-include-empty.jsx","webpack://SwaggerUICore/./src/core/components/parameter-row.jsx","webpack://SwaggerUICore/./src/core/components/execute.jsx","webpack://SwaggerUICore/./src/core/components/headers.jsx","webpack://SwaggerUICore/./src/core/components/errors.jsx","webpack://SwaggerUICore/./src/core/components/content-type.jsx","webpack://SwaggerUICore/./src/core/components/layout-utils.jsx","webpack://SwaggerUICore/./src/core/components/overview.jsx","webpack://SwaggerUICore/./src/core/components/initialized-input.jsx","webpack://SwaggerUICore/./src/core/components/info.jsx","webpack://SwaggerUICore/./src/core/containers/info.jsx","webpack://SwaggerUICore/./src/core/components/jump-to-path.jsx","webpack://SwaggerUICore/./src/core/components/copy-to-clipboard-btn.jsx","webpack://SwaggerUICore/./src/core/components/footer.jsx","webpack://SwaggerUICore/./src/core/containers/filter.jsx","webpack://SwaggerUICore/./src/core/components/param-body.jsx","webpack://SwaggerUICore/./src/core/components/curl.jsx","webpack://SwaggerUICore/./src/core/components/schemes.jsx","webpack://SwaggerUICore/./src/core/containers/schemes.jsx","webpack://SwaggerUICore/./src/core/components/model-collapse.jsx","webpack://SwaggerUICore/./src/core/components/model-example.jsx","webpack://SwaggerUICore/./src/core/components/model-wrapper.jsx","webpack://SwaggerUICore/./src/core/components/models.jsx","webpack://SwaggerUICore/./src/core/components/enum-model.jsx","webpack://SwaggerUICore/./src/core/components/object-model.jsx","webpack://SwaggerUICore/./src/core/components/array-model.jsx","webpack://SwaggerUICore/./src/core/components/primitive-model.jsx","webpack://SwaggerUICore/./src/core/components/property.jsx","webpack://SwaggerUICore/./src/core/components/try-it-out-button.jsx","webpack://SwaggerUICore/./src/core/components/version-pragma-filter.jsx","webpack://SwaggerUICore/./src/core/components/version-stamp.jsx","webpack://SwaggerUICore/./src/core/components/deep-link.jsx","webpack://SwaggerUICore/./src/core/components/svg-assets.jsx","webpack://SwaggerUICore/./src/core/components/layouts/base.jsx","webpack://SwaggerUICore/external commonjs \"react-debounce-input\"","webpack://SwaggerUICore/./src/core/json-schema-components.jsx","webpack://SwaggerUICore/./src/core/presets/base.js","webpack://SwaggerUICore/./src/core/presets/apis.js","webpack://SwaggerUICore/./src/core/index.js","webpack://SwaggerUICore/./src/index.js"],"names":["root","factory","exports","module","define","amd","this","require","Model","ImmutablePureComponent","ref","replace","model","specSelectors","props","findDefinition","render","getComponent","getConfigs","schema","required","name","isRef","specPath","displayName","includeReadOnly","includeWriteOnly","ObjectModel","ArrayModel","PrimitiveModel","type","$$ref","get","getModelName","getRefSchema","className","src","height","width","deprecated","isOAS3","undefined","ImPropTypes","isRequired","PropTypes","expandDepth","depth","OnlineValidatorBadge","React","constructor","context","super","URL","url","win","toString","validatorUrl","state","getDefinitionUrl","UNSAFE_componentWillReceiveProps","nextProps","setState","spec","sanitizedValidatorUrl","sanitizeUrl","length","requiresValidationURL","target","rel","href","encodeURIComponent","ValidatorImage","alt","loaded","error","componentDidMount","img","Image","onload","onerror","Markdown","source","md","Remarkable","html","typographer","breaks","linkTarget","use","linkify","core","ruler","disable","useUnsafeMarkdown","sanitized","sanitizer","cx","dangerouslySetInnerHTML","__html","DomPurify","current","setAttribute","defaultProps","str","ALLOW_DATA_ATTR","FORBID_ATTR","hasWarnedAboutDeprecation","console","warn","ADD_ATTR","FORBID_TAGS","request","allPlugins","key","mod","pascalCaseFilename","default","SafeRender","SHOW_AUTH_POPUP","AUTHORIZE","LOGOUT","PRE_AUTHORIZE_OAUTH2","AUTHORIZE_OAUTH2","VALIDATE","CONFIGURE_AUTH","RESTORE_AUTHORIZATION","showDefinitions","payload","authorize","authorizeWithPersistOption","authActions","persistAuthorizationIfNeeded","logout","logoutWithPersistOption","preAuthorizeImplicit","errActions","auth","token","isValid","flow","newAuthErr","authId","level","message","authorizeOauth2WithPersistOption","authorizeOauth2","authorizePassword","username","password","passwordType","clientId","clientSecret","form","grant_type","scope","scopes","join","headers","client_id","client_secret","setClientIdAndSecret","Authorization","btoa","authorizeRequest","body","buildFormData","query","authorizeApplication","authorizeAccessCodeWithFormParams","redirectUrl","codeVerifier","code","redirect_uri","code_verifier","authorizeAccessCodeWithBasicAuthentication","data","parsedUrl","fn","oas3Selectors","authSelectors","additionalQueryStringParams","finalServerUrl","serverEffectiveValue","selectedServer","parseUrl","fetchUrl","_headers","fetch","method","requestInterceptor","responseInterceptor","then","response","JSON","parse","parseError","ok","statusText","catch","e","Error","errData","jsonResponse","error_description","jsonError","configureAuth","restoreAuthorization","persistAuthorization","authorized","localStorage","setItem","toJS","authPopup","swaggerUIRedirectOauth2","afterLoad","system","rootInjects","initOAuth","preauthorizeApiKey","preauthorizeBasic","statePlugins","reducers","actions","selectors","wrapActions","specWrapActionReplacements","specJson","definitionBase","getIn","value","set","securities","fromJS","map","Map","entrySeq","security","isFunc","setIn","header","parsedAuth","result","withMutations","delete","shownDefinitions","createSelector","definitionsToAuthorize","definitions","securityDefinitions","list","List","val","push","getDefinitionsByNames","valueSeq","names","allowedScopes","definition","size","keySeq","contains","definitionsForRequirements","allDefinitions","sec","first","securityScopes","definitionScopes","isAuthorized","execute","oriAction","path","operation","extras","specSecurity","UPDATE_CONFIGS","TOGGLE_CONFIGS","update","configName","configValue","toggle","getItem","parseYamlConfig","yaml","YAML","newThrownErr","getLocalConfig","yamlConfig","configsPlugin","specActions","configs","action","merge","oriVal","downloadConfig","req","getConfigByUrl","cb","next","res","status","updateLoadingStatus","updateUrl","text","setHash","history","pushState","window","location","hash","layout","ori","decodeURIComponent","layoutActions","parseDeepLinkHash","wrapComponents","OperationWrapper","OperationTag","OperationTagWrapper","SCROLL_TO","CLEAR_SCROLL_TO","show","layoutSelectors","args","deepLinking","tokenArray","shown","urlHashArray","urlHashArrayFromIsShownKey","assetName","createDeepLinkPath","scrollTo","rawHash","hashArray","split","isShownKey","isShownKeyFromUrlHashArray","tagId","maybeOperationId","tagIsShownKey","readyToScroll","scrollToKey","getScrollToKey","Im","scrollToElement","clearScrollTo","container","getScrollParent","zenscroll","to","element","includeHidden","LAST_RESORT","document","documentElement","style","getComputedStyle","excludeStaticParent","position","overflowRegex","parent","parentElement","test","overflow","overflowY","overflowX","tag","operationId","Ori","onLoad","toObject","downloadUrlPlugin","toolbox","download","config","specUrl","createElement","protocol","origin","checkPossibleFailReasons","updateSpec","clear","loadSpec","a","credentials","enums","loadingStatus","NEW_THROWN_ERR","NEW_THROWN_ERR_BATCH","NEW_SPEC_ERR","NEW_SPEC_ERR_BATCH","NEW_AUTH_ERR","CLEAR","CLEAR_BY","err","serializeError","newThrownErrBatch","errors","newSpecErr","newSpecErrBatch","errArray","filter","clearBy","errorTransformers","transformErrors","inputs","jsSpec","transformedErrors","reduce","transformer","newlyTransformedErrors","transform","seekStr","i","types","p","c","arr","makeNewMessage","makeReducers","DEFAULT_ERROR_STRUCTURE","line","sortBy","newErrors","k","errValue","filterValue","allErrors","lastError","all","last","opsFilter","taggedOps","phrase","tagObj","UPDATE_LAYOUT","UPDATE_FILTER","UPDATE_MODE","SHOW","updateLayout","updateFilter","thing","normalizeArray","changeMode","mode","wrapSelectors","isShown","thingToShow","currentFilter","def","whatMode","showSummary","taggedOperations","oriSelector","getSystem","maxDisplayedTags","isNaN","levels","getLevel","logLevel","logLevelInt","log","info","debug","UPDATE_SELECTED_SERVER","UPDATE_REQUEST_BODY_VALUE","UPDATE_REQUEST_BODY_VALUE_RETAIN_FLAG","UPDATE_REQUEST_BODY_INCLUSION","UPDATE_ACTIVE_EXAMPLES_MEMBER","UPDATE_REQUEST_CONTENT_TYPE","UPDATE_RESPONSE_CONTENT_TYPE","UPDATE_SERVER_VARIABLE_VALUE","SET_REQUEST_BODY_VALIDATE_ERROR","CLEAR_REQUEST_BODY_VALIDATE_ERROR","CLEAR_REQUEST_BODY_VALUE","setSelectedServer","selectedServerUrl","namespace","setRequestBodyValue","pathMethod","setRetainRequestBodyValueFlag","setRequestBodyInclusion","setActiveExamplesMember","contextType","contextName","setRequestContentType","setResponseContentType","setServerVariableValue","server","setRequestBodyValidateError","validationErrors","clearRequestBodyValidateError","initRequestBodyValidateError","clearRequestBodyValue","selector","defName","flowKey","flowVal","translatedDef","authorizationUrl","tokenUrl","description","v","oidcData","grants","grant","translatedScopes","acc","cur","openIdConnectUrl","isOAS3Helper","resolvedSchemes","getState","callbacks","OperationContainer","callbackElements","callbackName","callback","pathItemName","pathItem","op","allowTryItOut","HttpAuth","onChange","newValue","getValue","errSelectors","Input","Row","Col","AuthError","JumpToPath","scheme","toLowerCase","autoFocus","autoComplete","Callbacks","RequestBody","Servers","ServersContainer","RequestBodyEditor","OperationServers","operationLink","OperationLink","Component","link","targetOp","parameters","n","string","Array","padString","forceUpdate","obj","getSelectedServer","getServerVariable","getEffectiveServerValue","operationServers","pathServers","serversToDisplay","displaying","servers","currentServer","NOOP","Function","prototype","PureComponent","defaultValue","stringify","inputValue","applyDefaultValue","isInvalid","TextArea","invalid","title","onDomChange","userHasEditedBody","getDefaultRequestBodyValue","requestBody","mediaType","activeExamplesKey","mediaTypeValue","hasExamplesKey","exampleSchema","mediaTypeExample","exampleValue","getSampleSchema","requestBodyValue","requestBodyInclusionSetting","requestBodyErrors","contentType","isExecute","onChangeIncludeEmpty","updateActiveExamplesKey","handleFile","files","setIsIncludedOptions","options","shouldDispatchInit","ModelExample","HighlightCode","ExamplesSelectValueRetainer","Example","ParameterIncludeEmpty","showCommonExtensions","requestBodyDescription","requestBodyContent","OrderedMap","schemaForMediaType","rawExamplesOfMediaType","sampleForMediaType","isObjectContent","isBinaryFormat","isBase64Format","JsonSchemaForm","ParameterExt","bodyProperties","prop","commonExt","getCommonExtensions","format","currentValue","currentErrors","included","useInitialValFromSchemaSamples","has","hasIn","useInitialValFromEnum","useInitialValue","initialValue","isFile","xKey","xVal","dispatchInitialValue","isIncluded","isIncludedOptions","isDisabled","isEmptyValue","sampleRequestBody","language","getKnownSyntaxHighlighterLanguage","examples","currentKey","currentUserInputValue","onSelect","updateValue","defaultToFirstExample","example","oas3Actions","serverVariableValue","setServer","variableName","getAttribute","newVariableValue","currentServerDefinition","prevServerDefinition","prevServerVariableDefs","prevServerVariableDefaultValue","currentServerVariableDefs","currentServerVariableDefaultValue","s","shouldShowVariableUI","htmlFor","onServerChange","toArray","onServerVariableValueChange","enumValue","selected","oasVersion","isSwagger2","swaggerVersion","OAS3ComponentWrapFactory","components","specWrapSelectors","authWrapSelectors","oas3","oas3Reducers","newVal","currentVal","valueKeys","valueKey","valueKeyVal","missingBodyValue","missingRequiredKeys","updateIn","missingKeyValues","bodyValue","currentMissingKey","bodyValues","curr","onlyOAS3","shouldRetainRequestBodyValue","selectDefaultRequestBodyValue","currentMediaType","requestContentType","specResolvedSubtree","activeExamplesMember","hasUserEditedBody","userEditedRequestBody","mapEntries","kv","currentMediaTypeDefaultBodyValue","responseContentType","locationData","serverVariables","varValues","serverValue","RegExp","validateBeforeExecute","validateRequestBodyValueExists","validateShallowRequired","oas3RequiredRequestBodyContentType","oas3RequestContentType","oas3RequestBodyValue","requiredKeys","contentTypeVal","requiredKey","specResolved","count","isSwagger2Helper","OAS3NullSelector","hasHost","specJsonWithResolvedSubtrees","host","basePath","consumes","produces","schemes","onAuthChange","AuthItem","JsonSchema_string","VersionStamp","onlineValidatorBadge","disabled","parser","block","enable","trimmed","ModelComponent","classes","engaged","updateJsonSpec","onComplete","extractKey","hashIdx","escapeShell","escapeCMD","escapePowershell","getStringBodyOfMap","curlifyToJoin","extractedKey","curlify","escape","newLine","ext","isMultipartFormDataRequest","curlified","addWords","addWordsWithoutLeadingSpace","addNewLine","addIndent","h","reqBody","requestSnippetGenerator_curl_powershell","requestSnippetGenerator_curl_bash","requestSnippetGenerator_curl_cmd","RequestSnippets","requestSnippets","cursor","lineHeight","display","backgroundColor","paddingBottom","paddingTop","border","borderRadius","boxShadow","borderBottom","activeStyle","marginTop","marginRight","marginLeft","zIndex","requestSnippetsSelectors","isFunction","canSyntaxHighlight","rootRef","useRef","activeLanguage","setActiveLanguage","useState","getSnippetGenerators","isExpanded","setIsExpanded","getDefaultExpanded","useEffect","childNodes","node","nodeType","classList","addEventListener","handlePreventYScrollingBeyondElement","passive","removeEventListener","snippetGenerators","activeGenerator","snippet","handleSetIsExpanded","handleGetBtnStyle","deltaY","scrollHeight","contentHeight","offsetHeight","visibleHeight","scrollTop","preventDefault","SnippetComponent","getStyle","readOnly","justifyContent","alignItems","marginBottom","onClick","background","xlinkHref","paddingLeft","paddingRight","gen","handleGenChange","color","CopyToClipboard","getGenerators","languageKeys","generators","isEmpty","genFn","getGenFn","getActiveLanguage","ErrorBoundary","static","hasError","componentDidCatch","errorInfo","targetName","children","FallbackComponent","Fallback","withErrorBoundary","WrappedComponent","getDisplayName","WithErrorBoundary","component","isReactComponent","mapStateToProps","componentList","fullOverride","mergedComponentList","zipObject","Original","primitives","pattern","RandExp","generateStringFromRegex","Date","toISOString","substring","primitive","objectify","sanitizeRef","deeplyStripKey","objectContracts","arrayContracts","numberContracts","stringContracts","liftSampleHelper","oldSchema","setIfNotDefinedInTarget","properties","propName","Object","hasOwnProperty","call","writeOnly","items","sampleFromSchemaGeneric","exampleOverride","respectXML","usePlainValue","hasOneOf","oneOf","hasAnyOf","anyOf","schemaToAdd","xml","_attr","additionalProperties","prefix","schemaHasAny","keys","enum","handleMinMaxItems","sampleArray","maxItems","minItems","addPropertyToResult","propertyAddedCounter","hasExceededMaxProperties","maxProperties","requiredPropertiesToAdd","addedCount","x","isOptionalProperty","canAddProperty","overrideE","attribute","enumAttrVal","attrExample","attrDefault","t","discriminator","mapping","propertyName","pair","search","sample","itemSchema","itemSamples","wrapped","additionalProp","additionalProp1","additionalProps","additionalPropSample","toGenerateCount","minProperties","temp","min","minimum","exclusiveMinimum","max","maximum","exclusiveMaximum","maxLength","minLength","inferSchema","createXMLExample","o","json","XML","declaration","indent","sampleFromSchema","resolver","arg1","arg2","arg3","memoizedCreateXMLExample","memoizeN","memoizedSampleFromSchema","UPDATE_SPEC","UPDATE_URL","UPDATE_JSON","UPDATE_PARAM","UPDATE_EMPTY_PARAM_INCLUSION","VALIDATE_PARAMS","SET_RESPONSE","SET_REQUEST","SET_MUTATED_REQUEST","LOG_REQUEST","CLEAR_RESPONSE","CLEAR_REQUEST","CLEAR_VALIDATE_PARAMS","UPDATE_OPERATION_META_VALUE","UPDATE_RESOLVED","UPDATE_RESOLVED_SUBTREE","SET_SCHEME","cleanSpec","isString","updateResolved","parseToJson","specStr","JSON_SCHEMA","reason","mark","hasWarnedAboutResolveSpecDeprecation","resolveSpec","resolve","AST","modelPropertyMacro","parameterMacro","getLineNumberForPath","baseDoc","preparedErrors","fullPath","enumerable","requestBatch","debResolveSubtrees","debounce","async","resolveSubtree","batchResult","prev","resultMap","specWithCurrentSubtrees","oidcScheme","openIdConnectData","updateResolvedSubtree","requestResolvedSubtree","changeParam","paramName","paramIn","isXml","changeParamByIdentity","param","invalidateResolvedSubtreeCache","validateParams","updateEmptyParamInclusion","includeEmptyValue","clearValidateParams","changeConsumesValue","changeProducesValue","setResponse","setRequest","setMutatedRequest","logRequest","executeRequest","pathName","parameterInclusionSettingFor","paramValue","paramToValue","contextUrl","opId","namespaceVariables","globalVariables","parsedRequest","buildRequest","r","mutatedRequest","apply","parsedMutatedRequest","startTime","duration","operationScheme","contentTypeValues","parameterValues","clearResponse","clearRequest","setScheme","fromJSOrdered","paramKey","paramToIdentifier","paramValues","paramMeta","isEmptyValueIncluded","validateParam","bypassRequiredCheck","statusCode","newState","operationPath","metaPath","deleteIn","OPERATION_METHODS","specSource","mergerFn","oldVal","mergeWith","returnSelfOrNewMap","externalDocs","version","semver","exec","paths","operations","id","Set","resolvedRes","unresolvedRes","operationsWithRootInherited","ops","tags","tagDetails","currentTags","operationsWithTags","taggedMap","ar","tagsSorter","operationsSorter","tagA","tagB","sortFn","sorters","responses","requests","mutatedRequests","responseFor","requestFor","mutatedRequestFor","allowTryItOutFor","parameterWithMetaByIdentity","opParams","metaParams","mergedParams","currentParam","inNameKeyedMeta","hashKeyedMeta","hashCode","parameterWithMeta","operationWithMeta","meta","getParameter","inType","params","allowHashes","parametersIncludeIn","inValue","parametersIncludeType","typeValue","producesValue","currentProducesFor","currentProducesValue","firstProducesArrayItem","producesOptionsFor","operationProduces","pathItemProduces","globalProduces","consumesOptionsFor","operationConsumes","pathItemConsumes","globalConsumes","matchResult","match","urlScheme","canExecuteScheme","getOAS3RequiredRequestBodyContentType","requiredObj","isMediaTypeSchemaPropertiesEqual","targetMediaType","currentMediaTypeSchemaProperties","targetMediaTypeSchemaProperties","equals","pathItems","pathItemKeys","$ref","withCredentials","makeHttp","Http","preFetch","postFetch","opts","freshConfigs","rest","serializeRes","shallowEqualKeys","getComponents","getStore","memGetComponent","memoize","memMakeMappedContainer","memoizeForWithMappedContainer","withMappedContainer","makeMappedContainer","withSystem","WithSystem","withRoot","reduxStore","WithRoot","Provider","store","withConnect","compose","identity","connect","ownProps","customMapStateToProps","handleProps","oldProps","componentName","WithMappedContainer","cleanProps","omit","domNode","App","ReactDOM","TypeError","failSilently","SyntaxHighlighter","js","http","bash","powershell","javascript","styles","agate","arta","monokai","nord","obsidian","tomorrowNight","availableStyles","DEFAULT_RESPONSE_KEY","isImmutable","maybe","isObject","toList","objWithHashedKeys","fdObj","newObj","trackKeys","containsMultiple","createObjWithHashedKeys","isFn","isArray","_memoize","objMap","objReduce","systemThunkMiddleware","dispatch","defaultStatusCode","codes","getList","iterable","extractFileNameFromContentDispositionHeader","responseFilename","patterns","regex","filename","upperFirst","camelCase","validateValueBySchema","requiredByParam","parameterContentMediaType","nullable","requiredBySchema","uniqueItems","schemaRequiresValue","hasValue","stringCheck","arrayCheck","arrayListCheck","allChecks","passedAnyCheck","objectVal","isList","propKey","errs","rxPattern","validatePattern","validateMinItems","validateMaxItems","needRemove","errorPerItem","toSet","errorsPerIndex","item","add","index","validateUniqueItems","validateMaxLength","validateMinLength","validateMaximum","validateMinimum","validateDateTime","validateGuid","validateString","validateBoolean","validateNumber","validateInteger","validateFile","paramRequired","paramDetails","getParameterSchema","getXmlSampleSchema","shouldStringifyTypesConfig","when","shouldStringifyTypes","defaultStringifyTypes","getStringifiedSampleForSchema","resType","typesToStringify","nextConfig","some","getYamlSampleSchema","jsonExample","yamlString","lineWidth","parseSearch","substr","buffer","Buffer","from","alpha","b","localeCompare","formArr","find","eq","braintreeSanitizeUrl","uri","getAcceptControllingResponse","suitable2xxResponse","defaultResponse","suitableDefaultResponse","String","escapeDeepLinkPath","cssEscape","getExtensions","defObj","input","keyToStrip","predicate","numberToString","returnAll","generatedIdentifiers","allIdentifiers","generateCodeVerifier","b64toB64UrlEncoded","randomBytes","createCodeChallenge","shaJs","digest","canJsonParse","open","close","File","swagger2SchemaKeys","parameter","shallowArrayEquals","Cache","foundKey","OriginalCache","memoized","webpackContext","webpackContextResolve","__webpack_require__","__webpack_module_cache__","moduleId","cachedModule","__webpack_modules__","getter","__esModule","d","defineProperty","Symbol","toStringTag","idFn","Store","rootReducer","initialState","deepExtend","plugins","pluginsOptions","boundSystem","_getSystem","middlwares","composeEnhancers","createStore","applyMiddleware","createStoreWithMiddleware","buildSystem","register","rebuild","pluginSystem","combinePlugins","systemExtend","callAfterLoad","buildReducer","getRootInjects","getWrappedAndBoundActions","getWrappedAndBoundSelectors","getStateThunks","getFn","rebuildReducer","_getConfigs","setConfigs","states","replaceReducer","reducerSystem","reducerObj","redFn","wrapWithTryCatch","makeReducer","combineReducers","allReducers","getType","upName","toUpperCase","getSelectors","getActions","actionHolders","actionName","actionGroups","getBoundActions","actionGroupName","wrappers","wrap","newAction","selectorGroups","getBoundSelectors","selectorGroupName","stateName","selectorName","wrappedSelector","getStates","wrapper","getNestedState","process","creator","actionCreator","bindActionCreators","getMapStateToProps","getMapDispatchToProps","pluginOptions","dest","pluginLoadType","plugin","hasLoaded","calledSomething","wrapperFn","namespaceObj","logErrors","resolvedSubtree","getResolvedSubtree","tryItOutEnabled","defaultRequestBodyValue","executeInProgress","nextState","docExpansion","displayOperationId","displayRequestDuration","supportedSubmitMethods","isDeepLinkingEnabled","jumpToKey","unresolvedOp","Operation","operationProps","summary","originalOperationId","toggleShown","onTryoutClick","onResetClick","onCancelClick","onExecute","getLayout","layoutName","Layout","AuthorizationPopup","Auths","AuthorizeBtn","showPopup","AuthorizeBtnContainer","authorizableDefinitions","AuthorizeOperationBtn","stopPropagation","auths","Oauth2","Button","authorizedAuth","nonOauthDefinitions","oauthDefinitions","onSubmit","submitAuth","logoutClick","ApiKeyAuth","BasicAuth","authEl","showValue","ExamplesSelect","isSyntheticChange","selectedOptions","_onSelect","currentExampleKey","currentExamplePerProps","firstExamplesKey","firstExample","firstExampleKey","keyOf","isValueModified","isModifiedValueAvailable","showLabels","_onDomSelect","exampleName","stringifyUnlessList","currentNamespace","_setStateForNamespace","newStateForNamespace","mergeDeep","_getCurrentExampleValue","exampleKey","_getValueForExample","lastUserEditedValue","_getStateForCurrentNamespace","valueFromExample","_setStateForCurrentNamespace","isModifiedValueSelected","otherArgs","lastDownstreamValue","componentWillUnmount","valueFromCurrentExample","examplesMatchingNewValue","_onExamplesSelect","authConfigs","oauth2RedirectUrl","scopesArray","scopeSeparator","realm","usePkceWithAuthorizationCodeGrant","codeChallenge","sanitizedAuthorizationUrl","useBasicAuthenticationWithAccessCodeGrant","errCb","oauth2Authorize","checked","dataset","newScopes","appName","InitializedInput","oidcUrl","AUTH_FLOW_IMPLICIT","AUTH_FLOW_PASSWORD","AUTH_FLOW_ACCESS_CODE","AUTH_FLOW_APPLICATION","isPkceCodeGrant","flowToDisplay","tablet","desktop","onInputChange","selectScopes","onScopeChange","Clear","Headers","Duration","LiveResponse","shouldComponentUpdate","showMutatedRequest","requestSnippetsEnabled","curlRequest","notDocumented","isError","headersKeys","ResponseBody","returnObject","joinedHeaders","hasHeaders","Curl","content","SWAGGER2_OPERATION_METHODS","OAS3_OPERATION_METHODS","Operations","validMethods","renderOperationTag","isAbsoluteUrl","buildBaseUrl","buildUrl","baseUrl","safeBuildUrl","Collapse","DeepLink","Link","tagExternalDocsUrl","tagDescription","tagExternalDocsDescription","rawTagExternalDocsUrl","showTag","enabled","focusable","isOpened","externalDocsUrl","extensions","Responses","Parameters","Execute","Schemes","OperationExt","OperationSummary","showExtensions","onChangeKey","currentScheme","tryItOutResponse","resolvedSummary","OperationSummaryMethod","OperationSummaryPath","CopyToClipboardBtn","hasSecurity","securityIsOptional","allowAnonymous","applicableDefinitions","textToCopy","pathParts","OperationExtRow","xNormalizedValue","fileName","downloadable","canCopy","saveAs","controlsAcceptHeader","defaultCode","ContentType","Response","acceptControllingResponse","regionId","replacement","createHtmlReadyId","controlId","ariaControls","ariaLabel","contentTypes","onChangeProducesWrapper","role","isDefault","onContentTypeChange","onResponseContentTypeChange","activeContentType","links","ResponseExtension","specPathWithPossibleSchema","activeMediaType","examplesForMediaType","oas3SchemaForContentType","sampleSchema","shouldOverrideSchemaExample","sampleGenConfig","targetExamplesKey","getTargetExamplesKey","getMediaTypeExample","targetExample","oldOASMediaTypeExample","sampleResponse","getExampleComponent","Seq","_onContentTypeChange","omitValue","toSeq","parsedContent","prevContent","Blob","reader","FileReader","readAsText","updateParsedContent","componentDidUpdate","prevProps","downloadName","getTime","bodyEl","blob","disposition","formatXml","textNodesOnSameLine","indentor","toLower","controls","tab","parametersVisible","callbackVisible","ParameterRow","TryItOutButton","groupedParametersArr","toggleTab","rawParam","onChangeConsumes","onChangeConsumesWrapper","onChangeMediaType","f","lastValue","usableValue","ParameterIncludeEmptyDefaultProps","onCheckboxChange","valueForUpstream","getParamKey","paramWithMeta","parameterMediaType","generatedSampleValue","onChangeWrapper","setDefaultValue","ParamBody","bodyParam","consumesValue","paramItems","paramEnum","paramDefaultValue","paramExample","itemType","isFormData","isFormDataSupported","isDisplayParamEnum","_onExampleSelect","oas3ValidateBeforeExecuteSuccess","missingKey","isPass","handleValidationResultPass","handleValidationResultFail","paramsResult","handleValidateParameters","requestBodyResult","handleValidateRequestBody","handleValidationResult","Property","schemaExample","propVal","propClass","Errors","editorActions","jumpToLine","allErrorsToDisplay","isVisible","sortedJSErrors","animated","ThrownErrorItem","SpecErrorItem","errorLine","toTitleCase","locationMessage","xclass","Container","fullscreen","full","containerClass","DEVICES","hide","keepContents","mobile","large","classesAr","device","deviceClass","Select","multiple","option","allowedValues","allowEmptyValue","NoMargin","renderNotAnimated","Overview","setTagShown","_setTagShown","showTagId","showOp","showOpIdPrefix","showOpId","_onClick","inputRef","otherProps","InfoBasePath","Contact","email","License","license","InfoUrl","Info","termsOfServiceUrl","contact","externalDocsDescription","InfoContainer","Footer","FilterContainer","isLoading","isFailed","classNames","placeholder","onFilterChange","isJson","isEditBox","_onChange","updateValues","defaultProp","handleOnChange","toggleIsEditBox","curl","curlBlock","UNSAFE_componentWillMount","SchemesContainer","ModelCollapse","onToggle","modelName","expanded","toggleCollapsed","collapsedContent","hideSelfOnExpand","activeTab","defaultModelRendering","defaultModelExpandDepth","ModelWrapper","exampleTabId","examplePanelId","modelTabId","modelPanelId","active","inactive","tabIndex","Models","getSchemaBasePath","defaultModelsExpandDepth","specPathBase","showModels","onLoadModels","schemaValue","rawSchemaValue","rawSchema","onLoadModel","getCollapsedContent","handleToggle","requiredProperties","infoProperties","JumpToPathSection","not","titleEl","isDeprecated","normalizedValue","Primitive","enumArray","_","filterNot","EnumModel","showReset","VersionPragmaFilter","bypass","alsoShow","xmlns","xmlnsXlink","viewBox","fill","fillRule","BaseLayout","SvgAssets","isSpecEmpty","loadingMessage","lastErr","lastErrMsg","hasServers","hasSchemes","hasSecurityDefinitions","JsonSchemaDefaultProps","keyName","getComponentSilently","Comp","schemaIn","onEnumChange","debounceTimeout","JsonSchema_array","itemVal","valueOrEmptyList","arrayErrors","needsRemoveError","shouldRenderValue","schemaItemsEnum","schemaItemsType","schemaItemsFormat","schemaItemsSchema","ArrayItemsComponent","isArrayItemText","isArrayItemFile","itemErrors","JsonSchemaArrayItemFile","onItemChange","JsonSchemaArrayItemText","removeItem","addItem","onFileChange","JsonSchema_boolean","booleanValue","stringifyObjectErrors","stringError","currentError","part","JsonSchema_object","coreComponents","authorizationPopup","authorizeBtn","authorizeOperationBtn","authError","oauth2","apiKeyAuth","basicAuth","liveResponse","highlightCode","responseBody","parameterRow","overview","footer","modelExample","formComponents","LayoutUtils","jsonSchemaComponents","JsonSchemaComponents","util","logs","view","samples","swaggerJs","deepLinkingPlugin","safeRender","PresetApis","BasePreset","OAS3Plugin","GIT_DIRTY","GIT_COMMIT","PACKAGE_VERSION","BUILD_TIME","buildInfo","SwaggerUI","gitRevision","gitDirty","buildTimestamp","defaults","dom_id","urls","pathname","custom","syntax","defaultExpanded","languages","queryConfigEnabled","presets","ApisPreset","syntaxHighlight","activated","theme","queryConfig","constructorConfig","storeConfigs","System","downloadSpec","fetchedConfig","localConfig","mergedConfig","configsActions","querySelector","configUrl","loadRemoteConfig","apis","AllPlugins"],"sourceRoot":""} \ No newline at end of file diff --git a/vendor/github.com/swaggo/files/v2/files.go b/vendor/github.com/swaggo/files/v2/files.go new file mode 100644 index 0000000..d7b452e --- /dev/null +++ b/vendor/github.com/swaggo/files/v2/files.go @@ -0,0 +1,12 @@ +package swaggerFiles + +import ( + "embed" + "io/fs" +) + +//go:embed dist/* +var dist embed.FS + +// FS holds embedded swagger ui files +var FS, _ = fs.Sub(dist, "dist") diff --git a/vendor/github.com/swaggo/swag/.gitignore b/vendor/github.com/swaggo/swag/.gitignore new file mode 100644 index 0000000..865a6f3 --- /dev/null +++ b/vendor/github.com/swaggo/swag/.gitignore @@ -0,0 +1,24 @@ +dist +testdata/simple*/docs +testdata/quotes/docs +testdata/quotes/quotes.so +testdata/delims/docs +testdata/delims/delims.so +example/basic/docs/* +example/celler/docs/* +cover.out + + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out +.idea +.vscode + +# Etc +.DS_Store + +/swag +/swag.exe diff --git a/vendor/github.com/swaggo/swag/.goreleaser.yml b/vendor/github.com/swaggo/swag/.goreleaser.yml new file mode 100644 index 0000000..a431d5d --- /dev/null +++ b/vendor/github.com/swaggo/swag/.goreleaser.yml @@ -0,0 +1,30 @@ +build: + main: cmd/swag/main.go + goos: + - linux + - darwin + goarch: + - amd64 + - arm64 + - 386 + env: + - CGO_ENABLED=0 + +archives: + - id: foo + name_template: >- + {{ .ProjectName }}_ + {{- .Version }}_ + {{- if eq .Os "linux"}}Linux{{ else if eq .Os "darwin"}}Darwin{{ else }}{{ .Os }}{{ end }}_ + {{- if eq .Arch "386" }}i386{{ else if eq .Arch "amd64" }}x86_64{{ else }}{{ .Arch }}{{ end }} + +checksum: + name_template: 'checksums.txt' +snapshot: + name_template: "{{ .Tag }}-next" +changelog: + sort: asc + filters: + exclude: + - '^docs:' + - '^test:' diff --git a/vendor/github.com/swaggo/swag/CODE_OF_CONDUCT.md b/vendor/github.com/swaggo/swag/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..717d795 --- /dev/null +++ b/vendor/github.com/swaggo/swag/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [gitter.im/swaggo/swag](https://gitter.im/swaggo/swag).The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/swaggo/swag/CONTRIBUTING.md b/vendor/github.com/swaggo/swag/CONTRIBUTING.md new file mode 100644 index 0000000..f3a3a15 --- /dev/null +++ b/vendor/github.com/swaggo/swag/CONTRIBUTING.md @@ -0,0 +1,16 @@ +# Contributing + +When contributing to this repository, please first discuss the change you wish to make via issue, +email, or any other method with the owners of this repository before making a change. + +Please note we have a code of conduct, please follow it in all your interactions with the project. + +## Pull Request Process + +1. Fork it +2. Create your feature branch (`git checkout -b my-new-feature`) +3. Commit your changes (`git commit -am 'Add some feature'`) +4. Push to the branch (`git push origin my-new-feature`) +5. Create new Pull Request + +Please make an issue first if the change is likely to increase. diff --git a/vendor/github.com/swaggo/swag/Dockerfile b/vendor/github.com/swaggo/swag/Dockerfile new file mode 100644 index 0000000..410fe31 --- /dev/null +++ b/vendor/github.com/swaggo/swag/Dockerfile @@ -0,0 +1,30 @@ +# Dockerfile References: https://docs.docker.com/engine/reference/builder/ + +# Start from the latest golang base image +FROM golang:1.20-alpine as builder + +# Set the Current Working Directory inside the container +WORKDIR /app + +# Copy go mod and sum files +COPY go.mod go.sum ./ + +# Download all dependencies. Dependencies will be cached if the go.mod and go.sum files are not changed +RUN go mod download + +# Copy the source from the current directory to the Working Directory inside the container +COPY . . + +# Build the Go app +RUN CGO_ENABLED=0 GOOS=linux go build -v -a -installsuffix cgo -o swag cmd/swag/main.go + + +######## Start a new stage from scratch ####### +FROM scratch + +WORKDIR /code/ + +# Copy the Pre-built binary file from the previous stage +COPY --from=builder /app/swag /bin/swag + +ENTRYPOINT ["/bin/swag"] diff --git a/vendor/github.com/swaggo/swag/Makefile b/vendor/github.com/swaggo/swag/Makefile new file mode 100644 index 0000000..0d8175d --- /dev/null +++ b/vendor/github.com/swaggo/swag/Makefile @@ -0,0 +1,89 @@ +GOCMD:=$(shell which go) +GOLINT:=$(shell which golint) +GOIMPORT:=$(shell which goimports) +GOFMT:=$(shell which gofmt) +GOBUILD:=$(GOCMD) build +GOINSTALL:=$(GOCMD) install +GOCLEAN:=$(GOCMD) clean +GOTEST:=$(GOCMD) test +GOMODTIDY:=$(GOCMD) mod tidy +GOGET:=$(GOCMD) get +GOLIST:=$(GOCMD) list +GOVET:=$(GOCMD) vet +GOPATH:=$(shell $(GOCMD) env GOPATH) +u := $(if $(update),-u) + +BINARY_NAME:=swag +PACKAGES:=$(shell $(GOLIST) github.com/swaggo/swag github.com/swaggo/swag/cmd/swag github.com/swaggo/swag/gen github.com/swaggo/swag/format) +GOFILES:=$(shell find . -name "*.go" -type f) + +export GO111MODULE := on + +all: test build + +.PHONY: build +build: deps + $(GOBUILD) -o $(BINARY_NAME) ./cmd/swag + +.PHONY: install +install: deps + $(GOINSTALL) ./cmd/swag + +.PHONY: test +test: + echo "mode: count" > coverage.out + for PKG in $(PACKAGES); do \ + $(GOCMD) test -v -covermode=count -coverprofile=profile.out $$PKG > tmp.out; \ + cat tmp.out; \ + if grep -q "^--- FAIL" tmp.out; then \ + rm tmp.out; \ + exit 1; \ + elif grep -q "build failed" tmp.out; then \ + rm tmp.out; \ + exit; \ + fi; \ + if [ -f profile.out ]; then \ + cat profile.out | grep -v "mode:" >> coverage.out; \ + rm profile.out; \ + fi; \ + done + +.PHONY: clean +clean: + $(GOCLEAN) + rm -f $(BINARY_NAME) + +.PHONY: deps +deps: + $(GOMODTIDY) + +.PHONY: devel-deps +devel-deps: + GO111MODULE=off $(GOGET) -v -u \ + golang.org/x/lint/golint + +.PHONY: lint +lint: devel-deps + for PKG in $(PACKAGES); do golint -set_exit_status $$PKG || exit 1; done; + +.PHONY: vet +vet: deps devel-deps + $(GOVET) $(PACKAGES) + +.PHONY: fmt +fmt: + $(GOFMT) -s -w $(GOFILES) + +.PHONY: fmt-check +fmt-check: + @diff=$$($(GOFMT) -s -d $(GOFILES)); \ + if [ -n "$$diff" ]; then \ + echo "Please run 'make fmt' and commit the result:"; \ + echo "$${diff}"; \ + exit 1; \ + fi; + +.PHONY: view-covered +view-covered: + $(GOTEST) -coverprofile=cover.out $(TARGET) + $(GOCMD) tool cover -html=cover.out diff --git a/vendor/github.com/swaggo/swag/PULL_REQUEST_TEMPLATE.md b/vendor/github.com/swaggo/swag/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..697de1f --- /dev/null +++ b/vendor/github.com/swaggo/swag/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,8 @@ +**Describe the PR** +e.g. add cool parser. + +**Relation issue** +e.g. https://github.com/swaggo/swag/pull/118/files + +**Additional context** +Add any other context about the problem here. diff --git a/vendor/github.com/swaggo/swag/README.md b/vendor/github.com/swaggo/swag/README.md new file mode 100644 index 0000000..054e36e --- /dev/null +++ b/vendor/github.com/swaggo/swag/README.md @@ -0,0 +1,983 @@ +# swag + +🌍 *[English](README.md) ∙ [简体中文](README_zh-CN.md) ∙ [Português](README_pt.md)* + + + +[![Build Status](https://github.com/swaggo/swag/actions/workflows/ci.yml/badge.svg?branch=master)](https://github.com/features/actions) +[![Coverage Status](https://img.shields.io/codecov/c/github/swaggo/swag/master.svg)](https://codecov.io/gh/swaggo/swag) +[![Go Report Card](https://goreportcard.com/badge/github.com/swaggo/swag)](https://goreportcard.com/report/github.com/swaggo/swag) +[![codebeat badge](https://codebeat.co/badges/71e2f5e5-9e6b-405d-baf9-7cc8b5037330)](https://codebeat.co/projects/github-com-swaggo-swag-master) +[![Go Doc](https://godoc.org/github.com/swaggo/swagg?status.svg)](https://godoc.org/github.com/swaggo/swag) +[![Backers on Open Collective](https://opencollective.com/swag/backers/badge.svg)](#backers) +[![Sponsors on Open Collective](https://opencollective.com/swag/sponsors/badge.svg)](#sponsors) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_shield) +[![Release](https://img.shields.io/github/release/swaggo/swag.svg?style=flat-square)](https://github.com/swaggo/swag/releases) + + +Swag converts Go annotations to Swagger Documentation 2.0. We've created a variety of plugins for popular [Go web frameworks](#supported-web-frameworks). This allows you to quickly integrate with an existing Go project (using Swagger UI). + +## Contents + - [Getting started](#getting-started) + - [Supported Web Frameworks](#supported-web-frameworks) + - [How to use it with Gin](#how-to-use-it-with-gin) + - [The swag formatter](#the-swag-formatter) + - [Implementation Status](#implementation-status) + - [Declarative Comments Format](#declarative-comments-format) + - [General API Info](#general-api-info) + - [API Operation](#api-operation) + - [Security](#security) + - [Examples](#examples) + - [Descriptions over multiple lines](#descriptions-over-multiple-lines) + - [User defined structure with an array type](#user-defined-structure-with-an-array-type) + - [Function scoped struct declaration](#function-scoped-struct-declaration) + - [Model composition in response](#model-composition-in-response) + - [Add a headers in response](#add-a-headers-in-response) + - [Use multiple path params](#use-multiple-path-params) + - [Example value of struct](#example-value-of-struct) + - [SchemaExample of body](#schemaexample-of-body) + - [Description of struct](#description-of-struct) + - [Use swaggertype tag to supported custom type](#use-swaggertype-tag-to-supported-custom-type) + - [Use global overrides to support a custom type](#use-global-overrides-to-support-a-custom-type) + - [Use swaggerignore tag to exclude a field](#use-swaggerignore-tag-to-exclude-a-field) + - [Add extension info to struct field](#add-extension-info-to-struct-field) + - [Rename model to display](#rename-model-to-display) + - [How to use security annotations](#how-to-use-security-annotations) + - [Add a description for enum items](#add-a-description-for-enum-items) + - [Generate only specific docs file types](#generate-only-specific-docs-file-types) + - [How to use Go generic types](#how-to-use-generics) +- [About the Project](#about-the-project) + +## Getting started + +1. Add comments to your API source code, See [Declarative Comments Format](#declarative-comments-format). + +2. Install swag by using: +```sh +go install github.com/swaggo/swag/cmd/swag@latest +``` +To build from source you need [Go](https://golang.org/dl/) (1.18 or newer). + +Alternatively you can run the docker image: +```sh +docker run --rm -v $(pwd):/code ghcr.io/swaggo/swag:latest +``` + +Or download a pre-compiled binary from the [release page](https://github.com/swaggo/swag/releases). + +3. Run `swag init` in the project's root folder which contains the `main.go` file. This will parse your comments and generate the required files (`docs` folder and `docs/docs.go`). +```sh +swag init +``` + + Make sure to import the generated `docs/docs.go` so that your specific configuration gets `init`'ed. If your General API annotations do not live in `main.go`, you can let swag know with `-g` flag. + ```go + import _ "example-module-name/docs" + ``` + ```sh + swag init -g http/api.go + ``` + +4. (optional) Use `swag fmt` format the SWAG comment. (Please upgrade to the latest version) + + ```sh + swag fmt + ``` + +## swag cli + +```sh +swag init -h +NAME: + swag init - Create docs.go + +USAGE: + swag init [command options] [arguments...] + +OPTIONS: + --quiet, -q Make the logger quiet. (default: false) + --generalInfo value, -g value Go file path in which 'swagger general API Info' is written (default: "main.go") + --dir value, -d value Directories you want to parse,comma separated and general-info file must be in the first one (default: "./") + --exclude value Exclude directories and files when searching, comma separated + --propertyStrategy value, -p value Property Naming Strategy like snakecase,camelcase,pascalcase (default: "camelcase") + --output value, -o value Output directory for all the generated files(swagger.json, swagger.yaml and docs.go) (default: "./docs") + --outputTypes value, --ot value Output types of generated files (docs.go, swagger.json, swagger.yaml) like go,json,yaml (default: "go,json,yaml") + --parseVendor Parse go files in 'vendor' folder, disabled by default (default: false) + --parseDependency, --pd Parse go files inside dependency folder, disabled by default (default: false) + --markdownFiles value, --md value Parse folder containing markdown files to use as description, disabled by default + --codeExampleFiles value, --cef value Parse folder containing code example files to use for the x-codeSamples extension, disabled by default + --parseInternal Parse go files in internal packages, disabled by default (default: false) + --generatedTime Generate timestamp at the top of docs.go, disabled by default (default: false) + --parseDepth value Dependency parse depth (default: 100) + --requiredByDefault Set validation required for all fields by default (default: false) + --instanceName value This parameter can be used to name different swagger document instances. It is optional. + --overridesFile value File to read global type overrides from. (default: ".swaggo") + --parseGoList Parse dependency via 'go list' (default: true) + --tags value, -t value A comma-separated list of tags to filter the APIs for which the documentation is generated.Special case if the tag is prefixed with the '!' character then the APIs with that tag will be excluded + --templateDelims value, --td value Provide custom delimeters for Go template generation. The format is leftDelim,rightDelim. For example: "[[,]]" + --collectionFormat value, --cf value Set default collection format (default: "csv") + --state value Initial state for the state machine (default: ""), @HostState in root file, @State in other files + --help, -h show help (default: false) +``` + +```bash +swag fmt -h +NAME: + swag fmt - format swag comments + +USAGE: + swag fmt [command options] [arguments...] + +OPTIONS: + --dir value, -d value Directories you want to parse,comma separated and general-info file must be in the first one (default: "./") + --exclude value Exclude directories and files when searching, comma separated + --generalInfo value, -g value Go file path in which 'swagger general API Info' is written (default: "main.go") + --help, -h show help (default: false) + +``` + +## Supported Web Frameworks + +- [gin](http://github.com/swaggo/gin-swagger) +- [echo](http://github.com/swaggo/echo-swagger) +- [buffalo](https://github.com/swaggo/buffalo-swagger) +- [net/http](https://github.com/swaggo/http-swagger) +- [gorilla/mux](https://github.com/swaggo/http-swagger) +- [go-chi/chi](https://github.com/swaggo/http-swagger) +- [flamingo](https://github.com/i-love-flamingo/swagger) +- [fiber](https://github.com/gofiber/swagger) +- [atreugo](https://github.com/Nerzal/atreugo-swagger) +- [hertz](https://github.com/hertz-contrib/swagger) + +## How to use it with Gin + +Find the example source code [here](https://github.com/swaggo/swag/tree/master/example/celler). + +Finish the steps in [Getting started](#getting-started) +1. After using `swag init` to generate Swagger 2.0 docs, import the following packages: +```go +import "github.com/swaggo/gin-swagger" // gin-swagger middleware +import "github.com/swaggo/files" // swagger embed files +``` + +2. Add [General API](#general-api-info) annotations in `main.go` code: + +```go +// @title Swagger Example API +// @version 1.0 +// @description This is a sample server celler server. +// @termsOfService http://swagger.io/terms/ + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html + +// @host localhost:8080 +// @BasePath /api/v1 + +// @securityDefinitions.basic BasicAuth + +// @externalDocs.description OpenAPI +// @externalDocs.url https://swagger.io/resources/open-api/ +func main() { + r := gin.Default() + + c := controller.NewController() + + v1 := r.Group("/api/v1") + { + accounts := v1.Group("/accounts") + { + accounts.GET(":id", c.ShowAccount) + accounts.GET("", c.ListAccounts) + accounts.POST("", c.AddAccount) + accounts.DELETE(":id", c.DeleteAccount) + accounts.PATCH(":id", c.UpdateAccount) + accounts.POST(":id/images", c.UploadAccountImage) + } + //... + } + r.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerFiles.Handler)) + r.Run(":8080") +} +//... +``` + +Additionally some general API info can be set dynamically. The generated code package `docs` exports `SwaggerInfo` variable which we can use to set the title, description, version, host and base path programmatically. Example using Gin: + +```go +package main + +import ( + "github.com/gin-gonic/gin" + "github.com/swaggo/files" + "github.com/swaggo/gin-swagger" + + "./docs" // docs is generated by Swag CLI, you have to import it. +) + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html +func main() { + + // programmatically set swagger info + docs.SwaggerInfo.Title = "Swagger Example API" + docs.SwaggerInfo.Description = "This is a sample server Petstore server." + docs.SwaggerInfo.Version = "1.0" + docs.SwaggerInfo.Host = "petstore.swagger.io" + docs.SwaggerInfo.BasePath = "/v2" + docs.SwaggerInfo.Schemes = []string{"http", "https"} + + r := gin.New() + + // use ginSwagger middleware to serve the API docs + r.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerFiles.Handler)) + + r.Run() +} +``` + +3. Add [API Operation](#api-operation) annotations in `controller` code + +``` go +package controller + +import ( + "fmt" + "net/http" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/swaggo/swag/example/celler/httputil" + "github.com/swaggo/swag/example/celler/model" +) + +// ShowAccount godoc +// @Summary Show an account +// @Description get string by ID +// @Tags accounts +// @Accept json +// @Produce json +// @Param id path int true "Account ID" +// @Success 200 {object} model.Account +// @Failure 400 {object} httputil.HTTPError +// @Failure 404 {object} httputil.HTTPError +// @Failure 500 {object} httputil.HTTPError +// @Router /accounts/{id} [get] +func (c *Controller) ShowAccount(ctx *gin.Context) { + id := ctx.Param("id") + aid, err := strconv.Atoi(id) + if err != nil { + httputil.NewError(ctx, http.StatusBadRequest, err) + return + } + account, err := model.AccountOne(aid) + if err != nil { + httputil.NewError(ctx, http.StatusNotFound, err) + return + } + ctx.JSON(http.StatusOK, account) +} + +// ListAccounts godoc +// @Summary List accounts +// @Description get accounts +// @Tags accounts +// @Accept json +// @Produce json +// @Param q query string false "name search by q" Format(email) +// @Success 200 {array} model.Account +// @Failure 400 {object} httputil.HTTPError +// @Failure 404 {object} httputil.HTTPError +// @Failure 500 {object} httputil.HTTPError +// @Router /accounts [get] +func (c *Controller) ListAccounts(ctx *gin.Context) { + q := ctx.Request.URL.Query().Get("q") + accounts, err := model.AccountsAll(q) + if err != nil { + httputil.NewError(ctx, http.StatusNotFound, err) + return + } + ctx.JSON(http.StatusOK, accounts) +} +//... +``` + +```console +swag init +``` + +4. Run your app, and browse to http://localhost:8080/swagger/index.html. You will see Swagger 2.0 Api documents as shown below: + +![swagger_index.html](https://raw.githubusercontent.com/swaggo/swag/master/assets/swagger-image.png) + +## The swag formatter + +The Swag Comments can be automatically formatted, just like 'go fmt'. +Find the result of formatting [here](https://github.com/swaggo/swag/tree/master/example/celler). + +Usage: +```shell +swag fmt +``` + +Exclude folder: +```shell +swag fmt -d ./ --exclude ./internal +``` + +When using `swag fmt`, you need to ensure that you have a doc comment for the function to ensure correct formatting. +This is due to `swag fmt` indenting swag comments with tabs, which is only allowed *after* a standard doc comment. + +For example, use + +```go +// ListAccounts lists all existing accounts +// +// @Summary List accounts +// @Description get accounts +// @Tags accounts +// @Accept json +// @Produce json +// @Param q query string false "name search by q" Format(email) +// @Success 200 {array} model.Account +// @Failure 400 {object} httputil.HTTPError +// @Failure 404 {object} httputil.HTTPError +// @Failure 500 {object} httputil.HTTPError +// @Router /accounts [get] +func (c *Controller) ListAccounts(ctx *gin.Context) { +``` + +## Implementation Status + +[Swagger 2.0 document](https://swagger.io/docs/specification/2-0/basic-structure/) + +- [x] Basic Structure +- [x] API Host and Base Path +- [x] Paths and Operations +- [x] Describing Parameters +- [x] Describing Request Body +- [x] Describing Responses +- [x] MIME Types +- [x] Authentication + - [x] Basic Authentication + - [x] API Keys +- [x] Adding Examples +- [x] File Upload +- [x] Enums +- [x] Grouping Operations With Tags +- [ ] Swagger Extensions + +# Declarative Comments Format + +## General API Info + +**Example** +[celler/main.go](https://github.com/swaggo/swag/blob/master/example/celler/main.go) + +| annotation | description | example | +|-------------|--------------------------------------------|---------------------------------| +| title | **Required.** The title of the application.| // @title Swagger Example API | +| version | **Required.** Provides the version of the application API.| // @version 1.0 | +| description | A short description of the application. |// @description This is a sample server celler server. | +| tag.name | Name of a tag.| // @tag.name This is the name of the tag | +| tag.description | Description of the tag | // @tag.description Cool Description | +| tag.docs.url | Url of the external Documentation of the tag | // @tag.docs.url https://example.com| +| tag.docs.description | Description of the external Documentation of the tag| // @tag.docs.description Best example documentation | +| termsOfService | The Terms of Service for the API.| // @termsOfService http://swagger.io/terms/ | +| contact.name | The contact information for the exposed API.| // @contact.name API Support | +| contact.url | The URL pointing to the contact information. MUST be in the format of a URL. | // @contact.url http://www.swagger.io/support| +| contact.email| The email address of the contact person/organization. MUST be in the format of an email address.| // @contact.email support@swagger.io | +| license.name | **Required.** The license name used for the API.|// @license.name Apache 2.0| +| license.url | A URL to the license used for the API. MUST be in the format of a URL. | // @license.url http://www.apache.org/licenses/LICENSE-2.0.html | +| host | The host (name or ip) serving the API. | // @host localhost:8080 | +| BasePath | The base path on which the API is served. | // @BasePath /api/v1 | +| accept | A list of MIME types the APIs can consume. Note that Accept only affects operations with a request body, such as POST, PUT and PATCH. Value MUST be as described under [Mime Types](#mime-types). | // @accept json | +| produce | A list of MIME types the APIs can produce. Value MUST be as described under [Mime Types](#mime-types). | // @produce json | +| query.collection.format | The default collection(array) param format in query,enums:csv,multi,pipes,tsv,ssv. If not set, csv is the default.| // @query.collection.format multi +| schemes | The transfer protocol for the operation that separated by spaces. | // @schemes http https | +| externalDocs.description | Description of the external document. | // @externalDocs.description OpenAPI | +| externalDocs.url | URL of the external document. | // @externalDocs.url https://swagger.io/resources/open-api/ | +| x-name | The extension key, must be start by x- and take only json value | // @x-example-key {"key": "value"} | + +### Using markdown descriptions +When a short string in your documentation is insufficient, or you need images, code examples and things like that you may want to use markdown descriptions. In order to use markdown descriptions use the following annotations. + + +| annotation | description | example | +|-------------|--------------------------------------------|---------------------------------| +| title | **Required.** The title of the application.| // @title Swagger Example API | +| version | **Required.** Provides the version of the application API.| // @version 1.0 | +| description.markdown | A short description of the application. Parsed from the api.md file. This is an alternative to @description |// @description.markdown No value needed, this parses the description from api.md | +| tag.name | Name of a tag.| // @tag.name This is the name of the tag | +| tag.description.markdown | Description of the tag this is an alternative to tag.description. The description will be read from a file named like tagname.md | // @tag.description.markdown | + + +## API Operation + +**Example** +[celler/controller](https://github.com/swaggo/swag/tree/master/example/celler/controller) + + +| annotation | description | +|----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| description | A verbose explanation of the operation behavior. | +| description.markdown | A short description of the application. The description will be read from a file. E.g. `@description.markdown details` will load `details.md` | // @description.file endpoint.description.markdown | +| id | A unique string used to identify the operation. Must be unique among all API operations. | +| tags | A list of tags to each API operation that separated by commas. | +| summary | A short summary of what the operation does. | +| accept | A list of MIME types the APIs can consume. Note that Accept only affects operations with a request body, such as POST, PUT and PATCH. Value MUST be as described under [Mime Types](#mime-types). | +| produce | A list of MIME types the APIs can produce. Value MUST be as described under [Mime Types](#mime-types). | +| param | Parameters that separated by spaces. `param name`,`param type`,`data type`,`is mandatory?`,`comment` `attribute(optional)` | +| security | [Security](#security) to each API operation. | +| success | Success response that separated by spaces. `return code or default`,`{param type}`,`data type`,`comment` | +| failure | Failure response that separated by spaces. `return code or default`,`{param type}`,`data type`,`comment` | +| response | As same as `success` and `failure` | +| header | Header in response that separated by spaces. `return code`,`{param type}`,`data type`,`comment` | +| router | Path definition that separated by spaces. `path`,`[httpMethod]` | +| deprecatedrouter | As same as router, but deprecated. | +| x-name | The extension key, must be start by x- and take only json value. | +| x-codeSample | Optional Markdown usage. take `file` as parameter. This will then search for a file named like the summary in the given folder. | +| deprecated | Mark endpoint as deprecated. | + + + +## Mime Types + +`swag` accepts all MIME Types which are in the correct format, that is, match `*/*`. +Besides that, `swag` also accepts aliases for some MIME Types as follows: + +| Alias | MIME Type | +|-----------------------|-----------------------------------| +| json | application/json | +| xml | text/xml | +| plain | text/plain | +| html | text/html | +| mpfd | multipart/form-data | +| x-www-form-urlencoded | application/x-www-form-urlencoded | +| json-api | application/vnd.api+json | +| json-stream | application/x-json-stream | +| octet-stream | application/octet-stream | +| png | image/png | +| jpeg | image/jpeg | +| gif | image/gif | + + + +## Param Type + +- query +- path +- header +- body +- formData + +## Data Type + +- string (string) +- integer (int, uint, uint32, uint64) +- number (float32) +- boolean (bool) +- file (param data type when uploading) +- user defined struct + +## Security +| annotation | description | parameters | example | +|------------|-------------|------------|---------| +| securitydefinitions.basic | [Basic](https://swagger.io/docs/specification/2-0/authentication/basic-authentication/) auth. | | // @securityDefinitions.basic BasicAuth | +| securitydefinitions.apikey | [API key](https://swagger.io/docs/specification/2-0/authentication/api-keys/) auth. | in, name, description | // @securityDefinitions.apikey ApiKeyAuth | +| securitydefinitions.oauth2.application | [OAuth2 application](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, scope, description | // @securitydefinitions.oauth2.application OAuth2Application | +| securitydefinitions.oauth2.implicit | [OAuth2 implicit](https://swagger.io/docs/specification/authentication/oauth2/) auth. | authorizationUrl, scope, description | // @securitydefinitions.oauth2.implicit OAuth2Implicit | +| securitydefinitions.oauth2.password | [OAuth2 password](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, scope, description | // @securitydefinitions.oauth2.password OAuth2Password | +| securitydefinitions.oauth2.accessCode | [OAuth2 access code](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, authorizationUrl, scope, description | // @securitydefinitions.oauth2.accessCode OAuth2AccessCode | + + +| parameters annotation | example | +|---------------------------------|-------------------------------------------------------------------------| +| in | // @in header | +| name | // @name Authorization | +| tokenUrl | // @tokenUrl https://example.com/oauth/token | +| authorizationurl | // @authorizationurl https://example.com/oauth/authorize | +| scope.hoge | // @scope.write Grants write access | +| description | // @description OAuth protects our entity endpoints | + +## Attribute + +```go +// @Param enumstring query string false "string enums" Enums(A, B, C) +// @Param enumint query int false "int enums" Enums(1, 2, 3) +// @Param enumnumber query number false "int enums" Enums(1.1, 1.2, 1.3) +// @Param string query string false "string valid" minlength(5) maxlength(10) +// @Param int query int false "int valid" minimum(1) maximum(10) +// @Param default query string false "string default" default(A) +// @Param example query string false "string example" example(string) +// @Param collection query []string false "string collection" collectionFormat(multi) +// @Param extensions query []string false "string collection" extensions(x-example=test,x-nullable) +``` + +It also works for the struct fields: + +```go +type Foo struct { + Bar string `minLength:"4" maxLength:"16" example:"random string"` + Baz int `minimum:"10" maximum:"20" default:"15"` + Qux []string `enums:"foo,bar,baz"` +} +``` + +### Available + +Field Name | Type | Description +---|:---:|--- +validate | `string` | Determines the validation for the parameter. Possible values are: `required,optional`. +default | * | Declares the value of the parameter that the server will use if none is provided, for example a "count" to control the number of results per page might default to 100 if not supplied by the client in the request. (Note: "default" has no meaning for required parameters.) See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-6.2. Unlike JSON Schema this value MUST conform to the defined [`type`](#parameterType) for this parameter. +maximum | `number` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.2. +minimum | `number` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.3. +multipleOf | `number` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.1. +maxLength | `integer` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.1. +minLength | `integer` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.2. +enums | [\*] | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.5.1. +format | `string` | The extending format for the previously mentioned [`type`](#parameterType). See [Data Type Formats](https://swagger.io/specification/v2/#dataTypeFormat) for further details. +collectionFormat | `string` |Determines the format of the array if type array is used. Possible values are:
  • `csv` - comma separated values `foo,bar`.
  • `ssv` - space separated values `foo bar`.
  • `tsv` - tab separated values `foo\tbar`.
  • `pipes` - pipe separated values foo|bar.
  • `multi` - corresponds to multiple parameter instances instead of multiple values for a single instance `foo=bar&foo=baz`. This is valid only for parameters [`in`](#parameterIn) "query" or "formData".
Default value is `csv`. +example | * | Declares the example for the parameter value +extensions | `string` | Add extension to parameters. + +### Future + +Field Name | Type | Description +---|:---:|--- +pattern | `string` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.3. +maxItems | `integer` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.2. +minItems | `integer` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.3. +uniqueItems | `boolean` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.4. + +## Examples + +### Descriptions over multiple lines + +You can add descriptions spanning multiple lines in either the general api description or routes definitions like so: + +```go +// @description This is the first line +// @description This is the second line +// @description And so forth. +``` + +### User defined structure with an array type + +```go +// @Success 200 {array} model.Account <-- This is a user defined struct. +``` + +```go +package model + +type Account struct { + ID int `json:"id" example:"1"` + Name string `json:"name" example:"account name"` +} +``` + + +### Function scoped struct declaration + +You can declare your request response structs inside a function body. +You must have to follow the naming convention `.. `. + +```go +package main + +// @Param request body main.MyHandler.request true "query params" +// @Success 200 {object} main.MyHandler.response +// @Router /test [post] +func MyHandler() { + type request struct { + RequestField string + } + + type response struct { + ResponseField string + } +} +``` + + +### Model composition in response +```go +// JSONResult's data field will be overridden by the specific type proto.Order +@success 200 {object} jsonresult.JSONResult{data=proto.Order} "desc" +``` + +```go +type JSONResult struct { + Code int `json:"code" ` + Message string `json:"message"` + Data interface{} `json:"data"` +} + +type Order struct { //in `proto` package + Id uint `json:"id"` + Data interface{} `json:"data"` +} +``` + +- also support array of objects and primitive types as nested response +```go +@success 200 {object} jsonresult.JSONResult{data=[]proto.Order} "desc" +@success 200 {object} jsonresult.JSONResult{data=string} "desc" +@success 200 {object} jsonresult.JSONResult{data=[]string} "desc" +``` + +- overriding multiple fields. field will be added if not exists +```go +@success 200 {object} jsonresult.JSONResult{data1=string,data2=[]string,data3=proto.Order,data4=[]proto.Order} "desc" +``` +- overriding deep-level fields +```go +type DeepObject struct { //in `proto` package + ... +} +@success 200 {object} jsonresult.JSONResult{data1=proto.Order{data=proto.DeepObject},data2=[]proto.Order{data=[]proto.DeepObject}} "desc" +``` +### Add a headers in response + +```go +// @Success 200 {string} string "ok" +// @failure 400 {string} string "error" +// @response default {string} string "other error" +// @Header 200 {string} Location "/entity/1" +// @Header 200,400,default {string} Token "token" +// @Header all {string} Token2 "token2" +``` + +### Use multiple path params + +```go +/// ... +// @Param group_id path int true "Group ID" +// @Param account_id path int true "Account ID" +// ... +// @Router /examples/groups/{group_id}/accounts/{account_id} [get] +``` + +### Add multiple paths + +```go +/// ... +// @Param group_id path int true "Group ID" +// @Param user_id path int true "User ID" +// ... +// @Router /examples/groups/{group_id}/user/{user_id}/address [put] +// @Router /examples/user/{user_id}/address [put] +``` + +### Example value of struct + +```go +type Account struct { + ID int `json:"id" example:"1"` + Name string `json:"name" example:"account name"` + PhotoUrls []string `json:"photo_urls" example:"http://test/image/1.jpg,http://test/image/2.jpg"` +} +``` + +### SchemaExample of body + +```go +// @Param email body string true "message/rfc822" SchemaExample(Subject: Testmail\r\n\r\nBody Message\r\n) +``` + +### Description of struct + +```go +// Account model info +// @Description User account information +// @Description with user id and username +type Account struct { + // ID this is userid + ID int `json:"id"` + Name string `json:"name"` // This is Name +} +``` + +[#708](https://github.com/swaggo/swag/issues/708) The parser handles only struct comments starting with `@Description` attribute. +But it writes all struct field comments as is. + +So, generated swagger doc as follows: +```json +"Account": { + "type":"object", + "description": "User account information with user id and username" + "properties": { + "id": { + "type": "integer", + "description": "ID this is userid" + }, + "name": { + "type":"string", + "description": "This is Name" + } + } +} +``` + +### Use swaggertype tag to supported custom type +[#201](https://github.com/swaggo/swag/issues/201#issuecomment-475479409) + +```go +type TimestampTime struct { + time.Time +} + +///implement encoding.JSON.Marshaler interface +func (t *TimestampTime) MarshalJSON() ([]byte, error) { + bin := make([]byte, 16) + bin = strconv.AppendInt(bin[:0], t.Time.Unix(), 10) + return bin, nil +} + +func (t *TimestampTime) UnmarshalJSON(bin []byte) error { + v, err := strconv.ParseInt(string(bin), 10, 64) + if err != nil { + return err + } + t.Time = time.Unix(v, 0) + return nil +} +/// + +type Account struct { + // Override primitive type by simply specifying it via `swaggertype` tag + ID sql.NullInt64 `json:"id" swaggertype:"integer"` + + // Override struct type to a primitive type 'integer' by specifying it via `swaggertype` tag + RegisterTime TimestampTime `json:"register_time" swaggertype:"primitive,integer"` + + // Array types can be overridden using "array," format + Coeffs []big.Float `json:"coeffs" swaggertype:"array,number"` +} +``` + +[#379](https://github.com/swaggo/swag/issues/379) +```go +type CerticateKeyPair struct { + Crt []byte `json:"crt" swaggertype:"string" format:"base64" example:"U3dhZ2dlciByb2Nrcw=="` + Key []byte `json:"key" swaggertype:"string" format:"base64" example:"U3dhZ2dlciByb2Nrcw=="` +} +``` +generated swagger doc as follows: +```go +"api.MyBinding": { + "type":"object", + "properties":{ + "crt":{ + "type":"string", + "format":"base64", + "example":"U3dhZ2dlciByb2Nrcw==" + }, + "key":{ + "type":"string", + "format":"base64", + "example":"U3dhZ2dlciByb2Nrcw==" + } + } +} + +``` + +### Use global overrides to support a custom type + +If you are using generated files, the [`swaggertype`](#use-swaggertype-tag-to-supported-custom-type) or `swaggerignore` tags may not be possible. + +By passing a mapping to swag with `--overridesFile` you can tell swag to use one type in place of another wherever it appears. By default, if a `.swaggo` file is present in the current directory it will be used. + +Go code: +```go +type MyStruct struct { + ID sql.NullInt64 `json:"id"` + Name sql.NullString `json:"name"` +} +``` + +`.swaggo`: +``` +// Replace all NullInt64 with int +replace database/sql.NullInt64 int + +// Don't include any fields of type database/sql.NullString in the swagger docs +skip database/sql.NullString +``` + +Possible directives are comments (beginning with `//`), `replace path/to/a.type path/to/b.type`, and `skip path/to/a.type`. + +(Note that the full paths to any named types must be provided to prevent problems when multiple packages define a type with the same name) + +Rendered: +```go +"types.MyStruct": { + "id": "integer" +} +``` + + +### Use swaggerignore tag to exclude a field + +```go +type Account struct { + ID string `json:"id"` + Name string `json:"name"` + Ignored int `swaggerignore:"true"` +} +``` + +### Add extension info to struct field + +```go +type Account struct { + ID string `json:"id" extensions:"x-nullable,x-abc=def,!x-omitempty"` // extensions fields must start with "x-" +} +``` + +generate swagger doc as follows: + +```go +"Account": { + "type": "object", + "properties": { + "id": { + "type": "string", + "x-nullable": true, + "x-abc": "def", + "x-omitempty": false + } + } +} +``` +### Rename model to display + +```golang +type Resp struct { + Code int +}//@name Response +``` + +### How to use security annotations + +General API info. + +```go +// @securityDefinitions.basic BasicAuth + +// @securitydefinitions.oauth2.application OAuth2Application +// @tokenUrl https://example.com/oauth/token +// @scope.write Grants write access +// @scope.admin Grants read and write access to administrative information +``` + +Each API operation. + +```go +// @Security ApiKeyAuth +``` + +Make it AND condition + +```go +// @Security ApiKeyAuth +// @Security OAuth2Application[write, admin] +``` + +Make it OR condition + +```go +// @Security ApiKeyAuth || firebase +// @Security OAuth2Application[write, admin] || APIKeyAuth +``` + + +### Add a description for enum items + +```go +type Example struct { + // Sort order: + // * asc - Ascending, from A to Z. + // * desc - Descending, from Z to A. + Order string `enums:"asc,desc"` +} +``` + +### Generate only specific docs file types + +By default `swag` command generates Swagger specification in three different files/file types: +- docs.go +- swagger.json +- swagger.yaml + +If you would like to limit a set of file types which should be generated you can use `--outputTypes` (short `-ot`) flag. Default value is `go,json,yaml` - output types separated with comma. To limit output only to `go` and `yaml` files, you would write `go,yaml`. With complete command that would be `swag init --outputTypes go,yaml`. + +### How to use Generics + +```go +// @Success 200 {object} web.GenericNestedResponse[types.Post] +// @Success 204 {object} web.GenericNestedResponse[types.Post, Types.AnotherOne] +// @Success 201 {object} web.GenericNestedResponse[web.GenericInnerType[types.Post]] +func GetPosts(w http.ResponseWriter, r *http.Request) { + _ = web.GenericNestedResponse[types.Post]{} +} +``` +See [this file](https://github.com/swaggo/swag/blob/master/testdata/generics_nested/api/api.go) for more details +and other examples. + +### Change the default Go Template action delimiters +[#980](https://github.com/swaggo/swag/issues/980) +[#1177](https://github.com/swaggo/swag/issues/1177) + +If your swagger annotations or struct fields contain "{{" or "}}", the template generation will most likely fail, as these are the default delimiters for [go templates](https://pkg.go.dev/text/template#Template.Delims). + +To make the generation work properly, you can change the default delimiters with `-td`. For example: +```console +swag init -g http/api.go -td "[[,]]" +``` +The new delimiter is a string with the format "``,``". + +## About the Project +This project was inspired by [yvasiyarov/swagger](https://github.com/yvasiyarov/swagger) but we simplified the usage and added support a variety of [web frameworks](#supported-web-frameworks). Gopher image source is [tenntenn/gopher-stickers](https://github.com/tenntenn/gopher-stickers). It has licenses [creative commons licensing](http://creativecommons.org/licenses/by/3.0/deed.en). +## Contributors + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. + + + +## Backers + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/swag#backer)] + + + + +## Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/swag#sponsor)] + + + + + + + + + + + + + + + +## License +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_large) diff --git a/vendor/github.com/swaggo/swag/README_pt.md b/vendor/github.com/swaggo/swag/README_pt.md new file mode 100644 index 0000000..7f95066 --- /dev/null +++ b/vendor/github.com/swaggo/swag/README_pt.md @@ -0,0 +1,968 @@ +# swag + +🌍 *[English](README.md) ∙ [简体中文](README_zh-CN.md) ∙ [Português](README_pt.md)* + + + +[![Build Status](https://github.com/swaggo/swag/actions/workflows/ci.yml/badge.svg?branch=master)](https://github.com/features/actions) +[![Coverage Status](https://img.shields.io/codecov/c/github/swaggo/swag/master.svg)](https://codecov.io/gh/swaggo/swag) +[![Go Report Card](https://goreportcard.com/badge/github.com/swaggo/swag)](https://goreportcard.com/report/github.com/swaggo/swag) +[![codebeat badge](https://codebeat.co/badges/71e2f5e5-9e6b-405d-baf9-7cc8b5037330)](https://codebeat.co/projects/github-com-swaggo-swag-master) +[![Go Doc](https://godoc.org/github.com/swaggo/swagg?status.svg)](https://godoc.org/github.com/swaggo/swag) +[![Backers on Open Collective](https://opencollective.com/swag/backers/badge.svg)](#backers) +[![Sponsors on Open Collective](https://opencollective.com/swag/sponsors/badge.svg)](#sponsors) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_shield) +[![Release](https://img.shields.io/github/release/swaggo/swag.svg?style=flat-square)](https://github.com/swaggo/swag/releases) + +Swag converte anotações Go para Documentação Swagger 2.0. Criámos uma variedade de plugins para populares [Go web frameworks](#supported-web-frameworks). Isto permite uma integração rápida com um projecto Go existente (utilizando a Swagger UI). + +## Conteúdo +- [Começando](#começando) + - [Estruturas Web Suportadas](#estruturas-web-suportadas) + - [Como utilizá-lo com Gin](#como-como-ser-como-gin) + - [O formatador de swag](#a-formatação-de-swag) + - [Estado de Implementação](#implementação-estado) + - [Formato dos comentários declarativos](#formato-dos-comentarios-declarativos) + - [Informações Gerais API](#informações-gerais-api) + - [Operação API](#api-operacao) + - [Segurança](#seguranca) + - [Exemplos](#exemplos) + - [Descrições em múltiplas linhas](#descricoes-sobre-múltiplas-linhas) + - [Estrutura definida pelo utilizador com um tipo de matriz](#-estrutura-definida-pelo-utilizador-com-um-um-tipo) + - [Declaração de estruturação de funções](#function-scoped-struct-declaration) + - [Composição do modelo em resposta](#model-composição-em-resposta) + - [Adicionar um cabeçalho em resposta](#add-a-headers-in-response) + - [Utilizar parâmetros de caminhos múltiplos](#use-multiple-path-params) + - [Exemplo de valor de estrutura](#exemplo-do-valor-de-estrutura) + - [Schema Exemplo do corpo](#schemaexample-of-body) + - [Descrição da estrutura](#descrição-da-estrutura) + - [Usar etiqueta do tipo swaggertype para suportar o tipo personalizado](#use-swaggertype-tag-to-supported-custom-type) + - [Utilizar anulações globais para suportar um tipo personalizado](#use-global-overrides-to-support-a-custom-type) + - [Use swaggerignore tag para excluir um campo](#use-swaggerignore-tag-to-excluir-um-campo) + - [Adicionar informações de extensão ao campo de estruturação](#add-extension-info-to-struct-field) + - [Renomear modelo a expor](#renome-modelo-a-exibir) + - [Como utilizar as anotações de segurança](#como-utilizar-as-anotações-de-segurança) + - [Adicionar uma descrição para enumerar artigos](#add-a-description-for-enum-items) + - [Gerar apenas tipos de ficheiros de documentos específicos](#generate-only-specific-docs-file-file-types) + - [Como usar tipos genéricos](#como-usar-tipos-genéricos) +- [Sobre o projecto](#sobre-o-projecto) + +## Começando + +1. Adicione comentários ao código-fonte da API, consulte [Formato dos comentários declarativos](#declarative-comments-format). + +2. Descarregue o swag utilizando: +```sh +go install github.com/swaggo/swag/cmd/swag@latest +``` +Para construir a partir da fonte é necessário [Go](https://golang.org/dl/) (1.18 ou mais recente). + +Ou descarregar um binário pré-compilado a partir da [página de lançamento](https://github.com/swaggo/swag/releases). + +3. Executar `swag init` na pasta raiz do projecto que contém o ficheiro `main.go`. Isto irá analisar os seus comentários e gerar os ficheiros necessários (pasta `docs` e `docs/docs.go`). +```sh +swag init +``` + +Certifique-se de importar os `docs/docs.go` gerados para que a sua configuração específica fique "init" ed. Se as suas anotações API gerais não viverem em `main.go`, pode avisar a swag com a bandeira `-g`. +```sh +swag init -g http/api.go +``` + +4. (opcional) Utilizar o formato `swag fmt` no comentário SWAG. (Por favor, actualizar para a versão mais recente) + +```sh +swag fmt +``` + +## swag cli + +```sh +swag init -h +NOME: + swag init - Criar docs.go + +UTILIZAÇÃO: + swag init [opções de comando] [argumentos...] + +OPÇÕES: + --quiet, -q Fazer o logger ficar quiet (por padrão: falso) + --generalInfo valor, -g valor Go caminho do ficheiro em que 'swagger general API Info' está escrito (por padrão: "main.go") + --dir valor, -d valor Os directórios que deseja analisar, separados por vírgulas e de informação geral devem estar no primeiro (por padrão: "./") + --exclude valor Excluir directórios e ficheiros ao pesquisar, separados por vírgulas + -propertyStrategy da estratégia, -p valor da propriedadeEstratégia de nomeação de propriedades como snakecase,camelcase,pascalcase (por padrão: "camelcase") + --output de saída, -o valor directório de saída para todos os ficheiros gerados(swagger.json, swagger.yaml e docs.go) (por padrão: "./docs") + --outputTypes valor de saídaTypes, -- valor de saída Tipos de ficheiros gerados (docs.go, swagger.json, swagger.yaml) como go,json,yaml (por padrão: "go,json,yaml") + --parseVendor ParseVendor Parse go files na pasta 'vendor', desactivado por padrão (padrão: falso) + --parseInternal Parse go ficheiros em pacotes internos, desactivados por padrão (padrão: falso) + --generatedTime Gerar timestamp no topo dos docs.go, desactivado por padrão (padrão: falso) + --parteDepth value Dependência profundidade parse (por padrão: 100) + --templateDelims value, --td value fornecem delimitadores personalizados para a geração de modelos Go. O formato é leftDelim,rightDelim. Por exemplo: "[[,]]" + ... + + --help, -h mostrar ajuda (por padrão: falso) +``` + +```bash +swag fmt -h +NOME: + swag fmt - formato swag comentários + +UTILIZAÇÃO: + swag fmt [opções de comando] [argumentos...] + +OPÇÕES: + --dir valor, -d valor Os directórios que pretende analisar, separados por vírgulas e de informação geral devem estar no primeiro (por padrão: "./") + --excluir valor Excluir directórios e ficheiros ao pesquisar, separados por vírgulas + --generalInfo value, -g value Go file path in which 'swagger general API Info' is written (por padrão: "main.go") + --ajuda, -h mostrar ajuda (por padrão: falso) + +``` + +## Estruturas Web Suportadas + +- [gin](http://github.com/swaggo/gin-swagger) +- [echo](http://github.com/swaggo/echo-swagger) +- [buffalo](https://github.com/swaggo/buffalo-swagger) +- [net/http](https://github.com/swaggo/http-swagger) +- [gorilla/mux](https://github.com/swaggo/http-swagger) +- [go-chi/chi](https://github.com/swaggo/http-swagger) +- [flamingo](https://github.com/i-love-flamingo/swagger) +- [fiber](https://github.com/gofiber/swagger) +- [atreugo](https://github.com/Nerzal/atreugo-swagger) +- [hertz](https://github.com/hertz-contrib/swagger) + +## Como utilizá-lo com Gin + +Encontrar o código fonte de exemplo [aqui](https://github.com/swaggo/swag/tree/master/example/celler). + +1. Depois de utilizar `swag init` para gerar os documentos Swagger 2.0, importar os seguintes pacotes: +```go +import "github.com/swaggo/gin-swagger" // gin-swagger middleware +import "github.com/swaggo/files" // swagger embed files +``` + +2. Adicionar [Informações Gerais API](#general-api-info) anotações em código `main.go`: + + +```go +// @title Swagger Example API +// @version 1.0 +// @description This is a sample server celler server. +// @termsOfService http://swagger.io/terms/ + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html + +// @host localhost:8080 +// @BasePath /api/v1 + +// @securityDefinitions.basic BasicAuth + +// @externalDocs.description OpenAPI +// @externalDocs.url https://swagger.io/resources/open-api/ +func main() { + r := gin.Default() + + c := controller.NewController() + + v1 := r.Group("/api/v1") + { + accounts := v1.Group("/accounts") + { + accounts.GET(":id", c.ShowAccount) + accounts.GET("", c.ListAccounts) + accounts.POST("", c.AddAccount) + accounts.DELETE(":id", c.DeleteAccount) + accounts.PATCH(":id", c.UpdateAccount) + accounts.POST(":id/images", c.UploadAccountImage) + } + //... + } + r.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerFiles.Handler)) + r.Run(":8080") +} +//... +``` + +Além disso, algumas informações API gerais podem ser definidas de forma dinâmica. O pacote de código gerado `docs` exporta a variável `SwaggerInfo` que podemos utilizar para definir programticamente o título, descrição, versão, hospedeiro e caminho base. Exemplo utilizando Gin: + +```go +package main + +import ( + "github.com/gin-gonic/gin" + "github.com/swaggo/files" + "github.com/swaggo/gin-swagger" + + "./docs" // docs is generated by Swag CLI, you have to import it. +) + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html +func main() { + + // programmatically set swagger info + docs.SwaggerInfo.Title = "Swagger Example API" + docs.SwaggerInfo.Description = "This is a sample server Petstore server." + docs.SwaggerInfo.Version = "1.0" + docs.SwaggerInfo.Host = "petstore.swagger.io" + docs.SwaggerInfo.BasePath = "/v2" + docs.SwaggerInfo.Schemes = []string{"http", "https"} + + r := gin.New() + + // use ginSwagger middleware to serve the API docs + r.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerFiles.Handler)) + + r.Run() +} +``` + +3. Adicionar [Operação API](#api-operacao) anotações em código `controller` + +```go +package controller + +import ( + "fmt" + "net/http" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/swaggo/swag/example/celler/httputil" + "github.com/swaggo/swag/example/celler/model" +) + +// ShowAccount godoc +// @Summary Show an account +// @Description get string by ID +// @Tags accounts +// @Accept json +// @Produce json +// @Param id path int true "Account ID" +// @Success 200 {object} model.Account +// @Failure 400 {object} httputil.HTTPError +// @Failure 404 {object} httputil.HTTPError +// @Failure 500 {object} httputil.HTTPError +// @Router /accounts/{id} [get] +func (c *Controller) ShowAccount(ctx *gin.Context) { + id := ctx.Param("id") + aid, err := strconv.Atoi(id) + if err != nil { + httputil.NewError(ctx, http.StatusBadRequest, err) + return + } + account, err := model.AccountOne(aid) + if err != nil { + httputil.NewError(ctx, http.StatusNotFound, err) + return + } + ctx.JSON(http.StatusOK, account) +} + +// ListAccounts godoc +// @Summary List accounts +// @Description get accounts +// @Tags accounts +// @Accept json +// @Produce json +// @Param q query string false "name search by q" Format(email) +// @Success 200 {array} model.Account +// @Failure 400 {object} httputil.HTTPError +// @Failure 404 {object} httputil.HTTPError +// @Failure 500 {object} httputil.HTTPError +// @Router /accounts [get] +func (c *Controller) ListAccounts(ctx *gin.Context) { + q := ctx.Request.URL.Query().Get("q") + accounts, err := model.AccountsAll(q) + if err != nil { + httputil.NewError(ctx, http.StatusNotFound, err) + return + } + ctx.JSON(http.StatusOK, accounts) +} +//... +``` + +```console +swag init +``` + +4. Execute a sua aplicação, e navegue para http://localhost:8080/swagger/index.html. Verá os documentos Swagger 2.0 Api, como mostrado abaixo: + +![swagger_index.html](https://raw.githubusercontent.com/swaggo/swag/master/assets/swagger-image.png) + +## O formatador de swag + +Os Swag Comments podem ser formatados automaticamente, assim como 'go fmt'. +Encontre o resultado da formatação [aqui](https://github.com/swaggo/swag/tree/master/example/celler). + +Usage: +```shell +swag fmt +``` + +Exclude folder: +```shell +swag fmt -d ./ --exclude ./internal +``` + +Ao utilizar `swag fmt`, é necessário assegurar-se de que tem um comentário doc para a função a fim de assegurar uma formatação correcta. +Isto deve-se ao `swag fmt` que traça comentários swag com separadores, o que só é permitido *após* um comentário doc padrão. + +Por exemplo, utilizar + +```go +// ListAccounts lists all existing accounts +// +// @Summary List accounts +// @Description get accounts +// @Tags accounts +// @Accept json +// @Produce json +// @Param q query string false "name search by q" Format(email) +// @Success 200 {array} model.Account +// @Failure 400 {object} httputil.HTTPError +// @Failure 404 {object} httputil.HTTPError +// @Failure 500 {object} httputil.HTTPError +// @Router /accounts [get] +func (c *Controller) ListAccounts(ctx *gin.Context) { +``` + +## Estado de Implementação + +[Documento Swagger 2.0](https://swagger.io/docs/specification/2-0/basic-structure/) + +- [x] Estrutura básica +- [x] Hospedeiro API e Caminho Base +- [x] Caminhos e operações +- [x] Descrição dos parâmetros +- [x] Descrever o corpo do pedido +- [x] Descrição das respostas +- [x] Tipos MIME +- [x] Autenticação + - [x] Autenticação básica + - [x] Chaves API +- [x] Acrescentar exemplos +- [x] Carregamento de ficheiros +- [x] Enums +- [x] Operações de Agrupamento com Etiquetas +- Extensões Swagger + +## Formato dos comentários declarativos + +## Informações Gerais API + +**Exemplo** +[celler/main.go](https://github.com/swaggo/swag/blob/master/example/celler/main.go) + +| anotação | descrição | exemplo | +|-------------|--------------------------------------------|---------------------------------| +| title | **Obrigatório.** O título da aplicação.| // @title Swagger Example API | +| version | **Obrigatório.** Fornece a versão da aplicação API.| // @version 1.0 | +| description | Uma breve descrição da candidatura. |// @descrição Este é um servidor servidor de celas de amostra. | +| tag.name | Nome de uma tag.| // @tag.name Este é o nome da tag | +| tag.description | Descrição da tag | // @tag.description Cool Description | +| tag.docs.url | Url da Documentação externa da tag | // @tag.docs.url https://example.com| +| tag.docs.description | Descrição da documentação externa da tag| // @tag.docs.description Melhor exemplo de documentação | +| TermsOfService | Os Termos de Serviço para o API.| // @termsOfService http://swagger.io/terms/ | +| contact.name | A informação de contacto para a API exposta.| // @contacto.name Suporte API | +| contact.url | O URL que aponta para as informações de contacto. DEVE estar no formato de um URL. | // @contact.url http://www.swagger.io/support| +| contact.email| O endereço de email da pessoa/organização de contacto. DEVE estar no formato de um endereço de correio electrónico.| // @contact.email support@swagger.io | +| license.name | **Obrigatório.** O nome da licença utilizada para a API.|// @licença.name Apache 2.0| +| license.url | Um URL para a licença utilizada para a API. DEVE estar no formato de um URL. | // @license.url http://www.apache.org/licenses/LICENSE-2.0.html | +| host | O anfitrião (nome ou ip) que serve o API. | // @host localhost:8080 | +| BasePath | O caminho de base sobre o qual o API é servido. | // @BasePath /api/v1 | +| accept | Uma lista de tipos de MIME que os APIs podem consumir. Note que accept só afecta operações com um organismo de pedido, tais como POST, PUT e PATCH. O valor DEVE ser o descrito em [Tipos de Mime](#mime-types). | // @accept json | +| produce | Uma lista de tipos de MIME que os APIs podem produce. O valor DEVE ser o descrito em [Tipos de Mime](#mime-types). | // @produce json | +| query.collection.format | O formato padrão de param de colecção(array) em query,enums:csv,multi,pipes,tsv,ssv. Se não definido, csv é o padrão.| // @query.collection.format multi +| schemes | O protocolo de transferência para a operação que separou por espaços. | // @schemes http https | +| externalDocs.description | Descrição do documento externo. | // @externalDocs.description OpenAPI | +| externalDocs.url | URL do documento externo. | // @externalDocs.url https://swagger.io/resources/open-api/ | +| x-name | A chave de extensão, deve ser iniciada por x- e tomar apenas o valor json | // @x-example-key {"chave": "valor"} | + +### Usando descrições de remarcação para baixo +Quando uma pequena sequência na sua documentação é insuficiente, ou precisa de imagens, exemplos de códigos e coisas do género, pode querer usar descrições de marcação. Para utilizar as descrições markdown, utilize as seguintes anotações. + +| anotação | descrição | exemplo | +|-------------|--------------------------------------------|---------------------------------| +| title | **Obrigatório.** O título da aplicação.| // @title Swagger Example API | +| version | **Obrigatório.** Fornece a versão da aplicação API.| // @versão 1.0 | +| description.markdown | Uma breve descrição da candidatura. Parsed a partir do ficheiro api.md. Esta é uma alternativa a @description |// @description.markdown Sem valor necessário, isto analisa a descrição do ficheiro api.md |. +| tag.name | Nome de uma tag.| // @tag.name Este é o nome da tag | +| tag.description.markdown | Descrição da tag esta é uma alternativa à tag.description. A descrição será lida a partir de um ficheiro nomeado como tagname.md | // @tag.description.markdown | + +## Operação API + +**Exemplo** +[celler/controller](https://github.com/swaggo/swag/tree/master/example/celler/controller) + +| anotação | descrição | +|-------------|----------------------------------------------------------------------------------------------------------------------------| +| descrição | Uma explicação verbosa do comportamento da operação. | +| description.markdown | Uma breve descrição da candidatura. A descrição será lida a partir de um ficheiro. Por exemplo, `@description.markdown details` irá carregar `details.md`| // @description.file endpoint.description.markdown | +| id | Um fio único utilizado para identificar a operação. Deve ser única entre todas as operações API. | +| tags | Uma lista de tags para cada operação API que separou por vírgulas. | +| summary | Um breve resumo do que a operação faz. | +| accept | Uma lista de tipos de MIME que os APIs podem consumir. Note que accept só afecta operações com um organismo de pedido, tais como POST, PUT e PATCH. O valor DEVE ser o descrito em [Tipos de Mime](#mime-types). | +| produce | Uma lista de tipos de MIME que os APIs podem produce. O valor DEVE ser o descrito em [Tipos de Mime](#mime-types). | +| param | Parâmetros que se separaram por espaços. `param name`,`param type`,`data type`,`is mandatory?`,`comment` `attribute(optional)` | +| security | [Segurança](#security) para cada operação API. | +| success | resposta de sucesso que separou por espaços. `return code or default`,`{param type}`,`data type`,`comment` |. +| failure | Resposta de falha que separou por espaços. `return code or default`,`{param type}`,`data type`,`comment` | +| response | Igual ao `sucesso` e `falha` | +| header | Cabeçalho em resposta que separou por espaços. `código de retorno`,`{tipo de parâmetro}`,`tipo de dados`,`comentário` |. +| router | Definição do caminho que separou por espaços. caminho",`path`,`[httpMethod]` |[httpMethod]` | +| x-name | A chave de extensão, deve ser iniciada por x- e tomar apenas o valor json. | +| x-codeSample | Optional Markdown use. tomar `file` como parâmetro. Isto irá então procurar um ficheiro nomeado como o resumo na pasta dada. | +| deprecated | Marcar o ponto final como depreciado. | + +## Mime Types + +`swag` aceita todos os tipos MIME que estão no formato correcto, ou seja, correspondem `*/*`. +Além disso, `swag` também aceita pseudónimos para alguns tipos de MIME, como se segue: + + +| Alias | MIME Type | +|-----------------------|-----------------------------------| +| json | application/json | +| xml | text/xml | +| plain | text/plain | +| html | text/html | +| mpfd | multipart/form-data | +| x-www-form-urlencoded | application/x-www-form-urlencoded | +| json-api | application/vnd.api+json | +| json-stream | application/x-json-stream | +| octet-stream | application/octet-stream | +| png | image/png | +| jpeg | image/jpeg | +| gif | image/gif | + + + +## Tipo de parâmetro + +- query +- path +- header +- body +- formData + +## Tipo de dados + +- string (string) +- integer (int, uint, uint32, uint64) +- number (float32) +- boolean (bool) +- file (param data type when uploading) +- user defined struct + +## Segurança +| anotação | descrição | parâmetros | exemplo | +|------------|-------------|------------|---------| +| securitydefinitions.basic | [Basic](https://swagger.io/docs/specification/2-0/authentication/basic-authentication/) auth. | | // @securityDefinitions.basicAuth | [Básico]() +| securitydefinitions.apikey | [chave API](https://swagger.io/docs/specification/2-0/authentication/api-keys/) auth. | in, name, description | // @securityDefinitions.apikey ApiKeyAuth | +| securitydefinitions.oauth2.application | [Aplicação OAuth2](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, scope, description | // @securitydefinitions.oauth2.application OAuth2Application | +| securitydefinitions.oauth2.implicit | [OAuth2 implicit](https://swagger.io/docs/specification/authentication/oauth2/) auth. | authorizationUrl, scope, description | // @securitydefinitions.oauth2.implicit OAuth2Implicit | [OAuth2Implicit]() +| securitydefinitions.oauth2.password | [OAuth2 password](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, scope, description | // @securitydefinitions.oauth2.password OAuth2Password | +| securitydefinitions.oauth2.accessCode | [código de acesso OAuth2](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, authorizationUrl, scope, description | // @securitydefinitions.oauth2.accessCode OAuth2AccessCode | [código de acesso OAuth2.accessCode]() + + +| anotação de parâmetros | exemplo | +|---------------------------------|-------------------------------------------------------------------------| +| in | // @in header | +| name | // @name Authorization | +| tokenUrl | // @tokenUrl https://example.com/oauth/token | +| authorizationurl | // @authorizationurl https://example.com/oauth/authorize | +| scope.hoge | // @scope.write Grants write access | +| description | // @descrição OAuth protege os pontos finais da nossa entidade | + +## Atributo + +```go +// @Param enumstring query string false "string enums" Enums(A, B, C) +// @Param enumint query int false "int enums" Enums(1, 2, 3) +// @Param enumnumber query number false "int enums" Enums(1.1, 1.2, 1.3) +// @Param string query string false "string valid" minlength(5) maxlength(10) +// @Param int query int false "int valid" minimum(1) maximum(10) +// @Param default query string false "string default" default(A) +// @Param example query string false "string example" example(string) +// @Param collection query []string false "string collection" collectionFormat(multi) +// @Param extensions query []string false "string collection" extensions(x-example=test,x-nullable) +``` + +It also works for the struct fields: + +```go +type Foo struct { + Bar string `minLength:"4" maxLength:"16" example:"random string"` + Baz int `minimum:"10" maximum:"20" default:"15"` + Qux []string `enums:"foo,bar,baz"` +} +``` + +### Disponível + +Nome do campo | Tipo | Descrição +---|:---:|--- +validate | `string` | Determina a validação para o parâmetro. Os valores possíveis são: `required,optional`. +default | * | Declara o valor do parâmetro que o servidor utilizará se nenhum for fornecido, por exemplo, uma "contagem" para controlar o número de resultados por página poderá ser por defeito de 100 se não for fornecido pelo cliente no pedido. (Nota: "por defeito" não tem significado para os parâmetros requeridos). +See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-6.2. Ao contrário do esquema JSON, este valor DEVE estar em conformidade com o definido [`type`](#parameterType) para este parâmetro. +maximum | `number` | Ver https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.2. +minimum | `number` | Ver https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.3. +multipleOf | `number` | Ver https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.1. +maxLength | `integer` | Ver https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.1. +minLength | `integer` | Ver https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.2. +enums | [\*] | Ver https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.5.1. +format | `string` | O formato de extensão para o anteriormente mencionado [`type`](#parameterType). Ver [Data Type Formats](https://swagger.io/specification/v2/#dataTypeFormat) para mais detalhes. +collectionFormat | `string` |Determina o formato da matriz se for utilizada uma matriz de tipos. Os valores possíveis são:
  • `csv` - valores separados por vírgulas `foo,bar`.
  • `ssv` - valores separados por espaço `foo bar`.
  • `tsv` - valores separados por tabulação `foo\tbar`.
  • `pipes` - valores separados por tubo foo|bar.
  • `multi` - corresponde a múltiplas instâncias de parâmetros em vez de múltiplos valores para uma única instância `foo=bar&foo=baz`. This is valid only for parameters [`in`](#parameterIn) "query" or "formData".
Default value is `csv`. +example | * | Declara o exemplo para o valor do parâmetro +extensions | `string` | Acrescentar extensão aos parâmetros. + +### Futuro + +Nome do campo | Tipo | Description +---|:---:|--- +pattern | `string` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.3. +maxItems | `integer` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.2. +minItems | `integer` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.3. +uniqueItems | `boolean` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.4. + +## Exemplos + + +### Descrições em múltiplas linhas + +É possível acrescentar descrições que abranjam várias linhas tanto na descrição geral da api como em definições de rotas como esta: + +```go +// @description This is the first line +// @description This is the second line +// @description And so forth. +``` + +### Estrutura definida pelo utilizador com um tipo de matriz + +```go +// @Success 200 {array} model.Account <-- This is a user defined struct. +``` + +```go +package model + +type Account struct { + ID int `json:"id" example:"1"` + Name string `json:"name" example:"account name"` +} +``` + + +### Declaração de estruturação de funções + +Pode declarar as estruturas de resposta do seu pedido dentro de um corpo funcional. +Deve ter de seguir a convenção de nomeação +`.. `. + +```go +package main + +// @Param request body main.MyHandler.request true "query params" +// @Success 200 {object} main.MyHandler.response +// @Router /test [post] +func MyHandler() { + type request struct { + RequestField string + } + + type response struct { + ResponseField string + } +} +``` + + +### Composição do modelo em resposta +```go +// JSONResult's data field will be overridden by the specific type proto.Order +@success 200 {object} jsonresult.JSONResult{data=proto.Order} "desc" +``` + +```go +type JSONResult struct { + Code int `json:"code" ` + Message string `json:"message"` + Data interface{} `json:"data"` +} + +type Order struct { //in `proto` package + Id uint `json:"id"` + Data interface{} `json:"data"` +} +``` + +- também suportam uma variedade de objectos e tipos primitivos como resposta aninhada +```go +@success 200 {object} jsonresult.JSONResult{data=[]proto.Order} "desc" +@success 200 {object} jsonresult.JSONResult{data=string} "desc" +@success 200 {object} jsonresult.JSONResult{data=[]string} "desc" +``` + +- campos múltiplos que se sobrepõem. campo será adicionado se não existir +```go +@success 200 {object} jsonresult.JSONResult{data1=string,data2=[]string,data3=proto.Order,data4=[]proto.Order} "desc" +``` +- overriding deep-level fields +```go +type DeepObject struct { //in `proto` package + ... +} +@success 200 {object} jsonresult.JSONResult{data1=proto.Order{data=proto.DeepObject},data2=[]proto.Order{data=[]proto.DeepObject}} "desc" +``` + +### Adicionar um cabeçalho em resposta + +```go +// @Success 200 {string} string "ok" +// @failure 400 {string} string "error" +// @response default {string} string "other error" +// @Header 200 {string} Location "/entity/1" +// @Header 200,400,default {string} Token "token" +// @Header all {string} Token2 "token2" +``` + + +### Utilizar parâmetros de caminhos múltiplos + +```go +/// ... +// @Param group_id path int true "Group ID" +// @Param account_id path int true "Account ID" +// ... +// @Router /examples/groups/{group_id}/accounts/{account_id} [get] +``` + +### Adicionar múltiplos caminhos + +```go +/// ... +// @Param group_id path int true "Group ID" +// @Param user_id path int true "User ID" +// ... +// @Router /examples/groups/{group_id}/user/{user_id}/address [put] +// @Router /examples/user/{user_id}/address [put] +``` + +### Exemplo de valor de estrutura + +```go +type Account struct { + ID int `json:"id" example:"1"` + Name string `json:"name" example:"account name"` + PhotoUrls []string `json:"photo_urls" example:"http://test/image/1.jpg,http://test/image/2.jpg"` +} +``` + +### Schema Exemplo do corpo + +```go +// @Param email body string true "message/rfc822" SchemaExample(Subject: Testmail\r\n\r\nBody Message\r\n) +``` + +### Descrição da estrutura + +```go +// Account model info +// @Description User account information +// @Description with user id and username +type Account struct { + // ID this is userid + ID int `json:"id"` + Name string `json:"name"` // This is Name +} +``` + +[#708](https://github.com/swaggo/swag/issues/708) O analisador trata apenas de comentários estruturais a partir de `@Description` attribute. + +Assim, gerou o doc. de swagger como se segue: +```json +"Account": { + "type":"object", + "description": "User account information with user id and username" + "properties": { + "id": { + "type": "integer", + "description": "ID this is userid" + }, + "name": { + "type":"string", + "description": "This is Name" + } + } +} +``` + +### Usar etiqueta do tipo swaggertype para suportar o tipo personalizado +[#201](https://github.com/swaggo/swag/issues/201#issuecomment-475479409) + +```go +type TimestampTime struct { + time.Time +} + +///implement encoding.JSON.Marshaler interface +func (t *TimestampTime) MarshalJSON() ([]byte, error) { + bin := make([]byte, 16) + bin = strconv.AppendInt(bin[:0], t.Time.Unix(), 10) + return bin, nil +} + +func (t *TimestampTime) UnmarshalJSON(bin []byte) error { + v, err := strconv.ParseInt(string(bin), 10, 64) + if err != nil { + return err + } + t.Time = time.Unix(v, 0) + return nil +} +/// + +type Account struct { + // Override primitive type by simply specifying it via `swaggertype` tag + ID sql.NullInt64 `json:"id" swaggertype:"integer"` + + // Override struct type to a primitive type 'integer' by specifying it via `swaggertype` tag + RegisterTime TimestampTime `json:"register_time" swaggertype:"primitive,integer"` + + // Array types can be overridden using "array," format + Coeffs []big.Float `json:"coeffs" swaggertype:"array,number"` +} +``` + +[#379](https://github.com/swaggo/swag/issues/379) +```go +type CerticateKeyPair struct { + Crt []byte `json:"crt" swaggertype:"string" format:"base64" example:"U3dhZ2dlciByb2Nrcw=="` + Key []byte `json:"key" swaggertype:"string" format:"base64" example:"U3dhZ2dlciByb2Nrcw=="` +} +``` +generated swagger doc as follows: +```go +"api.MyBinding": { + "type":"object", + "properties":{ + "crt":{ + "type":"string", + "format":"base64", + "example":"U3dhZ2dlciByb2Nrcw==" + }, + "key":{ + "type":"string", + "format":"base64", + "example":"U3dhZ2dlciByb2Nrcw==" + } + } +} + +``` + +### Utilizar anulações globais para suportar um tipo personalizado + +Se estiver a utilizar ficheiros gerados, as etiquetas [`swaggertype`](#use-swaggertype-tag-to-supported-custom-type) ou `swaggerignore` podem não ser possíveis. + +Ao passar um mapeamento para swag com `--overridesFile` pode dizer swag para utilizar um tipo no lugar de outro onde quer que apareça. Por defeito, se um ficheiro `.swaggo` estiver presente no directório actual, será utilizado. + +Go code: +```go +type MyStruct struct { + ID sql.NullInt64 `json:"id"` + Name sql.NullString `json:"name"` +} +``` + +`.swaggo`: +``` +// Substituir todos os NullInt64 por int +replace database/sql.NullInt64 int + +// Não inclua quaisquer campos do tipo base de database/sql. +NullString no swagger docs +skip database/sql.NullString +``` + +As directivas possíveis são comentários (começando por `//`), `replace path/to/a.type path/to/b.type`, e `skip path/to/a.type`. + +(Note que os caminhos completos para qualquer tipo nomeado devem ser fornecidos para evitar problemas quando vários pacotes definem um tipo com o mesmo nome) + +Entregue em: +```go +"types.MyStruct": { + "id": "integer" +} + +### Use swaggerignore tag para excluir um campo + +```go +type Account struct { + ID string `json:"id"` + Name string `json:"name"` + Ignored int `swaggerignore:"true"` +} +``` + + +### Adicionar informações de extensão ao campo de estruturação + +```go +type Account struct { + ID string `json:"id" extensions:"x-nullable,x-abc=def,!x-omitempty"` // extensions fields must start with "x-" +} +``` + +gerar doc. de swagger como se segue: + +```go +"Account": { + "type": "object", + "properties": { + "id": { + "type": "string", + "x-nullable": true, + "x-abc": "def", + "x-omitempty": false + } + } +} +``` + + +### Renomear modelo a expor + +```golang +type Resp struct { + Code int +}//@name Response +``` + +### Como utilizar as anotações de segurança + +Informações API gerais. + +```go +// @securityDefinitions.basic BasicAuth + +// @securitydefinitions.oauth2.application OAuth2Application +// @tokenUrl https://example.com/oauth/token +// @scope.write Grants write access +// @scope.admin Grants read and write access to administrative information +``` + +Cada operação API. + +```go +// @Security ApiKeyAuth +``` + +Faça-o AND condicione-o + +```go +// @Security ApiKeyAuth +// @Security OAuth2Application[write, admin] +``` + +Faça-o OR condição + +```go +// @Security ApiKeyAuth || firebase +// @Security OAuth2Application[write, admin] || APIKeyAuth +``` + + + +### Adicionar uma descrição para enumerar artigos + +```go +type Example struct { + // Sort order: + // * asc - Ascending, from A to Z. + // * desc - Descending, from Z to A. + Order string `enums:"asc,desc"` +} +``` + +### Gerar apenas tipos de ficheiros de documentos específicos + +Por defeito, o comando `swag` gera especificação Swagger em três tipos diferentes de ficheiros/arquivos: +- docs.go +- swagger.json +- swagger.yaml + +Se desejar limitar um conjunto de tipos de ficheiros que devem ser gerados pode utilizar a bandeira `--outputTypes` (short `-ot`). O valor por defeito é `go,json,yaml` - tipos de saída separados por vírgula. Para limitar a saída apenas a ficheiros `go` e `yaml`, escrever-se-ia `go,yaml'. Com comando completo que seria `swag init --outputTypes go,yaml`. + +### Como usar tipos genéricos + +```go +// @Success 200 {object} web.GenericNestedResponse[types.Post] +// @Success 204 {object} web.GenericNestedResponse[types.Post, Types.AnotherOne] +// @Success 201 {object} web.GenericNestedResponse[web.GenericInnerType[types.Post]] +func GetPosts(w http.ResponseWriter, r *http.Request) { + _ = web.GenericNestedResponse[types.Post]{} +} +``` +Para mais detalhes e outros exemplos, veja [esse arquivo](https://github.com/swaggo/swag/blob/master/testdata/generics_nested/api/api.go) + +### Alterar os delimitadores de acção padrão Go Template +[#980](https://github.com/swaggo/swag/issues/980) +[#1177](https://github.com/swaggo/swag/issues/1177) + +Se as suas anotações ou campos estruturantes contêm "{{" or "}}", a geração de modelos irá muito provavelmente falhar, uma vez que estes são os delimitadores por defeito para [go templates](https://pkg.go.dev/text/template#Template.Delims). + +Para que a geração funcione correctamente, pode alterar os delimitadores por defeito com `-td'. Por exemplo: +``console +swag init -g http/api.go -td "[[,]" +``` + +O novo delimitador é um fio com o formato "``,``". + +## Sobre o projecto +Este projecto foi inspirado por [yvasiyarov/swagger](https://github.com/yvasiyarov/swagger) mas simplificámos a utilização e acrescentámos apoio a uma variedade de [frameworks web](#estruturas-web-suportadas). A fonte de imagem Gopher é [tenntenn/gopher-stickers](https://github.com/tenntenn/gopher-stickers). Tem licenças [creative commons licensing](http://creativecommons.org/licenses/by/3.0/deed.en). + +## Contribuidores + +Este projecto existe graças a todas as pessoas que contribuem. [[Contribute](CONTRIBUTING.md)]. + + + +## Apoios + +Obrigado a todos os nossos apoiantes! 🙏 [[Become a backer](https://opencollective.com/swag#backer)] + + + + +## Patrocinadores + +Apoiar este projecto tornando-se um patrocinador. O seu logótipo aparecerá aqui com um link para o seu website. [[Become a sponsor](https://opencollective.com/swag#sponsor)] + + + + + + + + + + + + + + +## Licença +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_large) diff --git a/vendor/github.com/swaggo/swag/README_zh-CN.md b/vendor/github.com/swaggo/swag/README_zh-CN.md new file mode 100644 index 0000000..87d600b --- /dev/null +++ b/vendor/github.com/swaggo/swag/README_zh-CN.md @@ -0,0 +1,770 @@ +# swag + +🌍 *[English](README.md) ∙ [简体中文](README_zh-CN.md)* + + + +[![Travis Status](https://img.shields.io/travis/swaggo/swag/master.svg)](https://travis-ci.org/swaggo/swag) +[![Coverage Status](https://img.shields.io/codecov/c/github/swaggo/swag/master.svg)](https://codecov.io/gh/swaggo/swag) +[![Go Report Card](https://goreportcard.com/badge/github.com/swaggo/swag)](https://goreportcard.com/report/github.com/swaggo/swag) +[![codebeat badge](https://codebeat.co/badges/71e2f5e5-9e6b-405d-baf9-7cc8b5037330)](https://codebeat.co/projects/github-com-swaggo-swag-master) +[![Go Doc](https://godoc.org/github.com/swaggo/swagg?status.svg)](https://godoc.org/github.com/swaggo/swag) +[![Backers on Open Collective](https://opencollective.com/swag/backers/badge.svg)](#backers) +[![Sponsors on Open Collective](https://opencollective.com/swag/sponsors/badge.svg)](#sponsors) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_shield) +[![Release](https://img.shields.io/github/release/swaggo/swag.svg?style=flat-square)](https://github.com/swaggo/swag/releases) + +Swag将Go的注释转换为Swagger2.0文档。我们为流行的 [Go Web Framework](#支持的Web框架) 创建了各种插件,这样可以与现有Go项目快速集成(使用Swagger UI)。 + +## 目录 + +- [快速开始](#快速开始) +- [支持的Web框架](#支持的web框架) +- [如何与Gin集成](#如何与gin集成) +- [格式化说明](#格式化说明) +- [开发现状](#开发现状) +- [声明式注释格式](#声明式注释格式) + - [通用API信息](#通用api信息) + - [API操作](#api操作) + - [安全性](#安全性) +- [样例](#样例) + - [多行的描述](#多行的描述) + - [用户自定义的具有数组类型的结构](#用户自定义的具有数组类型的结构) + - [响应对象中的模型组合](#响应对象中的模型组合) + - [在响应中增加头字段](#在响应中增加头字段) + - [使用多路径参数](#使用多路径参数) + - [结构体的示例值](#结构体的示例值) + - [结构体描述](#结构体描述) + - [使用`swaggertype`标签更改字段类型](#使用`swaggertype`标签更改字段类型) + - [使用`swaggerignore`标签排除字段](#使用swaggerignore标签排除字段) + - [将扩展信息添加到结构字段](#将扩展信息添加到结构字段) + - [对展示的模型重命名](#对展示的模型重命名) + - [如何使用安全性注释](#如何使用安全性注释) +- [项目相关](#项目相关) + +## 快速开始 + +1. 将注释添加到API源代码中,请参阅声明性注释格式。 +2. 使用如下命令下载swag: + +```bash +go install github.com/swaggo/swag/cmd/swag@latest +``` + +从源码开始构建的话,需要有Go环境(1.18及以上版本)。 + +或者从github的release页面下载预编译好的二进制文件。 + +3. 在包含`main.go`文件的项目根目录运行`swag init`。这将会解析注释并生成需要的文件(`docs`文件夹和`docs/docs.go`)。 + +```bash +swag init +``` + +确保导入了生成的`docs/docs.go`文件,这样特定的配置文件才会被初始化。如果通用API注释没有写在`main.go`中,可以使用`-g`标识符来告知swag。 + +```bash +swag init -g http/api.go +``` + +4. (可选) 使用`fmt`格式化 SWAG 注释。(请先升级到最新版本) + +```bash +swag fmt +``` + +## swag cli + +```bash +swag init -h +NAME: + swag init - Create docs.go + +USAGE: + swag init [command options] [arguments...] + +OPTIONS: + --generalInfo value, -g value API通用信息所在的go源文件路径,如果是相对路径则基于API解析目录 (默认: "main.go") + --dir value, -d value API解析目录 (默认: "./") + --exclude value 解析扫描时排除的目录,多个目录可用逗号分隔(默认:空) + --propertyStrategy value, -p value 结构体字段命名规则,三种:snakecase,camelcase,pascalcase (默认: "camelcase") + --output value, -o value 文件(swagger.json, swagger.yaml and doc.go)输出目录 (默认: "./docs") + --parseVendor 是否解析vendor目录里的go源文件,默认不 + --parseDependency 是否解析依赖目录中的go源文件,默认不 + --markdownFiles value, --md value 指定API的描述信息所使用的markdown文件所在的目录 + --generatedTime 是否输出时间到输出文件docs.go的顶部,默认是 + --codeExampleFiles value, --cef value 解析包含用于 x-codeSamples 扩展的代码示例文件的文件夹,默认禁用 + --parseInternal 解析 internal 包中的go文件,默认禁用 + --parseDepth value 依赖解析深度 (默认: 100) + --instanceName value 设置文档实例名 (默认: "swagger") +``` + +```bash +swag fmt -h +NAME: + swag fmt - format swag comments + +USAGE: + swag fmt [command options] [arguments...] + +OPTIONS: + --dir value, -d value API解析目录 (默认: "./") + --exclude value 解析扫描时排除的目录,多个目录可用逗号分隔(默认:空) + --generalInfo value, -g value API通用信息所在的go源文件路径,如果是相对路径则基于API解析目录 (默认: "main.go") + --help, -h show help (default: false) + +``` + +## 支持的Web框架 + +- [gin](http://github.com/swaggo/gin-swagger) +- [echo](http://github.com/swaggo/echo-swagger) +- [buffalo](https://github.com/swaggo/buffalo-swagger) +- [net/http](https://github.com/swaggo/http-swagger) +- [gorilla/mux](https://github.com/swaggo/http-swagger) +- [go-chi/chi](https://github.com/swaggo/http-swagger) +- [flamingo](https://github.com/i-love-flamingo/swagger) +- [fiber](https://github.com/gofiber/swagger) +- [atreugo](https://github.com/Nerzal/atreugo-swagger) +- [hertz](https://github.com/hertz-contrib/swagger) + +## 如何与Gin集成 + +[点击此处](https://github.com/swaggo/swag/tree/master/example/celler)查看示例源代码。 + +1. 使用`swag init`生成Swagger2.0文档后,导入如下代码包: + +```go +import "github.com/swaggo/gin-swagger" // gin-swagger middleware +import "github.com/swaggo/files" // swagger embed files +``` + +2. 在`main.go`源代码中添加通用的API注释: + +```go +// @title Swagger Example API +// @version 1.0 +// @description This is a sample server celler server. +// @termsOfService http://swagger.io/terms/ + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html + +// @host localhost:8080 +// @BasePath /api/v1 + +// @securityDefinitions.basic BasicAuth + +// @externalDocs.description OpenAPI +// @externalDocs.url https://swagger.io/resources/open-api/ +func main() { + r := gin.Default() + + c := controller.NewController() + + v1 := r.Group("/api/v1") + { + accounts := v1.Group("/accounts") + { + accounts.GET(":id", c.ShowAccount) + accounts.GET("", c.ListAccounts) + accounts.POST("", c.AddAccount) + accounts.DELETE(":id", c.DeleteAccount) + accounts.PATCH(":id", c.UpdateAccount) + accounts.POST(":id/images", c.UploadAccountImage) + } + //... + } + r.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerFiles.Handler)) + r.Run(":8080") +} +//... +``` + +此外,可以动态设置一些通用的API信息。生成的代码包`docs`导出`SwaggerInfo`变量,使用该变量可以通过编码的方式设置标题、描述、版本、主机和基础路径。使用Gin的示例: + +```go +package main + +import ( + "github.com/gin-gonic/gin" + "github.com/swaggo/files" + "github.com/swaggo/gin-swagger" + + "./docs" // docs is generated by Swag CLI, you have to import it. +) + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html +func main() { + + // programatically set swagger info + docs.SwaggerInfo.Title = "Swagger Example API" + docs.SwaggerInfo.Description = "This is a sample server Petstore server." + docs.SwaggerInfo.Version = "1.0" + docs.SwaggerInfo.Host = "petstore.swagger.io" + docs.SwaggerInfo.BasePath = "/v2" + docs.SwaggerInfo.Schemes = []string{"http", "https"} + + r := gin.New() + + // use ginSwagger middleware to serve the API docs + r.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerFiles.Handler)) + + r.Run() +} +``` + +3. 在`controller`代码中添加API操作注释: + +```go +package controller + +import ( + "fmt" + "net/http" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/swaggo/swag/example/celler/httputil" + "github.com/swaggo/swag/example/celler/model" +) + +// ShowAccount godoc +// @Summary Show an account +// @Description get string by ID +// @Tags accounts +// @Accept json +// @Produce json +// @Param id path int true "Account ID" +// @Success 200 {object} model.Account +// @Failure 400 {object} httputil.HTTPError +// @Failure 404 {object} httputil.HTTPError +// @Failure 500 {object} httputil.HTTPError +// @Router /accounts/{id} [get] +func (c *Controller) ShowAccount(ctx *gin.Context) { + id := ctx.Param("id") + aid, err := strconv.Atoi(id) + if err != nil { + httputil.NewError(ctx, http.StatusBadRequest, err) + return + } + account, err := model.AccountOne(aid) + if err != nil { + httputil.NewError(ctx, http.StatusNotFound, err) + return + } + ctx.JSON(http.StatusOK, account) +} + +// ListAccounts godoc +// @Summary List accounts +// @Description get accounts +// @Tags accounts +// @Accept json +// @Produce json +// @Param q query string false "name search by q" Format(email) +// @Success 200 {array} model.Account +// @Failure 400 {object} httputil.HTTPError +// @Failure 404 {object} httputil.HTTPError +// @Failure 500 {object} httputil.HTTPError +// @Router /accounts [get] +func (c *Controller) ListAccounts(ctx *gin.Context) { + q := ctx.Request.URL.Query().Get("q") + accounts, err := model.AccountsAll(q) + if err != nil { + httputil.NewError(ctx, http.StatusNotFound, err) + return + } + ctx.JSON(http.StatusOK, accounts) +} +//... +``` + +```bash +swag init +``` + +4. 运行程序,然后在浏览器中访问 http://localhost:8080/swagger/index.html 。将看到Swagger 2.0 Api文档,如下所示: + +![swagger_index.html](https://raw.githubusercontent.com/swaggo/swag/master/assets/swagger-image.png) + +## 格式化说明 + +可以针对Swag的注释自动格式化,就像`go fmt`。 +此处查看格式化结果 [here](https://github.com/swaggo/swag/tree/master/example/celler). + +示例: +```shell +swag fmt +``` + +排除目录(不扫描)示例: +```shell +swag fmt -d ./ --exclude ./internal +``` + +## 开发现状 + +[Swagger 2.0 文档](https://swagger.io/docs/specification/2-0/basic-structure/) + +- [x] Basic Structure +- [x] API Host and Base Path +- [x] Paths and Operations +- [x] Describing Parameters +- [x] Describing Request Body +- [x] Describing Responses +- [x] MIME Types +- [x] Authentication + - [x] Basic Authentication + - [x] API Keys +- [x] Adding Examples +- [x] File Upload +- [x] Enums +- [x] Grouping Operations With Tags +- [ ] Swagger Extensions + +## 声明式注释格式 + +## 通用API信息 + +**示例** [`celler/main.go`](https://github.com/swaggo/swag/blob/master/example/celler/main.go) + +| 注释 | 说明 | 示例 | +| ----------------------- | ----------------------------------------------------------------------------------------------- | --------------------------------------------------------------- | +| title | **必填** 应用程序的名称。 | // @title Swagger Example API | +| version | **必填** 提供应用程序API的版本。 | // @version 1.0 | +| description | 应用程序的简短描述。 | // @description This is a sample server celler server. | +| tag.name | 标签的名称。 | // @tag.name This is the name of the tag | +| tag.description | 标签的描述。 | // @tag.description Cool Description | +| tag.docs.url | 标签的外部文档的URL。 | // @tag.docs.url https://example.com | +| tag.docs.description | 标签的外部文档说明。 | // @tag.docs.description Best example documentation | +| termsOfService | API的服务条款。 | // @termsOfService http://swagger.io/terms/ | +| contact.name | 公开的API的联系信息。 | // @contact.name API Support | +| contact.url | 联系信息的URL。 必须采用网址格式。 | // @contact.url http://www.swagger.io/support | +| contact.email | 联系人/组织的电子邮件地址。 必须采用电子邮件地址的格式。 | // @contact.email support@swagger.io | +| license.name | **必填** 用于API的许可证名称。 | // @license.name Apache 2.0 | +| license.url | 用于API的许可证的URL。 必须采用网址格式。 | // @license.url http://www.apache.org/licenses/LICENSE-2.0.html | +| host | 运行API的主机(主机名或IP地址)。 | // @host localhost:8080 | +| BasePath | 运行API的基本路径。 | // @BasePath /api/v1 | +| accept | API 可以使用的 MIME 类型列表。 请注意,Accept 仅影响具有请求正文的操作,例如 POST、PUT 和 PATCH。 值必须如“[Mime类型](#mime类型)”中所述。 | // @accept json | +| produce | API可以生成的MIME类型的列表。值必须如“[Mime类型](#mime类型)”中所述。 | // @produce json | +| query.collection.format | 请求URI query里数组参数的默认格式:csv,multi,pipes,tsv,ssv。 如果未设置,则默认为csv。 | // @query.collection.format multi | +| schemes | 用空格分隔的请求的传输协议。 | // @schemes http https | +| externalDocs.description | Description of the external document. | // @externalDocs.description OpenAPI | +| externalDocs.url | URL of the external document. | // @externalDocs.url https://swagger.io/resources/open-api/ | +| x-name | 扩展的键必须以x-开头,并且只能使用json值 | // @x-example-key {"key": "value"} | + +### 使用Markdown描述 + +如果文档中的短字符串不足以完整表达,或者需要展示图片,代码示例等类似的内容,则可能需要使用Markdown描述。要使用Markdown描述,请使用一下注释。 + +| 注释 | 说明 | 示例 | +| ------------------------ | ------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------- | +| title | **必填** 应用程序的名称。 | // @title Swagger Example API | +| version | **必填** 提供应用程序API的版本。 | // @version 1.0 | +| description.markdown | 应用程序的简短描述。 从`api.md`文件中解析。 这是`@description`的替代用法。 | // @description.markdown No value needed, this parses the description from api.md | +| tag.name | 标签的名称。 | // @tag.name This is the name of the tag | +| tag.description.markdown | 标签说明,这是`tag.description`的替代用法。 该描述将从名为`tagname.md的`文件中读取。 | // @tag.description.markdown | + +## API操作 + +Example [celler/controller](https://github.com/swaggo/swag/tree/master/example/celler/controller) + +| 注释 | 描述 | +|----------------------|------------------------------------------------------------------------------------------------| +| description | 操作行为的详细说明。 | +| description.markdown | 应用程序的简短描述。该描述将从名为`endpointname.md`的文件中读取。 | +| id | 用于标识操作的唯一字符串。在所有API操作中必须唯一。 | +| tags | 每个API操作的标签列表,以逗号分隔。 | +| summary | 该操作的简短摘要。 | +| accept | API 可以使用的 MIME 类型列表。 请注意,Accept 仅影响具有请求正文的操作,例如 POST、PUT 和 PATCH。 值必须如“[Mime类型](#mime类型)”中所述。 | +| produce | API可以生成的MIME类型的列表。值必须如“[Mime类型](#mime类型)”中所述。 | +| param | 用空格分隔的参数。`param name`,`param type`,`data type`,`is mandatory?`,`comment` `attribute(optional)` | +| security | 每个API操作的[安全性](#安全性)。 | +| success | 以空格分隔的成功响应。`return code`,`{param type}`,`data type`,`comment` | +| failure | 以空格分隔的故障响应。`return code`,`{param type}`,`data type`,`comment` | +| response | 与success、failure作用相同 | +| header | 以空格分隔的头字段。 `return code`,`{param type}`,`data type`,`comment` | +| router | 以空格分隔的路径定义。 `path`,`[httpMethod]` | +| deprecatedrouter | 与router相同,但是是deprecated的。 | +| x-name | 扩展字段必须以`x-`开头,并且只能使用json值。 | +| deprecated | 将当前API操作的所有路径设置为deprecated | + +## Mime类型 + +`swag` 接受所有格式正确的MIME类型, 即使匹配 `*/*`。除此之外,`swag`还接受某些MIME类型的别名,如下所示: + +| Alias | MIME Type | +| --------------------- | --------------------------------- | +| json | application/json | +| xml | text/xml | +| plain | text/plain | +| html | text/html | +| mpfd | multipart/form-data | +| x-www-form-urlencoded | application/x-www-form-urlencoded | +| json-api | application/vnd.api+json | +| json-stream | application/x-json-stream | +| octet-stream | application/octet-stream | +| png | image/png | +| jpeg | image/jpeg | +| gif | image/gif | + +## 参数类型 + +- query +- path +- header +- body +- formData + +## 数据类型 + +- string (string) +- integer (int, uint, uint32, uint64) +- number (float32) +- boolean (bool) +- user defined struct + +## 安全性 + +| 注释 | 描述 | 参数 | 示例 | +| -------------------------------------- | --------------------------------------------------------------------------------------------- | --------------------------------- | ------------------------------------------------------------ | +| securitydefinitions.basic | [Basic](https://swagger.io/docs/specification/2-0/authentication/basic-authentication/) auth. | | // @securityDefinitions.basic BasicAuth | +| securitydefinitions.apikey | [API key](https://swagger.io/docs/specification/2-0/authentication/api-keys/) auth. | in, name | // @securityDefinitions.apikey ApiKeyAuth | +| securitydefinitions.oauth2.application | [OAuth2 application](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, scope | // @securitydefinitions.oauth2.application OAuth2Application | +| securitydefinitions.oauth2.implicit | [OAuth2 implicit](https://swagger.io/docs/specification/authentication/oauth2/) auth. | authorizationUrl, scope | // @securitydefinitions.oauth2.implicit OAuth2Implicit | +| securitydefinitions.oauth2.password | [OAuth2 password](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, scope | // @securitydefinitions.oauth2.password OAuth2Password | +| securitydefinitions.oauth2.accessCode | [OAuth2 access code](https://swagger.io/docs/specification/authentication/oauth2/) auth. | tokenUrl, authorizationUrl, scope | // @securitydefinitions.oauth2.accessCode OAuth2AccessCode | + +| 参数注释 | 示例 | +| ---------------- | -------------------------------------------------------- | +| in | // @in header | +| name | // @name Authorization | +| tokenUrl | // @tokenUrl https://example.com/oauth/token | +| authorizationurl | // @authorizationurl https://example.com/oauth/authorize | +| scope.hoge | // @scope.write Grants write access | + +## 属性 + +```go +// @Param enumstring query string false "string enums" Enums(A, B, C) +// @Param enumint query int false "int enums" Enums(1, 2, 3) +// @Param enumnumber query number false "int enums" Enums(1.1, 1.2, 1.3) +// @Param string query string false "string valid" minlength(5) maxlength(10) +// @Param int query int false "int valid" minimum(1) maximum(10) +// @Param default query string false "string default" default(A) +// @Param collection query []string false "string collection" collectionFormat(multi) +// @Param extensions query []string false "string collection" extensions(x-example=test,x-nullable) +``` + +也适用于结构体字段: + +```go +type Foo struct { + Bar string `minLength:"4" maxLength:"16"` + Baz int `minimum:"10" maximum:"20" default:"15"` + Qux []string `enums:"foo,bar,baz"` +} +``` + +### 当前可用的 + +| 字段名 | 类型 | 描述 | +| ---------------- | --------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| default | * | 声明如果未提供任何参数,则服务器将使用的默认参数值,例如,如果请求中的客户端未提供该参数,则用于控制每页结果数的“计数”可能默认为100。 (注意:“default”对于必需的参数没有意义)。参看 https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-6.2。 与JSON模式不同,此值务必符合此参数的定义[类型](#parameterType)。 | +| maximum | `number` | 参看 https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.2. | +| minimum | `number` | 参看 https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.3. | +| maxLength | `integer` | 参看 https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.1. | +| minLength | `integer` | 参看 https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.2. | +| enums | [\*] | 参看 https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.5.1. | +| format | `string` | 上面提到的[类型](#parameterType)的扩展格式。有关更多详细信息,请参见[数据类型格式](https://swagger.io/specification/v2/#dataTypeFormat)。 | +| collectionFormat | `string` | 指定query数组参数的格式。 可能的值为:
  • `csv` - 逗号分隔值 `foo,bar`.
  • `ssv` - 空格分隔值 `foo bar`.
  • `tsv` - 制表符分隔值 `foo\tbar`.
  • `pipes` - 管道符分隔值 foo|bar.
  • `multi` - 对应于多个参数实例,而不是单个实例 `foo=bar&foo=baz` 的多个值。这仅对“`query`”或“`formData`”中的参数有效。
默认值是 `csv`。 | + +### 进一步的 + +| 字段名 | 类型 | 描述 | +| ----------- | :-------: | ---------------------------------------------------------------------------------- | +| multipleOf | `number` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.1.1. | +| pattern | `string` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.2.3. | +| maxItems | `integer` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.2. | +| minItems | `integer` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.3. | +| uniqueItems | `boolean` | See https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-5.3.4. | + +## 样例 + +### 多行的描述 + +可以在常规api描述或路由定义中添加跨越多行的描述,如下所示: + +```go +// @description This is the first line +// @description This is the second line +// @description And so forth. +``` + +### 用户自定义的具有数组类型的结构 + +```go +// @Success 200 {array} model.Account <-- This is a user defined struct. +``` + +```go +package model + +type Account struct { + ID int `json:"id" example:"1"` + Name string `json:"name" example:"account name"` +} +``` + +### 响应对象中的模型组合 + +```go +// JSONResult的data字段类型将被proto.Order类型替换 +@success 200 {object} jsonresult.JSONResult{data=proto.Order} "desc" +``` + +```go +type JSONResult struct { + Code int `json:"code" ` + Message string `json:"message"` + Data interface{} `json:"data"` +} + +type Order struct { //in `proto` package + ... +} +``` + +- 还支持对象数组和原始类型作为嵌套响应 + +```go +@success 200 {object} jsonresult.JSONResult{data=[]proto.Order} "desc" +@success 200 {object} jsonresult.JSONResult{data=string} "desc" +@success 200 {object} jsonresult.JSONResult{data=[]string} "desc" +``` + +- 替换多个字段的类型。如果某字段不存在,将添加该字段。 + +```go +@success 200 {object} jsonresult.JSONResult{data1=string,data2=[]string,data3=proto.Order,data4=[]proto.Order} "desc" +``` + +### 在响应中增加头字段 + +```go +// @Success 200 {string} string "ok" +// @failure 400 {string} string "error" +// @response default {string} string "other error" +// @Header 200 {string} Location "/entity/1" +// @Header 200,400,default {string} Token "token" +// @Header all {string} Token2 "token2" +``` + +### 使用多路径参数 + +```go +/// ... +// @Param group_id path int true "Group ID" +// @Param account_id path int true "Account ID" +// ... +// @Router /examples/groups/{group_id}/accounts/{account_id} [get] +``` + +### 结构体的示例值 + +```go +type Account struct { + ID int `json:"id" example:"1"` + Name string `json:"name" example:"account name"` + PhotoUrls []string `json:"photo_urls" example:"http://test/image/1.jpg,http://test/image/2.jpg"` +} +``` + +### 结构体描述 + +```go +type Account struct { + // ID this is userid + ID int `json:"id"` + Name string `json:"name"` // This is Name +} +``` + +### 使用`swaggertype`标签更改字段类型 + +[#201](https://github.com/swaggo/swag/issues/201#issuecomment-475479409) + +```go +type TimestampTime struct { + time.Time +} + +///实现encoding.JSON.Marshaler接口 +func (t *TimestampTime) MarshalJSON() ([]byte, error) { + bin := make([]byte, 16) + bin = strconv.AppendInt(bin[:0], t.Time.Unix(), 10) + return bin, nil +} + +///实现encoding.JSON.Unmarshaler接口 +func (t *TimestampTime) UnmarshalJSON(bin []byte) error { + v, err := strconv.ParseInt(string(bin), 10, 64) + if err != nil { + return err + } + t.Time = time.Unix(v, 0) + return nil +} +/// + +type Account struct { + // 使用`swaggertype`标签将别名类型更改为内置类型integer + ID sql.NullInt64 `json:"id" swaggertype:"integer"` + + // 使用`swaggertype`标签更改struct类型为内置类型integer + RegisterTime TimestampTime `json:"register_time" swaggertype:"primitive,integer"` + + // Array types can be overridden using "array," format + Coeffs []big.Float `json:"coeffs" swaggertype:"array,number"` +} +``` + +[#379](https://github.com/swaggo/swag/issues/379) + +```go +type CerticateKeyPair struct { + Crt []byte `json:"crt" swaggertype:"string" format:"base64" example:"U3dhZ2dlciByb2Nrcw=="` + Key []byte `json:"key" swaggertype:"string" format:"base64" example:"U3dhZ2dlciByb2Nrcw=="` +} +``` + +生成的swagger文档如下: + +```go +"api.MyBinding": { + "type":"object", + "properties":{ + "crt":{ + "type":"string", + "format":"base64", + "example":"U3dhZ2dlciByb2Nrcw==" + }, + "key":{ + "type":"string", + "format":"base64", + "example":"U3dhZ2dlciByb2Nrcw==" + } + } +} +``` + +### 使用`swaggerignore`标签排除字段 + +```go +type Account struct { + ID string `json:"id"` + Name string `json:"name"` + Ignored int `swaggerignore:"true"` +} +``` + +### 将扩展信息添加到结构字段 + +```go +type Account struct { + ID string `json:"id" extensions:"x-nullable,x-abc=def,!x-omitempty"` // 扩展字段必须以"x-"开头 +} +``` + +生成swagger文档,如下所示: + +```go +"Account": { + "type": "object", + "properties": { + "id": { + "type": "string", + "x-nullable": true, + "x-abc": "def", + "x-omitempty": false + } + } +} +``` + +### 对展示的模型重命名 + +```go +type Resp struct { + Code int +}//@name Response +``` + +### 如何使用安全性注释 + +通用API信息。 + +```go +// @securityDefinitions.basic BasicAuth + +// @securitydefinitions.oauth2.application OAuth2Application +// @tokenUrl https://example.com/oauth/token +// @scope.write Grants write access +// @scope.admin Grants read and write access to administrative information +``` + +每个API操作。 + +```go +// @Security ApiKeyAuth +``` + +使用AND条件。 + +```go +// @Security ApiKeyAuth +// @Security OAuth2Application[write, admin] +``` + +## 项目相关 + +This project was inspired by [yvasiyarov/swagger](https://github.com/yvasiyarov/swagger) but we simplified the usage and added support a variety of [web frameworks](#supported-web-frameworks). Gopher image source is [tenntenn/gopher-stickers](https://github.com/tenntenn/gopher-stickers). It has licenses [creative commons licensing](http://creativecommons.org/licenses/by/3.0/deed.en). + +## 贡献者 + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. + + +## 支持者 + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/swag#backer)] + + + +## 赞助商 + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/swag#sponsor)] + + + + + + + + + + + + +## License + +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_large) diff --git a/vendor/github.com/swaggo/swag/const.go b/vendor/github.com/swaggo/swag/const.go new file mode 100644 index 0000000..8375510 --- /dev/null +++ b/vendor/github.com/swaggo/swag/const.go @@ -0,0 +1,567 @@ +package swag + +import ( + "go/ast" + "go/token" + "reflect" + "strconv" + "strings" + "unicode/utf8" +) + +// ConstVariable a model to record a const variable +type ConstVariable struct { + Name *ast.Ident + Type ast.Expr + Value interface{} + Comment *ast.CommentGroup + File *ast.File + Pkg *PackageDefinitions +} + +var escapedChars = map[uint8]uint8{ + 'n': '\n', + 'r': '\r', + 't': '\t', + 'v': '\v', + '\\': '\\', + '"': '"', +} + +// EvaluateEscapedChar parse escaped character +func EvaluateEscapedChar(text string) rune { + if len(text) == 1 { + return rune(text[0]) + } + + if len(text) == 2 && text[0] == '\\' { + return rune(escapedChars[text[1]]) + } + + if len(text) == 6 && text[0:2] == "\\u" { + n, err := strconv.ParseInt(text[2:], 16, 32) + if err == nil { + return rune(n) + } + } + + return 0 +} + +// EvaluateEscapedString parse escaped characters in string +func EvaluateEscapedString(text string) string { + if !strings.ContainsRune(text, '\\') { + return text + } + result := make([]byte, 0, len(text)) + for i := 0; i < len(text); i++ { + if text[i] == '\\' { + i++ + if text[i] == 'u' { + i++ + char, err := strconv.ParseInt(text[i:i+4], 16, 32) + if err == nil { + result = utf8.AppendRune(result, rune(char)) + } + i += 3 + } else if c, ok := escapedChars[text[i]]; ok { + result = append(result, c) + } + } else { + result = append(result, text[i]) + } + } + return string(result) +} + +// EvaluateDataConversion evaluate the type a explicit type conversion +func EvaluateDataConversion(x interface{}, typeName string) interface{} { + switch value := x.(type) { + case int: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case uint: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case int8: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case uint8: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case int16: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case uint16: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case int32: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + case "string": + return string(value) + } + case uint32: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case int64: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case uint64: + switch typeName { + case "int": + return int(value) + case "byte": + return byte(value) + case "int8": + return int8(value) + case "int16": + return int16(value) + case "int32": + return int32(value) + case "int64": + return int64(value) + case "uint": + return uint(value) + case "uint8": + return uint8(value) + case "uint16": + return uint16(value) + case "uint32": + return uint32(value) + case "uint64": + return uint64(value) + case "rune": + return rune(value) + } + case string: + switch typeName { + case "string": + return value + } + } + return nil +} + +// EvaluateUnary evaluate the type and value of a unary expression +func EvaluateUnary(x interface{}, operator token.Token, xtype ast.Expr) (interface{}, ast.Expr) { + switch operator { + case token.SUB: + switch value := x.(type) { + case int: + return -value, xtype + case int8: + return -value, xtype + case int16: + return -value, xtype + case int32: + return -value, xtype + case int64: + return -value, xtype + } + case token.XOR: + switch value := x.(type) { + case int: + return ^value, xtype + case int8: + return ^value, xtype + case int16: + return ^value, xtype + case int32: + return ^value, xtype + case int64: + return ^value, xtype + case uint: + return ^value, xtype + case uint8: + return ^value, xtype + case uint16: + return ^value, xtype + case uint32: + return ^value, xtype + case uint64: + return ^value, xtype + } + } + return nil, nil +} + +// EvaluateBinary evaluate the type and value of a binary expression +func EvaluateBinary(x, y interface{}, operator token.Token, xtype, ytype ast.Expr) (interface{}, ast.Expr) { + if operator == token.SHR || operator == token.SHL { + var rightOperand uint64 + yValue := reflect.ValueOf(y) + if yValue.CanUint() { + rightOperand = yValue.Uint() + } else if yValue.CanInt() { + rightOperand = uint64(yValue.Int()) + } + + switch operator { + case token.SHL: + switch xValue := x.(type) { + case int: + return xValue << rightOperand, xtype + case int8: + return xValue << rightOperand, xtype + case int16: + return xValue << rightOperand, xtype + case int32: + return xValue << rightOperand, xtype + case int64: + return xValue << rightOperand, xtype + case uint: + return xValue << rightOperand, xtype + case uint8: + return xValue << rightOperand, xtype + case uint16: + return xValue << rightOperand, xtype + case uint32: + return xValue << rightOperand, xtype + case uint64: + return xValue << rightOperand, xtype + } + case token.SHR: + switch xValue := x.(type) { + case int: + return xValue >> rightOperand, xtype + case int8: + return xValue >> rightOperand, xtype + case int16: + return xValue >> rightOperand, xtype + case int32: + return xValue >> rightOperand, xtype + case int64: + return xValue >> rightOperand, xtype + case uint: + return xValue >> rightOperand, xtype + case uint8: + return xValue >> rightOperand, xtype + case uint16: + return xValue >> rightOperand, xtype + case uint32: + return xValue >> rightOperand, xtype + case uint64: + return xValue >> rightOperand, xtype + } + } + return nil, nil + } + + evalType := xtype + if evalType == nil { + evalType = ytype + } + + xValue := reflect.ValueOf(x) + yValue := reflect.ValueOf(y) + if xValue.Kind() == reflect.String && yValue.Kind() == reflect.String { + return xValue.String() + yValue.String(), evalType + } + + var targetValue reflect.Value + if xValue.Kind() != reflect.Int { + targetValue = reflect.New(xValue.Type()).Elem() + } else { + targetValue = reflect.New(yValue.Type()).Elem() + } + + switch operator { + case token.ADD: + if xValue.CanInt() && yValue.CanInt() { + targetValue.SetInt(xValue.Int() + yValue.Int()) + } else if xValue.CanUint() && yValue.CanUint() { + targetValue.SetUint(xValue.Uint() + yValue.Uint()) + } else if xValue.CanInt() && yValue.CanUint() { + targetValue.SetUint(uint64(xValue.Int()) + yValue.Uint()) + } else if xValue.CanUint() && yValue.CanInt() { + targetValue.SetUint(xValue.Uint() + uint64(yValue.Int())) + } + case token.SUB: + if xValue.CanInt() && yValue.CanInt() { + targetValue.SetInt(xValue.Int() - yValue.Int()) + } else if xValue.CanUint() && yValue.CanUint() { + targetValue.SetUint(xValue.Uint() - yValue.Uint()) + } else if xValue.CanInt() && yValue.CanUint() { + targetValue.SetUint(uint64(xValue.Int()) - yValue.Uint()) + } else if xValue.CanUint() && yValue.CanInt() { + targetValue.SetUint(xValue.Uint() - uint64(yValue.Int())) + } + case token.MUL: + if xValue.CanInt() && yValue.CanInt() { + targetValue.SetInt(xValue.Int() * yValue.Int()) + } else if xValue.CanUint() && yValue.CanUint() { + targetValue.SetUint(xValue.Uint() * yValue.Uint()) + } else if xValue.CanInt() && yValue.CanUint() { + targetValue.SetUint(uint64(xValue.Int()) * yValue.Uint()) + } else if xValue.CanUint() && yValue.CanInt() { + targetValue.SetUint(xValue.Uint() * uint64(yValue.Int())) + } + case token.QUO: + if xValue.CanInt() && yValue.CanInt() { + targetValue.SetInt(xValue.Int() / yValue.Int()) + } else if xValue.CanUint() && yValue.CanUint() { + targetValue.SetUint(xValue.Uint() / yValue.Uint()) + } else if xValue.CanInt() && yValue.CanUint() { + targetValue.SetUint(uint64(xValue.Int()) / yValue.Uint()) + } else if xValue.CanUint() && yValue.CanInt() { + targetValue.SetUint(xValue.Uint() / uint64(yValue.Int())) + } + case token.REM: + if xValue.CanInt() && yValue.CanInt() { + targetValue.SetInt(xValue.Int() % yValue.Int()) + } else if xValue.CanUint() && yValue.CanUint() { + targetValue.SetUint(xValue.Uint() % yValue.Uint()) + } else if xValue.CanInt() && yValue.CanUint() { + targetValue.SetUint(uint64(xValue.Int()) % yValue.Uint()) + } else if xValue.CanUint() && yValue.CanInt() { + targetValue.SetUint(xValue.Uint() % uint64(yValue.Int())) + } + case token.AND: + if xValue.CanInt() && yValue.CanInt() { + targetValue.SetInt(xValue.Int() & yValue.Int()) + } else if xValue.CanUint() && yValue.CanUint() { + targetValue.SetUint(xValue.Uint() & yValue.Uint()) + } else if xValue.CanInt() && yValue.CanUint() { + targetValue.SetUint(uint64(xValue.Int()) & yValue.Uint()) + } else if xValue.CanUint() && yValue.CanInt() { + targetValue.SetUint(xValue.Uint() & uint64(yValue.Int())) + } + case token.OR: + if xValue.CanInt() && yValue.CanInt() { + targetValue.SetInt(xValue.Int() | yValue.Int()) + } else if xValue.CanUint() && yValue.CanUint() { + targetValue.SetUint(xValue.Uint() | yValue.Uint()) + } else if xValue.CanInt() && yValue.CanUint() { + targetValue.SetUint(uint64(xValue.Int()) | yValue.Uint()) + } else if xValue.CanUint() && yValue.CanInt() { + targetValue.SetUint(xValue.Uint() | uint64(yValue.Int())) + } + case token.XOR: + if xValue.CanInt() && yValue.CanInt() { + targetValue.SetInt(xValue.Int() ^ yValue.Int()) + } else if xValue.CanUint() && yValue.CanUint() { + targetValue.SetUint(xValue.Uint() ^ yValue.Uint()) + } else if xValue.CanInt() && yValue.CanUint() { + targetValue.SetUint(uint64(xValue.Int()) ^ yValue.Uint()) + } else if xValue.CanUint() && yValue.CanInt() { + targetValue.SetUint(xValue.Uint() ^ uint64(yValue.Int())) + } + } + return targetValue.Interface(), evalType +} diff --git a/vendor/github.com/swaggo/swag/doc.go b/vendor/github.com/swaggo/swag/doc.go new file mode 100644 index 0000000..564b5c6 --- /dev/null +++ b/vendor/github.com/swaggo/swag/doc.go @@ -0,0 +1,5 @@ +/* +Package swag converts Go annotations to Swagger Documentation 2.0. +See https://github.com/swaggo/swag for more information about swag. +*/ +package swag // import "github.com/swaggo/swag" diff --git a/vendor/github.com/swaggo/swag/enums.go b/vendor/github.com/swaggo/swag/enums.go new file mode 100644 index 0000000..38658f2 --- /dev/null +++ b/vendor/github.com/swaggo/swag/enums.go @@ -0,0 +1,13 @@ +package swag + +const ( + enumVarNamesExtension = "x-enum-varnames" + enumCommentsExtension = "x-enum-comments" +) + +// EnumValue a model to record an enum consts variable +type EnumValue struct { + key string + Value interface{} + Comment string +} diff --git a/vendor/github.com/swaggo/swag/field_parser.go b/vendor/github.com/swaggo/swag/field_parser.go new file mode 100644 index 0000000..9b24e78 --- /dev/null +++ b/vendor/github.com/swaggo/swag/field_parser.go @@ -0,0 +1,680 @@ +package swag + +import ( + "fmt" + "go/ast" + "reflect" + "regexp" + "strconv" + "strings" + "sync" + "unicode" + + "github.com/go-openapi/spec" +) + +var _ FieldParser = &tagBaseFieldParser{p: nil, field: nil, tag: ""} + +const ( + requiredLabel = "required" + optionalLabel = "optional" + swaggerTypeTag = "swaggertype" + swaggerIgnoreTag = "swaggerignore" +) + +type tagBaseFieldParser struct { + p *Parser + field *ast.Field + tag reflect.StructTag +} + +func newTagBaseFieldParser(p *Parser, field *ast.Field) FieldParser { + fieldParser := tagBaseFieldParser{ + p: p, + field: field, + tag: "", + } + if fieldParser.field.Tag != nil { + fieldParser.tag = reflect.StructTag(strings.ReplaceAll(field.Tag.Value, "`", "")) + } + + return &fieldParser +} + +func (ps *tagBaseFieldParser) ShouldSkip() bool { + // Skip non-exported fields. + if ps.field.Names != nil && !ast.IsExported(ps.field.Names[0].Name) { + return true + } + + if ps.field.Tag == nil { + return false + } + + ignoreTag := ps.tag.Get(swaggerIgnoreTag) + if strings.EqualFold(ignoreTag, "true") { + return true + } + + // json:"tag,hoge" + name := strings.TrimSpace(strings.Split(ps.tag.Get(jsonTag), ",")[0]) + if name == "-" { + return true + } + + return false +} + +func (ps *tagBaseFieldParser) FieldName() (string, error) { + var name string + + if ps.field.Tag != nil { + // json:"tag,hoge" + name = strings.TrimSpace(strings.Split(ps.tag.Get(jsonTag), ",")[0]) + if name != "" { + return name, nil + } + + // use "form" tag over json tag + name = ps.FormName() + if name != "" { + return name, nil + } + } + + if ps.field.Names == nil { + return "", nil + } + + switch ps.p.PropNamingStrategy { + case SnakeCase: + return toSnakeCase(ps.field.Names[0].Name), nil + case PascalCase: + return ps.field.Names[0].Name, nil + default: + return toLowerCamelCase(ps.field.Names[0].Name), nil + } +} + +func (ps *tagBaseFieldParser) firstTagValue(tag string) string { + if ps.field.Tag != nil { + return strings.TrimRight(strings.TrimSpace(strings.Split(ps.tag.Get(tag), ",")[0]), "[]") + } + return "" +} + +func (ps *tagBaseFieldParser) FormName() string { + return ps.firstTagValue(formTag) +} + +func (ps *tagBaseFieldParser) HeaderName() string { + return ps.firstTagValue(headerTag) +} + +func (ps *tagBaseFieldParser) PathName() string { + return ps.firstTagValue(uriTag) +} + +func toSnakeCase(in string) string { + var ( + runes = []rune(in) + length = len(runes) + out []rune + ) + + for idx := 0; idx < length; idx++ { + if idx > 0 && unicode.IsUpper(runes[idx]) && + ((idx+1 < length && unicode.IsLower(runes[idx+1])) || unicode.IsLower(runes[idx-1])) { + out = append(out, '_') + } + + out = append(out, unicode.ToLower(runes[idx])) + } + + return string(out) +} + +func toLowerCamelCase(in string) string { + var flag bool + + out := make([]rune, len(in)) + + runes := []rune(in) + for i, curr := range runes { + if (i == 0 && unicode.IsUpper(curr)) || (flag && unicode.IsUpper(curr)) { + out[i] = unicode.ToLower(curr) + flag = true + + continue + } + + out[i] = curr + flag = false + } + + return string(out) +} + +func (ps *tagBaseFieldParser) CustomSchema() (*spec.Schema, error) { + if ps.field.Tag == nil { + return nil, nil + } + + typeTag := ps.tag.Get(swaggerTypeTag) + if typeTag != "" { + return BuildCustomSchema(strings.Split(typeTag, ",")) + } + + return nil, nil +} + +type structField struct { + schemaType string + arrayType string + formatType string + maximum *float64 + minimum *float64 + multipleOf *float64 + maxLength *int64 + minLength *int64 + maxItems *int64 + minItems *int64 + exampleValue interface{} + enums []interface{} + enumVarNames []interface{} + unique bool +} + +// splitNotWrapped slices s into all substrings separated by sep if sep is not +// wrapped by brackets and returns a slice of the substrings between those separators. +func splitNotWrapped(s string, sep rune) []string { + openCloseMap := map[rune]rune{ + '(': ')', + '[': ']', + '{': '}', + } + + var ( + result = make([]string, 0) + current = strings.Builder{} + openCount = 0 + openChar rune + ) + + for _, char := range s { + switch { + case openChar == 0 && openCloseMap[char] != 0: + openChar = char + + openCount++ + + current.WriteRune(char) + case char == openChar: + openCount++ + + current.WriteRune(char) + case openCount > 0 && char == openCloseMap[openChar]: + openCount-- + + current.WriteRune(char) + case openCount == 0 && char == sep: + result = append(result, current.String()) + + openChar = 0 + + current = strings.Builder{} + default: + current.WriteRune(char) + } + } + + if current.String() != "" { + result = append(result, current.String()) + } + + return result +} + +// ComplementSchema complement schema with field properties +func (ps *tagBaseFieldParser) ComplementSchema(schema *spec.Schema) error { + types := ps.p.GetSchemaTypePath(schema, 2) + if len(types) == 0 { + return fmt.Errorf("invalid type for field: %s", ps.field.Names[0]) + } + + if IsRefSchema(schema) { + var newSchema = spec.Schema{} + err := ps.complementSchema(&newSchema, types) + if err != nil { + return err + } + if !reflect.ValueOf(newSchema).IsZero() { + *schema = *(newSchema.WithAllOf(*schema)) + } + return nil + } + + return ps.complementSchema(schema, types) +} + +// complementSchema complement schema with field properties +func (ps *tagBaseFieldParser) complementSchema(schema *spec.Schema, types []string) error { + if ps.field.Tag == nil { + if ps.field.Doc != nil { + schema.Description = strings.TrimSpace(ps.field.Doc.Text()) + } + + if schema.Description == "" && ps.field.Comment != nil { + schema.Description = strings.TrimSpace(ps.field.Comment.Text()) + } + + return nil + } + + field := &structField{ + schemaType: types[0], + formatType: ps.tag.Get(formatTag), + } + + if len(types) > 1 && (types[0] == ARRAY || types[0] == OBJECT) { + field.arrayType = types[1] + } + + jsonTagValue := ps.tag.Get(jsonTag) + + bindingTagValue := ps.tag.Get(bindingTag) + if bindingTagValue != "" { + parseValidTags(bindingTagValue, field) + } + + validateTagValue := ps.tag.Get(validateTag) + if validateTagValue != "" { + parseValidTags(validateTagValue, field) + } + + enumsTagValue := ps.tag.Get(enumsTag) + if enumsTagValue != "" { + err := parseEnumTags(enumsTagValue, field) + if err != nil { + return err + } + } + + if IsNumericType(field.schemaType) || IsNumericType(field.arrayType) { + maximum, err := getFloatTag(ps.tag, maximumTag) + if err != nil { + return err + } + + if maximum != nil { + field.maximum = maximum + } + + minimum, err := getFloatTag(ps.tag, minimumTag) + if err != nil { + return err + } + + if minimum != nil { + field.minimum = minimum + } + + multipleOf, err := getFloatTag(ps.tag, multipleOfTag) + if err != nil { + return err + } + + if multipleOf != nil { + field.multipleOf = multipleOf + } + } + + if field.schemaType == STRING || field.arrayType == STRING { + maxLength, err := getIntTag(ps.tag, maxLengthTag) + if err != nil { + return err + } + + if maxLength != nil { + field.maxLength = maxLength + } + + minLength, err := getIntTag(ps.tag, minLengthTag) + if err != nil { + return err + } + + if minLength != nil { + field.minLength = minLength + } + } + + // json:"name,string" or json:",string" + exampleTagValue, ok := ps.tag.Lookup(exampleTag) + if ok { + field.exampleValue = exampleTagValue + + if !strings.Contains(jsonTagValue, ",string") { + example, err := defineTypeOfExample(field.schemaType, field.arrayType, exampleTagValue) + if err != nil { + return err + } + + field.exampleValue = example + } + } + + // perform this after setting everything else (min, max, etc...) + if strings.Contains(jsonTagValue, ",string") { + // @encoding/json: "It applies only to fields of string, floating point, integer, or boolean types." + defaultValues := map[string]string{ + // Zero Values as string + STRING: "", + INTEGER: "0", + BOOLEAN: "false", + NUMBER: "0", + } + + defaultValue, ok := defaultValues[field.schemaType] + if ok { + field.schemaType = STRING + *schema = *PrimitiveSchema(field.schemaType) + + if field.exampleValue == nil { + // if exampleValue is not defined by the user, + // we will force an example with a correct value + // (eg: int->"0", bool:"false") + field.exampleValue = defaultValue + } + } + } + + if ps.field.Doc != nil { + schema.Description = strings.TrimSpace(ps.field.Doc.Text()) + } + + if schema.Description == "" && ps.field.Comment != nil { + schema.Description = strings.TrimSpace(ps.field.Comment.Text()) + } + + schema.ReadOnly = ps.tag.Get(readOnlyTag) == "true" + + defaultTagValue := ps.tag.Get(defaultTag) + if defaultTagValue != "" { + value, err := defineType(field.schemaType, defaultTagValue) + if err != nil { + return err + } + + schema.Default = value + } + + schema.Example = field.exampleValue + + if field.schemaType != ARRAY { + schema.Format = field.formatType + } + + extensionsTagValue := ps.tag.Get(extensionsTag) + if extensionsTagValue != "" { + schema.Extensions = setExtensionParam(extensionsTagValue) + } + + varNamesTag := ps.tag.Get("x-enum-varnames") + if varNamesTag != "" { + varNames := strings.Split(varNamesTag, ",") + if len(varNames) != len(field.enums) { + return fmt.Errorf("invalid count of x-enum-varnames. expected %d, got %d", len(field.enums), len(varNames)) + } + + field.enumVarNames = nil + + for _, v := range varNames { + field.enumVarNames = append(field.enumVarNames, v) + } + + if field.schemaType == ARRAY { + // Add the var names in the items schema + if schema.Items.Schema.Extensions == nil { + schema.Items.Schema.Extensions = map[string]interface{}{} + } + schema.Items.Schema.Extensions[enumVarNamesExtension] = field.enumVarNames + } else { + // Add to top level schema + if schema.Extensions == nil { + schema.Extensions = map[string]interface{}{} + } + schema.Extensions[enumVarNamesExtension] = field.enumVarNames + } + } + + eleSchema := schema + + if field.schemaType == ARRAY { + // For Array only + schema.MaxItems = field.maxItems + schema.MinItems = field.minItems + schema.UniqueItems = field.unique + + eleSchema = schema.Items.Schema + eleSchema.Format = field.formatType + } + + eleSchema.Maximum = field.maximum + eleSchema.Minimum = field.minimum + eleSchema.MultipleOf = field.multipleOf + eleSchema.MaxLength = field.maxLength + eleSchema.MinLength = field.minLength + eleSchema.Enum = field.enums + + return nil +} + +func getFloatTag(structTag reflect.StructTag, tagName string) (*float64, error) { + strValue := structTag.Get(tagName) + if strValue == "" { + return nil, nil + } + + value, err := strconv.ParseFloat(strValue, 64) + if err != nil { + return nil, fmt.Errorf("can't parse numeric value of %q tag: %v", tagName, err) + } + + return &value, nil +} + +func getIntTag(structTag reflect.StructTag, tagName string) (*int64, error) { + strValue := structTag.Get(tagName) + if strValue == "" { + return nil, nil + } + + value, err := strconv.ParseInt(strValue, 10, 64) + if err != nil { + return nil, fmt.Errorf("can't parse numeric value of %q tag: %v", tagName, err) + } + + return &value, nil +} + +func (ps *tagBaseFieldParser) IsRequired() (bool, error) { + if ps.field.Tag == nil { + return false, nil + } + + bindingTag := ps.tag.Get(bindingTag) + if bindingTag != "" { + for _, val := range strings.Split(bindingTag, ",") { + switch val { + case requiredLabel: + return true, nil + case optionalLabel: + return false, nil + } + } + } + + validateTag := ps.tag.Get(validateTag) + if validateTag != "" { + for _, val := range strings.Split(validateTag, ",") { + switch val { + case requiredLabel: + return true, nil + case optionalLabel: + return false, nil + } + } + } + + return ps.p.RequiredByDefault, nil +} + +func parseValidTags(validTag string, sf *structField) { + // `validate:"required,max=10,min=1"` + // ps. required checked by IsRequired(). + for _, val := range strings.Split(validTag, ",") { + var ( + valValue string + keyVal = strings.Split(val, "=") + ) + + switch len(keyVal) { + case 1: + case 2: + valValue = strings.ReplaceAll(strings.ReplaceAll(keyVal[1], utf8HexComma, ","), utf8Pipe, "|") + default: + continue + } + + switch keyVal[0] { + case "max", "lte": + sf.setMax(valValue) + case "min", "gte": + sf.setMin(valValue) + case "oneof": + sf.setOneOf(valValue) + case "unique": + if sf.schemaType == ARRAY { + sf.unique = true + } + case "dive": + // ignore dive + return + default: + continue + } + } +} + +func parseEnumTags(enumTag string, field *structField) error { + enumType := field.schemaType + if field.schemaType == ARRAY { + enumType = field.arrayType + } + + field.enums = nil + + for _, e := range strings.Split(enumTag, ",") { + value, err := defineType(enumType, e) + if err != nil { + return err + } + + field.enums = append(field.enums, value) + } + + return nil +} + +func (sf *structField) setOneOf(valValue string) { + if len(sf.enums) != 0 { + return + } + + enumType := sf.schemaType + if sf.schemaType == ARRAY { + enumType = sf.arrayType + } + + valValues := parseOneOfParam2(valValue) + for i := range valValues { + value, err := defineType(enumType, valValues[i]) + if err != nil { + continue + } + + sf.enums = append(sf.enums, value) + } +} + +func (sf *structField) setMin(valValue string) { + value, err := strconv.ParseFloat(valValue, 64) + if err != nil { + return + } + + switch sf.schemaType { + case INTEGER, NUMBER: + sf.minimum = &value + case STRING: + intValue := int64(value) + sf.minLength = &intValue + case ARRAY: + intValue := int64(value) + sf.minItems = &intValue + } +} + +func (sf *structField) setMax(valValue string) { + value, err := strconv.ParseFloat(valValue, 64) + if err != nil { + return + } + + switch sf.schemaType { + case INTEGER, NUMBER: + sf.maximum = &value + case STRING: + intValue := int64(value) + sf.maxLength = &intValue + case ARRAY: + intValue := int64(value) + sf.maxItems = &intValue + } +} + +const ( + utf8HexComma = "0x2C" + utf8Pipe = "0x7C" +) + +// These code copy from +// https://github.com/go-playground/validator/blob/d4271985b44b735c6f76abc7a06532ee997f9476/baked_in.go#L207 +// ---. +var oneofValsCache = map[string][]string{} +var oneofValsCacheRWLock = sync.RWMutex{} +var splitParamsRegex = regexp.MustCompile(`'[^']*'|\S+`) + +func parseOneOfParam2(param string) []string { + oneofValsCacheRWLock.RLock() + values, ok := oneofValsCache[param] + oneofValsCacheRWLock.RUnlock() + + if !ok { + oneofValsCacheRWLock.Lock() + values = splitParamsRegex.FindAllString(param, -1) + + for i := 0; i < len(values); i++ { + values[i] = strings.ReplaceAll(values[i], "'", "") + } + + oneofValsCache[param] = values + + oneofValsCacheRWLock.Unlock() + } + + return values +} + +// ---. diff --git a/vendor/github.com/swaggo/swag/formatter.go b/vendor/github.com/swaggo/swag/formatter.go new file mode 100644 index 0000000..511e3a8 --- /dev/null +++ b/vendor/github.com/swaggo/swag/formatter.go @@ -0,0 +1,182 @@ +package swag + +import ( + "bytes" + "fmt" + "go/ast" + goparser "go/parser" + "go/token" + "log" + "os" + "regexp" + "sort" + "strings" + "text/tabwriter" +) + +// Check of @Param @Success @Failure @Response @Header +var specialTagForSplit = map[string]bool{ + paramAttr: true, + successAttr: true, + failureAttr: true, + responseAttr: true, + headerAttr: true, +} + +var skipChar = map[byte]byte{ + '"': '"', + '(': ')', + '{': '}', + '[': ']', +} + +// Formatter implements a formatter for Go source files. +type Formatter struct { + // debugging output goes here + debug Debugger +} + +// NewFormatter create a new formatter instance. +func NewFormatter() *Formatter { + formatter := &Formatter{ + debug: log.New(os.Stdout, "", log.LstdFlags), + } + return formatter +} + +// Format formats swag comments in contents. It uses fileName to report errors +// that happen during parsing of contents. +func (f *Formatter) Format(fileName string, contents []byte) ([]byte, error) { + fileSet := token.NewFileSet() + ast, err := goparser.ParseFile(fileSet, fileName, contents, goparser.ParseComments) + if err != nil { + return nil, err + } + + // Formatting changes are described as an edit list of byte range + // replacements. We make these content-level edits directly rather than + // changing the AST nodes and writing those out (via [go/printer] or + // [go/format]) so that we only change the formatting of Swag attribute + // comments. This won't touch the formatting of any other comments, or of + // functions, etc. + maxEdits := 0 + for _, comment := range ast.Comments { + maxEdits += len(comment.List) + } + edits := make(edits, 0, maxEdits) + + for _, comment := range ast.Comments { + formatFuncDoc(fileSet, comment.List, &edits) + } + + return edits.apply(contents), nil +} + +type edit struct { + begin int + end int + replacement []byte +} + +type edits []edit + +func (edits edits) apply(contents []byte) []byte { + // Apply the edits with the highest offset first, so that earlier edits + // don't affect the offsets of later edits. + sort.Slice(edits, func(i, j int) bool { + return edits[i].begin > edits[j].begin + }) + + for _, edit := range edits { + prefix := contents[:edit.begin] + suffix := contents[edit.end:] + contents = append(prefix, append(edit.replacement, suffix...)...) + } + + return contents +} + +// formatFuncDoc reformats the comment lines in commentList, and appends any +// changes to the edit list. +func formatFuncDoc(fileSet *token.FileSet, commentList []*ast.Comment, edits *edits) { + // Building the edit list to format a comment block is a two-step process. + // First, we iterate over each comment line looking for Swag attributes. In + // each one we find, we replace alignment whitespace with a tab character, + // then write the result into a tab writer. + + linesToComments := make(map[int]int, len(commentList)) + + buffer := &bytes.Buffer{} + w := tabwriter.NewWriter(buffer, 1, 4, 1, '\t', 0) + + for commentIndex, comment := range commentList { + text := comment.Text + if attr, body, found := swagComment(text); found { + formatted := "//\t" + attr + if body != "" { + formatted += "\t" + splitComment2(attr, body) + } + _, _ = fmt.Fprintln(w, formatted) + linesToComments[len(linesToComments)] = commentIndex + } + } + + // Once we've loaded all of the comment lines to be aligned into the tab + // writer, flushing it causes the aligned text to be written out to the + // backing buffer. + _ = w.Flush() + + // Now the second step: we iterate over the aligned comment lines that were + // written into the backing buffer, pair each one up to its original + // comment line, and use the combination to describe the edit that needs to + // be made to the original input. + formattedComments := bytes.Split(buffer.Bytes(), []byte("\n")) + for lineIndex, commentIndex := range linesToComments { + comment := commentList[commentIndex] + *edits = append(*edits, edit{ + begin: fileSet.Position(comment.Pos()).Offset, + end: fileSet.Position(comment.End()).Offset, + replacement: formattedComments[lineIndex], + }) + } +} + +func splitComment2(attr, body string) string { + if specialTagForSplit[strings.ToLower(attr)] { + for i := 0; i < len(body); i++ { + if skipEnd, ok := skipChar[body[i]]; ok { + skipStart, n := body[i], 1 + for i++; i < len(body); i++ { + if skipStart != skipEnd && body[i] == skipStart { + n++ + } else if body[i] == skipEnd { + n-- + if n == 0 { + break + } + } + } + } else if body[i] == ' ' || body[i] == '\t' { + j := i + for ; j < len(body) && (body[j] == ' ' || body[j] == '\t'); j++ { + } + body = replaceRange(body, i, j, "\t") + } + } + } + return body +} + +func replaceRange(s string, start, end int, new string) string { + return s[:start] + new + s[end:] +} + +var swagCommentLineExpression = regexp.MustCompile(`^\/\/\s+(@[\S.]+)\s*(.*)`) + +func swagComment(comment string) (string, string, bool) { + matches := swagCommentLineExpression.FindStringSubmatch(comment) + if matches == nil { + return "", "", false + } + return matches[1], matches[2], true +} diff --git a/vendor/github.com/swaggo/swag/generics.go b/vendor/github.com/swaggo/swag/generics.go new file mode 100644 index 0000000..07344bb --- /dev/null +++ b/vendor/github.com/swaggo/swag/generics.go @@ -0,0 +1,432 @@ +//go:build go1.18 +// +build go1.18 + +package swag + +import ( + "errors" + "fmt" + "go/ast" + "strings" + "unicode" + + "github.com/go-openapi/spec" +) + +type genericTypeSpec struct { + TypeSpec *TypeSpecDef + Name string +} + +type formalParamType struct { + Name string + Type string +} + +func (t *genericTypeSpec) TypeName() string { + if t.TypeSpec != nil { + return t.TypeSpec.TypeName() + } + return t.Name +} + +func normalizeGenericTypeName(name string) string { + return strings.Replace(name, ".", "_", -1) +} + +func (pkgDefs *PackagesDefinitions) getTypeFromGenericParam(genericParam string, file *ast.File) (typeSpecDef *TypeSpecDef) { + if strings.HasPrefix(genericParam, "[]") { + typeSpecDef = pkgDefs.getTypeFromGenericParam(genericParam[2:], file) + if typeSpecDef == nil { + return nil + } + var expr ast.Expr + switch typeSpecDef.TypeSpec.Type.(type) { + case *ast.ArrayType, *ast.MapType: + expr = typeSpecDef.TypeSpec.Type + default: + name := typeSpecDef.TypeName() + expr = ast.NewIdent(name) + if _, ok := pkgDefs.uniqueDefinitions[name]; !ok { + pkgDefs.uniqueDefinitions[name] = typeSpecDef + } + } + return &TypeSpecDef{ + TypeSpec: &ast.TypeSpec{ + Name: ast.NewIdent(string(IgnoreNameOverridePrefix) + "array_" + typeSpecDef.TypeName()), + Type: &ast.ArrayType{ + Elt: expr, + }, + }, + Enums: typeSpecDef.Enums, + PkgPath: typeSpecDef.PkgPath, + ParentSpec: typeSpecDef.ParentSpec, + NotUnique: false, + } + } + + if strings.HasPrefix(genericParam, "map[") { + parts := strings.SplitN(genericParam[4:], "]", 2) + if len(parts) != 2 { + return nil + } + typeSpecDef = pkgDefs.getTypeFromGenericParam(parts[1], file) + if typeSpecDef == nil { + return nil + } + var expr ast.Expr + switch typeSpecDef.TypeSpec.Type.(type) { + case *ast.ArrayType, *ast.MapType: + expr = typeSpecDef.TypeSpec.Type + default: + name := typeSpecDef.TypeName() + expr = ast.NewIdent(name) + if _, ok := pkgDefs.uniqueDefinitions[name]; !ok { + pkgDefs.uniqueDefinitions[name] = typeSpecDef + } + } + return &TypeSpecDef{ + TypeSpec: &ast.TypeSpec{ + Name: ast.NewIdent(string(IgnoreNameOverridePrefix) + "map_" + parts[0] + "_" + typeSpecDef.TypeName()), + Type: &ast.MapType{ + Key: ast.NewIdent(parts[0]), //assume key is string or integer + Value: expr, + }, + }, + Enums: typeSpecDef.Enums, + PkgPath: typeSpecDef.PkgPath, + ParentSpec: typeSpecDef.ParentSpec, + NotUnique: false, + } + + } + if IsGolangPrimitiveType(genericParam) { + return &TypeSpecDef{ + TypeSpec: &ast.TypeSpec{ + Name: ast.NewIdent(genericParam), + Type: ast.NewIdent(genericParam), + }, + } + } + return pkgDefs.FindTypeSpec(genericParam, file) +} + +func (pkgDefs *PackagesDefinitions) parametrizeGenericType(file *ast.File, original *TypeSpecDef, fullGenericForm string) *TypeSpecDef { + if original == nil || original.TypeSpec.TypeParams == nil || len(original.TypeSpec.TypeParams.List) == 0 { + return original + } + + name, genericParams := splitGenericsTypeName(fullGenericForm) + if genericParams == nil { + return nil + } + + //generic[x,y any,z any] considered, TODO what if the type is not `any`, but a concrete one, such as `int32|int64` or an certain interface{} + var formals []formalParamType + for _, field := range original.TypeSpec.TypeParams.List { + for _, ident := range field.Names { + formal := formalParamType{Name: ident.Name} + if ident, ok := field.Type.(*ast.Ident); ok { + formal.Type = ident.Name + } + formals = append(formals, formal) + } + } + if len(genericParams) != len(formals) { + return nil + } + genericParamTypeDefs := map[string]*genericTypeSpec{} + + for i, genericParam := range genericParams { + var typeDef *TypeSpecDef + if !IsGolangPrimitiveType(genericParam) { + typeDef = pkgDefs.getTypeFromGenericParam(genericParam, file) + if typeDef != nil { + genericParam = typeDef.TypeName() + if _, ok := pkgDefs.uniqueDefinitions[genericParam]; !ok { + pkgDefs.uniqueDefinitions[genericParam] = typeDef + } + } + } + genericParamTypeDefs[formals[i].Name] = &genericTypeSpec{ + TypeSpec: typeDef, + Name: genericParam, + } + } + + name = fmt.Sprintf("%s%s-", string(IgnoreNameOverridePrefix), original.TypeName()) + var nameParts []string + for _, def := range formals { + if specDef, ok := genericParamTypeDefs[def.Name]; ok { + nameParts = append(nameParts, specDef.TypeName()) + } + } + + name += normalizeGenericTypeName(strings.Join(nameParts, "-")) + + if typeSpec, ok := pkgDefs.uniqueDefinitions[name]; ok { + return typeSpec + } + + parametrizedTypeSpec := &TypeSpecDef{ + File: original.File, + PkgPath: original.PkgPath, + TypeSpec: &ast.TypeSpec{ + Name: &ast.Ident{ + Name: name, + NamePos: original.TypeSpec.Name.NamePos, + Obj: original.TypeSpec.Name.Obj, + }, + Doc: original.TypeSpec.Doc, + Assign: original.TypeSpec.Assign, + }, + } + pkgDefs.uniqueDefinitions[name] = parametrizedTypeSpec + + parametrizedTypeSpec.TypeSpec.Type = pkgDefs.resolveGenericType(original.File, original.TypeSpec.Type, genericParamTypeDefs) + + return parametrizedTypeSpec +} + +// splitGenericsTypeName splits a generic struct name in his parts +func splitGenericsTypeName(fullGenericForm string) (string, []string) { + //remove all spaces character + fullGenericForm = strings.Map(func(r rune) rune { + if unicode.IsSpace(r) { + return -1 + } + return r + }, fullGenericForm) + + // split only at the first '[' and remove the last ']' + if fullGenericForm[len(fullGenericForm)-1] != ']' { + return "", nil + } + + genericParams := strings.SplitN(fullGenericForm[:len(fullGenericForm)-1], "[", 2) + if len(genericParams) == 1 { + return "", nil + } + + // generic type name + genericTypeName := genericParams[0] + + depth := 0 + genericParams = strings.FieldsFunc(genericParams[1], func(r rune) bool { + if r == '[' { + depth++ + } else if r == ']' { + depth-- + } else if r == ',' && depth == 0 { + return true + } + return false + }) + if depth != 0 { + return "", nil + } + + return genericTypeName, genericParams +} + +func (pkgDefs *PackagesDefinitions) getParametrizedType(genTypeSpec *genericTypeSpec) ast.Expr { + if genTypeSpec.TypeSpec != nil && strings.Contains(genTypeSpec.Name, ".") { + parts := strings.SplitN(genTypeSpec.Name, ".", 2) + return &ast.SelectorExpr{ + X: &ast.Ident{Name: parts[0]}, + Sel: &ast.Ident{Name: parts[1]}, + } + } + + //a primitive type name or a type name in current package + return &ast.Ident{Name: genTypeSpec.Name} +} + +func (pkgDefs *PackagesDefinitions) resolveGenericType(file *ast.File, expr ast.Expr, genericParamTypeDefs map[string]*genericTypeSpec) ast.Expr { + switch astExpr := expr.(type) { + case *ast.Ident: + if genTypeSpec, ok := genericParamTypeDefs[astExpr.Name]; ok { + return pkgDefs.getParametrizedType(genTypeSpec) + } + case *ast.ArrayType: + return &ast.ArrayType{ + Elt: pkgDefs.resolveGenericType(file, astExpr.Elt, genericParamTypeDefs), + Len: astExpr.Len, + Lbrack: astExpr.Lbrack, + } + case *ast.MapType: + return &ast.MapType{ + Map: astExpr.Map, + Key: pkgDefs.resolveGenericType(file, astExpr.Key, genericParamTypeDefs), + Value: pkgDefs.resolveGenericType(file, astExpr.Value, genericParamTypeDefs), + } + case *ast.StarExpr: + return &ast.StarExpr{ + Star: astExpr.Star, + X: pkgDefs.resolveGenericType(file, astExpr.X, genericParamTypeDefs), + } + case *ast.IndexExpr, *ast.IndexListExpr: + fullGenericName, _ := getGenericFieldType(file, expr, genericParamTypeDefs) + typeDef := pkgDefs.FindTypeSpec(fullGenericName, file) + if typeDef != nil { + return typeDef.TypeSpec.Name + } + case *ast.StructType: + newStructTypeDef := &ast.StructType{ + Struct: astExpr.Struct, + Incomplete: astExpr.Incomplete, + Fields: &ast.FieldList{ + Opening: astExpr.Fields.Opening, + Closing: astExpr.Fields.Closing, + }, + } + + for _, field := range astExpr.Fields.List { + newField := &ast.Field{ + Type: field.Type, + Doc: field.Doc, + Names: field.Names, + Tag: field.Tag, + Comment: field.Comment, + } + + newField.Type = pkgDefs.resolveGenericType(file, field.Type, genericParamTypeDefs) + + newStructTypeDef.Fields.List = append(newStructTypeDef.Fields.List, newField) + } + return newStructTypeDef + } + return expr +} + +func getExtendedGenericFieldType(file *ast.File, field ast.Expr, genericParamTypeDefs map[string]*genericTypeSpec) (string, error) { + switch fieldType := field.(type) { + case *ast.ArrayType: + fieldName, err := getExtendedGenericFieldType(file, fieldType.Elt, genericParamTypeDefs) + return "[]" + fieldName, err + case *ast.StarExpr: + return getExtendedGenericFieldType(file, fieldType.X, genericParamTypeDefs) + case *ast.Ident: + if genericParamTypeDefs != nil { + if typeSpec, ok := genericParamTypeDefs[fieldType.Name]; ok { + return typeSpec.Name, nil + } + } + if fieldType.Obj == nil { + return fieldType.Name, nil + } + + tSpec := &TypeSpecDef{ + File: file, + TypeSpec: fieldType.Obj.Decl.(*ast.TypeSpec), + PkgPath: file.Name.Name, + } + return tSpec.TypeName(), nil + default: + return getFieldType(file, field, genericParamTypeDefs) + } +} + +func getGenericFieldType(file *ast.File, field ast.Expr, genericParamTypeDefs map[string]*genericTypeSpec) (string, error) { + var fullName string + var baseName string + var err error + switch fieldType := field.(type) { + case *ast.IndexListExpr: + baseName, err = getGenericTypeName(file, fieldType.X) + if err != nil { + return "", err + } + fullName = baseName + "[" + + for _, index := range fieldType.Indices { + fieldName, err := getExtendedGenericFieldType(file, index, genericParamTypeDefs) + if err != nil { + return "", err + } + + fullName += fieldName + "," + } + + fullName = strings.TrimRight(fullName, ",") + "]" + case *ast.IndexExpr: + baseName, err = getGenericTypeName(file, fieldType.X) + if err != nil { + return "", err + } + + indexName, err := getExtendedGenericFieldType(file, fieldType.Index, genericParamTypeDefs) + if err != nil { + return "", err + } + + fullName = fmt.Sprintf("%s[%s]", baseName, indexName) + } + + if fullName == "" { + return "", fmt.Errorf("unknown field type %#v", field) + } + + var packageName string + if !strings.Contains(baseName, ".") { + if file.Name == nil { + return "", errors.New("file name is nil") + } + packageName, _ = getFieldType(file, file.Name, genericParamTypeDefs) + } + + return strings.TrimLeft(fmt.Sprintf("%s.%s", packageName, fullName), "."), nil +} + +func getGenericTypeName(file *ast.File, field ast.Expr) (string, error) { + switch fieldType := field.(type) { + case *ast.Ident: + if fieldType.Obj == nil { + return fieldType.Name, nil + } + + tSpec := &TypeSpecDef{ + File: file, + TypeSpec: fieldType.Obj.Decl.(*ast.TypeSpec), + PkgPath: file.Name.Name, + } + return tSpec.TypeName(), nil + case *ast.ArrayType: + tSpec := &TypeSpecDef{ + File: file, + TypeSpec: fieldType.Elt.(*ast.Ident).Obj.Decl.(*ast.TypeSpec), + PkgPath: file.Name.Name, + } + return tSpec.TypeName(), nil + case *ast.SelectorExpr: + return fmt.Sprintf("%s.%s", fieldType.X.(*ast.Ident).Name, fieldType.Sel.Name), nil + } + return "", fmt.Errorf("unknown type %#v", field) +} + +func (parser *Parser) parseGenericTypeExpr(file *ast.File, typeExpr ast.Expr) (*spec.Schema, error) { + switch expr := typeExpr.(type) { + // suppress debug messages for these types + case *ast.InterfaceType: + case *ast.StructType: + case *ast.Ident: + case *ast.StarExpr: + case *ast.SelectorExpr: + case *ast.ArrayType: + case *ast.MapType: + case *ast.FuncType: + case *ast.IndexExpr, *ast.IndexListExpr: + name, err := getExtendedGenericFieldType(file, expr, nil) + if err == nil { + if schema, err := parser.getTypeSchema(name, file, false); err == nil { + return schema, nil + } + } + + parser.debug.Printf("Type definition of type '%T' is not supported yet. Using 'object' instead. (%s)\n", typeExpr, err) + default: + parser.debug.Printf("Type definition of type '%T' is not supported yet. Using 'object' instead.\n", typeExpr) + } + + return PrimitiveSchema(OBJECT), nil +} diff --git a/vendor/github.com/swaggo/swag/golist.go b/vendor/github.com/swaggo/swag/golist.go new file mode 100644 index 0000000..fa0b2cd --- /dev/null +++ b/vendor/github.com/swaggo/swag/golist.go @@ -0,0 +1,78 @@ +package swag + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "go/build" + "os/exec" + "path/filepath" +) + +func listPackages(ctx context.Context, dir string, env []string, args ...string) (pkgs []*build.Package, finalErr error) { + cmd := exec.CommandContext(ctx, "go", append([]string{"list", "-json", "-e"}, args...)...) + cmd.Env = env + cmd.Dir = dir + + stdout, err := cmd.StdoutPipe() + if err != nil { + return nil, err + } + var stderrBuf bytes.Buffer + cmd.Stderr = &stderrBuf + defer func() { + if stderrBuf.Len() > 0 { + finalErr = fmt.Errorf("%v\n%s", finalErr, stderrBuf.Bytes()) + } + }() + + err = cmd.Start() + if err != nil { + return nil, err + } + dec := json.NewDecoder(stdout) + for dec.More() { + var pkg build.Package + err = dec.Decode(&pkg) + if err != nil { + return nil, err + } + pkgs = append(pkgs, &pkg) + } + err = cmd.Wait() + if err != nil { + return nil, err + } + return pkgs, nil +} + +func (parser *Parser) getAllGoFileInfoFromDepsByList(pkg *build.Package, parseFlag ParseFlag) error { + ignoreInternal := pkg.Goroot && !parser.ParseInternal + if ignoreInternal { // ignored internal + return nil + } + + if parser.skipPackageByPrefix(pkg.ImportPath) { + return nil // ignored by user-defined package path prefixes + } + + srcDir := pkg.Dir + var err error + for i := range pkg.GoFiles { + err = parser.parseFile(pkg.ImportPath, filepath.Join(srcDir, pkg.GoFiles[i]), nil, parseFlag) + if err != nil { + return err + } + } + + // parse .go source files that import "C" + for i := range pkg.CgoFiles { + err = parser.parseFile(pkg.ImportPath, filepath.Join(srcDir, pkg.CgoFiles[i]), nil, parseFlag) + if err != nil { + return err + } + } + + return nil +} diff --git a/vendor/github.com/swaggo/swag/license b/vendor/github.com/swaggo/swag/license new file mode 100644 index 0000000..a97865b --- /dev/null +++ b/vendor/github.com/swaggo/swag/license @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Eason Lin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/swaggo/swag/operation.go b/vendor/github.com/swaggo/swag/operation.go new file mode 100644 index 0000000..169510f --- /dev/null +++ b/vendor/github.com/swaggo/swag/operation.go @@ -0,0 +1,1245 @@ +package swag + +import ( + "encoding/json" + "fmt" + "go/ast" + goparser "go/parser" + "go/token" + "net/http" + "os" + "path/filepath" + "regexp" + "strconv" + "strings" + + "github.com/go-openapi/spec" + "golang.org/x/tools/go/loader" +) + +// RouteProperties describes HTTP properties of a single router comment. +type RouteProperties struct { + HTTPMethod string + Path string + Deprecated bool +} + +// Operation describes a single API operation on a path. +// For more information: https://github.com/swaggo/swag#api-operation +type Operation struct { + parser *Parser + codeExampleFilesDir string + spec.Operation + RouterProperties []RouteProperties + State string +} + +var mimeTypeAliases = map[string]string{ + "json": "application/json", + "xml": "text/xml", + "plain": "text/plain", + "html": "text/html", + "mpfd": "multipart/form-data", + "x-www-form-urlencoded": "application/x-www-form-urlencoded", + "json-api": "application/vnd.api+json", + "json-stream": "application/x-json-stream", + "octet-stream": "application/octet-stream", + "png": "image/png", + "jpeg": "image/jpeg", + "gif": "image/gif", +} + +var mimeTypePattern = regexp.MustCompile("^[^/]+/[^/]+$") + +// NewOperation creates a new Operation with default properties. +// map[int]Response. +func NewOperation(parser *Parser, options ...func(*Operation)) *Operation { + if parser == nil { + parser = New() + } + + result := &Operation{ + parser: parser, + RouterProperties: []RouteProperties{}, + Operation: spec.Operation{ + OperationProps: spec.OperationProps{ + ID: "", + Description: "", + Summary: "", + Security: nil, + ExternalDocs: nil, + Deprecated: false, + Tags: []string{}, + Consumes: []string{}, + Produces: []string{}, + Schemes: []string{}, + Parameters: []spec.Parameter{}, + Responses: &spec.Responses{ + VendorExtensible: spec.VendorExtensible{ + Extensions: spec.Extensions{}, + }, + ResponsesProps: spec.ResponsesProps{ + Default: nil, + StatusCodeResponses: make(map[int]spec.Response), + }, + }, + }, + VendorExtensible: spec.VendorExtensible{ + Extensions: spec.Extensions{}, + }, + }, + codeExampleFilesDir: "", + } + + for _, option := range options { + option(result) + } + + return result +} + +// SetCodeExampleFilesDirectory sets the directory to search for codeExamples. +func SetCodeExampleFilesDirectory(directoryPath string) func(*Operation) { + return func(o *Operation) { + o.codeExampleFilesDir = directoryPath + } +} + +// ParseComment parses comment for given comment string and returns error if error occurs. +func (operation *Operation) ParseComment(comment string, astFile *ast.File) error { + commentLine := strings.TrimSpace(strings.TrimLeft(comment, "/")) + if len(commentLine) == 0 { + return nil + } + + fields := FieldsByAnySpace(commentLine, 2) + attribute := fields[0] + lowerAttribute := strings.ToLower(attribute) + var lineRemainder string + if len(fields) > 1 { + lineRemainder = fields[1] + } + switch lowerAttribute { + case stateAttr: + operation.ParseStateComment(lineRemainder) + case descriptionAttr: + operation.ParseDescriptionComment(lineRemainder) + case descriptionMarkdownAttr: + commentInfo, err := getMarkdownForTag(lineRemainder, operation.parser.markdownFileDir) + if err != nil { + return err + } + + operation.ParseDescriptionComment(string(commentInfo)) + case summaryAttr: + operation.Summary = lineRemainder + case idAttr: + operation.ID = lineRemainder + case tagsAttr: + operation.ParseTagsComment(lineRemainder) + case acceptAttr: + return operation.ParseAcceptComment(lineRemainder) + case produceAttr: + return operation.ParseProduceComment(lineRemainder) + case paramAttr: + return operation.ParseParamComment(lineRemainder, astFile) + case successAttr, failureAttr, responseAttr: + return operation.ParseResponseComment(lineRemainder, astFile) + case headerAttr: + return operation.ParseResponseHeaderComment(lineRemainder, astFile) + case routerAttr: + return operation.ParseRouterComment(lineRemainder, false) + case deprecatedRouterAttr: + return operation.ParseRouterComment(lineRemainder, true) + case securityAttr: + return operation.ParseSecurityComment(lineRemainder) + case deprecatedAttr: + operation.Deprecate() + case xCodeSamplesAttr: + return operation.ParseCodeSample(attribute, commentLine, lineRemainder) + default: + return operation.ParseMetadata(attribute, lowerAttribute, lineRemainder) + } + + return nil +} + +// ParseCodeSample parse code sample. +func (operation *Operation) ParseCodeSample(attribute, _, lineRemainder string) error { + if lineRemainder == "file" { + data, err := getCodeExampleForSummary(operation.Summary, operation.codeExampleFilesDir) + if err != nil { + return err + } + + var valueJSON interface{} + + err = json.Unmarshal(data, &valueJSON) + if err != nil { + return fmt.Errorf("annotation %s need a valid json value", attribute) + } + + // don't use the method provided by spec lib, because it will call toLower() on attribute names, which is wrongly + operation.Extensions[attribute[1:]] = valueJSON + + return nil + } + + // Fallback into existing logic + return operation.ParseMetadata(attribute, strings.ToLower(attribute), lineRemainder) +} + +// ParseStateComment parse state comment. +func (operation *Operation) ParseStateComment(lineRemainder string) { + operation.State = lineRemainder +} + +// ParseDescriptionComment parse description comment. +func (operation *Operation) ParseDescriptionComment(lineRemainder string) { + if operation.Description == "" { + operation.Description = lineRemainder + + return + } + + operation.Description += "\n" + lineRemainder +} + +// ParseMetadata parse metadata. +func (operation *Operation) ParseMetadata(attribute, lowerAttribute, lineRemainder string) error { + // parsing specific meta data extensions + if strings.HasPrefix(lowerAttribute, "@x-") { + if len(lineRemainder) == 0 { + return fmt.Errorf("annotation %s need a value", attribute) + } + + var valueJSON interface{} + + err := json.Unmarshal([]byte(lineRemainder), &valueJSON) + if err != nil { + return fmt.Errorf("annotation %s need a valid json value", attribute) + } + + // don't use the method provided by spec lib, because it will call toLower() on attribute names, which is wrongly + operation.Extensions[attribute[1:]] = valueJSON + } + + return nil +} + +var paramPattern = regexp.MustCompile(`(\S+)\s+(\w+)\s+([\S. ]+?)\s+(\w+)\s+"([^"]+)"`) + +func findInSlice(arr []string, target string) bool { + for _, str := range arr { + if str == target { + return true + } + } + + return false +} + +// ParseParamComment parses params return []string of param properties +// E.g. @Param queryText formData string true "The email for login" +// +// [param name] [paramType] [data type] [is mandatory?] [Comment] +// +// E.g. @Param some_id path int true "Some ID". +func (operation *Operation) ParseParamComment(commentLine string, astFile *ast.File) error { + matches := paramPattern.FindStringSubmatch(commentLine) + if len(matches) != 6 { + return fmt.Errorf("missing required param comment parameters \"%s\"", commentLine) + } + + name := matches[1] + paramType := matches[2] + refType := TransToValidSchemeType(matches[3]) + + // Detect refType + objectType := OBJECT + + if strings.HasPrefix(refType, "[]") { + objectType = ARRAY + refType = strings.TrimPrefix(refType, "[]") + refType = TransToValidSchemeType(refType) + } else if IsPrimitiveType(refType) || + paramType == "formData" && refType == "file" { + objectType = PRIMITIVE + } + + var enums []interface{} + if !IsPrimitiveType(refType) { + schema, _ := operation.parser.getTypeSchema(refType, astFile, false) + if schema != nil && len(schema.Type) == 1 && schema.Enum != nil { + if objectType == OBJECT { + objectType = PRIMITIVE + } + refType = TransToValidSchemeType(schema.Type[0]) + enums = schema.Enum + } + } + + requiredText := strings.ToLower(matches[4]) + required := requiredText == "true" || requiredText == requiredLabel + description := matches[5] + + param := createParameter(paramType, description, name, objectType, refType, required, enums, operation.parser.collectionFormatInQuery) + + switch paramType { + case "path", "header", "query", "formData": + switch objectType { + case ARRAY: + if !IsPrimitiveType(refType) && !(refType == "file" && paramType == "formData") { + return fmt.Errorf("%s is not supported array type for %s", refType, paramType) + } + case PRIMITIVE: + break + case OBJECT: + schema, err := operation.parser.getTypeSchema(refType, astFile, false) + if err != nil { + return err + } + + if len(schema.Properties) == 0 { + return nil + } + + items := schema.Properties.ToOrderedSchemaItems() + + for _, item := range items { + name, prop := item.Name, &item.Schema + if len(prop.Type) == 0 { + prop = operation.parser.getUnderlyingSchema(prop) + if len(prop.Type) == 0 { + continue + } + } + + nameOverrideType := paramType + // query also uses formData tags + if paramType == "query" { + nameOverrideType = "formData" + } + // load overridden type specific name from extensions if exists + if nameVal, ok := item.Schema.Extensions[nameOverrideType]; ok { + name = nameVal.(string) + } + + switch { + case prop.Type[0] == ARRAY: + if prop.Items.Schema == nil { + continue + } + itemSchema := prop.Items.Schema + if len(itemSchema.Type) == 0 { + itemSchema = operation.parser.getUnderlyingSchema(prop.Items.Schema) + } + if len(itemSchema.Type) == 0 { + continue + } + if !IsSimplePrimitiveType(itemSchema.Type[0]) { + continue + } + param = createParameter(paramType, prop.Description, name, prop.Type[0], itemSchema.Type[0], findInSlice(schema.Required, item.Name), itemSchema.Enum, operation.parser.collectionFormatInQuery) + + case IsSimplePrimitiveType(prop.Type[0]): + param = createParameter(paramType, prop.Description, name, PRIMITIVE, prop.Type[0], findInSlice(schema.Required, item.Name), nil, operation.parser.collectionFormatInQuery) + default: + operation.parser.debug.Printf("skip field [%s] in %s is not supported type for %s", name, refType, paramType) + continue + } + + param.Nullable = prop.Nullable + param.Format = prop.Format + param.Default = prop.Default + param.Example = prop.Example + param.Extensions = prop.Extensions + param.CommonValidations.Maximum = prop.Maximum + param.CommonValidations.Minimum = prop.Minimum + param.CommonValidations.ExclusiveMaximum = prop.ExclusiveMaximum + param.CommonValidations.ExclusiveMinimum = prop.ExclusiveMinimum + param.CommonValidations.MaxLength = prop.MaxLength + param.CommonValidations.MinLength = prop.MinLength + param.CommonValidations.Pattern = prop.Pattern + param.CommonValidations.MaxItems = prop.MaxItems + param.CommonValidations.MinItems = prop.MinItems + param.CommonValidations.UniqueItems = prop.UniqueItems + param.CommonValidations.MultipleOf = prop.MultipleOf + param.CommonValidations.Enum = prop.Enum + operation.Operation.Parameters = append(operation.Operation.Parameters, param) + } + + return nil + } + case "body": + if objectType == PRIMITIVE { + param.Schema = PrimitiveSchema(refType) + } else { + schema, err := operation.parseAPIObjectSchema(commentLine, objectType, refType, astFile) + if err != nil { + return err + } + + param.Schema = schema + } + default: + return fmt.Errorf("%s is not supported paramType", paramType) + } + + err := operation.parseParamAttribute(commentLine, objectType, refType, paramType, ¶m) + + if err != nil { + return err + } + + operation.Operation.Parameters = append(operation.Operation.Parameters, param) + + return nil +} + +const ( + formTag = "form" + jsonTag = "json" + uriTag = "uri" + headerTag = "header" + bindingTag = "binding" + defaultTag = "default" + enumsTag = "enums" + exampleTag = "example" + schemaExampleTag = "schemaExample" + formatTag = "format" + validateTag = "validate" + minimumTag = "minimum" + maximumTag = "maximum" + minLengthTag = "minLength" + maxLengthTag = "maxLength" + multipleOfTag = "multipleOf" + readOnlyTag = "readonly" + extensionsTag = "extensions" + collectionFormatTag = "collectionFormat" +) + +var regexAttributes = map[string]*regexp.Regexp{ + // for Enums(A, B) + enumsTag: regexp.MustCompile(`(?i)\s+enums\(.*\)`), + // for maximum(0) + maximumTag: regexp.MustCompile(`(?i)\s+maxinum|maximum\(.*\)`), + // for minimum(0) + minimumTag: regexp.MustCompile(`(?i)\s+mininum|minimum\(.*\)`), + // for default(0) + defaultTag: regexp.MustCompile(`(?i)\s+default\(.*\)`), + // for minlength(0) + minLengthTag: regexp.MustCompile(`(?i)\s+minlength\(.*\)`), + // for maxlength(0) + maxLengthTag: regexp.MustCompile(`(?i)\s+maxlength\(.*\)`), + // for format(email) + formatTag: regexp.MustCompile(`(?i)\s+format\(.*\)`), + // for extensions(x-example=test) + extensionsTag: regexp.MustCompile(`(?i)\s+extensions\(.*\)`), + // for collectionFormat(csv) + collectionFormatTag: regexp.MustCompile(`(?i)\s+collectionFormat\(.*\)`), + // example(0) + exampleTag: regexp.MustCompile(`(?i)\s+example\(.*\)`), + // schemaExample(0) + schemaExampleTag: regexp.MustCompile(`(?i)\s+schemaExample\(.*\)`), +} + +func (operation *Operation) parseParamAttribute(comment, objectType, schemaType, paramType string, param *spec.Parameter) error { + schemaType = TransToValidSchemeType(schemaType) + + for attrKey, re := range regexAttributes { + attr, err := findAttr(re, comment) + if err != nil { + continue + } + + switch attrKey { + case enumsTag: + err = setEnumParam(param, attr, objectType, schemaType, paramType) + case minimumTag, maximumTag: + err = setNumberParam(param, attrKey, schemaType, attr, comment) + case defaultTag: + err = setDefault(param, schemaType, attr) + case minLengthTag, maxLengthTag: + err = setStringParam(param, attrKey, schemaType, attr, comment) + case formatTag: + param.Format = attr + case exampleTag: + err = setExample(param, schemaType, attr) + case schemaExampleTag: + err = setSchemaExample(param, schemaType, attr) + case extensionsTag: + param.Extensions = setExtensionParam(attr) + case collectionFormatTag: + err = setCollectionFormatParam(param, attrKey, objectType, attr, comment) + } + + if err != nil { + return err + } + } + + return nil +} + +func findAttr(re *regexp.Regexp, commentLine string) (string, error) { + attr := re.FindString(commentLine) + + l, r := strings.Index(attr, "("), strings.Index(attr, ")") + if l == -1 || r == -1 { + return "", fmt.Errorf("can not find regex=%s, comment=%s", re.String(), commentLine) + } + + return strings.TrimSpace(attr[l+1 : r]), nil +} + +func setStringParam(param *spec.Parameter, name, schemaType, attr, commentLine string) error { + if schemaType != STRING { + return fmt.Errorf("%s is attribute to set to a number. comment=%s got=%s", name, commentLine, schemaType) + } + + n, err := strconv.ParseInt(attr, 10, 64) + if err != nil { + return fmt.Errorf("%s is allow only a number got=%s", name, attr) + } + + switch name { + case minLengthTag: + param.MinLength = &n + case maxLengthTag: + param.MaxLength = &n + } + + return nil +} + +func setNumberParam(param *spec.Parameter, name, schemaType, attr, commentLine string) error { + switch schemaType { + case INTEGER, NUMBER: + n, err := strconv.ParseFloat(attr, 64) + if err != nil { + return fmt.Errorf("maximum is allow only a number. comment=%s got=%s", commentLine, attr) + } + + switch name { + case minimumTag: + param.Minimum = &n + case maximumTag: + param.Maximum = &n + } + + return nil + default: + return fmt.Errorf("%s is attribute to set to a number. comment=%s got=%s", name, commentLine, schemaType) + } +} + +func setEnumParam(param *spec.Parameter, attr, objectType, schemaType, paramType string) error { + for _, e := range strings.Split(attr, ",") { + e = strings.TrimSpace(e) + + value, err := defineType(schemaType, e) + if err != nil { + return err + } + + switch objectType { + case ARRAY: + param.Items.Enum = append(param.Items.Enum, value) + default: + switch paramType { + case "body": + param.Schema.Enum = append(param.Schema.Enum, value) + default: + param.Enum = append(param.Enum, value) + } + } + } + + return nil +} + +func setExtensionParam(attr string) spec.Extensions { + extensions := spec.Extensions{} + + for _, val := range splitNotWrapped(attr, ',') { + parts := strings.SplitN(val, "=", 2) + if len(parts) == 2 { + extensions.Add(parts[0], parts[1]) + + continue + } + + if len(parts[0]) > 0 && string(parts[0][0]) == "!" { + extensions.Add(parts[0][1:], false) + + continue + } + + extensions.Add(parts[0], true) + } + + return extensions +} + +func setCollectionFormatParam(param *spec.Parameter, name, schemaType, attr, commentLine string) error { + if schemaType == ARRAY { + param.CollectionFormat = TransToValidCollectionFormat(attr) + + return nil + } + + return fmt.Errorf("%s is attribute to set to an array. comment=%s got=%s", name, commentLine, schemaType) +} + +func setDefault(param *spec.Parameter, schemaType string, value string) error { + val, err := defineType(schemaType, value) + if err != nil { + return nil // Don't set a default value if it's not valid + } + + param.Default = val + + return nil +} + +func setSchemaExample(param *spec.Parameter, schemaType string, value string) error { + val, err := defineType(schemaType, value) + if err != nil { + return nil // Don't set a example value if it's not valid + } + // skip schema + if param.Schema == nil { + return nil + } + + switch v := val.(type) { + case string: + // replaces \r \n \t in example string values. + param.Schema.Example = strings.NewReplacer(`\r`, "\r", `\n`, "\n", `\t`, "\t").Replace(v) + default: + param.Schema.Example = val + } + + return nil +} + +func setExample(param *spec.Parameter, schemaType string, value string) error { + val, err := defineType(schemaType, value) + if err != nil { + return nil // Don't set a example value if it's not valid + } + + param.Example = val + + return nil +} + +// defineType enum value define the type (object and array unsupported). +func defineType(schemaType string, value string) (v interface{}, err error) { + schemaType = TransToValidSchemeType(schemaType) + + switch schemaType { + case STRING: + return value, nil + case NUMBER: + v, err = strconv.ParseFloat(value, 64) + if err != nil { + return nil, fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err) + } + case INTEGER: + v, err = strconv.Atoi(value) + if err != nil { + return nil, fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err) + } + case BOOLEAN: + v, err = strconv.ParseBool(value) + if err != nil { + return nil, fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err) + } + default: + return nil, fmt.Errorf("%s is unsupported type in enum value %s", schemaType, value) + } + + return v, nil +} + +// ParseTagsComment parses comment for given `tag` comment string. +func (operation *Operation) ParseTagsComment(commentLine string) { + for _, tag := range strings.Split(commentLine, ",") { + operation.Tags = append(operation.Tags, strings.TrimSpace(tag)) + } +} + +// ParseAcceptComment parses comment for given `accept` comment string. +func (operation *Operation) ParseAcceptComment(commentLine string) error { + return parseMimeTypeList(commentLine, &operation.Consumes, "%v accept type can't be accepted") +} + +// ParseProduceComment parses comment for given `produce` comment string. +func (operation *Operation) ParseProduceComment(commentLine string) error { + return parseMimeTypeList(commentLine, &operation.Produces, "%v produce type can't be accepted") +} + +// parseMimeTypeList parses a list of MIME Types for a comment like +// `produce` (`Content-Type:` response header) or +// `accept` (`Accept:` request header). +func parseMimeTypeList(mimeTypeList string, typeList *[]string, format string) error { + for _, typeName := range strings.Split(mimeTypeList, ",") { + if mimeTypePattern.MatchString(typeName) { + *typeList = append(*typeList, typeName) + + continue + } + + aliasMimeType, ok := mimeTypeAliases[typeName] + if !ok { + return fmt.Errorf(format, typeName) + } + + *typeList = append(*typeList, aliasMimeType) + } + + return nil +} + +var routerPattern = regexp.MustCompile(`^(/[\w./\-{}+:$]*)[[:blank:]]+\[(\w+)]`) + +// ParseRouterComment parses comment for given `router` comment string. +func (operation *Operation) ParseRouterComment(commentLine string, deprecated bool) error { + matches := routerPattern.FindStringSubmatch(commentLine) + if len(matches) != 3 { + return fmt.Errorf("can not parse router comment \"%s\"", commentLine) + } + + signature := RouteProperties{ + Path: matches[1], + HTTPMethod: strings.ToUpper(matches[2]), + Deprecated: deprecated, + } + + if _, ok := allMethod[signature.HTTPMethod]; !ok { + return fmt.Errorf("invalid method: %s", signature.HTTPMethod) + } + + operation.RouterProperties = append(operation.RouterProperties, signature) + + return nil +} + +// ParseSecurityComment parses comment for given `security` comment string. +func (operation *Operation) ParseSecurityComment(commentLine string) error { + if len(commentLine) == 0 { + operation.Security = []map[string][]string{} + return nil + } + + var ( + securityMap = make(map[string][]string) + securitySource = commentLine[strings.Index(commentLine, "@Security")+1:] + ) + + for _, securityOption := range strings.Split(securitySource, "||") { + securityOption = strings.TrimSpace(securityOption) + + left, right := strings.Index(securityOption, "["), strings.Index(securityOption, "]") + + if !(left == -1 && right == -1) { + scopes := securityOption[left+1 : right] + + var options []string + + for _, scope := range strings.Split(scopes, ",") { + options = append(options, strings.TrimSpace(scope)) + } + + securityKey := securityOption[0:left] + securityMap[securityKey] = append(securityMap[securityKey], options...) + } else { + securityKey := strings.TrimSpace(securityOption) + securityMap[securityKey] = []string{} + } + } + + operation.Security = append(operation.Security, securityMap) + + return nil +} + +// findTypeDef attempts to find the *ast.TypeSpec for a specific type given the +// type's name and the package's import path. +// TODO: improve finding external pkg. +func findTypeDef(importPath, typeName string) (*ast.TypeSpec, error) { + cwd, err := os.Getwd() + if err != nil { + return nil, err + } + + conf := loader.Config{ + ParserMode: goparser.SpuriousErrors, + Cwd: cwd, + } + + conf.Import(importPath) + + lprog, err := conf.Load() + if err != nil { + return nil, err + } + + // If the pkg is vendored, the actual pkg path is going to resemble + // something like "{importPath}/vendor/{importPath}" + for k := range lprog.AllPackages { + realPkgPath := k.Path() + + if strings.Contains(realPkgPath, "vendor/"+importPath) { + importPath = realPkgPath + } + } + + pkgInfo := lprog.Package(importPath) + + if pkgInfo == nil { + return nil, fmt.Errorf("package was nil") + } + + // TODO: possibly cache pkgInfo since it's an expensive operation + for i := range pkgInfo.Files { + for _, astDeclaration := range pkgInfo.Files[i].Decls { + generalDeclaration, ok := astDeclaration.(*ast.GenDecl) + if ok && generalDeclaration.Tok == token.TYPE { + for _, astSpec := range generalDeclaration.Specs { + typeSpec, ok := astSpec.(*ast.TypeSpec) + if ok { + if typeSpec.Name.String() == typeName { + return typeSpec, nil + } + } + } + } + } + } + + return nil, fmt.Errorf("type spec not found") +} + +var responsePattern = regexp.MustCompile(`^([\w,]+)\s+([\w{}]+)\s+([\w\-.\\{}=,\[\s\]]+)\s*(".*)?`) + +// ResponseType{data1=Type1,data2=Type2}. +var combinedPattern = regexp.MustCompile(`^([\w\-./\[\]]+){(.*)}$`) + +func (operation *Operation) parseObjectSchema(refType string, astFile *ast.File) (*spec.Schema, error) { + return parseObjectSchema(operation.parser, refType, astFile) +} + +func parseObjectSchema(parser *Parser, refType string, astFile *ast.File) (*spec.Schema, error) { + switch { + case refType == NIL: + return nil, nil + case refType == INTERFACE: + return PrimitiveSchema(OBJECT), nil + case refType == ANY: + return PrimitiveSchema(OBJECT), nil + case IsGolangPrimitiveType(refType): + refType = TransToValidSchemeType(refType) + + return PrimitiveSchema(refType), nil + case IsPrimitiveType(refType): + return PrimitiveSchema(refType), nil + case strings.HasPrefix(refType, "[]"): + schema, err := parseObjectSchema(parser, refType[2:], astFile) + if err != nil { + return nil, err + } + + return spec.ArrayProperty(schema), nil + case strings.HasPrefix(refType, "map["): + // ignore key type + idx := strings.Index(refType, "]") + if idx < 0 { + return nil, fmt.Errorf("invalid type: %s", refType) + } + + refType = refType[idx+1:] + if refType == INTERFACE || refType == ANY { + return spec.MapProperty(nil), nil + } + + schema, err := parseObjectSchema(parser, refType, astFile) + if err != nil { + return nil, err + } + + return spec.MapProperty(schema), nil + case strings.Contains(refType, "{"): + return parseCombinedObjectSchema(parser, refType, astFile) + default: + if parser != nil { // checking refType has existing in 'TypeDefinitions' + schema, err := parser.getTypeSchema(refType, astFile, true) + if err != nil { + return nil, err + } + + return schema, nil + } + + return RefSchema(refType), nil + } +} + +func parseFields(s string) []string { + nestLevel := 0 + + return strings.FieldsFunc(s, func(char rune) bool { + if char == '{' { + nestLevel++ + + return false + } else if char == '}' { + nestLevel-- + + return false + } + + return char == ',' && nestLevel == 0 + }) +} + +func parseCombinedObjectSchema(parser *Parser, refType string, astFile *ast.File) (*spec.Schema, error) { + matches := combinedPattern.FindStringSubmatch(refType) + if len(matches) != 3 { + return nil, fmt.Errorf("invalid type: %s", refType) + } + + schema, err := parseObjectSchema(parser, matches[1], astFile) + if err != nil { + return nil, err + } + + fields, props := parseFields(matches[2]), map[string]spec.Schema{} + + for _, field := range fields { + keyVal := strings.SplitN(field, "=", 2) + if len(keyVal) == 2 { + schema, err := parseObjectSchema(parser, keyVal[1], astFile) + if err != nil { + return nil, err + } + + if schema == nil { + schema = PrimitiveSchema(OBJECT) + } + + props[keyVal[0]] = *schema + } + } + + if len(props) == 0 { + return schema, nil + } + + if schema.Ref.GetURL() == nil && len(schema.Type) > 0 && schema.Type[0] == OBJECT && len(schema.Properties) == 0 && schema.AdditionalProperties == nil { + schema.Properties = props + return schema, nil + } + + return spec.ComposedSchema(*schema, spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{OBJECT}, + Properties: props, + }, + }), nil +} + +func (operation *Operation) parseAPIObjectSchema(commentLine, schemaType, refType string, astFile *ast.File) (*spec.Schema, error) { + if strings.HasSuffix(refType, ",") && strings.Contains(refType, "[") { + // regexp may have broken generic syntax. find closing bracket and add it back + allMatchesLenOffset := strings.Index(commentLine, refType) + len(refType) + lostPartEndIdx := strings.Index(commentLine[allMatchesLenOffset:], "]") + if lostPartEndIdx >= 0 { + refType += commentLine[allMatchesLenOffset : allMatchesLenOffset+lostPartEndIdx+1] + } + } + + switch schemaType { + case OBJECT: + if !strings.HasPrefix(refType, "[]") { + return operation.parseObjectSchema(refType, astFile) + } + + refType = refType[2:] + + fallthrough + case ARRAY: + schema, err := operation.parseObjectSchema(refType, astFile) + if err != nil { + return nil, err + } + + return spec.ArrayProperty(schema), nil + default: + return PrimitiveSchema(schemaType), nil + } +} + +// ParseResponseComment parses comment for given `response` comment string. +func (operation *Operation) ParseResponseComment(commentLine string, astFile *ast.File) error { + matches := responsePattern.FindStringSubmatch(commentLine) + if len(matches) != 5 { + err := operation.ParseEmptyResponseComment(commentLine) + if err != nil { + return operation.ParseEmptyResponseOnly(commentLine) + } + + return err + } + + description := strings.Trim(matches[4], "\"") + + schema, err := operation.parseAPIObjectSchema(commentLine, strings.Trim(matches[2], "{}"), strings.TrimSpace(matches[3]), astFile) + if err != nil { + return err + } + + for _, codeStr := range strings.Split(matches[1], ",") { + if strings.EqualFold(codeStr, defaultTag) { + operation.DefaultResponse().WithSchema(schema).WithDescription(description) + + continue + } + + code, err := strconv.Atoi(codeStr) + if err != nil { + return fmt.Errorf("can not parse response comment \"%s\"", commentLine) + } + + resp := spec.NewResponse().WithSchema(schema).WithDescription(description) + if description == "" { + resp.WithDescription(http.StatusText(code)) + } + + operation.AddResponse(code, resp) + } + + return nil +} + +func newHeaderSpec(schemaType, description string) spec.Header { + return spec.Header{ + SimpleSchema: spec.SimpleSchema{ + Type: schemaType, + }, + HeaderProps: spec.HeaderProps{ + Description: description, + }, + VendorExtensible: spec.VendorExtensible{ + Extensions: nil, + }, + CommonValidations: spec.CommonValidations{ + Maximum: nil, + ExclusiveMaximum: false, + Minimum: nil, + ExclusiveMinimum: false, + MaxLength: nil, + MinLength: nil, + Pattern: "", + MaxItems: nil, + MinItems: nil, + UniqueItems: false, + MultipleOf: nil, + Enum: nil, + }, + } +} + +// ParseResponseHeaderComment parses comment for given `response header` comment string. +func (operation *Operation) ParseResponseHeaderComment(commentLine string, _ *ast.File) error { + matches := responsePattern.FindStringSubmatch(commentLine) + if len(matches) != 5 { + return fmt.Errorf("can not parse response comment \"%s\"", commentLine) + } + + header := newHeaderSpec(strings.Trim(matches[2], "{}"), strings.Trim(matches[4], "\"")) + + headerKey := strings.TrimSpace(matches[3]) + + if strings.EqualFold(matches[1], "all") { + if operation.Responses.Default != nil { + operation.Responses.Default.Headers[headerKey] = header + } + + if operation.Responses.StatusCodeResponses != nil { + for code, response := range operation.Responses.StatusCodeResponses { + response.Headers[headerKey] = header + operation.Responses.StatusCodeResponses[code] = response + } + } + + return nil + } + + for _, codeStr := range strings.Split(matches[1], ",") { + if strings.EqualFold(codeStr, defaultTag) { + if operation.Responses.Default != nil { + operation.Responses.Default.Headers[headerKey] = header + } + + continue + } + + code, err := strconv.Atoi(codeStr) + if err != nil { + return fmt.Errorf("can not parse response comment \"%s\"", commentLine) + } + + if operation.Responses.StatusCodeResponses != nil { + response, responseExist := operation.Responses.StatusCodeResponses[code] + if responseExist { + response.Headers[headerKey] = header + + operation.Responses.StatusCodeResponses[code] = response + } + } + } + + return nil +} + +var emptyResponsePattern = regexp.MustCompile(`([\w,]+)\s+"(.*)"`) + +// ParseEmptyResponseComment parse only comment out status code and description,eg: @Success 200 "it's ok". +func (operation *Operation) ParseEmptyResponseComment(commentLine string) error { + matches := emptyResponsePattern.FindStringSubmatch(commentLine) + if len(matches) != 3 { + return fmt.Errorf("can not parse response comment \"%s\"", commentLine) + } + + description := strings.Trim(matches[2], "\"") + + for _, codeStr := range strings.Split(matches[1], ",") { + if strings.EqualFold(codeStr, defaultTag) { + operation.DefaultResponse().WithDescription(description) + + continue + } + + code, err := strconv.Atoi(codeStr) + if err != nil { + return fmt.Errorf("can not parse response comment \"%s\"", commentLine) + } + + operation.AddResponse(code, spec.NewResponse().WithDescription(description)) + } + + return nil +} + +// ParseEmptyResponseOnly parse only comment out status code ,eg: @Success 200. +func (operation *Operation) ParseEmptyResponseOnly(commentLine string) error { + for _, codeStr := range strings.Split(commentLine, ",") { + if strings.EqualFold(codeStr, defaultTag) { + _ = operation.DefaultResponse() + + continue + } + + code, err := strconv.Atoi(codeStr) + if err != nil { + return fmt.Errorf("can not parse response comment \"%s\"", commentLine) + } + + operation.AddResponse(code, spec.NewResponse().WithDescription(http.StatusText(code))) + } + + return nil +} + +// DefaultResponse return the default response member pointer. +func (operation *Operation) DefaultResponse() *spec.Response { + if operation.Responses.Default == nil { + operation.Responses.Default = &spec.Response{ + ResponseProps: spec.ResponseProps{ + Description: "", + Headers: make(map[string]spec.Header), + }, + } + } + + return operation.Responses.Default +} + +// AddResponse add a response for a code. +func (operation *Operation) AddResponse(code int, response *spec.Response) { + if response.Headers == nil { + response.Headers = make(map[string]spec.Header) + } + + operation.Responses.StatusCodeResponses[code] = *response +} + +// createParameter returns swagger spec.Parameter for given paramType, description, paramName, schemaType, required. +func createParameter(paramType, description, paramName, objectType, schemaType string, required bool, enums []interface{}, collectionFormat string) spec.Parameter { + // //five possible parameter types. query, path, body, header, form + result := spec.Parameter{ + ParamProps: spec.ParamProps{ + Name: paramName, + Description: description, + Required: required, + In: paramType, + }, + } + + if paramType == "body" { + return result + } + + switch objectType { + case ARRAY: + result.Type = objectType + result.CollectionFormat = collectionFormat + result.Items = &spec.Items{ + CommonValidations: spec.CommonValidations{ + Enum: enums, + }, + SimpleSchema: spec.SimpleSchema{ + Type: schemaType, + }, + } + case PRIMITIVE, OBJECT: + result.Type = schemaType + result.Enum = enums + } + return result +} + +func getCodeExampleForSummary(summaryName string, dirPath string) ([]byte, error) { + dirEntries, err := os.ReadDir(dirPath) + if err != nil { + return nil, err + } + + for _, entry := range dirEntries { + if entry.IsDir() { + continue + } + + fileName := entry.Name() + + if !strings.Contains(fileName, ".json") { + continue + } + + if strings.Contains(fileName, summaryName) { + fullPath := filepath.Join(dirPath, fileName) + + commentInfo, err := os.ReadFile(fullPath) + if err != nil { + return nil, fmt.Errorf("Failed to read code example file %s error: %s ", fullPath, err) + } + + return commentInfo, nil + } + } + + return nil, fmt.Errorf("unable to find code example file for tag %s in the given directory", summaryName) +} diff --git a/vendor/github.com/swaggo/swag/package.go b/vendor/github.com/swaggo/swag/package.go new file mode 100644 index 0000000..6c3129e --- /dev/null +++ b/vendor/github.com/swaggo/swag/package.go @@ -0,0 +1,187 @@ +package swag + +import ( + "go/ast" + "go/token" + "reflect" + "strconv" + "strings" +) + +// PackageDefinitions files and definition in a package. +type PackageDefinitions struct { + // files in this package, map key is file's relative path starting package path + Files map[string]*ast.File + + // definitions in this package, map key is typeName + TypeDefinitions map[string]*TypeSpecDef + + // const variables in this package, map key is the name + ConstTable map[string]*ConstVariable + + // const variables in order in this package + OrderedConst []*ConstVariable + + // package name + Name string + + // package path + Path string +} + +// ConstVariableGlobalEvaluator an interface used to evaluate enums across packages +type ConstVariableGlobalEvaluator interface { + EvaluateConstValue(pkg *PackageDefinitions, cv *ConstVariable, recursiveStack map[string]struct{}) (interface{}, ast.Expr) + EvaluateConstValueByName(file *ast.File, pkgPath, constVariableName string, recursiveStack map[string]struct{}) (interface{}, ast.Expr) + FindTypeSpec(typeName string, file *ast.File) *TypeSpecDef +} + +// NewPackageDefinitions new a PackageDefinitions object +func NewPackageDefinitions(name, pkgPath string) *PackageDefinitions { + return &PackageDefinitions{ + Name: name, + Path: pkgPath, + Files: make(map[string]*ast.File), + TypeDefinitions: make(map[string]*TypeSpecDef), + ConstTable: make(map[string]*ConstVariable), + } +} + +// AddFile add a file +func (pkg *PackageDefinitions) AddFile(pkgPath string, file *ast.File) *PackageDefinitions { + pkg.Files[pkgPath] = file + return pkg +} + +// AddTypeSpec add a type spec. +func (pkg *PackageDefinitions) AddTypeSpec(name string, typeSpec *TypeSpecDef) *PackageDefinitions { + pkg.TypeDefinitions[name] = typeSpec + return pkg +} + +// AddConst add a const variable. +func (pkg *PackageDefinitions) AddConst(astFile *ast.File, valueSpec *ast.ValueSpec) *PackageDefinitions { + for i := 0; i < len(valueSpec.Names) && i < len(valueSpec.Values); i++ { + variable := &ConstVariable{ + Name: valueSpec.Names[i], + Type: valueSpec.Type, + Value: valueSpec.Values[i], + Comment: valueSpec.Comment, + File: astFile, + } + pkg.ConstTable[valueSpec.Names[i].Name] = variable + pkg.OrderedConst = append(pkg.OrderedConst, variable) + } + return pkg +} + +func (pkg *PackageDefinitions) evaluateConstValue(file *ast.File, iota int, expr ast.Expr, globalEvaluator ConstVariableGlobalEvaluator, recursiveStack map[string]struct{}) (interface{}, ast.Expr) { + switch valueExpr := expr.(type) { + case *ast.Ident: + if valueExpr.Name == "iota" { + return iota, nil + } + if pkg.ConstTable != nil { + if cv, ok := pkg.ConstTable[valueExpr.Name]; ok { + return globalEvaluator.EvaluateConstValue(pkg, cv, recursiveStack) + } + } + case *ast.SelectorExpr: + pkgIdent, ok := valueExpr.X.(*ast.Ident) + if !ok { + return nil, nil + } + return globalEvaluator.EvaluateConstValueByName(file, pkgIdent.Name, valueExpr.Sel.Name, recursiveStack) + case *ast.BasicLit: + switch valueExpr.Kind { + case token.INT: + // handle underscored number, such as 1_000_000 + if strings.ContainsRune(valueExpr.Value, '_') { + valueExpr.Value = strings.Replace(valueExpr.Value, "_", "", -1) + } + if len(valueExpr.Value) >= 2 && valueExpr.Value[0] == '0' { + var start, base = 2, 8 + switch valueExpr.Value[1] { + case 'x', 'X': + //hex + base = 16 + case 'b', 'B': + //binary + base = 2 + default: + //octet + start = 1 + } + if x, err := strconv.ParseInt(valueExpr.Value[start:], base, 64); err == nil { + return int(x), nil + } else if x, err := strconv.ParseUint(valueExpr.Value[start:], base, 64); err == nil { + return x, nil + } else { + panic(err) + } + } + + //a basic literal integer is int type in default, or must have an explicit converting type in front + if x, err := strconv.ParseInt(valueExpr.Value, 10, 64); err == nil { + return int(x), nil + } else if x, err := strconv.ParseUint(valueExpr.Value, 10, 64); err == nil { + return x, nil + } else { + panic(err) + } + case token.STRING: + if valueExpr.Value[0] == '`' { + return valueExpr.Value[1 : len(valueExpr.Value)-1], nil + } + return EvaluateEscapedString(valueExpr.Value[1 : len(valueExpr.Value)-1]), nil + case token.CHAR: + return EvaluateEscapedChar(valueExpr.Value[1 : len(valueExpr.Value)-1]), nil + } + case *ast.UnaryExpr: + x, evalType := pkg.evaluateConstValue(file, iota, valueExpr.X, globalEvaluator, recursiveStack) + if x == nil { + return x, evalType + } + return EvaluateUnary(x, valueExpr.Op, evalType) + case *ast.BinaryExpr: + x, evalTypex := pkg.evaluateConstValue(file, iota, valueExpr.X, globalEvaluator, recursiveStack) + y, evalTypey := pkg.evaluateConstValue(file, iota, valueExpr.Y, globalEvaluator, recursiveStack) + if x == nil || y == nil { + return nil, nil + } + return EvaluateBinary(x, y, valueExpr.Op, evalTypex, evalTypey) + case *ast.ParenExpr: + return pkg.evaluateConstValue(file, iota, valueExpr.X, globalEvaluator, recursiveStack) + case *ast.CallExpr: + //data conversion + if len(valueExpr.Args) != 1 { + return nil, nil + } + arg := valueExpr.Args[0] + if ident, ok := valueExpr.Fun.(*ast.Ident); ok { + name := ident.Name + if name == "uintptr" { + name = "uint" + } + value, _ := pkg.evaluateConstValue(file, iota, arg, globalEvaluator, recursiveStack) + if IsGolangPrimitiveType(name) { + value = EvaluateDataConversion(value, name) + return value, nil + } else if name == "len" { + return reflect.ValueOf(value).Len(), nil + } + typeDef := globalEvaluator.FindTypeSpec(name, file) + if typeDef == nil { + return nil, nil + } + return value, valueExpr.Fun + } else if selector, ok := valueExpr.Fun.(*ast.SelectorExpr); ok { + typeDef := globalEvaluator.FindTypeSpec(fullTypeName(selector.X.(*ast.Ident).Name, selector.Sel.Name), file) + if typeDef == nil { + return nil, nil + } + return arg, typeDef.TypeSpec.Type + } + } + return nil, nil +} diff --git a/vendor/github.com/swaggo/swag/packages.go b/vendor/github.com/swaggo/swag/packages.go new file mode 100644 index 0000000..69a1b05 --- /dev/null +++ b/vendor/github.com/swaggo/swag/packages.go @@ -0,0 +1,574 @@ +package swag + +import ( + "fmt" + "go/ast" + goparser "go/parser" + "go/token" + "os" + "path/filepath" + "runtime" + "sort" + "strings" + + "golang.org/x/tools/go/loader" +) + +// PackagesDefinitions map[package import path]*PackageDefinitions. +type PackagesDefinitions struct { + files map[*ast.File]*AstFileInfo + packages map[string]*PackageDefinitions + uniqueDefinitions map[string]*TypeSpecDef + parseDependency ParseFlag + debug Debugger +} + +// NewPackagesDefinitions create object PackagesDefinitions. +func NewPackagesDefinitions() *PackagesDefinitions { + return &PackagesDefinitions{ + files: make(map[*ast.File]*AstFileInfo), + packages: make(map[string]*PackageDefinitions), + uniqueDefinitions: make(map[string]*TypeSpecDef), + } +} + +// ParseFile parse a source file. +func (pkgDefs *PackagesDefinitions) ParseFile(packageDir, path string, src interface{}, flag ParseFlag) error { + // positions are relative to FileSet + fileSet := token.NewFileSet() + astFile, err := goparser.ParseFile(fileSet, path, src, goparser.ParseComments) + if err != nil { + return fmt.Errorf("failed to parse file %s, error:%+v", path, err) + } + return pkgDefs.collectAstFile(fileSet, packageDir, path, astFile, flag) +} + +// collectAstFile collect ast.file. +func (pkgDefs *PackagesDefinitions) collectAstFile(fileSet *token.FileSet, packageDir, path string, astFile *ast.File, flag ParseFlag) error { + if pkgDefs.files == nil { + pkgDefs.files = make(map[*ast.File]*AstFileInfo) + } + + if pkgDefs.packages == nil { + pkgDefs.packages = make(map[string]*PackageDefinitions) + } + + // return without storing the file if we lack a packageDir + if packageDir == "" { + return nil + } + + path, err := filepath.Abs(path) + if err != nil { + return err + } + + dependency, ok := pkgDefs.packages[packageDir] + if ok { + // return without storing the file if it already exists + _, exists := dependency.Files[path] + if exists { + return nil + } + + dependency.Files[path] = astFile + } else { + pkgDefs.packages[packageDir] = NewPackageDefinitions(astFile.Name.Name, packageDir).AddFile(path, astFile) + } + + pkgDefs.files[astFile] = &AstFileInfo{ + FileSet: fileSet, + File: astFile, + Path: path, + PackagePath: packageDir, + ParseFlag: flag, + } + + return nil +} + +// RangeFiles for range the collection of ast.File in alphabetic order. +func (pkgDefs *PackagesDefinitions) RangeFiles(handle func(info *AstFileInfo) error) error { + sortedFiles := make([]*AstFileInfo, 0, len(pkgDefs.files)) + for _, info := range pkgDefs.files { + // ignore package path prefix with 'vendor' or $GOROOT, + // because the router info of api will not be included these files. + if strings.HasPrefix(info.PackagePath, "vendor") || strings.HasPrefix(info.Path, runtime.GOROOT()) { + continue + } + sortedFiles = append(sortedFiles, info) + } + + sort.Slice(sortedFiles, func(i, j int) bool { + return strings.Compare(sortedFiles[i].Path, sortedFiles[j].Path) < 0 + }) + + for _, info := range sortedFiles { + err := handle(info) + if err != nil { + return err + } + } + + return nil +} + +// ParseTypes parse types +// @Return parsed definitions. +func (pkgDefs *PackagesDefinitions) ParseTypes() (map[*TypeSpecDef]*Schema, error) { + parsedSchemas := make(map[*TypeSpecDef]*Schema) + for astFile, info := range pkgDefs.files { + pkgDefs.parseTypesFromFile(astFile, info.PackagePath, parsedSchemas) + pkgDefs.parseFunctionScopedTypesFromFile(astFile, info.PackagePath, parsedSchemas) + } + pkgDefs.removeAllNotUniqueTypes() + pkgDefs.evaluateAllConstVariables() + pkgDefs.collectConstEnums(parsedSchemas) + return parsedSchemas, nil +} + +func (pkgDefs *PackagesDefinitions) parseTypesFromFile(astFile *ast.File, packagePath string, parsedSchemas map[*TypeSpecDef]*Schema) { + for _, astDeclaration := range astFile.Decls { + generalDeclaration, ok := astDeclaration.(*ast.GenDecl) + if !ok { + continue + } + if generalDeclaration.Tok == token.TYPE { + for _, astSpec := range generalDeclaration.Specs { + if typeSpec, ok := astSpec.(*ast.TypeSpec); ok { + typeSpecDef := &TypeSpecDef{ + PkgPath: packagePath, + File: astFile, + TypeSpec: typeSpec, + } + + if idt, ok := typeSpec.Type.(*ast.Ident); ok && IsGolangPrimitiveType(idt.Name) && parsedSchemas != nil { + parsedSchemas[typeSpecDef] = &Schema{ + PkgPath: typeSpecDef.PkgPath, + Name: astFile.Name.Name, + Schema: PrimitiveSchema(TransToValidSchemeType(idt.Name)), + } + } + + if pkgDefs.uniqueDefinitions == nil { + pkgDefs.uniqueDefinitions = make(map[string]*TypeSpecDef) + } + + fullName := typeSpecDef.TypeName() + + anotherTypeDef, ok := pkgDefs.uniqueDefinitions[fullName] + if ok { + if anotherTypeDef == nil { + typeSpecDef.NotUnique = true + fullName = typeSpecDef.TypeName() + pkgDefs.uniqueDefinitions[fullName] = typeSpecDef + } else if typeSpecDef.PkgPath != anotherTypeDef.PkgPath { + pkgDefs.uniqueDefinitions[fullName] = nil + anotherTypeDef.NotUnique = true + pkgDefs.uniqueDefinitions[anotherTypeDef.TypeName()] = anotherTypeDef + typeSpecDef.NotUnique = true + fullName = typeSpecDef.TypeName() + pkgDefs.uniqueDefinitions[fullName] = typeSpecDef + } + } else { + pkgDefs.uniqueDefinitions[fullName] = typeSpecDef + } + + if pkgDefs.packages[typeSpecDef.PkgPath] == nil { + pkgDefs.packages[typeSpecDef.PkgPath] = NewPackageDefinitions(astFile.Name.Name, typeSpecDef.PkgPath).AddTypeSpec(typeSpecDef.Name(), typeSpecDef) + } else if _, ok = pkgDefs.packages[typeSpecDef.PkgPath].TypeDefinitions[typeSpecDef.Name()]; !ok { + pkgDefs.packages[typeSpecDef.PkgPath].AddTypeSpec(typeSpecDef.Name(), typeSpecDef) + } + } + } + } else if generalDeclaration.Tok == token.CONST { + // collect consts + pkgDefs.collectConstVariables(astFile, packagePath, generalDeclaration) + } + } +} + +func (pkgDefs *PackagesDefinitions) parseFunctionScopedTypesFromFile(astFile *ast.File, packagePath string, parsedSchemas map[*TypeSpecDef]*Schema) { + for _, astDeclaration := range astFile.Decls { + funcDeclaration, ok := astDeclaration.(*ast.FuncDecl) + if ok && funcDeclaration.Body != nil { + for _, stmt := range funcDeclaration.Body.List { + if declStmt, ok := (stmt).(*ast.DeclStmt); ok { + if genDecl, ok := (declStmt.Decl).(*ast.GenDecl); ok && genDecl.Tok == token.TYPE { + for _, astSpec := range genDecl.Specs { + if typeSpec, ok := astSpec.(*ast.TypeSpec); ok { + typeSpecDef := &TypeSpecDef{ + PkgPath: packagePath, + File: astFile, + TypeSpec: typeSpec, + ParentSpec: astDeclaration, + } + + if idt, ok := typeSpec.Type.(*ast.Ident); ok && IsGolangPrimitiveType(idt.Name) && parsedSchemas != nil { + parsedSchemas[typeSpecDef] = &Schema{ + PkgPath: typeSpecDef.PkgPath, + Name: astFile.Name.Name, + Schema: PrimitiveSchema(TransToValidSchemeType(idt.Name)), + } + } + + if pkgDefs.uniqueDefinitions == nil { + pkgDefs.uniqueDefinitions = make(map[string]*TypeSpecDef) + } + + fullName := typeSpecDef.TypeName() + + anotherTypeDef, ok := pkgDefs.uniqueDefinitions[fullName] + if ok { + if anotherTypeDef == nil { + typeSpecDef.NotUnique = true + fullName = typeSpecDef.TypeName() + pkgDefs.uniqueDefinitions[fullName] = typeSpecDef + } else if typeSpecDef.PkgPath != anotherTypeDef.PkgPath { + pkgDefs.uniqueDefinitions[fullName] = nil + anotherTypeDef.NotUnique = true + pkgDefs.uniqueDefinitions[anotherTypeDef.TypeName()] = anotherTypeDef + typeSpecDef.NotUnique = true + fullName = typeSpecDef.TypeName() + pkgDefs.uniqueDefinitions[fullName] = typeSpecDef + } + } else { + pkgDefs.uniqueDefinitions[fullName] = typeSpecDef + } + + if pkgDefs.packages[typeSpecDef.PkgPath] == nil { + pkgDefs.packages[typeSpecDef.PkgPath] = NewPackageDefinitions(astFile.Name.Name, typeSpecDef.PkgPath).AddTypeSpec(fullName, typeSpecDef) + } else if _, ok = pkgDefs.packages[typeSpecDef.PkgPath].TypeDefinitions[fullName]; !ok { + pkgDefs.packages[typeSpecDef.PkgPath].AddTypeSpec(fullName, typeSpecDef) + } + } + } + + } + } + } + } + } +} + +func (pkgDefs *PackagesDefinitions) collectConstVariables(astFile *ast.File, packagePath string, generalDeclaration *ast.GenDecl) { + pkg, ok := pkgDefs.packages[packagePath] + if !ok { + pkg = NewPackageDefinitions(astFile.Name.Name, packagePath) + pkgDefs.packages[packagePath] = pkg + } + + var lastValueSpec *ast.ValueSpec + for _, astSpec := range generalDeclaration.Specs { + valueSpec, ok := astSpec.(*ast.ValueSpec) + if !ok { + continue + } + if len(valueSpec.Names) == 1 && len(valueSpec.Values) == 1 { + lastValueSpec = valueSpec + } else if len(valueSpec.Names) == 1 && len(valueSpec.Values) == 0 && valueSpec.Type == nil && lastValueSpec != nil { + valueSpec.Type = lastValueSpec.Type + valueSpec.Values = lastValueSpec.Values + } + pkg.AddConst(astFile, valueSpec) + } +} + +func (pkgDefs *PackagesDefinitions) evaluateAllConstVariables() { + for _, pkg := range pkgDefs.packages { + for _, constVar := range pkg.OrderedConst { + pkgDefs.EvaluateConstValue(pkg, constVar, nil) + } + } +} + +// EvaluateConstValue evaluate a const variable. +func (pkgDefs *PackagesDefinitions) EvaluateConstValue(pkg *PackageDefinitions, cv *ConstVariable, recursiveStack map[string]struct{}) (interface{}, ast.Expr) { + if expr, ok := cv.Value.(ast.Expr); ok { + defer func() { + if err := recover(); err != nil { + if fi, ok := pkgDefs.files[cv.File]; ok { + pos := fi.FileSet.Position(cv.Name.NamePos) + pkgDefs.debug.Printf("warning: failed to evaluate const %s at %s:%d:%d, %v", cv.Name.Name, fi.Path, pos.Line, pos.Column, err) + } + } + }() + if recursiveStack == nil { + recursiveStack = make(map[string]struct{}) + } + fullConstName := fullTypeName(pkg.Path, cv.Name.Name) + if _, ok = recursiveStack[fullConstName]; ok { + return nil, nil + } + recursiveStack[fullConstName] = struct{}{} + + value, evalType := pkg.evaluateConstValue(cv.File, cv.Name.Obj.Data.(int), expr, pkgDefs, recursiveStack) + if cv.Type == nil && evalType != nil { + cv.Type = evalType + } + if value != nil { + cv.Value = value + } + return value, cv.Type + } + return cv.Value, cv.Type +} + +// EvaluateConstValueByName evaluate a const variable by name. +func (pkgDefs *PackagesDefinitions) EvaluateConstValueByName(file *ast.File, pkgName, constVariableName string, recursiveStack map[string]struct{}) (interface{}, ast.Expr) { + matchedPkgPaths, externalPkgPaths := pkgDefs.findPackagePathFromImports(pkgName, file) + for _, pkgPath := range matchedPkgPaths { + if pkg, ok := pkgDefs.packages[pkgPath]; ok { + if cv, ok := pkg.ConstTable[constVariableName]; ok { + return pkgDefs.EvaluateConstValue(pkg, cv, recursiveStack) + } + } + } + if pkgDefs.parseDependency > 0 { + for _, pkgPath := range externalPkgPaths { + if err := pkgDefs.loadExternalPackage(pkgPath); err == nil { + if pkg, ok := pkgDefs.packages[pkgPath]; ok { + if cv, ok := pkg.ConstTable[constVariableName]; ok { + return pkgDefs.EvaluateConstValue(pkg, cv, recursiveStack) + } + } + } + } + } + return nil, nil +} + +func (pkgDefs *PackagesDefinitions) collectConstEnums(parsedSchemas map[*TypeSpecDef]*Schema) { + for _, pkg := range pkgDefs.packages { + for _, constVar := range pkg.OrderedConst { + if constVar.Type == nil { + continue + } + ident, ok := constVar.Type.(*ast.Ident) + if !ok || IsGolangPrimitiveType(ident.Name) { + continue + } + typeDef, ok := pkg.TypeDefinitions[ident.Name] + if !ok { + continue + } + + //delete it from parsed schemas, and will parse it again + if _, ok = parsedSchemas[typeDef]; ok { + delete(parsedSchemas, typeDef) + } + + if typeDef.Enums == nil { + typeDef.Enums = make([]EnumValue, 0) + } + + name := constVar.Name.Name + if _, ok = constVar.Value.(ast.Expr); ok { + continue + } + + enumValue := EnumValue{ + key: name, + Value: constVar.Value, + } + if constVar.Comment != nil && len(constVar.Comment.List) > 0 { + enumValue.Comment = constVar.Comment.List[0].Text + enumValue.Comment = strings.TrimPrefix(enumValue.Comment, "//") + enumValue.Comment = strings.TrimPrefix(enumValue.Comment, "/*") + enumValue.Comment = strings.TrimSuffix(enumValue.Comment, "*/") + enumValue.Comment = strings.TrimSpace(enumValue.Comment) + } + typeDef.Enums = append(typeDef.Enums, enumValue) + } + } +} + +func (pkgDefs *PackagesDefinitions) removeAllNotUniqueTypes() { + for key, ud := range pkgDefs.uniqueDefinitions { + if ud == nil { + delete(pkgDefs.uniqueDefinitions, key) + } + } +} + +func (pkgDefs *PackagesDefinitions) findTypeSpec(pkgPath string, typeName string) *TypeSpecDef { + if pkgDefs.packages == nil { + return nil + } + + pd, found := pkgDefs.packages[pkgPath] + if found { + typeSpec, ok := pd.TypeDefinitions[typeName] + if ok { + return typeSpec + } + } + + return nil +} + +func (pkgDefs *PackagesDefinitions) loadExternalPackage(importPath string) error { + cwd, err := os.Getwd() + if err != nil { + return err + } + + conf := loader.Config{ + ParserMode: goparser.ParseComments, + Cwd: cwd, + } + + conf.Import(importPath) + + loaderProgram, err := conf.Load() + if err != nil { + return err + } + + for _, info := range loaderProgram.AllPackages { + pkgPath := strings.TrimPrefix(info.Pkg.Path(), "vendor/") + for _, astFile := range info.Files { + pkgDefs.parseTypesFromFile(astFile, pkgPath, nil) + } + } + + return nil +} + +// findPackagePathFromImports finds out the package path of a package via ranging imports of an ast.File +// @pkg the name of the target package +// @file current ast.File in which to search imports +// @return the package paths of a package of @pkg. +func (pkgDefs *PackagesDefinitions) findPackagePathFromImports(pkg string, file *ast.File) (matchedPkgPaths, externalPkgPaths []string) { + if file == nil { + return + } + + if strings.ContainsRune(pkg, '.') { + pkg = strings.Split(pkg, ".")[0] + } + + matchLastPathPart := func(pkgPath string) bool { + paths := strings.Split(pkgPath, "/") + return paths[len(paths)-1] == pkg + } + + // prior to match named package + for _, imp := range file.Imports { + path := strings.Trim(imp.Path.Value, `"`) + if imp.Name != nil { + if imp.Name.Name == pkg { + // if name match, break loop and return + _, ok := pkgDefs.packages[path] + if ok { + matchedPkgPaths = []string{path} + externalPkgPaths = nil + } else { + externalPkgPaths = []string{path} + matchedPkgPaths = nil + } + break + } else if imp.Name.Name == "_" && len(pkg) > 0 { + //for unused types + pd, ok := pkgDefs.packages[path] + if ok { + if pd.Name == pkg { + matchedPkgPaths = append(matchedPkgPaths, path) + } + } else if matchLastPathPart(path) { + externalPkgPaths = append(externalPkgPaths, path) + } + } else if imp.Name.Name == "." && len(pkg) == 0 { + _, ok := pkgDefs.packages[path] + if ok { + matchedPkgPaths = append(matchedPkgPaths, path) + } else if len(pkg) == 0 || matchLastPathPart(path) { + externalPkgPaths = append(externalPkgPaths, path) + } + } + } else if pkgDefs.packages != nil && len(pkg) > 0 { + pd, ok := pkgDefs.packages[path] + if ok { + if pd.Name == pkg { + matchedPkgPaths = append(matchedPkgPaths, path) + } + } else if matchLastPathPart(path) { + externalPkgPaths = append(externalPkgPaths, path) + } + } + } + + if len(pkg) == 0 || file.Name.Name == pkg { + matchedPkgPaths = append(matchedPkgPaths, pkgDefs.files[file].PackagePath) + } + + return +} + +func (pkgDefs *PackagesDefinitions) findTypeSpecFromPackagePaths(matchedPkgPaths, externalPkgPaths []string, name string) (typeDef *TypeSpecDef) { + if pkgDefs.parseDependency > 0 { + for _, pkgPath := range externalPkgPaths { + if err := pkgDefs.loadExternalPackage(pkgPath); err == nil { + typeDef = pkgDefs.findTypeSpec(pkgPath, name) + if typeDef != nil { + return typeDef + } + } + } + } + + for _, pkgPath := range matchedPkgPaths { + typeDef = pkgDefs.findTypeSpec(pkgPath, name) + if typeDef != nil { + return typeDef + } + } + + return typeDef +} + +// FindTypeSpec finds out TypeSpecDef of a type by typeName +// @typeName the name of the target type, if it starts with a package name, find its own package path from imports on top of @file +// @file the ast.file in which @typeName is used +// @pkgPath the package path of @file. +func (pkgDefs *PackagesDefinitions) FindTypeSpec(typeName string, file *ast.File) *TypeSpecDef { + if IsGolangPrimitiveType(typeName) { + return nil + } + + if file == nil { // for test + return pkgDefs.uniqueDefinitions[typeName] + } + + parts := strings.Split(strings.Split(typeName, "[")[0], ".") + if len(parts) > 1 { + pkgPaths, externalPkgPaths := pkgDefs.findPackagePathFromImports(parts[0], file) + if len(externalPkgPaths) == 0 || pkgDefs.parseDependency == ParseNone { + typeDef, ok := pkgDefs.uniqueDefinitions[typeName] + if ok { + return typeDef + } + } + typeDef := pkgDefs.findTypeSpecFromPackagePaths(pkgPaths, externalPkgPaths, parts[1]) + return pkgDefs.parametrizeGenericType(file, typeDef, typeName) + } + + typeDef, ok := pkgDefs.uniqueDefinitions[fullTypeName(file.Name.Name, typeName)] + if ok { + return typeDef + } + + //in case that comment //@name renamed the type with a name without a dot + typeDef, ok = pkgDefs.uniqueDefinitions[typeName] + if ok { + return typeDef + } + + name := parts[0] + typeDef, ok = pkgDefs.uniqueDefinitions[fullTypeName(file.Name.Name, name)] + if !ok { + pkgPaths, externalPkgPaths := pkgDefs.findPackagePathFromImports("", file) + typeDef = pkgDefs.findTypeSpecFromPackagePaths(pkgPaths, externalPkgPaths, name) + } + return pkgDefs.parametrizeGenericType(file, typeDef, typeName) +} diff --git a/vendor/github.com/swaggo/swag/parser.go b/vendor/github.com/swaggo/swag/parser.go new file mode 100644 index 0000000..604f827 --- /dev/null +++ b/vendor/github.com/swaggo/swag/parser.go @@ -0,0 +1,1835 @@ +package swag + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "go/ast" + "go/build" + goparser "go/parser" + "go/token" + "log" + "net/http" + "os" + "os/exec" + "path/filepath" + "reflect" + "sort" + "strconv" + "strings" + + "github.com/KyleBanks/depth" + "github.com/go-openapi/spec" +) + +const ( + // CamelCase indicates using CamelCase strategy for struct field. + CamelCase = "camelcase" + + // PascalCase indicates using PascalCase strategy for struct field. + PascalCase = "pascalcase" + + // SnakeCase indicates using SnakeCase strategy for struct field. + SnakeCase = "snakecase" + + idAttr = "@id" + acceptAttr = "@accept" + produceAttr = "@produce" + paramAttr = "@param" + successAttr = "@success" + failureAttr = "@failure" + responseAttr = "@response" + headerAttr = "@header" + tagsAttr = "@tags" + routerAttr = "@router" + deprecatedRouterAttr = "@deprecatedrouter" + summaryAttr = "@summary" + deprecatedAttr = "@deprecated" + securityAttr = "@security" + titleAttr = "@title" + conNameAttr = "@contact.name" + conURLAttr = "@contact.url" + conEmailAttr = "@contact.email" + licNameAttr = "@license.name" + licURLAttr = "@license.url" + versionAttr = "@version" + descriptionAttr = "@description" + descriptionMarkdownAttr = "@description.markdown" + secBasicAttr = "@securitydefinitions.basic" + secAPIKeyAttr = "@securitydefinitions.apikey" + secApplicationAttr = "@securitydefinitions.oauth2.application" + secImplicitAttr = "@securitydefinitions.oauth2.implicit" + secPasswordAttr = "@securitydefinitions.oauth2.password" + secAccessCodeAttr = "@securitydefinitions.oauth2.accesscode" + tosAttr = "@termsofservice" + extDocsDescAttr = "@externaldocs.description" + extDocsURLAttr = "@externaldocs.url" + xCodeSamplesAttr = "@x-codesamples" + scopeAttrPrefix = "@scope." + stateAttr = "@state" +) + +// ParseFlag determine what to parse +type ParseFlag int + +const ( + // ParseNone parse nothing + ParseNone ParseFlag = 0x00 + // ParseModels parse models + ParseModels = 0x01 + // ParseOperations parse operations + ParseOperations = 0x02 + // ParseAll parse operations and models + ParseAll = ParseOperations | ParseModels +) + +var ( + // ErrRecursiveParseStruct recursively parsing struct. + ErrRecursiveParseStruct = errors.New("recursively parsing struct") + + // ErrFuncTypeField field type is func. + ErrFuncTypeField = errors.New("field type is func") + + // ErrFailedConvertPrimitiveType Failed to convert for swag to interpretable type. + ErrFailedConvertPrimitiveType = errors.New("swag property: failed convert primitive type") + + // ErrSkippedField .swaggo specifies field should be skipped. + ErrSkippedField = errors.New("field is skipped by global overrides") +) + +var allMethod = map[string]struct{}{ + http.MethodGet: {}, + http.MethodPut: {}, + http.MethodPost: {}, + http.MethodDelete: {}, + http.MethodOptions: {}, + http.MethodHead: {}, + http.MethodPatch: {}, +} + +// Parser implements a parser for Go source files. +type Parser struct { + // swagger represents the root document object for the API specification + swagger *spec.Swagger + + // packages store entities of APIs, definitions, file, package path etc. and their relations + packages *PackagesDefinitions + + // parsedSchemas store schemas which have been parsed from ast.TypeSpec + parsedSchemas map[*TypeSpecDef]*Schema + + // outputSchemas store schemas which will be export to swagger + outputSchemas map[*TypeSpecDef]*Schema + + // PropNamingStrategy naming strategy + PropNamingStrategy string + + // ParseVendor parse vendor folder + ParseVendor bool + + // ParseDependencies whether swag should be parse outside dependency folder: 0 none, 1 models, 2 operations, 3 all + ParseDependency ParseFlag + + // ParseInternal whether swag should parse internal packages + ParseInternal bool + + // Strict whether swag should error or warn when it detects cases which are most likely user errors + Strict bool + + // RequiredByDefault set validation required for all fields by default + RequiredByDefault bool + + // structStack stores full names of the structures that were already parsed or are being parsed now + structStack []*TypeSpecDef + + // markdownFileDir holds the path to the folder, where markdown files are stored + markdownFileDir string + + // codeExampleFilesDir holds path to the folder, where code example files are stored + codeExampleFilesDir string + + // collectionFormatInQuery set the default collectionFormat otherwise then 'csv' for array in query params + collectionFormatInQuery string + + // excludes excludes dirs and files in SearchDir + excludes map[string]struct{} + + // packagePrefix is a list of package path prefixes, packages that do not + // match any one of them will be excluded when searching. + packagePrefix []string + + // tells parser to include only specific extension + parseExtension string + + // debugging output goes here + debug Debugger + + // fieldParserFactory create FieldParser + fieldParserFactory FieldParserFactory + + // Overrides allows global replacements of types. A blank replacement will be skipped. + Overrides map[string]string + + // parseGoList whether swag use go list to parse dependency + parseGoList bool + + // tags to filter the APIs after + tags map[string]struct{} + + // HostState is the state of the host + HostState string +} + +// FieldParserFactory create FieldParser. +type FieldParserFactory func(ps *Parser, field *ast.Field) FieldParser + +// FieldParser parse struct field. +type FieldParser interface { + ShouldSkip() bool + FieldName() (string, error) + FormName() string + HeaderName() string + PathName() string + CustomSchema() (*spec.Schema, error) + ComplementSchema(schema *spec.Schema) error + IsRequired() (bool, error) +} + +// Debugger is the interface that wraps the basic Printf method. +type Debugger interface { + Printf(format string, v ...interface{}) +} + +// New creates a new Parser with default properties. +func New(options ...func(*Parser)) *Parser { + parser := &Parser{ + swagger: &spec.Swagger{ + SwaggerProps: spec.SwaggerProps{ + Info: &spec.Info{ + InfoProps: spec.InfoProps{ + Contact: &spec.ContactInfo{}, + License: nil, + }, + VendorExtensible: spec.VendorExtensible{ + Extensions: spec.Extensions{}, + }, + }, + Paths: &spec.Paths{ + Paths: make(map[string]spec.PathItem), + VendorExtensible: spec.VendorExtensible{ + Extensions: nil, + }, + }, + Definitions: make(map[string]spec.Schema), + SecurityDefinitions: make(map[string]*spec.SecurityScheme), + }, + VendorExtensible: spec.VendorExtensible{ + Extensions: nil, + }, + }, + packages: NewPackagesDefinitions(), + debug: log.New(os.Stdout, "", log.LstdFlags), + parsedSchemas: make(map[*TypeSpecDef]*Schema), + outputSchemas: make(map[*TypeSpecDef]*Schema), + excludes: make(map[string]struct{}), + tags: make(map[string]struct{}), + fieldParserFactory: newTagBaseFieldParser, + Overrides: make(map[string]string), + } + + for _, option := range options { + option(parser) + } + + parser.packages.debug = parser.debug + + return parser +} + +// SetParseDependency sets whether to parse the dependent packages. +func SetParseDependency(parseDependency int) func(*Parser) { + return func(p *Parser) { + p.ParseDependency = ParseFlag(parseDependency) + if p.packages != nil { + p.packages.parseDependency = p.ParseDependency + } + } +} + +// SetMarkdownFileDirectory sets the directory to search for markdown files. +func SetMarkdownFileDirectory(directoryPath string) func(*Parser) { + return func(p *Parser) { + p.markdownFileDir = directoryPath + } +} + +// SetCodeExamplesDirectory sets the directory to search for code example files. +func SetCodeExamplesDirectory(directoryPath string) func(*Parser) { + return func(p *Parser) { + p.codeExampleFilesDir = directoryPath + } +} + +// SetExcludedDirsAndFiles sets directories and files to be excluded when searching. +func SetExcludedDirsAndFiles(excludes string) func(*Parser) { + return func(p *Parser) { + for _, f := range strings.Split(excludes, ",") { + f = strings.TrimSpace(f) + if f != "" { + f = filepath.Clean(f) + p.excludes[f] = struct{}{} + } + } + } +} + +// SetPackagePrefix sets a list of package path prefixes from a comma-separated +// string, packages that do not match any one of them will be excluded when +// searching. +func SetPackagePrefix(packagePrefix string) func(*Parser) { + return func(p *Parser) { + for _, f := range strings.Split(packagePrefix, ",") { + f = strings.TrimSpace(f) + if f != "" { + p.packagePrefix = append(p.packagePrefix, f) + } + } + } +} + +// SetTags sets the tags to be included +func SetTags(include string) func(*Parser) { + return func(p *Parser) { + for _, f := range strings.Split(include, ",") { + f = strings.TrimSpace(f) + if f != "" { + p.tags[f] = struct{}{} + } + } + } +} + +// SetParseExtension parses only those operations which match given extension +func SetParseExtension(parseExtension string) func(*Parser) { + return func(p *Parser) { + p.parseExtension = parseExtension + } +} + +// SetStrict sets whether swag should error or warn when it detects cases which are most likely user errors. +func SetStrict(strict bool) func(*Parser) { + return func(p *Parser) { + p.Strict = strict + } +} + +// SetDebugger allows the use of user-defined implementations. +func SetDebugger(logger Debugger) func(parser *Parser) { + return func(p *Parser) { + if logger != nil { + p.debug = logger + } + } +} + +// SetFieldParserFactory allows the use of user-defined implementations. +func SetFieldParserFactory(factory FieldParserFactory) func(parser *Parser) { + return func(p *Parser) { + p.fieldParserFactory = factory + } +} + +// SetOverrides allows the use of user-defined global type overrides. +func SetOverrides(overrides map[string]string) func(parser *Parser) { + return func(p *Parser) { + for k, v := range overrides { + p.Overrides[k] = v + } + } +} + +// SetCollectionFormat set default collection format +func SetCollectionFormat(collectionFormat string) func(*Parser) { + return func(p *Parser) { + p.collectionFormatInQuery = collectionFormat + } +} + +// ParseUsingGoList sets whether swag use go list to parse dependency +func ParseUsingGoList(enabled bool) func(parser *Parser) { + return func(p *Parser) { + p.parseGoList = enabled + } +} + +// ParseAPI parses general api info for given searchDir and mainAPIFile. +func (parser *Parser) ParseAPI(searchDir string, mainAPIFile string, parseDepth int) error { + return parser.ParseAPIMultiSearchDir([]string{searchDir}, mainAPIFile, parseDepth) +} + +// skipPackageByPrefix returns true the given pkgpath does not match +// any user-defined package path prefixes. +func (parser *Parser) skipPackageByPrefix(pkgpath string) bool { + if len(parser.packagePrefix) == 0 { + return false + } + for _, prefix := range parser.packagePrefix { + if strings.HasPrefix(pkgpath, prefix) { + return false + } + } + return true +} + +// ParseAPIMultiSearchDir is like ParseAPI but for multiple search dirs. +func (parser *Parser) ParseAPIMultiSearchDir(searchDirs []string, mainAPIFile string, parseDepth int) error { + for _, searchDir := range searchDirs { + parser.debug.Printf("Generate general API Info, search dir:%s", searchDir) + + packageDir, err := getPkgName(searchDir) + if err != nil { + parser.debug.Printf("warning: failed to get package name in dir: %s, error: %s", searchDir, err.Error()) + } + + err = parser.getAllGoFileInfo(packageDir, searchDir) + if err != nil { + return err + } + } + + absMainAPIFilePath, err := filepath.Abs(filepath.Join(searchDirs[0], mainAPIFile)) + if err != nil { + return err + } + + // Use 'go list' command instead of depth.Resolve() + if parser.ParseDependency > 0 { + if parser.parseGoList { + pkgs, err := listPackages(context.Background(), filepath.Dir(absMainAPIFilePath), nil, "-deps") + if err != nil { + return fmt.Errorf("pkg %s cannot find all dependencies, %s", filepath.Dir(absMainAPIFilePath), err) + } + + length := len(pkgs) + for i := 0; i < length; i++ { + err := parser.getAllGoFileInfoFromDepsByList(pkgs[i], parser.ParseDependency) + if err != nil { + return err + } + } + } else { + var t depth.Tree + t.ResolveInternal = true + t.MaxDepth = parseDepth + + pkgName, err := getPkgName(filepath.Dir(absMainAPIFilePath)) + if err != nil { + return err + } + + err = t.Resolve(pkgName) + if err != nil { + return fmt.Errorf("pkg %s cannot find all dependencies, %s", pkgName, err) + } + for i := 0; i < len(t.Root.Deps); i++ { + err := parser.getAllGoFileInfoFromDeps(&t.Root.Deps[i], parser.ParseDependency) + if err != nil { + return err + } + } + } + } + + err = parser.ParseGeneralAPIInfo(absMainAPIFilePath) + if err != nil { + return err + } + + parser.parsedSchemas, err = parser.packages.ParseTypes() + if err != nil { + return err + } + + err = parser.packages.RangeFiles(parser.ParseRouterAPIInfo) + if err != nil { + return err + } + + return parser.checkOperationIDUniqueness() +} + +func getPkgName(searchDir string) (string, error) { + cmd := exec.Command("go", "list", "-f={{.ImportPath}}") + cmd.Dir = searchDir + + var stdout, stderr strings.Builder + + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + if err := cmd.Run(); err != nil { + return "", fmt.Errorf("execute go list command, %s, stdout:%s, stderr:%s", err, stdout.String(), stderr.String()) + } + + outStr, _ := stdout.String(), stderr.String() + + if outStr[0] == '_' { // will shown like _/{GOPATH}/src/{YOUR_PACKAGE} when NOT enable GO MODULE. + outStr = strings.TrimPrefix(outStr, "_"+build.Default.GOPATH+"/src/") + } + + f := strings.Split(outStr, "\n") + + outStr = f[0] + + return outStr, nil +} + +// ParseGeneralAPIInfo parses general api info for given mainAPIFile path. +func (parser *Parser) ParseGeneralAPIInfo(mainAPIFile string) error { + fileTree, err := goparser.ParseFile(token.NewFileSet(), mainAPIFile, nil, goparser.ParseComments) + if err != nil { + return fmt.Errorf("cannot parse source files %s: %s", mainAPIFile, err) + } + + parser.swagger.Swagger = "2.0" + + for _, comment := range fileTree.Comments { + comments := strings.Split(comment.Text(), "\n") + if !isGeneralAPIComment(comments) { + continue + } + + err = parseGeneralAPIInfo(parser, comments) + if err != nil { + return err + } + } + + return nil +} + +func parseGeneralAPIInfo(parser *Parser, comments []string) error { + previousAttribute := "" + + // parsing classic meta data model + for line := 0; line < len(comments); line++ { + commentLine := comments[line] + commentLine = strings.TrimSpace(commentLine) + if len(commentLine) == 0 { + continue + } + fields := FieldsByAnySpace(commentLine, 2) + + attribute := fields[0] + var value string + if len(fields) > 1 { + value = fields[1] + } + + switch attr := strings.ToLower(attribute); attr { + case versionAttr, titleAttr, tosAttr, licNameAttr, licURLAttr, conNameAttr, conURLAttr, conEmailAttr: + setSwaggerInfo(parser.swagger, attr, value) + case descriptionAttr: + if previousAttribute == attribute { + parser.swagger.Info.Description += "\n" + value + + continue + } + + setSwaggerInfo(parser.swagger, attr, value) + case descriptionMarkdownAttr: + commentInfo, err := getMarkdownForTag("api", parser.markdownFileDir) + if err != nil { + return err + } + + setSwaggerInfo(parser.swagger, descriptionAttr, string(commentInfo)) + + case "@host": + parser.swagger.Host = value + case "@hoststate": + fields = FieldsByAnySpace(commentLine, 3) + if len(fields) != 3 { + return fmt.Errorf("%s needs 3 arguments", attribute) + } + if parser.HostState == fields[1] { + parser.swagger.Host = fields[2] + } + case "@basepath": + parser.swagger.BasePath = value + + case acceptAttr: + err := parser.ParseAcceptComment(value) + if err != nil { + return err + } + case produceAttr: + err := parser.ParseProduceComment(value) + if err != nil { + return err + } + case "@schemes": + parser.swagger.Schemes = strings.Split(value, " ") + case "@tag.name": + parser.swagger.Tags = append(parser.swagger.Tags, spec.Tag{ + TagProps: spec.TagProps{ + Name: value, + }, + }) + case "@tag.description": + tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] + tag.TagProps.Description = value + replaceLastTag(parser.swagger.Tags, tag) + case "@tag.description.markdown": + tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] + + commentInfo, err := getMarkdownForTag(tag.TagProps.Name, parser.markdownFileDir) + if err != nil { + return err + } + + tag.TagProps.Description = string(commentInfo) + replaceLastTag(parser.swagger.Tags, tag) + case "@tag.docs.url": + tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] + tag.TagProps.ExternalDocs = &spec.ExternalDocumentation{ + URL: value, + Description: "", + } + + replaceLastTag(parser.swagger.Tags, tag) + case "@tag.docs.description": + tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] + if tag.TagProps.ExternalDocs == nil { + return fmt.Errorf("%s needs to come after a @tags.docs.url", attribute) + } + + tag.TagProps.ExternalDocs.Description = value + replaceLastTag(parser.swagger.Tags, tag) + + case secBasicAttr, secAPIKeyAttr, secApplicationAttr, secImplicitAttr, secPasswordAttr, secAccessCodeAttr: + scheme, err := parseSecAttributes(attribute, comments, &line) + if err != nil { + return err + } + + parser.swagger.SecurityDefinitions[value] = scheme + + case securityAttr: + parser.swagger.Security = append(parser.swagger.Security, parseSecurity(value)) + + case "@query.collection.format": + parser.collectionFormatInQuery = TransToValidCollectionFormat(value) + + case extDocsDescAttr, extDocsURLAttr: + if parser.swagger.ExternalDocs == nil { + parser.swagger.ExternalDocs = new(spec.ExternalDocumentation) + } + switch attr { + case extDocsDescAttr: + parser.swagger.ExternalDocs.Description = value + case extDocsURLAttr: + parser.swagger.ExternalDocs.URL = value + } + + default: + if strings.HasPrefix(attribute, "@x-") { + extensionName := attribute[1:] + + extExistsInSecurityDef := false + // for each security definition + for _, v := range parser.swagger.SecurityDefinitions { + // check if extension exists + _, extExistsInSecurityDef = v.VendorExtensible.Extensions.GetString(extensionName) + // if it exists in at least one, then we stop iterating + if extExistsInSecurityDef { + break + } + } + + // if it is present on security def, don't add it again + if extExistsInSecurityDef { + break + } + + if len(value) == 0 { + return fmt.Errorf("annotation %s need a value", attribute) + } + + var valueJSON interface{} + err := json.Unmarshal([]byte(value), &valueJSON) + if err != nil { + return fmt.Errorf("annotation %s need a valid json value", attribute) + } + + if strings.Contains(extensionName, "logo") { + parser.swagger.Info.Extensions.Add(extensionName, valueJSON) + } else { + if parser.swagger.Extensions == nil { + parser.swagger.Extensions = make(map[string]interface{}) + } + + parser.swagger.Extensions[attribute[1:]] = valueJSON + } + } + } + + previousAttribute = attribute + } + + return nil +} + +func setSwaggerInfo(swagger *spec.Swagger, attribute, value string) { + switch attribute { + case versionAttr: + swagger.Info.Version = value + case titleAttr: + swagger.Info.Title = value + case tosAttr: + swagger.Info.TermsOfService = value + case descriptionAttr: + swagger.Info.Description = value + case conNameAttr: + swagger.Info.Contact.Name = value + case conEmailAttr: + swagger.Info.Contact.Email = value + case conURLAttr: + swagger.Info.Contact.URL = value + case licNameAttr: + swagger.Info.License = initIfEmpty(swagger.Info.License) + swagger.Info.License.Name = value + case licURLAttr: + swagger.Info.License = initIfEmpty(swagger.Info.License) + swagger.Info.License.URL = value + } +} + +func parseSecAttributes(context string, lines []string, index *int) (*spec.SecurityScheme, error) { + const ( + in = "@in" + name = "@name" + descriptionAttr = "@description" + tokenURL = "@tokenurl" + authorizationURL = "@authorizationurl" + ) + + var search []string + + attribute := strings.ToLower(FieldsByAnySpace(lines[*index], 2)[0]) + switch attribute { + case secBasicAttr: + return spec.BasicAuth(), nil + case secAPIKeyAttr: + search = []string{in, name} + case secApplicationAttr, secPasswordAttr: + search = []string{tokenURL} + case secImplicitAttr: + search = []string{authorizationURL} + case secAccessCodeAttr: + search = []string{tokenURL, authorizationURL} + } + + // For the first line we get the attributes in the context parameter, so we skip to the next one + *index++ + + attrMap, scopes := make(map[string]string), make(map[string]string) + extensions, description := make(map[string]interface{}), "" + +loopline: + for ; *index < len(lines); *index++ { + v := strings.TrimSpace(lines[*index]) + if len(v) == 0 { + continue + } + + fields := FieldsByAnySpace(v, 2) + securityAttr := strings.ToLower(fields[0]) + var value string + if len(fields) > 1 { + value = fields[1] + } + + for _, findterm := range search { + if securityAttr == findterm { + attrMap[securityAttr] = value + continue loopline + } + } + + if isExists, err := isExistsScope(securityAttr); err != nil { + return nil, err + } else if isExists { + scopes[securityAttr[len(scopeAttrPrefix):]] = value + continue + } + + if strings.HasPrefix(securityAttr, "@x-") { + // Add the custom attribute without the @ + extensions[securityAttr[1:]] = value + continue + } + + // Not mandatory field + if securityAttr == descriptionAttr { + description = value + } + + // next securityDefinitions + if strings.Index(securityAttr, "@securitydefinitions.") == 0 { + // Go back to the previous line and break + *index-- + + break + } + } + + if len(attrMap) != len(search) { + return nil, fmt.Errorf("%s is %v required", context, search) + } + + var scheme *spec.SecurityScheme + + switch attribute { + case secAPIKeyAttr: + scheme = spec.APIKeyAuth(attrMap[name], attrMap[in]) + case secApplicationAttr: + scheme = spec.OAuth2Application(attrMap[tokenURL]) + case secImplicitAttr: + scheme = spec.OAuth2Implicit(attrMap[authorizationURL]) + case secPasswordAttr: + scheme = spec.OAuth2Password(attrMap[tokenURL]) + case secAccessCodeAttr: + scheme = spec.OAuth2AccessToken(attrMap[authorizationURL], attrMap[tokenURL]) + } + + scheme.Description = description + + for extKey, extValue := range extensions { + scheme.AddExtension(extKey, extValue) + } + + for scope, scopeDescription := range scopes { + scheme.AddScope(scope, scopeDescription) + } + + return scheme, nil +} + +func parseSecurity(commentLine string) map[string][]string { + securityMap := make(map[string][]string) + + for _, securityOption := range strings.Split(commentLine, "||") { + securityOption = strings.TrimSpace(securityOption) + + left, right := strings.Index(securityOption, "["), strings.Index(securityOption, "]") + + if !(left == -1 && right == -1) { + scopes := securityOption[left+1 : right] + + var options []string + + for _, scope := range strings.Split(scopes, ",") { + options = append(options, strings.TrimSpace(scope)) + } + + securityKey := securityOption[0:left] + securityMap[securityKey] = append(securityMap[securityKey], options...) + } else { + securityKey := strings.TrimSpace(securityOption) + securityMap[securityKey] = []string{} + } + } + + return securityMap +} + +func initIfEmpty(license *spec.License) *spec.License { + if license == nil { + return new(spec.License) + } + + return license +} + +// ParseAcceptComment parses comment for given `accept` comment string. +func (parser *Parser) ParseAcceptComment(commentLine string) error { + return parseMimeTypeList(commentLine, &parser.swagger.Consumes, "%v accept type can't be accepted") +} + +// ParseProduceComment parses comment for given `produce` comment string. +func (parser *Parser) ParseProduceComment(commentLine string) error { + return parseMimeTypeList(commentLine, &parser.swagger.Produces, "%v produce type can't be accepted") +} + +func isGeneralAPIComment(comments []string) bool { + for _, commentLine := range comments { + commentLine = strings.TrimSpace(commentLine) + if len(commentLine) == 0 { + continue + } + attribute := strings.ToLower(FieldsByAnySpace(commentLine, 2)[0]) + switch attribute { + // The @summary, @router, @success, @failure annotation belongs to Operation + case summaryAttr, routerAttr, successAttr, failureAttr, responseAttr: + return false + } + } + + return true +} + +func getMarkdownForTag(tagName string, dirPath string) ([]byte, error) { + dirEntries, err := os.ReadDir(dirPath) + if err != nil { + return nil, err + } + + for _, entry := range dirEntries { + if entry.IsDir() { + continue + } + + fileName := entry.Name() + + if !strings.Contains(fileName, ".md") { + continue + } + + if strings.Contains(fileName, tagName) { + fullPath := filepath.Join(dirPath, fileName) + + commentInfo, err := os.ReadFile(fullPath) + if err != nil { + return nil, fmt.Errorf("Failed to read markdown file %s error: %s ", fullPath, err) + } + + return commentInfo, nil + } + } + + return nil, fmt.Errorf("Unable to find markdown file for tag %s in the given directory", tagName) +} + +func isExistsScope(scope string) (bool, error) { + s := strings.Fields(scope) + for _, v := range s { + if strings.HasPrefix(v, scopeAttrPrefix) { + if strings.Contains(v, ",") { + return false, fmt.Errorf("@scope can't use comma(,) get=" + v) + } + } + } + + return strings.HasPrefix(scope, scopeAttrPrefix), nil +} + +func getTagsFromComment(comment string) (tags []string) { + commentLine := strings.TrimSpace(strings.TrimLeft(comment, "/")) + if len(commentLine) == 0 { + return nil + } + + attribute := strings.Fields(commentLine)[0] + lineRemainder, lowerAttribute := strings.TrimSpace(commentLine[len(attribute):]), strings.ToLower(attribute) + + if lowerAttribute == tagsAttr { + for _, tag := range strings.Split(lineRemainder, ",") { + tags = append(tags, strings.TrimSpace(tag)) + } + } + return + +} + +func (parser *Parser) matchTags(comments []*ast.Comment) (match bool) { + if len(parser.tags) == 0 { + return true + } + + match = false + for _, comment := range comments { + for _, tag := range getTagsFromComment(comment.Text) { + if _, has := parser.tags["!"+tag]; has { + return false + } + if _, has := parser.tags[tag]; has { + match = true // keep iterating as it may contain a tag that is excluded + } + } + } + + if !match { + // If all tags are negation then we should return true + for key := range parser.tags { + if key[0] != '!' { + return false + } + } + } + return true +} + +func matchExtension(extensionToMatch string, comments []*ast.Comment) (match bool) { + if len(extensionToMatch) != 0 { + for _, comment := range comments { + commentLine := strings.TrimSpace(strings.TrimLeft(comment.Text, "/")) + fields := FieldsByAnySpace(commentLine, 2) + if len(fields) > 0 { + lowerAttribute := strings.ToLower(fields[0]) + + if lowerAttribute == fmt.Sprintf("@x-%s", strings.ToLower(extensionToMatch)) { + return true + } + } + } + return false + } + return true +} + +// ParseRouterAPIInfo parses router api info for given astFile. +func (parser *Parser) ParseRouterAPIInfo(fileInfo *AstFileInfo) error { +DeclsLoop: + for _, astDescription := range fileInfo.File.Decls { + if (fileInfo.ParseFlag & ParseOperations) == ParseNone { + continue + } + astDeclaration, ok := astDescription.(*ast.FuncDecl) + if ok && astDeclaration.Doc != nil && astDeclaration.Doc.List != nil { + if parser.matchTags(astDeclaration.Doc.List) && + matchExtension(parser.parseExtension, astDeclaration.Doc.List) { + // for per 'function' comment, create a new 'Operation' object + operation := NewOperation(parser, SetCodeExampleFilesDirectory(parser.codeExampleFilesDir)) + for _, comment := range astDeclaration.Doc.List { + err := operation.ParseComment(comment.Text, fileInfo.File) + if err != nil { + return fmt.Errorf("ParseComment error in file %s :%+v", fileInfo.Path, err) + } + if operation.State != "" && operation.State != parser.HostState { + continue DeclsLoop + } + } + err := processRouterOperation(parser, operation) + if err != nil { + return err + } + } + } + } + + return nil +} + +func refRouteMethodOp(item *spec.PathItem, method string) (op **spec.Operation) { + switch method { + case http.MethodGet: + op = &item.Get + case http.MethodPost: + op = &item.Post + case http.MethodDelete: + op = &item.Delete + case http.MethodPut: + op = &item.Put + case http.MethodPatch: + op = &item.Patch + case http.MethodHead: + op = &item.Head + case http.MethodOptions: + op = &item.Options + } + + return +} + +func processRouterOperation(parser *Parser, operation *Operation) error { + for _, routeProperties := range operation.RouterProperties { + var ( + pathItem spec.PathItem + ok bool + ) + + pathItem, ok = parser.swagger.Paths.Paths[routeProperties.Path] + if !ok { + pathItem = spec.PathItem{} + } + + op := refRouteMethodOp(&pathItem, routeProperties.HTTPMethod) + + // check if we already have an operation for this path and method + if *op != nil { + err := fmt.Errorf("route %s %s is declared multiple times", routeProperties.HTTPMethod, routeProperties.Path) + if parser.Strict { + return err + } + + parser.debug.Printf("warning: %s\n", err) + } + + if len(operation.RouterProperties) > 1 { + newOp := *operation + var validParams []spec.Parameter + for _, param := range newOp.Operation.OperationProps.Parameters { + if param.In == "path" && !strings.Contains(routeProperties.Path, param.Name) { + // This path param is not actually contained in the path, skip adding it to the final params + continue + } + validParams = append(validParams, param) + } + newOp.Operation.OperationProps.Parameters = validParams + *op = &newOp.Operation + } else { + *op = &operation.Operation + } + + if routeProperties.Deprecated { + (*op).Deprecated = routeProperties.Deprecated + } + + parser.swagger.Paths.Paths[routeProperties.Path] = pathItem + } + + return nil +} + +func convertFromSpecificToPrimitive(typeName string) (string, error) { + name := typeName + if strings.ContainsRune(name, '.') { + name = strings.Split(name, ".")[1] + } + + switch strings.ToUpper(name) { + case "TIME", "OBJECTID", "UUID": + return STRING, nil + case "DECIMAL": + return NUMBER, nil + } + + return typeName, ErrFailedConvertPrimitiveType +} + +func (parser *Parser) getTypeSchema(typeName string, file *ast.File, ref bool) (*spec.Schema, error) { + if override, ok := parser.Overrides[typeName]; ok { + parser.debug.Printf("Override detected for %s: using %s instead", typeName, override) + return parseObjectSchema(parser, override, file) + } + + if IsInterfaceLike(typeName) { + return &spec.Schema{}, nil + } + if IsGolangPrimitiveType(typeName) { + return PrimitiveSchema(TransToValidSchemeType(typeName)), nil + } + + schemaType, err := convertFromSpecificToPrimitive(typeName) + if err == nil { + return PrimitiveSchema(schemaType), nil + } + + typeSpecDef := parser.packages.FindTypeSpec(typeName, file) + if typeSpecDef == nil { + return nil, fmt.Errorf("cannot find type definition: %s", typeName) + } + + if override, ok := parser.Overrides[typeSpecDef.FullPath()]; ok { + if override == "" { + parser.debug.Printf("Override detected for %s: ignoring", typeSpecDef.FullPath()) + + return nil, ErrSkippedField + } + + parser.debug.Printf("Override detected for %s: using %s instead", typeSpecDef.FullPath(), override) + + separator := strings.LastIndex(override, ".") + if separator == -1 { + // treat as a swaggertype tag + parts := strings.Split(override, ",") + + return BuildCustomSchema(parts) + } + + typeSpecDef = parser.packages.findTypeSpec(override[0:separator], override[separator+1:]) + } + + schema, ok := parser.parsedSchemas[typeSpecDef] + if !ok { + var err error + + schema, err = parser.ParseDefinition(typeSpecDef) + if err != nil { + if err == ErrRecursiveParseStruct && ref { + return parser.getRefTypeSchema(typeSpecDef, schema), nil + } + return nil, fmt.Errorf("%s: %w", typeName, err) + } + } + + if ref { + if IsComplexSchema(schema.Schema) { + return parser.getRefTypeSchema(typeSpecDef, schema), nil + } + // if it is a simple schema, just return a copy + newSchema := *schema.Schema + return &newSchema, nil + } + + return schema.Schema, nil +} + +func (parser *Parser) getRefTypeSchema(typeSpecDef *TypeSpecDef, schema *Schema) *spec.Schema { + _, ok := parser.outputSchemas[typeSpecDef] + if !ok { + parser.swagger.Definitions[schema.Name] = spec.Schema{} + + if schema.Schema != nil { + parser.swagger.Definitions[schema.Name] = *schema.Schema + } + + parser.outputSchemas[typeSpecDef] = schema + } + + refSchema := RefSchema(schema.Name) + + return refSchema +} + +func (parser *Parser) isInStructStack(typeSpecDef *TypeSpecDef) bool { + for _, specDef := range parser.structStack { + if typeSpecDef == specDef { + return true + } + } + + return false +} + +// ParseDefinition parses given type spec that corresponds to the type under +// given name and package, and populates swagger schema definitions registry +// with a schema for the given type +func (parser *Parser) ParseDefinition(typeSpecDef *TypeSpecDef) (*Schema, error) { + typeName := typeSpecDef.TypeName() + schema, found := parser.parsedSchemas[typeSpecDef] + if found { + parser.debug.Printf("Skipping '%s', already parsed.", typeName) + + return schema, nil + } + + if parser.isInStructStack(typeSpecDef) { + parser.debug.Printf("Skipping '%s', recursion detected.", typeName) + + return &Schema{ + Name: typeName, + PkgPath: typeSpecDef.PkgPath, + Schema: PrimitiveSchema(OBJECT), + }, + ErrRecursiveParseStruct + } + + parser.structStack = append(parser.structStack, typeSpecDef) + + parser.debug.Printf("Generating %s", typeName) + + definition, err := parser.parseTypeExpr(typeSpecDef.File, typeSpecDef.TypeSpec.Type, false) + if err != nil { + parser.debug.Printf("Error parsing type definition '%s': %s", typeName, err) + return nil, err + } + + if definition.Description == "" { + fillDefinitionDescription(definition, typeSpecDef.File, typeSpecDef) + } + + if len(typeSpecDef.Enums) > 0 { + var varnames []string + var enumComments = make(map[string]string) + for _, value := range typeSpecDef.Enums { + definition.Enum = append(definition.Enum, value.Value) + varnames = append(varnames, value.key) + if len(value.Comment) > 0 { + enumComments[value.key] = value.Comment + } + } + if definition.Extensions == nil { + definition.Extensions = make(spec.Extensions) + } + definition.Extensions[enumVarNamesExtension] = varnames + if len(enumComments) > 0 { + definition.Extensions[enumCommentsExtension] = enumComments + } + } + + sch := Schema{ + Name: typeName, + PkgPath: typeSpecDef.PkgPath, + Schema: definition, + } + parser.parsedSchemas[typeSpecDef] = &sch + + // update an empty schema as a result of recursion + s2, found := parser.outputSchemas[typeSpecDef] + if found { + parser.swagger.Definitions[s2.Name] = *definition + } + + return &sch, nil +} + +func fullTypeName(parts ...string) string { + return strings.Join(parts, ".") +} + +// fillDefinitionDescription additionally fills fields in definition (spec.Schema) +// TODO: If .go file contains many types, it may work for a long time +func fillDefinitionDescription(definition *spec.Schema, file *ast.File, typeSpecDef *TypeSpecDef) { + if file == nil { + return + } + for _, astDeclaration := range file.Decls { + generalDeclaration, ok := astDeclaration.(*ast.GenDecl) + if !ok || generalDeclaration.Tok != token.TYPE { + continue + } + + for _, astSpec := range generalDeclaration.Specs { + typeSpec, ok := astSpec.(*ast.TypeSpec) + if !ok || typeSpec != typeSpecDef.TypeSpec { + continue + } + + definition.Description = + extractDeclarationDescription(typeSpec.Doc, typeSpec.Comment, generalDeclaration.Doc) + } + } +} + +// extractDeclarationDescription gets first description +// from attribute descriptionAttr in commentGroups (ast.CommentGroup) +func extractDeclarationDescription(commentGroups ...*ast.CommentGroup) string { + var description string + + for _, commentGroup := range commentGroups { + if commentGroup == nil { + continue + } + + isHandlingDescription := false + + for _, comment := range commentGroup.List { + commentText := strings.TrimSpace(strings.TrimLeft(comment.Text, "/")) + if len(commentText) == 0 { + continue + } + attribute := FieldsByAnySpace(commentText, 2)[0] + + if strings.ToLower(attribute) != descriptionAttr { + if !isHandlingDescription { + continue + } + + break + } + + isHandlingDescription = true + description += " " + strings.TrimSpace(commentText[len(attribute):]) + } + } + + return strings.TrimLeft(description, " ") +} + +// parseTypeExpr parses given type expression that corresponds to the type under +// given name and package, and returns swagger schema for it. +func (parser *Parser) parseTypeExpr(file *ast.File, typeExpr ast.Expr, ref bool) (*spec.Schema, error) { + switch expr := typeExpr.(type) { + // type Foo interface{} + case *ast.InterfaceType: + return &spec.Schema{}, nil + + // type Foo struct {...} + case *ast.StructType: + return parser.parseStruct(file, expr.Fields) + + // type Foo Baz + case *ast.Ident: + return parser.getTypeSchema(expr.Name, file, ref) + + // type Foo *Baz + case *ast.StarExpr: + return parser.parseTypeExpr(file, expr.X, ref) + + // type Foo pkg.Bar + case *ast.SelectorExpr: + if xIdent, ok := expr.X.(*ast.Ident); ok { + return parser.getTypeSchema(fullTypeName(xIdent.Name, expr.Sel.Name), file, ref) + } + // type Foo []Baz + case *ast.ArrayType: + itemSchema, err := parser.parseTypeExpr(file, expr.Elt, true) + if err != nil { + return nil, err + } + + return spec.ArrayProperty(itemSchema), nil + // type Foo map[string]Bar + case *ast.MapType: + if _, ok := expr.Value.(*ast.InterfaceType); ok { + return spec.MapProperty(nil), nil + } + schema, err := parser.parseTypeExpr(file, expr.Value, true) + if err != nil { + return nil, err + } + + return spec.MapProperty(schema), nil + + case *ast.FuncType: + return nil, ErrFuncTypeField + // ... + } + + return parser.parseGenericTypeExpr(file, typeExpr) +} + +func (parser *Parser) parseStruct(file *ast.File, fields *ast.FieldList) (*spec.Schema, error) { + required, properties := make([]string, 0), make(map[string]spec.Schema) + + for _, field := range fields.List { + fieldProps, requiredFromAnon, err := parser.parseStructField(file, field) + if err != nil { + if errors.Is(err, ErrFuncTypeField) || errors.Is(err, ErrSkippedField) { + continue + } + + return nil, err + } + + if len(fieldProps) == 0 { + continue + } + + required = append(required, requiredFromAnon...) + + for k, v := range fieldProps { + properties[k] = v + } + } + + sort.Strings(required) + + return &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{OBJECT}, + Properties: properties, + Required: required, + }, + }, nil +} + +func (parser *Parser) parseStructField(file *ast.File, field *ast.Field) (map[string]spec.Schema, []string, error) { + if field.Tag != nil { + skip, ok := reflect.StructTag(strings.ReplaceAll(field.Tag.Value, "`", "")).Lookup("swaggerignore") + if ok && strings.EqualFold(skip, "true") { + return nil, nil, nil + } + } + + ps := parser.fieldParserFactory(parser, field) + + if ps.ShouldSkip() { + return nil, nil, nil + } + + fieldName, err := ps.FieldName() + if err != nil { + return nil, nil, err + } + + if fieldName == "" { + typeName, err := getFieldType(file, field.Type, nil) + if err != nil { + return nil, nil, fmt.Errorf("%s: %w", fieldName, err) + } + + schema, err := parser.getTypeSchema(typeName, file, false) + if err != nil { + return nil, nil, fmt.Errorf("%s: %w", fieldName, err) + } + + if len(schema.Type) > 0 && schema.Type[0] == OBJECT { + if len(schema.Properties) == 0 { + return nil, nil, nil + } + + properties := map[string]spec.Schema{} + for k, v := range schema.Properties { + properties[k] = v + } + + return properties, schema.SchemaProps.Required, nil + } + // for alias type of non-struct types ,such as array,map, etc. ignore field tag. + return map[string]spec.Schema{typeName: *schema}, nil, nil + + } + + schema, err := ps.CustomSchema() + if err != nil { + return nil, nil, fmt.Errorf("%s: %w", fieldName, err) + } + + if schema == nil { + typeName, err := getFieldType(file, field.Type, nil) + if err == nil { + // named type + schema, err = parser.getTypeSchema(typeName, file, true) + } else { + // unnamed type + schema, err = parser.parseTypeExpr(file, field.Type, false) + } + + if err != nil { + return nil, nil, fmt.Errorf("%s: %w", fieldName, err) + } + } + + err = ps.ComplementSchema(schema) + if err != nil { + return nil, nil, fmt.Errorf("%s: %w", fieldName, err) + } + + var tagRequired []string + + required, err := ps.IsRequired() + if err != nil { + return nil, nil, fmt.Errorf("%s: %w", fieldName, err) + } + + if required { + tagRequired = append(tagRequired, fieldName) + } + + if schema.Extensions == nil { + schema.Extensions = make(spec.Extensions) + } + if formName := ps.FormName(); len(formName) > 0 { + schema.Extensions["formData"] = formName + } + if headerName := ps.HeaderName(); len(headerName) > 0 { + schema.Extensions["header"] = headerName + } + if pathName := ps.PathName(); len(pathName) > 0 { + schema.Extensions["path"] = pathName + } + + return map[string]spec.Schema{fieldName: *schema}, tagRequired, nil +} + +func getFieldType(file *ast.File, field ast.Expr, genericParamTypeDefs map[string]*genericTypeSpec) (string, error) { + switch fieldType := field.(type) { + case *ast.Ident: + return fieldType.Name, nil + case *ast.SelectorExpr: + packageName, err := getFieldType(file, fieldType.X, genericParamTypeDefs) + if err != nil { + return "", err + } + + return fullTypeName(packageName, fieldType.Sel.Name), nil + case *ast.StarExpr: + fullName, err := getFieldType(file, fieldType.X, genericParamTypeDefs) + if err != nil { + return "", err + } + + return fullName, nil + default: + return getGenericFieldType(file, field, genericParamTypeDefs) + } +} + +func (parser *Parser) getUnderlyingSchema(schema *spec.Schema) *spec.Schema { + if schema == nil { + return nil + } + + if url := schema.Ref.GetURL(); url != nil { + if pos := strings.LastIndexByte(url.Fragment, '/'); pos >= 0 { + name := url.Fragment[pos+1:] + if schema, ok := parser.swagger.Definitions[name]; ok { + return &schema + } + } + } + + if len(schema.AllOf) > 0 { + merged := &spec.Schema{} + MergeSchema(merged, schema) + for _, s := range schema.AllOf { + MergeSchema(merged, parser.getUnderlyingSchema(&s)) + } + return merged + } + return nil +} + +// GetSchemaTypePath get path of schema type. +func (parser *Parser) GetSchemaTypePath(schema *spec.Schema, depth int) []string { + if schema == nil || depth == 0 { + return nil + } + + if underlying := parser.getUnderlyingSchema(schema); underlying != nil { + return parser.GetSchemaTypePath(underlying, depth) + } + + if len(schema.Type) > 0 { + switch schema.Type[0] { + case ARRAY: + depth-- + + s := []string{schema.Type[0]} + + return append(s, parser.GetSchemaTypePath(schema.Items.Schema, depth)...) + case OBJECT: + if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil { + // for map + depth-- + + s := []string{schema.Type[0]} + + return append(s, parser.GetSchemaTypePath(schema.AdditionalProperties.Schema, depth)...) + } + } + + return []string{schema.Type[0]} + } + + return []string{ANY} +} + +func replaceLastTag(slice []spec.Tag, element spec.Tag) { + slice = append(slice[:len(slice)-1], element) +} + +// defineTypeOfExample example value define the type (object and array unsupported). +func defineTypeOfExample(schemaType, arrayType, exampleValue string) (interface{}, error) { + switch schemaType { + case STRING: + return exampleValue, nil + case NUMBER: + v, err := strconv.ParseFloat(exampleValue, 64) + if err != nil { + return nil, fmt.Errorf("example value %s can't convert to %s err: %s", exampleValue, schemaType, err) + } + + return v, nil + case INTEGER: + v, err := strconv.Atoi(exampleValue) + if err != nil { + return nil, fmt.Errorf("example value %s can't convert to %s err: %s", exampleValue, schemaType, err) + } + + return v, nil + case BOOLEAN: + v, err := strconv.ParseBool(exampleValue) + if err != nil { + return nil, fmt.Errorf("example value %s can't convert to %s err: %s", exampleValue, schemaType, err) + } + + return v, nil + case ARRAY: + values := strings.Split(exampleValue, ",") + result := make([]interface{}, 0) + for _, value := range values { + v, err := defineTypeOfExample(arrayType, "", value) + if err != nil { + return nil, err + } + + result = append(result, v) + } + + return result, nil + case OBJECT: + if arrayType == "" { + return nil, fmt.Errorf("%s is unsupported type in example value `%s`", schemaType, exampleValue) + } + + values := strings.Split(exampleValue, ",") + + result := map[string]interface{}{} + + for _, value := range values { + mapData := strings.SplitN(value, ":", 2) + + if len(mapData) == 2 { + v, err := defineTypeOfExample(arrayType, "", mapData[1]) + if err != nil { + return nil, err + } + + result[mapData[0]] = v + + continue + } + + return nil, fmt.Errorf("example value %s should format: key:value", exampleValue) + } + + return result, nil + } + + return nil, fmt.Errorf("%s is unsupported type in example value %s", schemaType, exampleValue) +} + +// GetAllGoFileInfo gets all Go source files information for given searchDir. +func (parser *Parser) getAllGoFileInfo(packageDir, searchDir string) error { + if parser.skipPackageByPrefix(packageDir) { + return nil // ignored by user-defined package path prefixes + } + return filepath.Walk(searchDir, func(path string, f os.FileInfo, _ error) error { + err := parser.Skip(path, f) + if err != nil { + return err + } + + if f.IsDir() { + return nil + } + + relPath, err := filepath.Rel(searchDir, path) + if err != nil { + return err + } + + return parser.parseFile(filepath.ToSlash(filepath.Dir(filepath.Clean(filepath.Join(packageDir, relPath)))), path, nil, ParseAll) + }) +} + +func (parser *Parser) getAllGoFileInfoFromDeps(pkg *depth.Pkg, parseFlag ParseFlag) error { + ignoreInternal := pkg.Internal && !parser.ParseInternal + if ignoreInternal || !pkg.Resolved { // ignored internal and not resolved dependencies + return nil + } + + if pkg.Raw != nil && parser.skipPackageByPrefix(pkg.Raw.ImportPath) { + return nil // ignored by user-defined package path prefixes + } + + // Skip cgo + if pkg.Raw == nil && pkg.Name == "C" { + return nil + } + + srcDir := pkg.Raw.Dir + + files, err := os.ReadDir(srcDir) // only parsing files in the dir(don't contain sub dir files) + if err != nil { + return err + } + + for _, f := range files { + if f.IsDir() { + continue + } + + path := filepath.Join(srcDir, f.Name()) + if err := parser.parseFile(pkg.Name, path, nil, parseFlag); err != nil { + return err + } + } + + for i := 0; i < len(pkg.Deps); i++ { + if err := parser.getAllGoFileInfoFromDeps(&pkg.Deps[i], parseFlag); err != nil { + return err + } + } + + return nil +} + +func (parser *Parser) parseFile(packageDir, path string, src interface{}, flag ParseFlag) error { + if strings.HasSuffix(strings.ToLower(path), "_test.go") || filepath.Ext(path) != ".go" { + return nil + } + + return parser.packages.ParseFile(packageDir, path, src, flag) +} + +func (parser *Parser) checkOperationIDUniqueness() error { + // operationsIds contains all operationId annotations to check it's unique + operationsIds := make(map[string]string) + + for path, item := range parser.swagger.Paths.Paths { + var method, id string + + for method = range allMethod { + op := refRouteMethodOp(&item, method) + if *op != nil { + id = (**op).ID + + break + } + } + + if id == "" { + continue + } + + current := fmt.Sprintf("%s %s", method, path) + + previous, ok := operationsIds[id] + if ok { + return fmt.Errorf( + "duplicated @id annotation '%s' found in '%s', previously declared in: '%s'", + id, current, previous) + } + + operationsIds[id] = current + } + + return nil +} + +// Skip returns filepath.SkipDir error if match vendor and hidden folder. +func (parser *Parser) Skip(path string, f os.FileInfo) error { + return walkWith(parser.excludes, parser.ParseVendor)(path, f) +} + +func walkWith(excludes map[string]struct{}, parseVendor bool) func(path string, fileInfo os.FileInfo) error { + return func(path string, f os.FileInfo) error { + if f.IsDir() { + if !parseVendor && f.Name() == "vendor" || // ignore "vendor" + f.Name() == "docs" || // exclude docs + len(f.Name()) > 1 && f.Name()[0] == '.' && f.Name() != ".." { // exclude all hidden folder + return filepath.SkipDir + } + + if excludes != nil { + if _, ok := excludes[path]; ok { + return filepath.SkipDir + } + } + } + + return nil + } +} + +// GetSwagger returns *spec.Swagger which is the root document object for the API specification. +func (parser *Parser) GetSwagger() *spec.Swagger { + return parser.swagger +} + +// addTestType just for tests. +func (parser *Parser) addTestType(typename string) { + typeDef := &TypeSpecDef{} + parser.packages.uniqueDefinitions[typename] = typeDef + parser.parsedSchemas[typeDef] = &Schema{ + PkgPath: "", + Name: typename, + Schema: PrimitiveSchema(OBJECT), + } +} diff --git a/vendor/github.com/swaggo/swag/schema.go b/vendor/github.com/swaggo/swag/schema.go new file mode 100644 index 0000000..b3a5b38 --- /dev/null +++ b/vendor/github.com/swaggo/swag/schema.go @@ -0,0 +1,293 @@ +package swag + +import ( + "errors" + "fmt" + "github.com/go-openapi/spec" +) + +const ( + // ARRAY represent a array value. + ARRAY = "array" + // OBJECT represent a object value. + OBJECT = "object" + // PRIMITIVE represent a primitive value. + PRIMITIVE = "primitive" + // BOOLEAN represent a boolean value. + BOOLEAN = "boolean" + // INTEGER represent a integer value. + INTEGER = "integer" + // NUMBER represent a number value. + NUMBER = "number" + // STRING represent a string value. + STRING = "string" + // FUNC represent a function value. + FUNC = "func" + // ERROR represent a error value. + ERROR = "error" + // INTERFACE represent a interface value. + INTERFACE = "interface{}" + // ANY represent a any value. + ANY = "any" + // NIL represent a empty value. + NIL = "nil" + + // IgnoreNameOverridePrefix Prepend to model to avoid renaming based on comment. + IgnoreNameOverridePrefix = '$' +) + +// CheckSchemaType checks if typeName is not a name of primitive type. +func CheckSchemaType(typeName string) error { + if !IsPrimitiveType(typeName) { + return fmt.Errorf("%s is not basic types", typeName) + } + + return nil +} + +// IsSimplePrimitiveType determine whether the type name is a simple primitive type. +func IsSimplePrimitiveType(typeName string) bool { + switch typeName { + case STRING, NUMBER, INTEGER, BOOLEAN: + return true + } + + return false +} + +// IsPrimitiveType determine whether the type name is a primitive type. +func IsPrimitiveType(typeName string) bool { + switch typeName { + case STRING, NUMBER, INTEGER, BOOLEAN, ARRAY, OBJECT, FUNC: + return true + } + + return false +} + +// IsInterfaceLike determines whether the swagger type name is an go named interface type like error type. +func IsInterfaceLike(typeName string) bool { + return typeName == ERROR || typeName == ANY +} + +// IsNumericType determines whether the swagger type name is a numeric type. +func IsNumericType(typeName string) bool { + return typeName == INTEGER || typeName == NUMBER +} + +// TransToValidSchemeType indicates type will transfer golang basic type to swagger supported type. +func TransToValidSchemeType(typeName string) string { + switch typeName { + case "uint", "int", "uint8", "int8", "uint16", "int16", "byte": + return INTEGER + case "uint32", "int32", "rune": + return INTEGER + case "uint64", "int64": + return INTEGER + case "float32", "float64": + return NUMBER + case "bool": + return BOOLEAN + case "string": + return STRING + } + + return typeName +} + +// IsGolangPrimitiveType determine whether the type name is a golang primitive type. +func IsGolangPrimitiveType(typeName string) bool { + switch typeName { + case "uint", + "int", + "uint8", + "int8", + "uint16", + "int16", + "byte", + "uint32", + "int32", + "rune", + "uint64", + "int64", + "float32", + "float64", + "bool", + "string": + return true + } + + return false +} + +// TransToValidCollectionFormat determine valid collection format. +func TransToValidCollectionFormat(format string) string { + switch format { + case "csv", "multi", "pipes", "tsv", "ssv": + return format + } + + return "" +} + +func ignoreNameOverride(name string) bool { + return len(name) != 0 && name[0] == IgnoreNameOverridePrefix +} + +// IsComplexSchema whether a schema is complex and should be a ref schema +func IsComplexSchema(schema *spec.Schema) bool { + // a enum type should be complex + if len(schema.Enum) > 0 { + return true + } + + // a deep array type is complex, how to determine deep? here more than 2 ,for example: [][]object,[][][]int + if len(schema.Type) > 2 { + return true + } + + //Object included, such as Object or []Object + for _, st := range schema.Type { + if st == OBJECT { + return true + } + } + return false +} + +// IsRefSchema whether a schema is a reference schema. +func IsRefSchema(schema *spec.Schema) bool { + return schema.Ref.Ref.GetURL() != nil +} + +// RefSchema build a reference schema. +func RefSchema(refType string) *spec.Schema { + return spec.RefSchema("#/definitions/" + refType) +} + +// PrimitiveSchema build a primitive schema. +func PrimitiveSchema(refType string) *spec.Schema { + return &spec.Schema{SchemaProps: spec.SchemaProps{Type: []string{refType}}} +} + +// BuildCustomSchema build custom schema specified by tag swaggertype. +func BuildCustomSchema(types []string) (*spec.Schema, error) { + if len(types) == 0 { + return nil, nil + } + + switch types[0] { + case PRIMITIVE: + if len(types) == 1 { + return nil, errors.New("need primitive type after primitive") + } + + return BuildCustomSchema(types[1:]) + case ARRAY: + if len(types) == 1 { + return nil, errors.New("need array item type after array") + } + + schema, err := BuildCustomSchema(types[1:]) + if err != nil { + return nil, err + } + + return spec.ArrayProperty(schema), nil + case OBJECT: + if len(types) == 1 { + return PrimitiveSchema(types[0]), nil + } + + schema, err := BuildCustomSchema(types[1:]) + if err != nil { + return nil, err + } + + return spec.MapProperty(schema), nil + default: + err := CheckSchemaType(types[0]) + if err != nil { + return nil, err + } + + return PrimitiveSchema(types[0]), nil + } +} + +// MergeSchema merge schemas +func MergeSchema(dst *spec.Schema, src *spec.Schema) *spec.Schema { + if len(src.Type) > 0 { + dst.Type = src.Type + } + if len(src.Properties) > 0 { + dst.Properties = src.Properties + } + if src.Items != nil { + dst.Items = src.Items + } + if src.AdditionalProperties != nil { + dst.AdditionalProperties = src.AdditionalProperties + } + if len(src.Description) > 0 { + dst.Description = src.Description + } + if src.Nullable { + dst.Nullable = src.Nullable + } + if len(src.Format) > 0 { + dst.Format = src.Format + } + if src.Default != nil { + dst.Default = src.Default + } + if src.Example != nil { + dst.Example = src.Example + } + if len(src.Extensions) > 0 { + dst.Extensions = src.Extensions + } + if src.Maximum != nil { + dst.Maximum = src.Maximum + } + if src.Minimum != nil { + dst.Minimum = src.Minimum + } + if src.ExclusiveMaximum { + dst.ExclusiveMaximum = src.ExclusiveMaximum + } + if src.ExclusiveMinimum { + dst.ExclusiveMinimum = src.ExclusiveMinimum + } + if src.MaxLength != nil { + dst.MaxLength = src.MaxLength + } + if src.MinLength != nil { + dst.MinLength = src.MinLength + } + if len(src.Pattern) > 0 { + dst.Pattern = src.Pattern + } + if src.MaxItems != nil { + dst.MaxItems = src.MaxItems + } + if src.MinItems != nil { + dst.MinItems = src.MinItems + } + if src.UniqueItems { + dst.UniqueItems = src.UniqueItems + } + if src.MultipleOf != nil { + dst.MultipleOf = src.MultipleOf + } + if len(src.Enum) > 0 { + dst.Enum = src.Enum + } + if len(src.Extensions) > 0 { + dst.Extensions = src.Extensions + } + if len(src.ExtraProps) > 0 { + dst.ExtraProps = src.ExtraProps + } + return dst +} diff --git a/vendor/github.com/swaggo/swag/spec.go b/vendor/github.com/swaggo/swag/spec.go new file mode 100644 index 0000000..c18a365 --- /dev/null +++ b/vendor/github.com/swaggo/swag/spec.go @@ -0,0 +1,64 @@ +package swag + +import ( + "bytes" + "encoding/json" + "strings" + "text/template" +) + +// Spec holds exported Swagger Info so clients can modify it. +type Spec struct { + Version string + Host string + BasePath string + Schemes []string + Title string + Description string + InfoInstanceName string + SwaggerTemplate string + LeftDelim string + RightDelim string +} + +// ReadDoc parses SwaggerTemplate into swagger document. +func (i *Spec) ReadDoc() string { + i.Description = strings.ReplaceAll(i.Description, "\n", "\\n") + + tpl := template.New("swagger_info").Funcs(template.FuncMap{ + "marshal": func(v interface{}) string { + a, _ := json.Marshal(v) + + return string(a) + }, + "escape": func(v interface{}) string { + // escape tabs + var str = strings.ReplaceAll(v.(string), "\t", "\\t") + // replace " with \", and if that results in \\", replace that with \\\" + str = strings.ReplaceAll(str, "\"", "\\\"") + + return strings.ReplaceAll(str, "\\\\\"", "\\\\\\\"") + }, + }) + + if i.LeftDelim != "" && i.RightDelim != "" { + tpl = tpl.Delims(i.LeftDelim, i.RightDelim) + } + + parsed, err := tpl.Parse(i.SwaggerTemplate) + if err != nil { + return i.SwaggerTemplate + } + + var doc bytes.Buffer + if err = parsed.Execute(&doc, i); err != nil { + return i.SwaggerTemplate + } + + return doc.String() +} + +// InstanceName returns Spec instance name. +func (i *Spec) InstanceName() string { + return i.InfoInstanceName +} diff --git a/vendor/github.com/swaggo/swag/swagger.go b/vendor/github.com/swaggo/swag/swagger.go new file mode 100644 index 0000000..74c162c --- /dev/null +++ b/vendor/github.com/swaggo/swag/swagger.go @@ -0,0 +1,72 @@ +package swag + +import ( + "errors" + "fmt" + "sync" +) + +// Name is a unique name be used to register swag instance. +const Name = "swagger" + +var ( + swaggerMu sync.RWMutex + swags map[string]Swagger +) + +// Swagger is an interface to read swagger document. +type Swagger interface { + ReadDoc() string +} + +// Register registers swagger for given name. +func Register(name string, swagger Swagger) { + swaggerMu.Lock() + defer swaggerMu.Unlock() + + if swagger == nil { + panic("swagger is nil") + } + + if swags == nil { + swags = make(map[string]Swagger) + } + + if _, ok := swags[name]; ok { + panic("Register called twice for swag: " + name) + } + + swags[name] = swagger +} + +// GetSwagger returns the swagger instance for given name. +// If not found, returns nil. +func GetSwagger(name string) Swagger { + swaggerMu.RLock() + defer swaggerMu.RUnlock() + + return swags[name] +} + +// ReadDoc reads swagger document. An optional name parameter can be passed to read a specific document. +// The default name is "swagger". +func ReadDoc(optionalName ...string) (string, error) { + swaggerMu.RLock() + defer swaggerMu.RUnlock() + + if swags == nil { + return "", errors.New("no swag has yet been registered") + } + + name := Name + if len(optionalName) != 0 && optionalName[0] != "" { + name = optionalName[0] + } + + swag, ok := swags[name] + if !ok { + return "", fmt.Errorf("no swag named \"%s\" was registered", name) + } + + return swag.ReadDoc(), nil +} diff --git a/vendor/github.com/swaggo/swag/types.go b/vendor/github.com/swaggo/swag/types.go new file mode 100644 index 0000000..0076a6b --- /dev/null +++ b/vendor/github.com/swaggo/swag/types.go @@ -0,0 +1,105 @@ +package swag + +import ( + "go/ast" + "go/token" + "regexp" + "strings" + + "github.com/go-openapi/spec" +) + +// Schema parsed schema. +type Schema struct { + *spec.Schema // + PkgPath string // package import path used to rename Name of a definition int case of conflict + Name string // Name in definitions +} + +// TypeSpecDef the whole information of a typeSpec. +type TypeSpecDef struct { + // ast file where TypeSpec is + File *ast.File + + // the TypeSpec of this type definition + TypeSpec *ast.TypeSpec + + Enums []EnumValue + + // path of package starting from under ${GOPATH}/src or from module path in go.mod + PkgPath string + ParentSpec ast.Decl + + NotUnique bool +} + +// Name the name of the typeSpec. +func (t *TypeSpecDef) Name() string { + if t.TypeSpec != nil && t.TypeSpec.Name != nil { + return t.TypeSpec.Name.Name + } + + return "" +} + +// TypeName the type name of the typeSpec. +func (t *TypeSpecDef) TypeName() string { + if ignoreNameOverride(t.TypeSpec.Name.Name) { + return t.TypeSpec.Name.Name[1:] + } else if t.TypeSpec.Comment != nil { + // get alias from comment '// @name ' + const regexCaseInsensitive = "(?i)" + reTypeName, err := regexp.Compile(regexCaseInsensitive + `^@name\s+(\S+)`) + if err != nil { + panic(err) + } + for _, comment := range t.TypeSpec.Comment.List { + trimmedComment := strings.TrimSpace(strings.TrimLeft(comment.Text, "/")) + texts := reTypeName.FindStringSubmatch(trimmedComment) + if len(texts) > 1 { + return texts[1] + } + } + } + + var names []string + if t.NotUnique { + pkgPath := strings.Map(func(r rune) rune { + if r == '\\' || r == '/' || r == '.' { + return '_' + } + return r + }, t.PkgPath) + names = append(names, pkgPath) + } else if t.File != nil { + names = append(names, t.File.Name.Name) + } + if parentFun, ok := (t.ParentSpec).(*ast.FuncDecl); ok && parentFun != nil { + names = append(names, parentFun.Name.Name) + } + names = append(names, t.TypeSpec.Name.Name) + return fullTypeName(names...) +} + +// FullPath return the full path of the typeSpec. +func (t *TypeSpecDef) FullPath() string { + return t.PkgPath + "." + t.Name() +} + +// AstFileInfo information of an ast.File. +type AstFileInfo struct { + //FileSet the FileSet object which is used to parse this go source file + FileSet *token.FileSet + + // File ast.File + File *ast.File + + // Path the path of the ast.File + Path string + + // PackagePath package import path of the ast.File + PackagePath string + + // ParseFlag determine what to parse + ParseFlag ParseFlag +} diff --git a/vendor/github.com/swaggo/swag/utils.go b/vendor/github.com/swaggo/swag/utils.go new file mode 100644 index 0000000..df31ff2 --- /dev/null +++ b/vendor/github.com/swaggo/swag/utils.go @@ -0,0 +1,55 @@ +package swag + +import "unicode" + +// FieldsFunc split a string s by a func splitter into max n parts +func FieldsFunc(s string, f func(rune2 rune) bool, n int) []string { + // A span is used to record a slice of s of the form s[start:end]. + // The start index is inclusive and the end index is exclusive. + type span struct { + start int + end int + } + spans := make([]span, 0, 32) + + // Find the field start and end indices. + // Doing this in a separate pass (rather than slicing the string s + // and collecting the result substrings right away) is significantly + // more efficient, possibly due to cache effects. + start := -1 // valid span start if >= 0 + for end, rune := range s { + if f(rune) { + if start >= 0 { + spans = append(spans, span{start, end}) + // Set start to a negative value. + // Note: using -1 here consistently and reproducibly + // slows down this code by a several percent on amd64. + start = ^start + } + } else { + if start < 0 { + start = end + if n > 0 && len(spans)+1 >= n { + break + } + } + } + } + + // Last field might end at EOF. + if start >= 0 { + spans = append(spans, span{start, len(s)}) + } + + // Create strings from recorded field indices. + a := make([]string, len(spans)) + for i, span := range spans { + a[i] = s[span.start:span.end] + } + return a +} + +// FieldsByAnySpace split a string s by any space character into max n parts +func FieldsByAnySpace(s string, n int) []string { + return FieldsFunc(s, unicode.IsSpace, n) +} diff --git a/vendor/github.com/swaggo/swag/version.go b/vendor/github.com/swaggo/swag/version.go new file mode 100644 index 0000000..ff2810e --- /dev/null +++ b/vendor/github.com/swaggo/swag/version.go @@ -0,0 +1,4 @@ +package swag + +// Version of swag. +const Version = "v1.16.3" diff --git a/vendor/github.com/valyala/fasttemplate/template.go b/vendor/github.com/valyala/fasttemplate/template.go index c2df361..f2d3261 100644 --- a/vendor/github.com/valyala/fasttemplate/template.go +++ b/vendor/github.com/valyala/fasttemplate/template.go @@ -65,9 +65,9 @@ func ExecuteFunc(template, startTag, endTag string, w io.Writer, f TagFunc) (int // values from the map m and writes the result to the given writer w. // // Substitution map m may contain values with the following types: -// - []byte - the fastest value type -// - string - convenient value type -// - TagFunc - flexible value type +// * []byte - the fastest value type +// * string - convenient value type +// * TagFunc - flexible value type // // Returns the number of bytes written to w. // @@ -81,9 +81,9 @@ func Execute(template, startTag, endTag string, w io.Writer, m map[string]interf // This can be used as a drop-in replacement for strings.Replacer // // Substitution map m may contain values with the following types: -// - []byte - the fastest value type -// - string - convenient value type -// - TagFunc - flexible value type +// * []byte - the fastest value type +// * string - convenient value type +// * TagFunc - flexible value type // // Returns the number of bytes written to w. // @@ -134,9 +134,9 @@ var byteBufferPool bytebufferpool.Pool // values from the map m and returns the result. // // Substitution map m may contain values with the following types: -// - []byte - the fastest value type -// - string - convenient value type -// - TagFunc - flexible value type +// * []byte - the fastest value type +// * string - convenient value type +// * TagFunc - flexible value type // // This function is optimized for constantly changing templates. // Use Template.ExecuteString for frozen templates. @@ -148,9 +148,9 @@ func ExecuteString(template, startTag, endTag string, m map[string]interface{}) // This can be used as a drop-in replacement for strings.Replacer // // Substitution map m may contain values with the following types: -// - []byte - the fastest value type -// - string - convenient value type -// - TagFunc - flexible value type +// * []byte - the fastest value type +// * string - convenient value type +// * TagFunc - flexible value type // // This function is optimized for constantly changing templates. // Use Template.ExecuteStringStd for frozen templates. @@ -304,9 +304,9 @@ func (t *Template) ExecuteFunc(w io.Writer, f TagFunc) (int64, error) { // values from the map m and writes the result to the given writer w. // // Substitution map m may contain values with the following types: -// - []byte - the fastest value type -// - string - convenient value type -// - TagFunc - flexible value type +// * []byte - the fastest value type +// * string - convenient value type +// * TagFunc - flexible value type // // Returns the number of bytes written to w. func (t *Template) Execute(w io.Writer, m map[string]interface{}) (int64, error) { @@ -317,9 +317,9 @@ func (t *Template) Execute(w io.Writer, m map[string]interface{}) (int64, error) // This can be used as a drop-in replacement for strings.Replacer // // Substitution map m may contain values with the following types: -// - []byte - the fastest value type -// - string - convenient value type -// - TagFunc - flexible value type +// * []byte - the fastest value type +// * string - convenient value type +// * TagFunc - flexible value type // // Returns the number of bytes written to w. func (t *Template) ExecuteStd(w io.Writer, m map[string]interface{}) (int64, error) { @@ -365,9 +365,9 @@ func (t *Template) ExecuteFuncStringWithErr(f TagFunc) (string, error) { // values from the map m and returns the result. // // Substitution map m may contain values with the following types: -// - []byte - the fastest value type -// - string - convenient value type -// - TagFunc - flexible value type +// * []byte - the fastest value type +// * string - convenient value type +// * TagFunc - flexible value type // // This function is optimized for frozen templates. // Use ExecuteString for constantly changing templates. @@ -379,9 +379,9 @@ func (t *Template) ExecuteString(m map[string]interface{}) string { // This can be used as a drop-in replacement for strings.Replacer // // Substitution map m may contain values with the following types: -// - []byte - the fastest value type -// - string - convenient value type -// - TagFunc - flexible value type +// * []byte - the fastest value type +// * string - convenient value type +// * TagFunc - flexible value type // // This function is optimized for frozen templates. // Use ExecuteStringStd for constantly changing templates. diff --git a/vendor/github.com/valyala/fasttemplate/unsafe.go b/vendor/github.com/valyala/fasttemplate/unsafe.go index 1d0bc9e..1020ca3 100644 --- a/vendor/github.com/valyala/fasttemplate/unsafe.go +++ b/vendor/github.com/valyala/fasttemplate/unsafe.go @@ -1,4 +1,3 @@ -//go:build !appengine // +build !appengine package fasttemplate diff --git a/vendor/golang.org/x/net/http/httpguts/httplex.go b/vendor/golang.org/x/net/http/httpguts/httplex.go index 6e071e8..9b4de94 100644 --- a/vendor/golang.org/x/net/http/httpguts/httplex.go +++ b/vendor/golang.org/x/net/http/httpguts/httplex.go @@ -12,7 +12,7 @@ import ( "golang.org/x/net/idna" ) -var isTokenTable = [127]bool{ +var isTokenTable = [256]bool{ '!': true, '#': true, '$': true, @@ -93,12 +93,7 @@ var isTokenTable = [127]bool{ } func IsTokenRune(r rune) bool { - i := int(r) - return i < len(isTokenTable) && isTokenTable[i] -} - -func isNotToken(r rune) bool { - return !IsTokenRune(r) + return r < utf8.RuneSelf && isTokenTable[byte(r)] } // HeaderValuesContainsToken reports whether any string in values @@ -202,8 +197,8 @@ func ValidHeaderFieldName(v string) bool { if len(v) == 0 { return false } - for _, r := range v { - if !IsTokenRune(r) { + for i := 0; i < len(v); i++ { + if !isTokenTable[v[i]] { return false } } diff --git a/vendor/golang.org/x/net/http2/frame.go b/vendor/golang.org/x/net/http2/frame.go index c1f6b90..105c3b2 100644 --- a/vendor/golang.org/x/net/http2/frame.go +++ b/vendor/golang.org/x/net/http2/frame.go @@ -490,6 +490,9 @@ func terminalReadFrameError(err error) bool { // returned error is ErrFrameTooLarge. Other errors may be of type // ConnectionError, StreamError, or anything else from the underlying // reader. +// +// If ReadFrame returns an error and a non-nil Frame, the Frame's StreamID +// indicates the stream responsible for the error. func (fr *Framer) ReadFrame() (Frame, error) { fr.errDetail = nil if fr.lastFrame != nil { @@ -1510,19 +1513,18 @@ func (mh *MetaHeadersFrame) checkPseudos() error { } func (fr *Framer) maxHeaderStringLen() int { - v := fr.maxHeaderListSize() - if uint32(int(v)) == v { - return int(v) + v := int(fr.maxHeaderListSize()) + if v < 0 { + // If maxHeaderListSize overflows an int, use no limit (0). + return 0 } - // They had a crazy big number for MaxHeaderBytes anyway, - // so give them unlimited header lengths: - return 0 + return v } // readMetaFrame returns 0 or more CONTINUATION frames from fr and // merge them into the provided hf and returns a MetaHeadersFrame // with the decoded hpack values. -func (fr *Framer) readMetaFrame(hf *HeadersFrame) (*MetaHeadersFrame, error) { +func (fr *Framer) readMetaFrame(hf *HeadersFrame) (Frame, error) { if fr.AllowIllegalReads { return nil, errors.New("illegal use of AllowIllegalReads with ReadMetaHeaders") } @@ -1565,6 +1567,7 @@ func (fr *Framer) readMetaFrame(hf *HeadersFrame) (*MetaHeadersFrame, error) { if size > remainSize { hdec.SetEmitEnabled(false) mh.Truncated = true + remainSize = 0 return } remainSize -= size @@ -1577,8 +1580,38 @@ func (fr *Framer) readMetaFrame(hf *HeadersFrame) (*MetaHeadersFrame, error) { var hc headersOrContinuation = hf for { frag := hc.HeaderBlockFragment() + + // Avoid parsing large amounts of headers that we will then discard. + // If the sender exceeds the max header list size by too much, + // skip parsing the fragment and close the connection. + // + // "Too much" is either any CONTINUATION frame after we've already + // exceeded the max header list size (in which case remainSize is 0), + // or a frame whose encoded size is more than twice the remaining + // header list bytes we're willing to accept. + if int64(len(frag)) > int64(2*remainSize) { + if VerboseLogs { + log.Printf("http2: header list too large") + } + // It would be nice to send a RST_STREAM before sending the GOAWAY, + // but the structure of the server's frame writer makes this difficult. + return mh, ConnectionError(ErrCodeProtocol) + } + + // Also close the connection after any CONTINUATION frame following an + // invalid header, since we stop tracking the size of the headers after + // an invalid one. + if invalid != nil { + if VerboseLogs { + log.Printf("http2: invalid header: %v", invalid) + } + // It would be nice to send a RST_STREAM before sending the GOAWAY, + // but the structure of the server's frame writer makes this difficult. + return mh, ConnectionError(ErrCodeProtocol) + } + if _, err := hdec.Write(frag); err != nil { - return nil, ConnectionError(ErrCodeCompression) + return mh, ConnectionError(ErrCodeCompression) } if hc.HeadersEnded() { @@ -1595,7 +1628,7 @@ func (fr *Framer) readMetaFrame(hf *HeadersFrame) (*MetaHeadersFrame, error) { mh.HeadersFrame.invalidate() if err := hdec.Close(); err != nil { - return nil, ConnectionError(ErrCodeCompression) + return mh, ConnectionError(ErrCodeCompression) } if invalid != nil { fr.errDetail = invalid diff --git a/vendor/golang.org/x/net/http2/pipe.go b/vendor/golang.org/x/net/http2/pipe.go index 684d984..3b9f06b 100644 --- a/vendor/golang.org/x/net/http2/pipe.go +++ b/vendor/golang.org/x/net/http2/pipe.go @@ -77,7 +77,10 @@ func (p *pipe) Read(d []byte) (n int, err error) { } } -var errClosedPipeWrite = errors.New("write on closed buffer") +var ( + errClosedPipeWrite = errors.New("write on closed buffer") + errUninitializedPipeWrite = errors.New("write on uninitialized buffer") +) // Write copies bytes from p into the buffer and wakes a reader. // It is an error to write more data than the buffer can hold. @@ -91,6 +94,12 @@ func (p *pipe) Write(d []byte) (n int, err error) { if p.err != nil || p.breakErr != nil { return 0, errClosedPipeWrite } + // pipe.setBuffer is never invoked, leaving the buffer uninitialized. + // We shouldn't try to write to an uninitialized pipe, + // but returning an error is better than panicking. + if p.b == nil { + return 0, errUninitializedPipeWrite + } return p.b.Write(d) } diff --git a/vendor/golang.org/x/net/http2/server.go b/vendor/golang.org/x/net/http2/server.go index ae94c64..c5d0810 100644 --- a/vendor/golang.org/x/net/http2/server.go +++ b/vendor/golang.org/x/net/http2/server.go @@ -124,6 +124,7 @@ type Server struct { // IdleTimeout specifies how long until idle clients should be // closed with a GOAWAY frame. PING frames are not considered // activity for the purposes of IdleTimeout. + // If zero or negative, there is no timeout. IdleTimeout time.Duration // MaxUploadBufferPerConnection is the size of the initial flow @@ -434,7 +435,7 @@ func (s *Server) ServeConn(c net.Conn, opts *ServeConnOpts) { // passes the connection off to us with the deadline already set. // Write deadlines are set per stream in serverConn.newStream. // Disarm the net.Conn write deadline here. - if sc.hs.WriteTimeout != 0 { + if sc.hs.WriteTimeout > 0 { sc.conn.SetWriteDeadline(time.Time{}) } @@ -731,11 +732,7 @@ func isClosedConnError(err error) bool { return false } - // TODO: remove this string search and be more like the Windows - // case below. That might involve modifying the standard library - // to return better error types. - str := err.Error() - if strings.Contains(str, "use of closed network connection") { + if errors.Is(err, net.ErrClosed) { return true } @@ -924,7 +921,7 @@ func (sc *serverConn) serve() { sc.setConnState(http.StateActive) sc.setConnState(http.StateIdle) - if sc.srv.IdleTimeout != 0 { + if sc.srv.IdleTimeout > 0 { sc.idleTimer = time.AfterFunc(sc.srv.IdleTimeout, sc.onIdleTimer) defer sc.idleTimer.Stop() } @@ -1481,6 +1478,11 @@ func (sc *serverConn) processFrameFromReader(res readFrameResult) bool { sc.goAway(ErrCodeFlowControl) return true case ConnectionError: + if res.f != nil { + if id := res.f.Header().StreamID; id > sc.maxClientStreamID { + sc.maxClientStreamID = id + } + } sc.logf("http2: server connection error from %v: %v", sc.conn.RemoteAddr(), ev) sc.goAway(ErrCode(ev)) return true // goAway will handle shutdown @@ -1637,7 +1639,7 @@ func (sc *serverConn) closeStream(st *stream, err error) { delete(sc.streams, st.id) if len(sc.streams) == 0 { sc.setConnState(http.StateIdle) - if sc.srv.IdleTimeout != 0 { + if sc.srv.IdleTimeout > 0 { sc.idleTimer.Reset(sc.srv.IdleTimeout) } if h1ServerKeepAlivesDisabled(sc.hs) { @@ -2017,7 +2019,7 @@ func (sc *serverConn) processHeaders(f *MetaHeadersFrame) error { // similar to how the http1 server works. Here it's // technically more like the http1 Server's ReadHeaderTimeout // (in Go 1.8), though. That's a more sane option anyway. - if sc.hs.ReadTimeout != 0 { + if sc.hs.ReadTimeout > 0 { sc.conn.SetReadDeadline(time.Time{}) st.readDeadline = time.AfterFunc(sc.hs.ReadTimeout, st.onReadTimeout) } @@ -2038,7 +2040,7 @@ func (sc *serverConn) upgradeRequest(req *http.Request) { // Disable any read deadline set by the net/http package // prior to the upgrade. - if sc.hs.ReadTimeout != 0 { + if sc.hs.ReadTimeout > 0 { sc.conn.SetReadDeadline(time.Time{}) } @@ -2116,7 +2118,7 @@ func (sc *serverConn) newStream(id, pusherID uint32, state streamState) *stream st.flow.conn = &sc.flow // link to conn-level counter st.flow.add(sc.initialStreamSendWindowSize) st.inflow.init(sc.srv.initialStreamRecvWindowSize()) - if sc.hs.WriteTimeout != 0 { + if sc.hs.WriteTimeout > 0 { st.writeDeadline = time.AfterFunc(sc.hs.WriteTimeout, st.onWriteTimeout) } diff --git a/vendor/golang.org/x/net/http2/testsync.go b/vendor/golang.org/x/net/http2/testsync.go new file mode 100644 index 0000000..61075bd --- /dev/null +++ b/vendor/golang.org/x/net/http2/testsync.go @@ -0,0 +1,331 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +package http2 + +import ( + "context" + "sync" + "time" +) + +// testSyncHooks coordinates goroutines in tests. +// +// For example, a call to ClientConn.RoundTrip involves several goroutines, including: +// - the goroutine running RoundTrip; +// - the clientStream.doRequest goroutine, which writes the request; and +// - the clientStream.readLoop goroutine, which reads the response. +// +// Using testSyncHooks, a test can start a RoundTrip and identify when all these goroutines +// are blocked waiting for some condition such as reading the Request.Body or waiting for +// flow control to become available. +// +// The testSyncHooks also manage timers and synthetic time in tests. +// This permits us to, for example, start a request and cause it to time out waiting for +// response headers without resorting to time.Sleep calls. +type testSyncHooks struct { + // active/inactive act as a mutex and condition variable. + // + // - neither chan contains a value: testSyncHooks is locked. + // - active contains a value: unlocked, and at least one goroutine is not blocked + // - inactive contains a value: unlocked, and all goroutines are blocked + active chan struct{} + inactive chan struct{} + + // goroutine counts + total int // total goroutines + condwait map[*sync.Cond]int // blocked in sync.Cond.Wait + blocked []*testBlockedGoroutine // otherwise blocked + + // fake time + now time.Time + timers []*fakeTimer + + // Transport testing: Report various events. + newclientconn func(*ClientConn) + newstream func(*clientStream) +} + +// testBlockedGoroutine is a blocked goroutine. +type testBlockedGoroutine struct { + f func() bool // blocked until f returns true + ch chan struct{} // closed when unblocked +} + +func newTestSyncHooks() *testSyncHooks { + h := &testSyncHooks{ + active: make(chan struct{}, 1), + inactive: make(chan struct{}, 1), + condwait: map[*sync.Cond]int{}, + } + h.inactive <- struct{}{} + h.now = time.Date(2000, 1, 1, 0, 0, 0, 0, time.UTC) + return h +} + +// lock acquires the testSyncHooks mutex. +func (h *testSyncHooks) lock() { + select { + case <-h.active: + case <-h.inactive: + } +} + +// waitInactive waits for all goroutines to become inactive. +func (h *testSyncHooks) waitInactive() { + for { + <-h.inactive + if !h.unlock() { + break + } + } +} + +// unlock releases the testSyncHooks mutex. +// It reports whether any goroutines are active. +func (h *testSyncHooks) unlock() (active bool) { + // Look for a blocked goroutine which can be unblocked. + blocked := h.blocked[:0] + unblocked := false + for _, b := range h.blocked { + if !unblocked && b.f() { + unblocked = true + close(b.ch) + } else { + blocked = append(blocked, b) + } + } + h.blocked = blocked + + // Count goroutines blocked on condition variables. + condwait := 0 + for _, count := range h.condwait { + condwait += count + } + + if h.total > condwait+len(blocked) { + h.active <- struct{}{} + return true + } else { + h.inactive <- struct{}{} + return false + } +} + +// goRun starts a new goroutine. +func (h *testSyncHooks) goRun(f func()) { + h.lock() + h.total++ + h.unlock() + go func() { + defer func() { + h.lock() + h.total-- + h.unlock() + }() + f() + }() +} + +// blockUntil indicates that a goroutine is blocked waiting for some condition to become true. +// It waits until f returns true before proceeding. +// +// Example usage: +// +// h.blockUntil(func() bool { +// // Is the context done yet? +// select { +// case <-ctx.Done(): +// default: +// return false +// } +// return true +// }) +// // Wait for the context to become done. +// <-ctx.Done() +// +// The function f passed to blockUntil must be non-blocking and idempotent. +func (h *testSyncHooks) blockUntil(f func() bool) { + if f() { + return + } + ch := make(chan struct{}) + h.lock() + h.blocked = append(h.blocked, &testBlockedGoroutine{ + f: f, + ch: ch, + }) + h.unlock() + <-ch +} + +// broadcast is sync.Cond.Broadcast. +func (h *testSyncHooks) condBroadcast(cond *sync.Cond) { + h.lock() + delete(h.condwait, cond) + h.unlock() + cond.Broadcast() +} + +// broadcast is sync.Cond.Wait. +func (h *testSyncHooks) condWait(cond *sync.Cond) { + h.lock() + h.condwait[cond]++ + h.unlock() +} + +// newTimer creates a new fake timer. +func (h *testSyncHooks) newTimer(d time.Duration) timer { + h.lock() + defer h.unlock() + t := &fakeTimer{ + hooks: h, + when: h.now.Add(d), + c: make(chan time.Time), + } + h.timers = append(h.timers, t) + return t +} + +// afterFunc creates a new fake AfterFunc timer. +func (h *testSyncHooks) afterFunc(d time.Duration, f func()) timer { + h.lock() + defer h.unlock() + t := &fakeTimer{ + hooks: h, + when: h.now.Add(d), + f: f, + } + h.timers = append(h.timers, t) + return t +} + +func (h *testSyncHooks) contextWithTimeout(ctx context.Context, d time.Duration) (context.Context, context.CancelFunc) { + ctx, cancel := context.WithCancel(ctx) + t := h.afterFunc(d, cancel) + return ctx, func() { + t.Stop() + cancel() + } +} + +func (h *testSyncHooks) timeUntilEvent() time.Duration { + h.lock() + defer h.unlock() + var next time.Time + for _, t := range h.timers { + if next.IsZero() || t.when.Before(next) { + next = t.when + } + } + if d := next.Sub(h.now); d > 0 { + return d + } + return 0 +} + +// advance advances time and causes synthetic timers to fire. +func (h *testSyncHooks) advance(d time.Duration) { + h.lock() + defer h.unlock() + h.now = h.now.Add(d) + timers := h.timers[:0] + for _, t := range h.timers { + t := t // remove after go.mod depends on go1.22 + t.mu.Lock() + switch { + case t.when.After(h.now): + timers = append(timers, t) + case t.when.IsZero(): + // stopped timer + default: + t.when = time.Time{} + if t.c != nil { + close(t.c) + } + if t.f != nil { + h.total++ + go func() { + defer func() { + h.lock() + h.total-- + h.unlock() + }() + t.f() + }() + } + } + t.mu.Unlock() + } + h.timers = timers +} + +// A timer wraps a time.Timer, or a synthetic equivalent in tests. +// Unlike time.Timer, timer is single-use: The timer channel is closed when the timer expires. +type timer interface { + C() <-chan time.Time + Stop() bool + Reset(d time.Duration) bool +} + +// timeTimer implements timer using real time. +type timeTimer struct { + t *time.Timer + c chan time.Time +} + +// newTimeTimer creates a new timer using real time. +func newTimeTimer(d time.Duration) timer { + ch := make(chan time.Time) + t := time.AfterFunc(d, func() { + close(ch) + }) + return &timeTimer{t, ch} +} + +// newTimeAfterFunc creates an AfterFunc timer using real time. +func newTimeAfterFunc(d time.Duration, f func()) timer { + return &timeTimer{ + t: time.AfterFunc(d, f), + } +} + +func (t timeTimer) C() <-chan time.Time { return t.c } +func (t timeTimer) Stop() bool { return t.t.Stop() } +func (t timeTimer) Reset(d time.Duration) bool { return t.t.Reset(d) } + +// fakeTimer implements timer using fake time. +type fakeTimer struct { + hooks *testSyncHooks + + mu sync.Mutex + when time.Time // when the timer will fire + c chan time.Time // closed when the timer fires; mutually exclusive with f + f func() // called when the timer fires; mutually exclusive with c +} + +func (t *fakeTimer) C() <-chan time.Time { return t.c } + +func (t *fakeTimer) Stop() bool { + t.mu.Lock() + defer t.mu.Unlock() + stopped := t.when.IsZero() + t.when = time.Time{} + return stopped +} + +func (t *fakeTimer) Reset(d time.Duration) bool { + if t.c != nil || t.f == nil { + panic("fakeTimer only supports Reset on AfterFunc timers") + } + t.mu.Lock() + defer t.mu.Unlock() + t.hooks.lock() + defer t.hooks.unlock() + active := !t.when.IsZero() + t.when = t.hooks.now.Add(d) + if !active { + t.hooks.timers = append(t.hooks.timers, t) + } + return active +} diff --git a/vendor/golang.org/x/net/http2/transport.go b/vendor/golang.org/x/net/http2/transport.go index df578b8..2fa4949 100644 --- a/vendor/golang.org/x/net/http2/transport.go +++ b/vendor/golang.org/x/net/http2/transport.go @@ -147,6 +147,12 @@ type Transport struct { // waiting for their turn. StrictMaxConcurrentStreams bool + // IdleConnTimeout is the maximum amount of time an idle + // (keep-alive) connection will remain idle before closing + // itself. + // Zero means no limit. + IdleConnTimeout time.Duration + // ReadIdleTimeout is the timeout after which a health check using ping // frame will be carried out if no frame is received on the connection. // Note that a ping response will is considered a received frame, so if @@ -178,6 +184,8 @@ type Transport struct { connPoolOnce sync.Once connPoolOrDef ClientConnPool // non-nil version of ConnPool + + syncHooks *testSyncHooks } func (t *Transport) maxHeaderListSize() uint32 { @@ -302,7 +310,7 @@ type ClientConn struct { readerErr error // set before readerDone is closed idleTimeout time.Duration // or 0 for never - idleTimer *time.Timer + idleTimer timer mu sync.Mutex // guards following cond *sync.Cond // hold mu; broadcast on flow/closed changes @@ -344,6 +352,60 @@ type ClientConn struct { werr error // first write error that has occurred hbuf bytes.Buffer // HPACK encoder writes into this henc *hpack.Encoder + + syncHooks *testSyncHooks // can be nil +} + +// Hook points used for testing. +// Outside of tests, cc.syncHooks is nil and these all have minimal implementations. +// Inside tests, see the testSyncHooks function docs. + +// goRun starts a new goroutine. +func (cc *ClientConn) goRun(f func()) { + if cc.syncHooks != nil { + cc.syncHooks.goRun(f) + return + } + go f() +} + +// condBroadcast is cc.cond.Broadcast. +func (cc *ClientConn) condBroadcast() { + if cc.syncHooks != nil { + cc.syncHooks.condBroadcast(cc.cond) + } + cc.cond.Broadcast() +} + +// condWait is cc.cond.Wait. +func (cc *ClientConn) condWait() { + if cc.syncHooks != nil { + cc.syncHooks.condWait(cc.cond) + } + cc.cond.Wait() +} + +// newTimer creates a new time.Timer, or a synthetic timer in tests. +func (cc *ClientConn) newTimer(d time.Duration) timer { + if cc.syncHooks != nil { + return cc.syncHooks.newTimer(d) + } + return newTimeTimer(d) +} + +// afterFunc creates a new time.AfterFunc timer, or a synthetic timer in tests. +func (cc *ClientConn) afterFunc(d time.Duration, f func()) timer { + if cc.syncHooks != nil { + return cc.syncHooks.afterFunc(d, f) + } + return newTimeAfterFunc(d, f) +} + +func (cc *ClientConn) contextWithTimeout(ctx context.Context, d time.Duration) (context.Context, context.CancelFunc) { + if cc.syncHooks != nil { + return cc.syncHooks.contextWithTimeout(ctx, d) + } + return context.WithTimeout(ctx, d) } // clientStream is the state for a single HTTP/2 stream. One of these @@ -425,7 +487,7 @@ func (cs *clientStream) abortStreamLocked(err error) { // TODO(dneil): Clean up tests where cs.cc.cond is nil. if cs.cc.cond != nil { // Wake up writeRequestBody if it is waiting on flow control. - cs.cc.cond.Broadcast() + cs.cc.condBroadcast() } } @@ -435,7 +497,7 @@ func (cs *clientStream) abortRequestBodyWrite() { defer cc.mu.Unlock() if cs.reqBody != nil && cs.reqBodyClosed == nil { cs.closeReqBodyLocked() - cc.cond.Broadcast() + cc.condBroadcast() } } @@ -445,10 +507,10 @@ func (cs *clientStream) closeReqBodyLocked() { } cs.reqBodyClosed = make(chan struct{}) reqBodyClosed := cs.reqBodyClosed - go func() { + cs.cc.goRun(func() { cs.reqBody.Close() close(reqBodyClosed) - }() + }) } type stickyErrWriter struct { @@ -537,15 +599,6 @@ func authorityAddr(scheme string, authority string) (addr string) { return net.JoinHostPort(host, port) } -var retryBackoffHook func(time.Duration) *time.Timer - -func backoffNewTimer(d time.Duration) *time.Timer { - if retryBackoffHook != nil { - return retryBackoffHook(d) - } - return time.NewTimer(d) -} - // RoundTripOpt is like RoundTrip, but takes options. func (t *Transport) RoundTripOpt(req *http.Request, opt RoundTripOpt) (*http.Response, error) { if !(req.URL.Scheme == "https" || (req.URL.Scheme == "http" && t.AllowHTTP)) { @@ -573,13 +626,27 @@ func (t *Transport) RoundTripOpt(req *http.Request, opt RoundTripOpt) (*http.Res backoff := float64(uint(1) << (uint(retry) - 1)) backoff += backoff * (0.1 * mathrand.Float64()) d := time.Second * time.Duration(backoff) - timer := backoffNewTimer(d) + var tm timer + if t.syncHooks != nil { + tm = t.syncHooks.newTimer(d) + t.syncHooks.blockUntil(func() bool { + select { + case <-tm.C(): + case <-req.Context().Done(): + default: + return false + } + return true + }) + } else { + tm = newTimeTimer(d) + } select { - case <-timer.C: + case <-tm.C(): t.vlogf("RoundTrip retrying after failure: %v", roundTripErr) continue case <-req.Context().Done(): - timer.Stop() + tm.Stop() err = req.Context().Err() } } @@ -658,6 +725,9 @@ func canRetryError(err error) bool { } func (t *Transport) dialClientConn(ctx context.Context, addr string, singleUse bool) (*ClientConn, error) { + if t.syncHooks != nil { + return t.newClientConn(nil, singleUse, t.syncHooks) + } host, _, err := net.SplitHostPort(addr) if err != nil { return nil, err @@ -666,7 +736,7 @@ func (t *Transport) dialClientConn(ctx context.Context, addr string, singleUse b if err != nil { return nil, err } - return t.newClientConn(tconn, singleUse) + return t.newClientConn(tconn, singleUse, nil) } func (t *Transport) newTLSConfig(host string) *tls.Config { @@ -732,10 +802,10 @@ func (t *Transport) maxEncoderHeaderTableSize() uint32 { } func (t *Transport) NewClientConn(c net.Conn) (*ClientConn, error) { - return t.newClientConn(c, t.disableKeepAlives()) + return t.newClientConn(c, t.disableKeepAlives(), nil) } -func (t *Transport) newClientConn(c net.Conn, singleUse bool) (*ClientConn, error) { +func (t *Transport) newClientConn(c net.Conn, singleUse bool, hooks *testSyncHooks) (*ClientConn, error) { cc := &ClientConn{ t: t, tconn: c, @@ -750,10 +820,15 @@ func (t *Transport) newClientConn(c net.Conn, singleUse bool) (*ClientConn, erro wantSettingsAck: true, pings: make(map[[8]byte]chan struct{}), reqHeaderMu: make(chan struct{}, 1), + syncHooks: hooks, + } + if hooks != nil { + hooks.newclientconn(cc) + c = cc.tconn } if d := t.idleConnTimeout(); d != 0 { cc.idleTimeout = d - cc.idleTimer = time.AfterFunc(d, cc.onIdleTimeout) + cc.idleTimer = cc.afterFunc(d, cc.onIdleTimeout) } if VerboseLogs { t.vlogf("http2: Transport creating client conn %p to %v", cc, c.RemoteAddr()) @@ -818,7 +893,7 @@ func (t *Transport) newClientConn(c net.Conn, singleUse bool) (*ClientConn, erro return nil, cc.werr } - go cc.readLoop() + cc.goRun(cc.readLoop) return cc, nil } @@ -826,7 +901,7 @@ func (cc *ClientConn) healthCheck() { pingTimeout := cc.t.pingTimeout() // We don't need to periodically ping in the health check, because the readLoop of ClientConn will // trigger the healthCheck again if there is no frame received. - ctx, cancel := context.WithTimeout(context.Background(), pingTimeout) + ctx, cancel := cc.contextWithTimeout(context.Background(), pingTimeout) defer cancel() cc.vlogf("http2: Transport sending health check") err := cc.Ping(ctx) @@ -861,7 +936,20 @@ func (cc *ClientConn) setGoAway(f *GoAwayFrame) { } last := f.LastStreamID for streamID, cs := range cc.streams { - if streamID > last { + if streamID <= last { + // The server's GOAWAY indicates that it received this stream. + // It will either finish processing it, or close the connection + // without doing so. Either way, leave the stream alone for now. + continue + } + if streamID == 1 && cc.goAway.ErrCode != ErrCodeNo { + // Don't retry the first stream on a connection if we get a non-NO error. + // If the server is sending an error on a new connection, + // retrying the request on a new one probably isn't going to work. + cs.abortStreamLocked(fmt.Errorf("http2: Transport received GOAWAY from server ErrCode:%v", cc.goAway.ErrCode)) + } else { + // Aborting the stream with errClentConnGotGoAway indicates that + // the request should be retried on a new connection. cs.abortStreamLocked(errClientConnGotGoAway) } } @@ -1056,7 +1144,7 @@ func (cc *ClientConn) Shutdown(ctx context.Context) error { // Wait for all in-flight streams to complete or connection to close done := make(chan struct{}) cancelled := false // guarded by cc.mu - go func() { + cc.goRun(func() { cc.mu.Lock() defer cc.mu.Unlock() for { @@ -1068,9 +1156,9 @@ func (cc *ClientConn) Shutdown(ctx context.Context) error { if cancelled { break } - cc.cond.Wait() + cc.condWait() } - }() + }) shutdownEnterWaitStateHook() select { case <-done: @@ -1080,7 +1168,7 @@ func (cc *ClientConn) Shutdown(ctx context.Context) error { cc.mu.Lock() // Free the goroutine above cancelled = true - cc.cond.Broadcast() + cc.condBroadcast() cc.mu.Unlock() return ctx.Err() } @@ -1118,7 +1206,7 @@ func (cc *ClientConn) closeForError(err error) { for _, cs := range cc.streams { cs.abortStreamLocked(err) } - cc.cond.Broadcast() + cc.condBroadcast() cc.mu.Unlock() cc.closeConn() } @@ -1215,6 +1303,10 @@ func (cc *ClientConn) decrStreamReservationsLocked() { } func (cc *ClientConn) RoundTrip(req *http.Request) (*http.Response, error) { + return cc.roundTrip(req, nil) +} + +func (cc *ClientConn) roundTrip(req *http.Request, streamf func(*clientStream)) (*http.Response, error) { ctx := req.Context() cs := &clientStream{ cc: cc, @@ -1229,9 +1321,23 @@ func (cc *ClientConn) RoundTrip(req *http.Request) (*http.Response, error) { respHeaderRecv: make(chan struct{}), donec: make(chan struct{}), } - go cs.doRequest(req) + cc.goRun(func() { + cs.doRequest(req) + }) waitDone := func() error { + if cc.syncHooks != nil { + cc.syncHooks.blockUntil(func() bool { + select { + case <-cs.donec: + case <-ctx.Done(): + case <-cs.reqCancel: + default: + return false + } + return true + }) + } select { case <-cs.donec: return nil @@ -1292,7 +1398,24 @@ func (cc *ClientConn) RoundTrip(req *http.Request) (*http.Response, error) { return err } + if streamf != nil { + streamf(cs) + } + for { + if cc.syncHooks != nil { + cc.syncHooks.blockUntil(func() bool { + select { + case <-cs.respHeaderRecv: + case <-cs.abort: + case <-ctx.Done(): + case <-cs.reqCancel: + default: + return false + } + return true + }) + } select { case <-cs.respHeaderRecv: return handleResponseHeaders() @@ -1348,6 +1471,21 @@ func (cs *clientStream) writeRequest(req *http.Request) (err error) { if cc.reqHeaderMu == nil { panic("RoundTrip on uninitialized ClientConn") // for tests } + var newStreamHook func(*clientStream) + if cc.syncHooks != nil { + newStreamHook = cc.syncHooks.newstream + cc.syncHooks.blockUntil(func() bool { + select { + case cc.reqHeaderMu <- struct{}{}: + <-cc.reqHeaderMu + case <-cs.reqCancel: + case <-ctx.Done(): + default: + return false + } + return true + }) + } select { case cc.reqHeaderMu <- struct{}{}: case <-cs.reqCancel: @@ -1372,6 +1510,10 @@ func (cs *clientStream) writeRequest(req *http.Request) (err error) { } cc.mu.Unlock() + if newStreamHook != nil { + newStreamHook(cs) + } + // TODO(bradfitz): this is a copy of the logic in net/http. Unify somewhere? if !cc.t.disableCompression() && req.Header.Get("Accept-Encoding") == "" && @@ -1452,15 +1594,30 @@ func (cs *clientStream) writeRequest(req *http.Request) (err error) { var respHeaderTimer <-chan time.Time var respHeaderRecv chan struct{} if d := cc.responseHeaderTimeout(); d != 0 { - timer := time.NewTimer(d) + timer := cc.newTimer(d) defer timer.Stop() - respHeaderTimer = timer.C + respHeaderTimer = timer.C() respHeaderRecv = cs.respHeaderRecv } // Wait until the peer half-closes its end of the stream, // or until the request is aborted (via context, error, or otherwise), // whichever comes first. for { + if cc.syncHooks != nil { + cc.syncHooks.blockUntil(func() bool { + select { + case <-cs.peerClosed: + case <-respHeaderTimer: + case <-respHeaderRecv: + case <-cs.abort: + case <-ctx.Done(): + case <-cs.reqCancel: + default: + return false + } + return true + }) + } select { case <-cs.peerClosed: return nil @@ -1609,7 +1766,7 @@ func (cc *ClientConn) awaitOpenSlotForStreamLocked(cs *clientStream) error { return nil } cc.pendingRequests++ - cc.cond.Wait() + cc.condWait() cc.pendingRequests-- select { case <-cs.abort: @@ -1871,10 +2028,26 @@ func (cs *clientStream) awaitFlowControl(maxBytes int) (taken int32, err error) cs.flow.take(take) return take, nil } - cc.cond.Wait() + cc.condWait() } } +func validateHeaders(hdrs http.Header) string { + for k, vv := range hdrs { + if !httpguts.ValidHeaderFieldName(k) { + return fmt.Sprintf("name %q", k) + } + for _, v := range vv { + if !httpguts.ValidHeaderFieldValue(v) { + // Don't include the value in the error, + // because it may be sensitive. + return fmt.Sprintf("value for header %q", k) + } + } + } + return "" +} + var errNilRequestURL = errors.New("http2: Request.URI is nil") // requires cc.wmu be held. @@ -1912,19 +2085,14 @@ func (cc *ClientConn) encodeHeaders(req *http.Request, addGzipHeader bool, trail } } - // Check for any invalid headers and return an error before we + // Check for any invalid headers+trailers and return an error before we // potentially pollute our hpack state. (We want to be able to // continue to reuse the hpack encoder for future requests) - for k, vv := range req.Header { - if !httpguts.ValidHeaderFieldName(k) { - return nil, fmt.Errorf("invalid HTTP header name %q", k) - } - for _, v := range vv { - if !httpguts.ValidHeaderFieldValue(v) { - // Don't include the value in the error, because it may be sensitive. - return nil, fmt.Errorf("invalid HTTP header value for header %q", k) - } - } + if err := validateHeaders(req.Header); err != "" { + return nil, fmt.Errorf("invalid HTTP header %s", err) + } + if err := validateHeaders(req.Trailer); err != "" { + return nil, fmt.Errorf("invalid HTTP trailer %s", err) } enumerateHeaders := func(f func(name, value string)) { @@ -2143,7 +2311,7 @@ func (cc *ClientConn) forgetStreamID(id uint32) { } // Wake up writeRequestBody via clientStream.awaitFlowControl and // wake up RoundTrip if there is a pending request. - cc.cond.Broadcast() + cc.condBroadcast() closeOnIdle := cc.singleUse || cc.doNotReuse || cc.t.disableKeepAlives() || cc.goAway != nil if closeOnIdle && cc.streamsReserved == 0 && len(cc.streams) == 0 { @@ -2231,7 +2399,7 @@ func (rl *clientConnReadLoop) cleanup() { cs.abortStreamLocked(err) } } - cc.cond.Broadcast() + cc.condBroadcast() cc.mu.Unlock() } @@ -2266,10 +2434,9 @@ func (rl *clientConnReadLoop) run() error { cc := rl.cc gotSettings := false readIdleTimeout := cc.t.ReadIdleTimeout - var t *time.Timer + var t timer if readIdleTimeout != 0 { - t = time.AfterFunc(readIdleTimeout, cc.healthCheck) - defer t.Stop() + t = cc.afterFunc(readIdleTimeout, cc.healthCheck) } for { f, err := cc.fr.ReadFrame() @@ -2684,7 +2851,7 @@ func (rl *clientConnReadLoop) processData(f *DataFrame) error { }) return nil } - if !cs.firstByte { + if !cs.pastHeaders { cc.logf("protocol error: received DATA before a HEADERS frame") rl.endStreamError(cs, StreamError{ StreamID: f.StreamID, @@ -2867,7 +3034,7 @@ func (rl *clientConnReadLoop) processSettingsNoWrite(f *SettingsFrame) error { for _, cs := range cc.streams { cs.flow.add(delta) } - cc.cond.Broadcast() + cc.condBroadcast() cc.initialWindowSize = s.Val case SettingHeaderTableSize: @@ -2911,9 +3078,18 @@ func (rl *clientConnReadLoop) processWindowUpdate(f *WindowUpdateFrame) error { fl = &cs.flow } if !fl.add(int32(f.Increment)) { + // For stream, the sender sends RST_STREAM with an error code of FLOW_CONTROL_ERROR + if cs != nil { + rl.endStreamError(cs, StreamError{ + StreamID: f.StreamID, + Code: ErrCodeFlowControl, + }) + return nil + } + return ConnectionError(ErrCodeFlowControl) } - cc.cond.Broadcast() + cc.condBroadcast() return nil } @@ -2955,24 +3131,38 @@ func (cc *ClientConn) Ping(ctx context.Context) error { } cc.mu.Unlock() } - errc := make(chan error, 1) - go func() { + var pingError error + errc := make(chan struct{}) + cc.goRun(func() { cc.wmu.Lock() defer cc.wmu.Unlock() - if err := cc.fr.WritePing(false, p); err != nil { - errc <- err + if pingError = cc.fr.WritePing(false, p); pingError != nil { + close(errc) return } - if err := cc.bw.Flush(); err != nil { - errc <- err + if pingError = cc.bw.Flush(); pingError != nil { + close(errc) return } - }() + }) + if cc.syncHooks != nil { + cc.syncHooks.blockUntil(func() bool { + select { + case <-c: + case <-errc: + case <-ctx.Done(): + case <-cc.readerDone: + default: + return false + } + return true + }) + } select { case <-c: return nil - case err := <-errc: - return err + case <-errc: + return pingError case <-ctx.Done(): return ctx.Err() case <-cc.readerDone: @@ -3141,9 +3331,17 @@ func (rt noDialH2RoundTripper) RoundTrip(req *http.Request) (*http.Response, err } func (t *Transport) idleConnTimeout() time.Duration { + // to keep things backwards compatible, we use non-zero values of + // IdleConnTimeout, followed by using the IdleConnTimeout on the underlying + // http1 transport, followed by 0 + if t.IdleConnTimeout != 0 { + return t.IdleConnTimeout + } + if t.t1 != nil { return t.t1.IdleConnTimeout } + return 0 } diff --git a/vendor/golang.org/x/sys/unix/aliases.go b/vendor/golang.org/x/sys/unix/aliases.go index e7d3df4..b0e4198 100644 --- a/vendor/golang.org/x/sys/unix/aliases.go +++ b/vendor/golang.org/x/sys/unix/aliases.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build (aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos) && go1.9 +//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos package unix diff --git a/vendor/golang.org/x/sys/unix/asm_zos_s390x.s b/vendor/golang.org/x/sys/unix/asm_zos_s390x.s index 2f67ba8..813dfad 100644 --- a/vendor/golang.org/x/sys/unix/asm_zos_s390x.s +++ b/vendor/golang.org/x/sys/unix/asm_zos_s390x.s @@ -9,9 +9,11 @@ #define PSALAA 1208(R0) #define GTAB64(x) 80(x) #define LCA64(x) 88(x) +#define SAVSTACK_ASYNC(x) 336(x) // in the LCA #define CAA(x) 8(x) -#define EDCHPXV(x) 1016(x) // in the CAA -#define SAVSTACK_ASYNC(x) 336(x) // in the LCA +#define CEECAATHDID(x) 976(x) // in the CAA +#define EDCHPXV(x) 1016(x) // in the CAA +#define GOCB(x) 1104(x) // in the CAA // SS_*, where x=SAVSTACK_ASYNC #define SS_LE(x) 0(x) @@ -19,394 +21,125 @@ #define SS_ERRNO(x) 16(x) #define SS_ERRNOJR(x) 20(x) -#define LE_CALL BYTE $0x0D; BYTE $0x76; // BL R7, R6 +// Function Descriptor Offsets +#define __errno 0x156*16 +#define __err2ad 0x16C*16 -TEXT ·clearErrno(SB),NOSPLIT,$0-0 - BL addrerrno<>(SB) - MOVD $0, 0(R3) +// Call Instructions +#define LE_CALL BYTE $0x0D; BYTE $0x76 // BL R7, R6 +#define SVC_LOAD BYTE $0x0A; BYTE $0x08 // SVC 08 LOAD +#define SVC_DELETE BYTE $0x0A; BYTE $0x09 // SVC 09 DELETE + +DATA zosLibVec<>(SB)/8, $0 +GLOBL zosLibVec<>(SB), NOPTR, $8 + +TEXT ·initZosLibVec(SB), NOSPLIT|NOFRAME, $0-0 + MOVW PSALAA, R8 + MOVD LCA64(R8), R8 + MOVD CAA(R8), R8 + MOVD EDCHPXV(R8), R8 + MOVD R8, zosLibVec<>(SB) + RET + +TEXT ·GetZosLibVec(SB), NOSPLIT|NOFRAME, $0-0 + MOVD zosLibVec<>(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·clearErrno(SB), NOSPLIT, $0-0 + BL addrerrno<>(SB) + MOVD $0, 0(R3) RET // Returns the address of errno in R3. -TEXT addrerrno<>(SB),NOSPLIT|NOFRAME,$0-0 +TEXT addrerrno<>(SB), NOSPLIT|NOFRAME, $0-0 // Get library control area (LCA). - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 + MOVW PSALAA, R8 + MOVD LCA64(R8), R8 // Get __errno FuncDesc. - MOVD CAA(R8), R9 - MOVD EDCHPXV(R9), R9 - ADD $(0x156*16), R9 - LMG 0(R9), R5, R6 + MOVD CAA(R8), R9 + MOVD EDCHPXV(R9), R9 + ADD $(__errno), R9 + LMG 0(R9), R5, R6 // Switch to saved LE stack. - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD 0(R9), R4 - MOVD $0, 0(R9) + MOVD SAVSTACK_ASYNC(R8), R9 + MOVD 0(R9), R4 + MOVD $0, 0(R9) // Call __errno function. LE_CALL NOPH // Switch back to Go stack. - XOR R0, R0 // Restore R0 to $0. - MOVD R4, 0(R9) // Save stack pointer. - RET - -TEXT ·syscall_syscall(SB),NOSPLIT,$0-56 - BL runtime·entersyscall(SB) - MOVD a1+8(FP), R1 - MOVD a2+16(FP), R2 - MOVD a3+24(FP), R3 - - // Get library control area (LCA). - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 - - // Get function. - MOVD CAA(R8), R9 - MOVD EDCHPXV(R9), R9 - MOVD trap+0(FP), R5 - SLD $4, R5 - ADD R5, R9 - LMG 0(R9), R5, R6 - - // Restore LE stack. - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD 0(R9), R4 - MOVD $0, 0(R9) - - // Call function. - LE_CALL - NOPH - XOR R0, R0 // Restore R0 to $0. - MOVD R4, 0(R9) // Save stack pointer. - - MOVD R3, r1+32(FP) - MOVD R0, r2+40(FP) - MOVD R0, err+48(FP) - MOVW R3, R4 - CMP R4, $-1 - BNE done - BL addrerrno<>(SB) - MOVWZ 0(R3), R3 - MOVD R3, err+48(FP) -done: - BL runtime·exitsyscall(SB) - RET - -TEXT ·syscall_rawsyscall(SB),NOSPLIT,$0-56 - MOVD a1+8(FP), R1 - MOVD a2+16(FP), R2 - MOVD a3+24(FP), R3 - - // Get library control area (LCA). - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 - - // Get function. - MOVD CAA(R8), R9 - MOVD EDCHPXV(R9), R9 - MOVD trap+0(FP), R5 - SLD $4, R5 - ADD R5, R9 - LMG 0(R9), R5, R6 - - // Restore LE stack. - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD 0(R9), R4 - MOVD $0, 0(R9) - - // Call function. - LE_CALL - NOPH - XOR R0, R0 // Restore R0 to $0. - MOVD R4, 0(R9) // Save stack pointer. - - MOVD R3, r1+32(FP) - MOVD R0, r2+40(FP) - MOVD R0, err+48(FP) - MOVW R3, R4 - CMP R4, $-1 - BNE done - BL addrerrno<>(SB) - MOVWZ 0(R3), R3 - MOVD R3, err+48(FP) -done: - RET - -TEXT ·syscall_syscall6(SB),NOSPLIT,$0-80 - BL runtime·entersyscall(SB) - MOVD a1+8(FP), R1 - MOVD a2+16(FP), R2 - MOVD a3+24(FP), R3 - - // Get library control area (LCA). - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 - - // Get function. - MOVD CAA(R8), R9 - MOVD EDCHPXV(R9), R9 - MOVD trap+0(FP), R5 - SLD $4, R5 - ADD R5, R9 - LMG 0(R9), R5, R6 - - // Restore LE stack. - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD 0(R9), R4 - MOVD $0, 0(R9) - - // Fill in parameter list. - MOVD a4+32(FP), R12 - MOVD R12, (2176+24)(R4) - MOVD a5+40(FP), R12 - MOVD R12, (2176+32)(R4) - MOVD a6+48(FP), R12 - MOVD R12, (2176+40)(R4) - - // Call function. - LE_CALL - NOPH - XOR R0, R0 // Restore R0 to $0. - MOVD R4, 0(R9) // Save stack pointer. - - MOVD R3, r1+56(FP) - MOVD R0, r2+64(FP) - MOVD R0, err+72(FP) - MOVW R3, R4 - CMP R4, $-1 - BNE done - BL addrerrno<>(SB) - MOVWZ 0(R3), R3 - MOVD R3, err+72(FP) -done: - BL runtime·exitsyscall(SB) - RET - -TEXT ·syscall_rawsyscall6(SB),NOSPLIT,$0-80 - MOVD a1+8(FP), R1 - MOVD a2+16(FP), R2 - MOVD a3+24(FP), R3 - - // Get library control area (LCA). - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 - - // Get function. - MOVD CAA(R8), R9 - MOVD EDCHPXV(R9), R9 - MOVD trap+0(FP), R5 - SLD $4, R5 - ADD R5, R9 - LMG 0(R9), R5, R6 - - // Restore LE stack. - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD 0(R9), R4 - MOVD $0, 0(R9) - - // Fill in parameter list. - MOVD a4+32(FP), R12 - MOVD R12, (2176+24)(R4) - MOVD a5+40(FP), R12 - MOVD R12, (2176+32)(R4) - MOVD a6+48(FP), R12 - MOVD R12, (2176+40)(R4) - - // Call function. - LE_CALL - NOPH - XOR R0, R0 // Restore R0 to $0. - MOVD R4, 0(R9) // Save stack pointer. - - MOVD R3, r1+56(FP) - MOVD R0, r2+64(FP) - MOVD R0, err+72(FP) - MOVW R3, R4 - CMP R4, $-1 - BNE done - BL ·rrno<>(SB) - MOVWZ 0(R3), R3 - MOVD R3, err+72(FP) -done: - RET - -TEXT ·syscall_syscall9(SB),NOSPLIT,$0 - BL runtime·entersyscall(SB) - MOVD a1+8(FP), R1 - MOVD a2+16(FP), R2 - MOVD a3+24(FP), R3 - - // Get library control area (LCA). - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 - - // Get function. - MOVD CAA(R8), R9 - MOVD EDCHPXV(R9), R9 - MOVD trap+0(FP), R5 - SLD $4, R5 - ADD R5, R9 - LMG 0(R9), R5, R6 - - // Restore LE stack. - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD 0(R9), R4 - MOVD $0, 0(R9) - - // Fill in parameter list. - MOVD a4+32(FP), R12 - MOVD R12, (2176+24)(R4) - MOVD a5+40(FP), R12 - MOVD R12, (2176+32)(R4) - MOVD a6+48(FP), R12 - MOVD R12, (2176+40)(R4) - MOVD a7+56(FP), R12 - MOVD R12, (2176+48)(R4) - MOVD a8+64(FP), R12 - MOVD R12, (2176+56)(R4) - MOVD a9+72(FP), R12 - MOVD R12, (2176+64)(R4) - - // Call function. - LE_CALL - NOPH - XOR R0, R0 // Restore R0 to $0. - MOVD R4, 0(R9) // Save stack pointer. - - MOVD R3, r1+80(FP) - MOVD R0, r2+88(FP) - MOVD R0, err+96(FP) - MOVW R3, R4 - CMP R4, $-1 - BNE done - BL addrerrno<>(SB) - MOVWZ 0(R3), R3 - MOVD R3, err+96(FP) -done: - BL runtime·exitsyscall(SB) - RET - -TEXT ·syscall_rawsyscall9(SB),NOSPLIT,$0 - MOVD a1+8(FP), R1 - MOVD a2+16(FP), R2 - MOVD a3+24(FP), R3 - - // Get library control area (LCA). - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 - - // Get function. - MOVD CAA(R8), R9 - MOVD EDCHPXV(R9), R9 - MOVD trap+0(FP), R5 - SLD $4, R5 - ADD R5, R9 - LMG 0(R9), R5, R6 - - // Restore LE stack. - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD 0(R9), R4 - MOVD $0, 0(R9) - - // Fill in parameter list. - MOVD a4+32(FP), R12 - MOVD R12, (2176+24)(R4) - MOVD a5+40(FP), R12 - MOVD R12, (2176+32)(R4) - MOVD a6+48(FP), R12 - MOVD R12, (2176+40)(R4) - MOVD a7+56(FP), R12 - MOVD R12, (2176+48)(R4) - MOVD a8+64(FP), R12 - MOVD R12, (2176+56)(R4) - MOVD a9+72(FP), R12 - MOVD R12, (2176+64)(R4) - - // Call function. - LE_CALL - NOPH - XOR R0, R0 // Restore R0 to $0. - MOVD R4, 0(R9) // Save stack pointer. - - MOVD R3, r1+80(FP) - MOVD R0, r2+88(FP) - MOVD R0, err+96(FP) - MOVW R3, R4 - CMP R4, $-1 - BNE done - BL addrerrno<>(SB) - MOVWZ 0(R3), R3 - MOVD R3, err+96(FP) -done: + XOR R0, R0 // Restore R0 to $0. + MOVD R4, 0(R9) // Save stack pointer. RET // func svcCall(fnptr unsafe.Pointer, argv *unsafe.Pointer, dsa *uint64) -TEXT ·svcCall(SB),NOSPLIT,$0 - BL runtime·save_g(SB) // Save g and stack pointer - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD R15, 0(R9) +TEXT ·svcCall(SB), NOSPLIT, $0 + BL runtime·save_g(SB) // Save g and stack pointer + MOVW PSALAA, R8 + MOVD LCA64(R8), R8 + MOVD SAVSTACK_ASYNC(R8), R9 + MOVD R15, 0(R9) - MOVD argv+8(FP), R1 // Move function arguments into registers - MOVD dsa+16(FP), g - MOVD fnptr+0(FP), R15 + MOVD argv+8(FP), R1 // Move function arguments into registers + MOVD dsa+16(FP), g + MOVD fnptr+0(FP), R15 - BYTE $0x0D // Branch to function - BYTE $0xEF + BYTE $0x0D // Branch to function + BYTE $0xEF - BL runtime·load_g(SB) // Restore g and stack pointer - MOVW PSALAA, R8 - MOVD LCA64(R8), R8 - MOVD SAVSTACK_ASYNC(R8), R9 - MOVD 0(R9), R15 + BL runtime·load_g(SB) // Restore g and stack pointer + MOVW PSALAA, R8 + MOVD LCA64(R8), R8 + MOVD SAVSTACK_ASYNC(R8), R9 + MOVD 0(R9), R15 RET // func svcLoad(name *byte) unsafe.Pointer -TEXT ·svcLoad(SB),NOSPLIT,$0 - MOVD R15, R2 // Save go stack pointer - MOVD name+0(FP), R0 // Move SVC args into registers - MOVD $0x80000000, R1 - MOVD $0, R15 - BYTE $0x0A // SVC 08 LOAD - BYTE $0x08 - MOVW R15, R3 // Save return code from SVC - MOVD R2, R15 // Restore go stack pointer - CMP R3, $0 // Check SVC return code - BNE error +TEXT ·svcLoad(SB), NOSPLIT, $0 + MOVD R15, R2 // Save go stack pointer + MOVD name+0(FP), R0 // Move SVC args into registers + MOVD $0x80000000, R1 + MOVD $0, R15 + SVC_LOAD + MOVW R15, R3 // Save return code from SVC + MOVD R2, R15 // Restore go stack pointer + CMP R3, $0 // Check SVC return code + BNE error - MOVD $-2, R3 // Reset last bit of entry point to zero - AND R0, R3 - MOVD R3, addr+8(FP) // Return entry point returned by SVC - CMP R0, R3 // Check if last bit of entry point was set - BNE done + MOVD $-2, R3 // Reset last bit of entry point to zero + AND R0, R3 + MOVD R3, ret+8(FP) // Return entry point returned by SVC + CMP R0, R3 // Check if last bit of entry point was set + BNE done - MOVD R15, R2 // Save go stack pointer - MOVD $0, R15 // Move SVC args into registers (entry point still in r0 from SVC 08) - BYTE $0x0A // SVC 09 DELETE - BYTE $0x09 - MOVD R2, R15 // Restore go stack pointer + MOVD R15, R2 // Save go stack pointer + MOVD $0, R15 // Move SVC args into registers (entry point still in r0 from SVC 08) + SVC_DELETE + MOVD R2, R15 // Restore go stack pointer error: - MOVD $0, addr+8(FP) // Return 0 on failure + MOVD $0, ret+8(FP) // Return 0 on failure + done: - XOR R0, R0 // Reset r0 to 0 + XOR R0, R0 // Reset r0 to 0 RET // func svcUnload(name *byte, fnptr unsafe.Pointer) int64 -TEXT ·svcUnload(SB),NOSPLIT,$0 - MOVD R15, R2 // Save go stack pointer - MOVD name+0(FP), R0 // Move SVC args into registers - MOVD addr+8(FP), R15 - BYTE $0x0A // SVC 09 - BYTE $0x09 - XOR R0, R0 // Reset r0 to 0 - MOVD R15, R1 // Save SVC return code - MOVD R2, R15 // Restore go stack pointer - MOVD R1, rc+0(FP) // Return SVC return code +TEXT ·svcUnload(SB), NOSPLIT, $0 + MOVD R15, R2 // Save go stack pointer + MOVD name+0(FP), R0 // Move SVC args into registers + MOVD fnptr+8(FP), R15 + SVC_DELETE + XOR R0, R0 // Reset r0 to 0 + MOVD R15, R1 // Save SVC return code + MOVD R2, R15 // Restore go stack pointer + MOVD R1, ret+16(FP) // Return SVC return code RET // func gettid() uint64 @@ -417,7 +150,233 @@ TEXT ·gettid(SB), NOSPLIT, $0 // Get CEECAATHDID MOVD CAA(R8), R9 - MOVD 0x3D0(R9), R9 + MOVD CEECAATHDID(R9), R9 MOVD R9, ret+0(FP) RET + +// +// Call LE function, if the return is -1 +// errno and errno2 is retrieved +// +TEXT ·CallLeFuncWithErr(SB), NOSPLIT, $0 + MOVW PSALAA, R8 + MOVD LCA64(R8), R8 + MOVD CAA(R8), R9 + MOVD g, GOCB(R9) + + // Restore LE stack. + MOVD SAVSTACK_ASYNC(R8), R9 // R9-> LE stack frame saving address + MOVD 0(R9), R4 // R4-> restore previously saved stack frame pointer + + MOVD parms_base+8(FP), R7 // R7 -> argument array + MOVD parms_len+16(FP), R8 // R8 number of arguments + + // arg 1 ---> R1 + CMP R8, $0 + BEQ docall + SUB $1, R8 + MOVD 0(R7), R1 + + // arg 2 ---> R2 + CMP R8, $0 + BEQ docall + SUB $1, R8 + ADD $8, R7 + MOVD 0(R7), R2 + + // arg 3 --> R3 + CMP R8, $0 + BEQ docall + SUB $1, R8 + ADD $8, R7 + MOVD 0(R7), R3 + + CMP R8, $0 + BEQ docall + MOVD $2176+16, R6 // starting LE stack address-8 to store 4th argument + +repeat: + ADD $8, R7 + MOVD 0(R7), R0 // advance arg pointer by 8 byte + ADD $8, R6 // advance LE argument address by 8 byte + MOVD R0, (R4)(R6*1) // copy argument from go-slice to le-frame + SUB $1, R8 + CMP R8, $0 + BNE repeat + +docall: + MOVD funcdesc+0(FP), R8 // R8-> function descriptor + LMG 0(R8), R5, R6 + MOVD $0, 0(R9) // R9 address of SAVSTACK_ASYNC + LE_CALL // balr R7, R6 (return #1) + NOPH + MOVD R3, ret+32(FP) + CMP R3, $-1 // compare result to -1 + BNE done + + // retrieve errno and errno2 + MOVD zosLibVec<>(SB), R8 + ADD $(__errno), R8 + LMG 0(R8), R5, R6 + LE_CALL // balr R7, R6 __errno (return #3) + NOPH + MOVWZ 0(R3), R3 + MOVD R3, err+48(FP) + MOVD zosLibVec<>(SB), R8 + ADD $(__err2ad), R8 + LMG 0(R8), R5, R6 + LE_CALL // balr R7, R6 __err2ad (return #2) + NOPH + MOVW (R3), R2 // retrieve errno2 + MOVD R2, errno2+40(FP) // store in return area + +done: + MOVD R4, 0(R9) // Save stack pointer. + RET + +// +// Call LE function, if the return is 0 +// errno and errno2 is retrieved +// +TEXT ·CallLeFuncWithPtrReturn(SB), NOSPLIT, $0 + MOVW PSALAA, R8 + MOVD LCA64(R8), R8 + MOVD CAA(R8), R9 + MOVD g, GOCB(R9) + + // Restore LE stack. + MOVD SAVSTACK_ASYNC(R8), R9 // R9-> LE stack frame saving address + MOVD 0(R9), R4 // R4-> restore previously saved stack frame pointer + + MOVD parms_base+8(FP), R7 // R7 -> argument array + MOVD parms_len+16(FP), R8 // R8 number of arguments + + // arg 1 ---> R1 + CMP R8, $0 + BEQ docall + SUB $1, R8 + MOVD 0(R7), R1 + + // arg 2 ---> R2 + CMP R8, $0 + BEQ docall + SUB $1, R8 + ADD $8, R7 + MOVD 0(R7), R2 + + // arg 3 --> R3 + CMP R8, $0 + BEQ docall + SUB $1, R8 + ADD $8, R7 + MOVD 0(R7), R3 + + CMP R8, $0 + BEQ docall + MOVD $2176+16, R6 // starting LE stack address-8 to store 4th argument + +repeat: + ADD $8, R7 + MOVD 0(R7), R0 // advance arg pointer by 8 byte + ADD $8, R6 // advance LE argument address by 8 byte + MOVD R0, (R4)(R6*1) // copy argument from go-slice to le-frame + SUB $1, R8 + CMP R8, $0 + BNE repeat + +docall: + MOVD funcdesc+0(FP), R8 // R8-> function descriptor + LMG 0(R8), R5, R6 + MOVD $0, 0(R9) // R9 address of SAVSTACK_ASYNC + LE_CALL // balr R7, R6 (return #1) + NOPH + MOVD R3, ret+32(FP) + CMP R3, $0 // compare result to 0 + BNE done + + // retrieve errno and errno2 + MOVD zosLibVec<>(SB), R8 + ADD $(__errno), R8 + LMG 0(R8), R5, R6 + LE_CALL // balr R7, R6 __errno (return #3) + NOPH + MOVWZ 0(R3), R3 + MOVD R3, err+48(FP) + MOVD zosLibVec<>(SB), R8 + ADD $(__err2ad), R8 + LMG 0(R8), R5, R6 + LE_CALL // balr R7, R6 __err2ad (return #2) + NOPH + MOVW (R3), R2 // retrieve errno2 + MOVD R2, errno2+40(FP) // store in return area + XOR R2, R2 + MOVWZ R2, (R3) // clear errno2 + +done: + MOVD R4, 0(R9) // Save stack pointer. + RET + +// +// function to test if a pointer can be safely dereferenced (content read) +// return 0 for succces +// +TEXT ·ptrtest(SB), NOSPLIT, $0-16 + MOVD arg+0(FP), R10 // test pointer in R10 + + // set up R2 to point to CEECAADMC + BYTE $0xE3; BYTE $0x20; BYTE $0x04; BYTE $0xB8; BYTE $0x00; BYTE $0x17 // llgt 2,1208 + BYTE $0xB9; BYTE $0x17; BYTE $0x00; BYTE $0x22 // llgtr 2,2 + BYTE $0xA5; BYTE $0x26; BYTE $0x7F; BYTE $0xFF // nilh 2,32767 + BYTE $0xE3; BYTE $0x22; BYTE $0x00; BYTE $0x58; BYTE $0x00; BYTE $0x04 // lg 2,88(2) + BYTE $0xE3; BYTE $0x22; BYTE $0x00; BYTE $0x08; BYTE $0x00; BYTE $0x04 // lg 2,8(2) + BYTE $0x41; BYTE $0x22; BYTE $0x03; BYTE $0x68 // la 2,872(2) + + // set up R5 to point to the "shunt" path which set 1 to R3 (failure) + BYTE $0xB9; BYTE $0x82; BYTE $0x00; BYTE $0x33 // xgr 3,3 + BYTE $0xA7; BYTE $0x55; BYTE $0x00; BYTE $0x04 // bras 5,lbl1 + BYTE $0xA7; BYTE $0x39; BYTE $0x00; BYTE $0x01 // lghi 3,1 + + // if r3 is not zero (failed) then branch to finish + BYTE $0xB9; BYTE $0x02; BYTE $0x00; BYTE $0x33 // lbl1 ltgr 3,3 + BYTE $0xA7; BYTE $0x74; BYTE $0x00; BYTE $0x08 // brc b'0111',lbl2 + + // stomic store shunt address in R5 into CEECAADMC + BYTE $0xE3; BYTE $0x52; BYTE $0x00; BYTE $0x00; BYTE $0x00; BYTE $0x24 // stg 5,0(2) + + // now try reading from the test pointer in R10, if it fails it branches to the "lghi" instruction above + BYTE $0xE3; BYTE $0x9A; BYTE $0x00; BYTE $0x00; BYTE $0x00; BYTE $0x04 // lg 9,0(10) + + // finish here, restore 0 into CEECAADMC + BYTE $0xB9; BYTE $0x82; BYTE $0x00; BYTE $0x99 // lbl2 xgr 9,9 + BYTE $0xE3; BYTE $0x92; BYTE $0x00; BYTE $0x00; BYTE $0x00; BYTE $0x24 // stg 9,0(2) + MOVD R3, ret+8(FP) // result in R3 + RET + +// +// function to test if a untptr can be loaded from a pointer +// return 1: the 8-byte content +// 2: 0 for success, 1 for failure +// +// func safeload(ptr uintptr) ( value uintptr, error uintptr) +TEXT ·safeload(SB), NOSPLIT, $0-24 + MOVD ptr+0(FP), R10 // test pointer in R10 + MOVD $0x0, R6 + BYTE $0xE3; BYTE $0x20; BYTE $0x04; BYTE $0xB8; BYTE $0x00; BYTE $0x17 // llgt 2,1208 + BYTE $0xB9; BYTE $0x17; BYTE $0x00; BYTE $0x22 // llgtr 2,2 + BYTE $0xA5; BYTE $0x26; BYTE $0x7F; BYTE $0xFF // nilh 2,32767 + BYTE $0xE3; BYTE $0x22; BYTE $0x00; BYTE $0x58; BYTE $0x00; BYTE $0x04 // lg 2,88(2) + BYTE $0xE3; BYTE $0x22; BYTE $0x00; BYTE $0x08; BYTE $0x00; BYTE $0x04 // lg 2,8(2) + BYTE $0x41; BYTE $0x22; BYTE $0x03; BYTE $0x68 // la 2,872(2) + BYTE $0xB9; BYTE $0x82; BYTE $0x00; BYTE $0x33 // xgr 3,3 + BYTE $0xA7; BYTE $0x55; BYTE $0x00; BYTE $0x04 // bras 5,lbl1 + BYTE $0xA7; BYTE $0x39; BYTE $0x00; BYTE $0x01 // lghi 3,1 + BYTE $0xB9; BYTE $0x02; BYTE $0x00; BYTE $0x33 // lbl1 ltgr 3,3 + BYTE $0xA7; BYTE $0x74; BYTE $0x00; BYTE $0x08 // brc b'0111',lbl2 + BYTE $0xE3; BYTE $0x52; BYTE $0x00; BYTE $0x00; BYTE $0x00; BYTE $0x24 // stg 5,0(2) + BYTE $0xE3; BYTE $0x6A; BYTE $0x00; BYTE $0x00; BYTE $0x00; BYTE $0x04 // lg 6,0(10) + BYTE $0xB9; BYTE $0x82; BYTE $0x00; BYTE $0x99 // lbl2 xgr 9,9 + BYTE $0xE3; BYTE $0x92; BYTE $0x00; BYTE $0x00; BYTE $0x00; BYTE $0x24 // stg 9,0(2) + MOVD R6, value+8(FP) // result in R6 + MOVD R3, error+16(FP) // error in R3 + RET diff --git a/vendor/golang.org/x/sys/unix/bpxsvc_zos.go b/vendor/golang.org/x/sys/unix/bpxsvc_zos.go new file mode 100644 index 0000000..39d647d --- /dev/null +++ b/vendor/golang.org/x/sys/unix/bpxsvc_zos.go @@ -0,0 +1,657 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build zos + +package unix + +import ( + "bytes" + "fmt" + "unsafe" +) + +//go:noescape +func bpxcall(plist []unsafe.Pointer, bpx_offset int64) + +//go:noescape +func A2e([]byte) + +//go:noescape +func E2a([]byte) + +const ( + BPX4STA = 192 // stat + BPX4FST = 104 // fstat + BPX4LST = 132 // lstat + BPX4OPN = 156 // open + BPX4CLO = 72 // close + BPX4CHR = 500 // chattr + BPX4FCR = 504 // fchattr + BPX4LCR = 1180 // lchattr + BPX4CTW = 492 // cond_timed_wait + BPX4GTH = 1056 // __getthent + BPX4PTQ = 412 // pthread_quiesc + BPX4PTR = 320 // ptrace +) + +const ( + //options + //byte1 + BPX_OPNFHIGH = 0x80 + //byte2 + BPX_OPNFEXEC = 0x80 + //byte3 + BPX_O_NOLARGEFILE = 0x08 + BPX_O_LARGEFILE = 0x04 + BPX_O_ASYNCSIG = 0x02 + BPX_O_SYNC = 0x01 + //byte4 + BPX_O_CREXCL = 0xc0 + BPX_O_CREAT = 0x80 + BPX_O_EXCL = 0x40 + BPX_O_NOCTTY = 0x20 + BPX_O_TRUNC = 0x10 + BPX_O_APPEND = 0x08 + BPX_O_NONBLOCK = 0x04 + BPX_FNDELAY = 0x04 + BPX_O_RDWR = 0x03 + BPX_O_RDONLY = 0x02 + BPX_O_WRONLY = 0x01 + BPX_O_ACCMODE = 0x03 + BPX_O_GETFL = 0x0f + + //mode + // byte1 (file type) + BPX_FT_DIR = 1 + BPX_FT_CHARSPEC = 2 + BPX_FT_REGFILE = 3 + BPX_FT_FIFO = 4 + BPX_FT_SYMLINK = 5 + BPX_FT_SOCKET = 6 + //byte3 + BPX_S_ISUID = 0x08 + BPX_S_ISGID = 0x04 + BPX_S_ISVTX = 0x02 + BPX_S_IRWXU1 = 0x01 + BPX_S_IRUSR = 0x01 + //byte4 + BPX_S_IRWXU2 = 0xc0 + BPX_S_IWUSR = 0x80 + BPX_S_IXUSR = 0x40 + BPX_S_IRWXG = 0x38 + BPX_S_IRGRP = 0x20 + BPX_S_IWGRP = 0x10 + BPX_S_IXGRP = 0x08 + BPX_S_IRWXOX = 0x07 + BPX_S_IROTH = 0x04 + BPX_S_IWOTH = 0x02 + BPX_S_IXOTH = 0x01 + + CW_INTRPT = 1 + CW_CONDVAR = 32 + CW_TIMEOUT = 64 + + PGTHA_NEXT = 2 + PGTHA_CURRENT = 1 + PGTHA_FIRST = 0 + PGTHA_LAST = 3 + PGTHA_PROCESS = 0x80 + PGTHA_CONTTY = 0x40 + PGTHA_PATH = 0x20 + PGTHA_COMMAND = 0x10 + PGTHA_FILEDATA = 0x08 + PGTHA_THREAD = 0x04 + PGTHA_PTAG = 0x02 + PGTHA_COMMANDLONG = 0x01 + PGTHA_THREADFAST = 0x80 + PGTHA_FILEPATH = 0x40 + PGTHA_THDSIGMASK = 0x20 + // thread quiece mode + QUIESCE_TERM int32 = 1 + QUIESCE_FORCE int32 = 2 + QUIESCE_QUERY int32 = 3 + QUIESCE_FREEZE int32 = 4 + QUIESCE_UNFREEZE int32 = 5 + FREEZE_THIS_THREAD int32 = 6 + FREEZE_EXIT int32 = 8 + QUIESCE_SRB int32 = 9 +) + +type Pgtha struct { + Pid uint32 // 0 + Tid0 uint32 // 4 + Tid1 uint32 + Accesspid byte // C + Accesstid byte // D + Accessasid uint16 // E + Loginname [8]byte // 10 + Flag1 byte // 18 + Flag1b2 byte // 19 +} + +type Bpxystat_t struct { // DSECT BPXYSTAT + St_id [4]uint8 // 0 + St_length uint16 // 0x4 + St_version uint16 // 0x6 + St_mode uint32 // 0x8 + St_ino uint32 // 0xc + St_dev uint32 // 0x10 + St_nlink uint32 // 0x14 + St_uid uint32 // 0x18 + St_gid uint32 // 0x1c + St_size uint64 // 0x20 + St_atime uint32 // 0x28 + St_mtime uint32 // 0x2c + St_ctime uint32 // 0x30 + St_rdev uint32 // 0x34 + St_auditoraudit uint32 // 0x38 + St_useraudit uint32 // 0x3c + St_blksize uint32 // 0x40 + St_createtime uint32 // 0x44 + St_auditid [4]uint32 // 0x48 + St_res01 uint32 // 0x58 + Ft_ccsid uint16 // 0x5c + Ft_flags uint16 // 0x5e + St_res01a [2]uint32 // 0x60 + St_res02 uint32 // 0x68 + St_blocks uint32 // 0x6c + St_opaque [3]uint8 // 0x70 + St_visible uint8 // 0x73 + St_reftime uint32 // 0x74 + St_fid uint64 // 0x78 + St_filefmt uint8 // 0x80 + St_fspflag2 uint8 // 0x81 + St_res03 [2]uint8 // 0x82 + St_ctimemsec uint32 // 0x84 + St_seclabel [8]uint8 // 0x88 + St_res04 [4]uint8 // 0x90 + // end of version 1 + _ uint32 // 0x94 + St_atime64 uint64 // 0x98 + St_mtime64 uint64 // 0xa0 + St_ctime64 uint64 // 0xa8 + St_createtime64 uint64 // 0xb0 + St_reftime64 uint64 // 0xb8 + _ uint64 // 0xc0 + St_res05 [16]uint8 // 0xc8 + // end of version 2 +} + +type BpxFilestatus struct { + Oflag1 byte + Oflag2 byte + Oflag3 byte + Oflag4 byte +} + +type BpxMode struct { + Ftype byte + Mode1 byte + Mode2 byte + Mode3 byte +} + +// Thr attribute structure for extended attributes +type Bpxyatt_t struct { // DSECT BPXYATT + Att_id [4]uint8 + Att_version uint16 + Att_res01 [2]uint8 + Att_setflags1 uint8 + Att_setflags2 uint8 + Att_setflags3 uint8 + Att_setflags4 uint8 + Att_mode uint32 + Att_uid uint32 + Att_gid uint32 + Att_opaquemask [3]uint8 + Att_visblmaskres uint8 + Att_opaque [3]uint8 + Att_visibleres uint8 + Att_size_h uint32 + Att_size_l uint32 + Att_atime uint32 + Att_mtime uint32 + Att_auditoraudit uint32 + Att_useraudit uint32 + Att_ctime uint32 + Att_reftime uint32 + // end of version 1 + Att_filefmt uint8 + Att_res02 [3]uint8 + Att_filetag uint32 + Att_res03 [8]uint8 + // end of version 2 + Att_atime64 uint64 + Att_mtime64 uint64 + Att_ctime64 uint64 + Att_reftime64 uint64 + Att_seclabel [8]uint8 + Att_ver3res02 [8]uint8 + // end of version 3 +} + +func BpxOpen(name string, options *BpxFilestatus, mode *BpxMode) (rv int32, rc int32, rn int32) { + if len(name) < 1024 { + var namebuf [1024]byte + sz := int32(copy(namebuf[:], name)) + A2e(namebuf[:sz]) + var parms [7]unsafe.Pointer + parms[0] = unsafe.Pointer(&sz) + parms[1] = unsafe.Pointer(&namebuf[0]) + parms[2] = unsafe.Pointer(options) + parms[3] = unsafe.Pointer(mode) + parms[4] = unsafe.Pointer(&rv) + parms[5] = unsafe.Pointer(&rc) + parms[6] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4OPN) + return rv, rc, rn + } + return -1, -1, -1 +} + +func BpxClose(fd int32) (rv int32, rc int32, rn int32) { + var parms [4]unsafe.Pointer + parms[0] = unsafe.Pointer(&fd) + parms[1] = unsafe.Pointer(&rv) + parms[2] = unsafe.Pointer(&rc) + parms[3] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4CLO) + return rv, rc, rn +} + +func BpxFileFStat(fd int32, st *Bpxystat_t) (rv int32, rc int32, rn int32) { + st.St_id = [4]uint8{0xe2, 0xe3, 0xc1, 0xe3} + st.St_version = 2 + stat_sz := uint32(unsafe.Sizeof(*st)) + var parms [6]unsafe.Pointer + parms[0] = unsafe.Pointer(&fd) + parms[1] = unsafe.Pointer(&stat_sz) + parms[2] = unsafe.Pointer(st) + parms[3] = unsafe.Pointer(&rv) + parms[4] = unsafe.Pointer(&rc) + parms[5] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4FST) + return rv, rc, rn +} + +func BpxFileStat(name string, st *Bpxystat_t) (rv int32, rc int32, rn int32) { + if len(name) < 1024 { + var namebuf [1024]byte + sz := int32(copy(namebuf[:], name)) + A2e(namebuf[:sz]) + st.St_id = [4]uint8{0xe2, 0xe3, 0xc1, 0xe3} + st.St_version = 2 + stat_sz := uint32(unsafe.Sizeof(*st)) + var parms [7]unsafe.Pointer + parms[0] = unsafe.Pointer(&sz) + parms[1] = unsafe.Pointer(&namebuf[0]) + parms[2] = unsafe.Pointer(&stat_sz) + parms[3] = unsafe.Pointer(st) + parms[4] = unsafe.Pointer(&rv) + parms[5] = unsafe.Pointer(&rc) + parms[6] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4STA) + return rv, rc, rn + } + return -1, -1, -1 +} + +func BpxFileLStat(name string, st *Bpxystat_t) (rv int32, rc int32, rn int32) { + if len(name) < 1024 { + var namebuf [1024]byte + sz := int32(copy(namebuf[:], name)) + A2e(namebuf[:sz]) + st.St_id = [4]uint8{0xe2, 0xe3, 0xc1, 0xe3} + st.St_version = 2 + stat_sz := uint32(unsafe.Sizeof(*st)) + var parms [7]unsafe.Pointer + parms[0] = unsafe.Pointer(&sz) + parms[1] = unsafe.Pointer(&namebuf[0]) + parms[2] = unsafe.Pointer(&stat_sz) + parms[3] = unsafe.Pointer(st) + parms[4] = unsafe.Pointer(&rv) + parms[5] = unsafe.Pointer(&rc) + parms[6] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4LST) + return rv, rc, rn + } + return -1, -1, -1 +} + +func BpxChattr(path string, attr *Bpxyatt_t) (rv int32, rc int32, rn int32) { + if len(path) >= 1024 { + return -1, -1, -1 + } + var namebuf [1024]byte + sz := int32(copy(namebuf[:], path)) + A2e(namebuf[:sz]) + attr_sz := uint32(unsafe.Sizeof(*attr)) + var parms [7]unsafe.Pointer + parms[0] = unsafe.Pointer(&sz) + parms[1] = unsafe.Pointer(&namebuf[0]) + parms[2] = unsafe.Pointer(&attr_sz) + parms[3] = unsafe.Pointer(attr) + parms[4] = unsafe.Pointer(&rv) + parms[5] = unsafe.Pointer(&rc) + parms[6] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4CHR) + return rv, rc, rn +} + +func BpxLchattr(path string, attr *Bpxyatt_t) (rv int32, rc int32, rn int32) { + if len(path) >= 1024 { + return -1, -1, -1 + } + var namebuf [1024]byte + sz := int32(copy(namebuf[:], path)) + A2e(namebuf[:sz]) + attr_sz := uint32(unsafe.Sizeof(*attr)) + var parms [7]unsafe.Pointer + parms[0] = unsafe.Pointer(&sz) + parms[1] = unsafe.Pointer(&namebuf[0]) + parms[2] = unsafe.Pointer(&attr_sz) + parms[3] = unsafe.Pointer(attr) + parms[4] = unsafe.Pointer(&rv) + parms[5] = unsafe.Pointer(&rc) + parms[6] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4LCR) + return rv, rc, rn +} + +func BpxFchattr(fd int32, attr *Bpxyatt_t) (rv int32, rc int32, rn int32) { + attr_sz := uint32(unsafe.Sizeof(*attr)) + var parms [6]unsafe.Pointer + parms[0] = unsafe.Pointer(&fd) + parms[1] = unsafe.Pointer(&attr_sz) + parms[2] = unsafe.Pointer(attr) + parms[3] = unsafe.Pointer(&rv) + parms[4] = unsafe.Pointer(&rc) + parms[5] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4FCR) + return rv, rc, rn +} + +func BpxCondTimedWait(sec uint32, nsec uint32, events uint32, secrem *uint32, nsecrem *uint32) (rv int32, rc int32, rn int32) { + var parms [8]unsafe.Pointer + parms[0] = unsafe.Pointer(&sec) + parms[1] = unsafe.Pointer(&nsec) + parms[2] = unsafe.Pointer(&events) + parms[3] = unsafe.Pointer(secrem) + parms[4] = unsafe.Pointer(nsecrem) + parms[5] = unsafe.Pointer(&rv) + parms[6] = unsafe.Pointer(&rc) + parms[7] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4CTW) + return rv, rc, rn +} +func BpxGetthent(in *Pgtha, outlen *uint32, out unsafe.Pointer) (rv int32, rc int32, rn int32) { + var parms [7]unsafe.Pointer + inlen := uint32(26) // nothing else will work. Go says Pgtha is 28-byte because of alignment, but Pgtha is "packed" and must be 26-byte + parms[0] = unsafe.Pointer(&inlen) + parms[1] = unsafe.Pointer(&in) + parms[2] = unsafe.Pointer(outlen) + parms[3] = unsafe.Pointer(&out) + parms[4] = unsafe.Pointer(&rv) + parms[5] = unsafe.Pointer(&rc) + parms[6] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4GTH) + return rv, rc, rn +} +func ZosJobname() (jobname string, err error) { + var pgtha Pgtha + pgtha.Pid = uint32(Getpid()) + pgtha.Accesspid = PGTHA_CURRENT + pgtha.Flag1 = PGTHA_PROCESS + var out [256]byte + var outlen uint32 + outlen = 256 + rv, rc, rn := BpxGetthent(&pgtha, &outlen, unsafe.Pointer(&out[0])) + if rv == 0 { + gthc := []byte{0x87, 0xa3, 0x88, 0x83} // 'gthc' in ebcdic + ix := bytes.Index(out[:], gthc) + if ix == -1 { + err = fmt.Errorf("BPX4GTH: gthc return data not found") + return + } + jn := out[ix+80 : ix+88] // we didn't declare Pgthc, but jobname is 8-byte at offset 80 + E2a(jn) + jobname = string(bytes.TrimRight(jn, " ")) + + } else { + err = fmt.Errorf("BPX4GTH: rc=%d errno=%d reason=code=0x%x", rv, rc, rn) + } + return +} +func Bpx4ptq(code int32, data string) (rv int32, rc int32, rn int32) { + var userdata [8]byte + var parms [5]unsafe.Pointer + copy(userdata[:], data+" ") + A2e(userdata[:]) + parms[0] = unsafe.Pointer(&code) + parms[1] = unsafe.Pointer(&userdata[0]) + parms[2] = unsafe.Pointer(&rv) + parms[3] = unsafe.Pointer(&rc) + parms[4] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4PTQ) + return rv, rc, rn +} + +const ( + PT_TRACE_ME = 0 // Debug this process + PT_READ_I = 1 // Read a full word + PT_READ_D = 2 // Read a full word + PT_READ_U = 3 // Read control info + PT_WRITE_I = 4 //Write a full word + PT_WRITE_D = 5 //Write a full word + PT_CONTINUE = 7 //Continue the process + PT_KILL = 8 //Terminate the process + PT_READ_GPR = 11 // Read GPR, CR, PSW + PT_READ_FPR = 12 // Read FPR + PT_READ_VR = 13 // Read VR + PT_WRITE_GPR = 14 // Write GPR, CR, PSW + PT_WRITE_FPR = 15 // Write FPR + PT_WRITE_VR = 16 // Write VR + PT_READ_BLOCK = 17 // Read storage + PT_WRITE_BLOCK = 19 // Write storage + PT_READ_GPRH = 20 // Read GPRH + PT_WRITE_GPRH = 21 // Write GPRH + PT_REGHSET = 22 // Read all GPRHs + PT_ATTACH = 30 // Attach to a process + PT_DETACH = 31 // Detach from a process + PT_REGSET = 32 // Read all GPRs + PT_REATTACH = 33 // Reattach to a process + PT_LDINFO = 34 // Read loader info + PT_MULTI = 35 // Multi process mode + PT_LD64INFO = 36 // RMODE64 Info Area + PT_BLOCKREQ = 40 // Block request + PT_THREAD_INFO = 60 // Read thread info + PT_THREAD_MODIFY = 61 + PT_THREAD_READ_FOCUS = 62 + PT_THREAD_WRITE_FOCUS = 63 + PT_THREAD_HOLD = 64 + PT_THREAD_SIGNAL = 65 + PT_EXPLAIN = 66 + PT_EVENTS = 67 + PT_THREAD_INFO_EXTENDED = 68 + PT_REATTACH2 = 71 + PT_CAPTURE = 72 + PT_UNCAPTURE = 73 + PT_GET_THREAD_TCB = 74 + PT_GET_ALET = 75 + PT_SWAPIN = 76 + PT_EXTENDED_EVENT = 98 + PT_RECOVER = 99 // Debug a program check + PT_GPR0 = 0 // General purpose register 0 + PT_GPR1 = 1 // General purpose register 1 + PT_GPR2 = 2 // General purpose register 2 + PT_GPR3 = 3 // General purpose register 3 + PT_GPR4 = 4 // General purpose register 4 + PT_GPR5 = 5 // General purpose register 5 + PT_GPR6 = 6 // General purpose register 6 + PT_GPR7 = 7 // General purpose register 7 + PT_GPR8 = 8 // General purpose register 8 + PT_GPR9 = 9 // General purpose register 9 + PT_GPR10 = 10 // General purpose register 10 + PT_GPR11 = 11 // General purpose register 11 + PT_GPR12 = 12 // General purpose register 12 + PT_GPR13 = 13 // General purpose register 13 + PT_GPR14 = 14 // General purpose register 14 + PT_GPR15 = 15 // General purpose register 15 + PT_FPR0 = 16 // Floating point register 0 + PT_FPR1 = 17 // Floating point register 1 + PT_FPR2 = 18 // Floating point register 2 + PT_FPR3 = 19 // Floating point register 3 + PT_FPR4 = 20 // Floating point register 4 + PT_FPR5 = 21 // Floating point register 5 + PT_FPR6 = 22 // Floating point register 6 + PT_FPR7 = 23 // Floating point register 7 + PT_FPR8 = 24 // Floating point register 8 + PT_FPR9 = 25 // Floating point register 9 + PT_FPR10 = 26 // Floating point register 10 + PT_FPR11 = 27 // Floating point register 11 + PT_FPR12 = 28 // Floating point register 12 + PT_FPR13 = 29 // Floating point register 13 + PT_FPR14 = 30 // Floating point register 14 + PT_FPR15 = 31 // Floating point register 15 + PT_FPC = 32 // Floating point control register + PT_PSW = 40 // PSW + PT_PSW0 = 40 // Left half of the PSW + PT_PSW1 = 41 // Right half of the PSW + PT_CR0 = 42 // Control register 0 + PT_CR1 = 43 // Control register 1 + PT_CR2 = 44 // Control register 2 + PT_CR3 = 45 // Control register 3 + PT_CR4 = 46 // Control register 4 + PT_CR5 = 47 // Control register 5 + PT_CR6 = 48 // Control register 6 + PT_CR7 = 49 // Control register 7 + PT_CR8 = 50 // Control register 8 + PT_CR9 = 51 // Control register 9 + PT_CR10 = 52 // Control register 10 + PT_CR11 = 53 // Control register 11 + PT_CR12 = 54 // Control register 12 + PT_CR13 = 55 // Control register 13 + PT_CR14 = 56 // Control register 14 + PT_CR15 = 57 // Control register 15 + PT_GPRH0 = 58 // GP High register 0 + PT_GPRH1 = 59 // GP High register 1 + PT_GPRH2 = 60 // GP High register 2 + PT_GPRH3 = 61 // GP High register 3 + PT_GPRH4 = 62 // GP High register 4 + PT_GPRH5 = 63 // GP High register 5 + PT_GPRH6 = 64 // GP High register 6 + PT_GPRH7 = 65 // GP High register 7 + PT_GPRH8 = 66 // GP High register 8 + PT_GPRH9 = 67 // GP High register 9 + PT_GPRH10 = 68 // GP High register 10 + PT_GPRH11 = 69 // GP High register 11 + PT_GPRH12 = 70 // GP High register 12 + PT_GPRH13 = 71 // GP High register 13 + PT_GPRH14 = 72 // GP High register 14 + PT_GPRH15 = 73 // GP High register 15 + PT_VR0 = 74 // Vector register 0 + PT_VR1 = 75 // Vector register 1 + PT_VR2 = 76 // Vector register 2 + PT_VR3 = 77 // Vector register 3 + PT_VR4 = 78 // Vector register 4 + PT_VR5 = 79 // Vector register 5 + PT_VR6 = 80 // Vector register 6 + PT_VR7 = 81 // Vector register 7 + PT_VR8 = 82 // Vector register 8 + PT_VR9 = 83 // Vector register 9 + PT_VR10 = 84 // Vector register 10 + PT_VR11 = 85 // Vector register 11 + PT_VR12 = 86 // Vector register 12 + PT_VR13 = 87 // Vector register 13 + PT_VR14 = 88 // Vector register 14 + PT_VR15 = 89 // Vector register 15 + PT_VR16 = 90 // Vector register 16 + PT_VR17 = 91 // Vector register 17 + PT_VR18 = 92 // Vector register 18 + PT_VR19 = 93 // Vector register 19 + PT_VR20 = 94 // Vector register 20 + PT_VR21 = 95 // Vector register 21 + PT_VR22 = 96 // Vector register 22 + PT_VR23 = 97 // Vector register 23 + PT_VR24 = 98 // Vector register 24 + PT_VR25 = 99 // Vector register 25 + PT_VR26 = 100 // Vector register 26 + PT_VR27 = 101 // Vector register 27 + PT_VR28 = 102 // Vector register 28 + PT_VR29 = 103 // Vector register 29 + PT_VR30 = 104 // Vector register 30 + PT_VR31 = 105 // Vector register 31 + PT_PSWG = 106 // PSWG + PT_PSWG0 = 106 // Bytes 0-3 + PT_PSWG1 = 107 // Bytes 4-7 + PT_PSWG2 = 108 // Bytes 8-11 (IA high word) + PT_PSWG3 = 109 // Bytes 12-15 (IA low word) +) + +func Bpx4ptr(request int32, pid int32, addr unsafe.Pointer, data unsafe.Pointer, buffer unsafe.Pointer) (rv int32, rc int32, rn int32) { + var parms [8]unsafe.Pointer + parms[0] = unsafe.Pointer(&request) + parms[1] = unsafe.Pointer(&pid) + parms[2] = unsafe.Pointer(&addr) + parms[3] = unsafe.Pointer(&data) + parms[4] = unsafe.Pointer(&buffer) + parms[5] = unsafe.Pointer(&rv) + parms[6] = unsafe.Pointer(&rc) + parms[7] = unsafe.Pointer(&rn) + bpxcall(parms[:], BPX4PTR) + return rv, rc, rn +} + +func copyU8(val uint8, dest []uint8) int { + if len(dest) < 1 { + return 0 + } + dest[0] = val + return 1 +} + +func copyU8Arr(src, dest []uint8) int { + if len(dest) < len(src) { + return 0 + } + for i, v := range src { + dest[i] = v + } + return len(src) +} + +func copyU16(val uint16, dest []uint16) int { + if len(dest) < 1 { + return 0 + } + dest[0] = val + return 1 +} + +func copyU32(val uint32, dest []uint32) int { + if len(dest) < 1 { + return 0 + } + dest[0] = val + return 1 +} + +func copyU32Arr(src, dest []uint32) int { + if len(dest) < len(src) { + return 0 + } + for i, v := range src { + dest[i] = v + } + return len(src) +} + +func copyU64(val uint64, dest []uint64) int { + if len(dest) < 1 { + return 0 + } + dest[0] = val + return 1 +} diff --git a/vendor/golang.org/x/sys/unix/bpxsvc_zos.s b/vendor/golang.org/x/sys/unix/bpxsvc_zos.s new file mode 100644 index 0000000..4bd4a17 --- /dev/null +++ b/vendor/golang.org/x/sys/unix/bpxsvc_zos.s @@ -0,0 +1,192 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +#include "go_asm.h" +#include "textflag.h" + +// function to call USS assembly language services +// +// doc: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_3.1.0/com.ibm.zos.v3r1.bpxb100/bit64env.htm +// +// arg1 unsafe.Pointer array that ressembles an OS PLIST +// +// arg2 function offset as in +// doc: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_3.1.0/com.ibm.zos.v3r1.bpxb100/bpx2cr_List_of_offsets.htm +// +// func bpxcall(plist []unsafe.Pointer, bpx_offset int64) + +TEXT ·bpxcall(SB), NOSPLIT|NOFRAME, $0 + MOVD plist_base+0(FP), R1 // r1 points to plist + MOVD bpx_offset+24(FP), R2 // r2 offset to BPX vector table + MOVD R14, R7 // save r14 + MOVD R15, R8 // save r15 + MOVWZ 16(R0), R9 + MOVWZ 544(R9), R9 + MOVWZ 24(R9), R9 // call vector in r9 + ADD R2, R9 // add offset to vector table + MOVWZ (R9), R9 // r9 points to entry point + BYTE $0x0D // BL R14,R9 --> basr r14,r9 + BYTE $0xE9 // clobbers 0,1,14,15 + MOVD R8, R15 // restore 15 + JMP R7 // return via saved return address + +// func A2e(arr [] byte) +// code page conversion from 819 to 1047 +TEXT ·A2e(SB), NOSPLIT|NOFRAME, $0 + MOVD arg_base+0(FP), R2 // pointer to arry of characters + MOVD arg_len+8(FP), R3 // count + XOR R0, R0 + XOR R1, R1 + BYTE $0xA7; BYTE $0x15; BYTE $0x00; BYTE $0x82 // BRAS 1,(2+(256/2)) + + // ASCII -> EBCDIC conversion table: + BYTE $0x00; BYTE $0x01; BYTE $0x02; BYTE $0x03 + BYTE $0x37; BYTE $0x2d; BYTE $0x2e; BYTE $0x2f + BYTE $0x16; BYTE $0x05; BYTE $0x15; BYTE $0x0b + BYTE $0x0c; BYTE $0x0d; BYTE $0x0e; BYTE $0x0f + BYTE $0x10; BYTE $0x11; BYTE $0x12; BYTE $0x13 + BYTE $0x3c; BYTE $0x3d; BYTE $0x32; BYTE $0x26 + BYTE $0x18; BYTE $0x19; BYTE $0x3f; BYTE $0x27 + BYTE $0x1c; BYTE $0x1d; BYTE $0x1e; BYTE $0x1f + BYTE $0x40; BYTE $0x5a; BYTE $0x7f; BYTE $0x7b + BYTE $0x5b; BYTE $0x6c; BYTE $0x50; BYTE $0x7d + BYTE $0x4d; BYTE $0x5d; BYTE $0x5c; BYTE $0x4e + BYTE $0x6b; BYTE $0x60; BYTE $0x4b; BYTE $0x61 + BYTE $0xf0; BYTE $0xf1; BYTE $0xf2; BYTE $0xf3 + BYTE $0xf4; BYTE $0xf5; BYTE $0xf6; BYTE $0xf7 + BYTE $0xf8; BYTE $0xf9; BYTE $0x7a; BYTE $0x5e + BYTE $0x4c; BYTE $0x7e; BYTE $0x6e; BYTE $0x6f + BYTE $0x7c; BYTE $0xc1; BYTE $0xc2; BYTE $0xc3 + BYTE $0xc4; BYTE $0xc5; BYTE $0xc6; BYTE $0xc7 + BYTE $0xc8; BYTE $0xc9; BYTE $0xd1; BYTE $0xd2 + BYTE $0xd3; BYTE $0xd4; BYTE $0xd5; BYTE $0xd6 + BYTE $0xd7; BYTE $0xd8; BYTE $0xd9; BYTE $0xe2 + BYTE $0xe3; BYTE $0xe4; BYTE $0xe5; BYTE $0xe6 + BYTE $0xe7; BYTE $0xe8; BYTE $0xe9; BYTE $0xad + BYTE $0xe0; BYTE $0xbd; BYTE $0x5f; BYTE $0x6d + BYTE $0x79; BYTE $0x81; BYTE $0x82; BYTE $0x83 + BYTE $0x84; BYTE $0x85; BYTE $0x86; BYTE $0x87 + BYTE $0x88; BYTE $0x89; BYTE $0x91; BYTE $0x92 + BYTE $0x93; BYTE $0x94; BYTE $0x95; BYTE $0x96 + BYTE $0x97; BYTE $0x98; BYTE $0x99; BYTE $0xa2 + BYTE $0xa3; BYTE $0xa4; BYTE $0xa5; BYTE $0xa6 + BYTE $0xa7; BYTE $0xa8; BYTE $0xa9; BYTE $0xc0 + BYTE $0x4f; BYTE $0xd0; BYTE $0xa1; BYTE $0x07 + BYTE $0x20; BYTE $0x21; BYTE $0x22; BYTE $0x23 + BYTE $0x24; BYTE $0x25; BYTE $0x06; BYTE $0x17 + BYTE $0x28; BYTE $0x29; BYTE $0x2a; BYTE $0x2b + BYTE $0x2c; BYTE $0x09; BYTE $0x0a; BYTE $0x1b + BYTE $0x30; BYTE $0x31; BYTE $0x1a; BYTE $0x33 + BYTE $0x34; BYTE $0x35; BYTE $0x36; BYTE $0x08 + BYTE $0x38; BYTE $0x39; BYTE $0x3a; BYTE $0x3b + BYTE $0x04; BYTE $0x14; BYTE $0x3e; BYTE $0xff + BYTE $0x41; BYTE $0xaa; BYTE $0x4a; BYTE $0xb1 + BYTE $0x9f; BYTE $0xb2; BYTE $0x6a; BYTE $0xb5 + BYTE $0xbb; BYTE $0xb4; BYTE $0x9a; BYTE $0x8a + BYTE $0xb0; BYTE $0xca; BYTE $0xaf; BYTE $0xbc + BYTE $0x90; BYTE $0x8f; BYTE $0xea; BYTE $0xfa + BYTE $0xbe; BYTE $0xa0; BYTE $0xb6; BYTE $0xb3 + BYTE $0x9d; BYTE $0xda; BYTE $0x9b; BYTE $0x8b + BYTE $0xb7; BYTE $0xb8; BYTE $0xb9; BYTE $0xab + BYTE $0x64; BYTE $0x65; BYTE $0x62; BYTE $0x66 + BYTE $0x63; BYTE $0x67; BYTE $0x9e; BYTE $0x68 + BYTE $0x74; BYTE $0x71; BYTE $0x72; BYTE $0x73 + BYTE $0x78; BYTE $0x75; BYTE $0x76; BYTE $0x77 + BYTE $0xac; BYTE $0x69; BYTE $0xed; BYTE $0xee + BYTE $0xeb; BYTE $0xef; BYTE $0xec; BYTE $0xbf + BYTE $0x80; BYTE $0xfd; BYTE $0xfe; BYTE $0xfb + BYTE $0xfc; BYTE $0xba; BYTE $0xae; BYTE $0x59 + BYTE $0x44; BYTE $0x45; BYTE $0x42; BYTE $0x46 + BYTE $0x43; BYTE $0x47; BYTE $0x9c; BYTE $0x48 + BYTE $0x54; BYTE $0x51; BYTE $0x52; BYTE $0x53 + BYTE $0x58; BYTE $0x55; BYTE $0x56; BYTE $0x57 + BYTE $0x8c; BYTE $0x49; BYTE $0xcd; BYTE $0xce + BYTE $0xcb; BYTE $0xcf; BYTE $0xcc; BYTE $0xe1 + BYTE $0x70; BYTE $0xdd; BYTE $0xde; BYTE $0xdb + BYTE $0xdc; BYTE $0x8d; BYTE $0x8e; BYTE $0xdf + +retry: + WORD $0xB9931022 // TROO 2,2,b'0001' + BVS retry + RET + +// func e2a(arr [] byte) +// code page conversion from 1047 to 819 +TEXT ·E2a(SB), NOSPLIT|NOFRAME, $0 + MOVD arg_base+0(FP), R2 // pointer to arry of characters + MOVD arg_len+8(FP), R3 // count + XOR R0, R0 + XOR R1, R1 + BYTE $0xA7; BYTE $0x15; BYTE $0x00; BYTE $0x82 // BRAS 1,(2+(256/2)) + + // EBCDIC -> ASCII conversion table: + BYTE $0x00; BYTE $0x01; BYTE $0x02; BYTE $0x03 + BYTE $0x9c; BYTE $0x09; BYTE $0x86; BYTE $0x7f + BYTE $0x97; BYTE $0x8d; BYTE $0x8e; BYTE $0x0b + BYTE $0x0c; BYTE $0x0d; BYTE $0x0e; BYTE $0x0f + BYTE $0x10; BYTE $0x11; BYTE $0x12; BYTE $0x13 + BYTE $0x9d; BYTE $0x0a; BYTE $0x08; BYTE $0x87 + BYTE $0x18; BYTE $0x19; BYTE $0x92; BYTE $0x8f + BYTE $0x1c; BYTE $0x1d; BYTE $0x1e; BYTE $0x1f + BYTE $0x80; BYTE $0x81; BYTE $0x82; BYTE $0x83 + BYTE $0x84; BYTE $0x85; BYTE $0x17; BYTE $0x1b + BYTE $0x88; BYTE $0x89; BYTE $0x8a; BYTE $0x8b + BYTE $0x8c; BYTE $0x05; BYTE $0x06; BYTE $0x07 + BYTE $0x90; BYTE $0x91; BYTE $0x16; BYTE $0x93 + BYTE $0x94; BYTE $0x95; BYTE $0x96; BYTE $0x04 + BYTE $0x98; BYTE $0x99; BYTE $0x9a; BYTE $0x9b + BYTE $0x14; BYTE $0x15; BYTE $0x9e; BYTE $0x1a + BYTE $0x20; BYTE $0xa0; BYTE $0xe2; BYTE $0xe4 + BYTE $0xe0; BYTE $0xe1; BYTE $0xe3; BYTE $0xe5 + BYTE $0xe7; BYTE $0xf1; BYTE $0xa2; BYTE $0x2e + BYTE $0x3c; BYTE $0x28; BYTE $0x2b; BYTE $0x7c + BYTE $0x26; BYTE $0xe9; BYTE $0xea; BYTE $0xeb + BYTE $0xe8; BYTE $0xed; BYTE $0xee; BYTE $0xef + BYTE $0xec; BYTE $0xdf; BYTE $0x21; BYTE $0x24 + BYTE $0x2a; BYTE $0x29; BYTE $0x3b; BYTE $0x5e + BYTE $0x2d; BYTE $0x2f; BYTE $0xc2; BYTE $0xc4 + BYTE $0xc0; BYTE $0xc1; BYTE $0xc3; BYTE $0xc5 + BYTE $0xc7; BYTE $0xd1; BYTE $0xa6; BYTE $0x2c + BYTE $0x25; BYTE $0x5f; BYTE $0x3e; BYTE $0x3f + BYTE $0xf8; BYTE $0xc9; BYTE $0xca; BYTE $0xcb + BYTE $0xc8; BYTE $0xcd; BYTE $0xce; BYTE $0xcf + BYTE $0xcc; BYTE $0x60; BYTE $0x3a; BYTE $0x23 + BYTE $0x40; BYTE $0x27; BYTE $0x3d; BYTE $0x22 + BYTE $0xd8; BYTE $0x61; BYTE $0x62; BYTE $0x63 + BYTE $0x64; BYTE $0x65; BYTE $0x66; BYTE $0x67 + BYTE $0x68; BYTE $0x69; BYTE $0xab; BYTE $0xbb + BYTE $0xf0; BYTE $0xfd; BYTE $0xfe; BYTE $0xb1 + BYTE $0xb0; BYTE $0x6a; BYTE $0x6b; BYTE $0x6c + BYTE $0x6d; BYTE $0x6e; BYTE $0x6f; BYTE $0x70 + BYTE $0x71; BYTE $0x72; BYTE $0xaa; BYTE $0xba + BYTE $0xe6; BYTE $0xb8; BYTE $0xc6; BYTE $0xa4 + BYTE $0xb5; BYTE $0x7e; BYTE $0x73; BYTE $0x74 + BYTE $0x75; BYTE $0x76; BYTE $0x77; BYTE $0x78 + BYTE $0x79; BYTE $0x7a; BYTE $0xa1; BYTE $0xbf + BYTE $0xd0; BYTE $0x5b; BYTE $0xde; BYTE $0xae + BYTE $0xac; BYTE $0xa3; BYTE $0xa5; BYTE $0xb7 + BYTE $0xa9; BYTE $0xa7; BYTE $0xb6; BYTE $0xbc + BYTE $0xbd; BYTE $0xbe; BYTE $0xdd; BYTE $0xa8 + BYTE $0xaf; BYTE $0x5d; BYTE $0xb4; BYTE $0xd7 + BYTE $0x7b; BYTE $0x41; BYTE $0x42; BYTE $0x43 + BYTE $0x44; BYTE $0x45; BYTE $0x46; BYTE $0x47 + BYTE $0x48; BYTE $0x49; BYTE $0xad; BYTE $0xf4 + BYTE $0xf6; BYTE $0xf2; BYTE $0xf3; BYTE $0xf5 + BYTE $0x7d; BYTE $0x4a; BYTE $0x4b; BYTE $0x4c + BYTE $0x4d; BYTE $0x4e; BYTE $0x4f; BYTE $0x50 + BYTE $0x51; BYTE $0x52; BYTE $0xb9; BYTE $0xfb + BYTE $0xfc; BYTE $0xf9; BYTE $0xfa; BYTE $0xff + BYTE $0x5c; BYTE $0xf7; BYTE $0x53; BYTE $0x54 + BYTE $0x55; BYTE $0x56; BYTE $0x57; BYTE $0x58 + BYTE $0x59; BYTE $0x5a; BYTE $0xb2; BYTE $0xd4 + BYTE $0xd6; BYTE $0xd2; BYTE $0xd3; BYTE $0xd5 + BYTE $0x30; BYTE $0x31; BYTE $0x32; BYTE $0x33 + BYTE $0x34; BYTE $0x35; BYTE $0x36; BYTE $0x37 + BYTE $0x38; BYTE $0x39; BYTE $0xb3; BYTE $0xdb + BYTE $0xdc; BYTE $0xd9; BYTE $0xda; BYTE $0x9f + +retry: + WORD $0xB9931022 // TROO 2,2,b'0001' + BVS retry + RET diff --git a/vendor/golang.org/x/sys/unix/epoll_zos.go b/vendor/golang.org/x/sys/unix/epoll_zos.go deleted file mode 100644 index 7753fdd..0000000 --- a/vendor/golang.org/x/sys/unix/epoll_zos.go +++ /dev/null @@ -1,220 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build zos && s390x - -package unix - -import ( - "sync" -) - -// This file simulates epoll on z/OS using poll. - -// Analogous to epoll_event on Linux. -// TODO(neeilan): Pad is because the Linux kernel expects a 96-bit struct. We never pass this to the kernel; remove? -type EpollEvent struct { - Events uint32 - Fd int32 - Pad int32 -} - -const ( - EPOLLERR = 0x8 - EPOLLHUP = 0x10 - EPOLLIN = 0x1 - EPOLLMSG = 0x400 - EPOLLOUT = 0x4 - EPOLLPRI = 0x2 - EPOLLRDBAND = 0x80 - EPOLLRDNORM = 0x40 - EPOLLWRBAND = 0x200 - EPOLLWRNORM = 0x100 - EPOLL_CTL_ADD = 0x1 - EPOLL_CTL_DEL = 0x2 - EPOLL_CTL_MOD = 0x3 - // The following constants are part of the epoll API, but represent - // currently unsupported functionality on z/OS. - // EPOLL_CLOEXEC = 0x80000 - // EPOLLET = 0x80000000 - // EPOLLONESHOT = 0x40000000 - // EPOLLRDHUP = 0x2000 // Typically used with edge-triggered notis - // EPOLLEXCLUSIVE = 0x10000000 // Exclusive wake-up mode - // EPOLLWAKEUP = 0x20000000 // Relies on Linux's BLOCK_SUSPEND capability -) - -// TODO(neeilan): We can eliminate these epToPoll / pToEpoll calls by using identical mask values for POLL/EPOLL -// constants where possible The lower 16 bits of epoll events (uint32) can fit any system poll event (int16). - -// epToPollEvt converts epoll event field to poll equivalent. -// In epoll, Events is a 32-bit field, while poll uses 16 bits. -func epToPollEvt(events uint32) int16 { - var ep2p = map[uint32]int16{ - EPOLLIN: POLLIN, - EPOLLOUT: POLLOUT, - EPOLLHUP: POLLHUP, - EPOLLPRI: POLLPRI, - EPOLLERR: POLLERR, - } - - var pollEvts int16 = 0 - for epEvt, pEvt := range ep2p { - if (events & epEvt) != 0 { - pollEvts |= pEvt - } - } - - return pollEvts -} - -// pToEpollEvt converts 16 bit poll event bitfields to 32-bit epoll event fields. -func pToEpollEvt(revents int16) uint32 { - var p2ep = map[int16]uint32{ - POLLIN: EPOLLIN, - POLLOUT: EPOLLOUT, - POLLHUP: EPOLLHUP, - POLLPRI: EPOLLPRI, - POLLERR: EPOLLERR, - } - - var epollEvts uint32 = 0 - for pEvt, epEvt := range p2ep { - if (revents & pEvt) != 0 { - epollEvts |= epEvt - } - } - - return epollEvts -} - -// Per-process epoll implementation. -type epollImpl struct { - mu sync.Mutex - epfd2ep map[int]*eventPoll - nextEpfd int -} - -// eventPoll holds a set of file descriptors being watched by the process. A process can have multiple epoll instances. -// On Linux, this is an in-kernel data structure accessed through a fd. -type eventPoll struct { - mu sync.Mutex - fds map[int]*EpollEvent -} - -// epoll impl for this process. -var impl epollImpl = epollImpl{ - epfd2ep: make(map[int]*eventPoll), - nextEpfd: 0, -} - -func (e *epollImpl) epollcreate(size int) (epfd int, err error) { - e.mu.Lock() - defer e.mu.Unlock() - epfd = e.nextEpfd - e.nextEpfd++ - - e.epfd2ep[epfd] = &eventPoll{ - fds: make(map[int]*EpollEvent), - } - return epfd, nil -} - -func (e *epollImpl) epollcreate1(flag int) (fd int, err error) { - return e.epollcreate(4) -} - -func (e *epollImpl) epollctl(epfd int, op int, fd int, event *EpollEvent) (err error) { - e.mu.Lock() - defer e.mu.Unlock() - - ep, ok := e.epfd2ep[epfd] - if !ok { - - return EBADF - } - - switch op { - case EPOLL_CTL_ADD: - // TODO(neeilan): When we make epfds and fds disjoint, detect epoll - // loops here (instances watching each other) and return ELOOP. - if _, ok := ep.fds[fd]; ok { - return EEXIST - } - ep.fds[fd] = event - case EPOLL_CTL_MOD: - if _, ok := ep.fds[fd]; !ok { - return ENOENT - } - ep.fds[fd] = event - case EPOLL_CTL_DEL: - if _, ok := ep.fds[fd]; !ok { - return ENOENT - } - delete(ep.fds, fd) - - } - return nil -} - -// Must be called while holding ep.mu -func (ep *eventPoll) getFds() []int { - fds := make([]int, len(ep.fds)) - for fd := range ep.fds { - fds = append(fds, fd) - } - return fds -} - -func (e *epollImpl) epollwait(epfd int, events []EpollEvent, msec int) (n int, err error) { - e.mu.Lock() // in [rare] case of concurrent epollcreate + epollwait - ep, ok := e.epfd2ep[epfd] - - if !ok { - e.mu.Unlock() - return 0, EBADF - } - - pollfds := make([]PollFd, 4) - for fd, epollevt := range ep.fds { - pollfds = append(pollfds, PollFd{Fd: int32(fd), Events: epToPollEvt(epollevt.Events)}) - } - e.mu.Unlock() - - n, err = Poll(pollfds, msec) - if err != nil { - return n, err - } - - i := 0 - for _, pFd := range pollfds { - if pFd.Revents != 0 { - events[i] = EpollEvent{Fd: pFd.Fd, Events: pToEpollEvt(pFd.Revents)} - i++ - } - - if i == n { - break - } - } - - return n, nil -} - -func EpollCreate(size int) (fd int, err error) { - return impl.epollcreate(size) -} - -func EpollCreate1(flag int) (fd int, err error) { - return impl.epollcreate1(flag) -} - -func EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) { - return impl.epollctl(epfd, op, fd, event) -} - -// Because EpollWait mutates events, the caller is expected to coordinate -// concurrent access if calling with the same epfd from multiple goroutines. -func EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) { - return impl.epollwait(epfd, events, msec) -} diff --git a/vendor/golang.org/x/sys/unix/fstatfs_zos.go b/vendor/golang.org/x/sys/unix/fstatfs_zos.go deleted file mode 100644 index c8bde60..0000000 --- a/vendor/golang.org/x/sys/unix/fstatfs_zos.go +++ /dev/null @@ -1,163 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build zos && s390x - -package unix - -import ( - "unsafe" -) - -// This file simulates fstatfs on z/OS using fstatvfs and w_getmntent. - -func Fstatfs(fd int, stat *Statfs_t) (err error) { - var stat_v Statvfs_t - err = Fstatvfs(fd, &stat_v) - if err == nil { - // populate stat - stat.Type = 0 - stat.Bsize = stat_v.Bsize - stat.Blocks = stat_v.Blocks - stat.Bfree = stat_v.Bfree - stat.Bavail = stat_v.Bavail - stat.Files = stat_v.Files - stat.Ffree = stat_v.Ffree - stat.Fsid = stat_v.Fsid - stat.Namelen = stat_v.Namemax - stat.Frsize = stat_v.Frsize - stat.Flags = stat_v.Flag - for passn := 0; passn < 5; passn++ { - switch passn { - case 0: - err = tryGetmntent64(stat) - break - case 1: - err = tryGetmntent128(stat) - break - case 2: - err = tryGetmntent256(stat) - break - case 3: - err = tryGetmntent512(stat) - break - case 4: - err = tryGetmntent1024(stat) - break - default: - break - } - //proceed to return if: err is nil (found), err is nonnil but not ERANGE (another error occurred) - if err == nil || err != nil && err != ERANGE { - break - } - } - } - return err -} - -func tryGetmntent64(stat *Statfs_t) (err error) { - var mnt_ent_buffer struct { - header W_Mnth - filesys_info [64]W_Mntent - } - var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer)) - fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size) - if err != nil { - return err - } - err = ERANGE //return ERANGE if no match is found in this batch - for i := 0; i < fs_count; i++ { - if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) { - stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0]) - err = nil - break - } - } - return err -} - -func tryGetmntent128(stat *Statfs_t) (err error) { - var mnt_ent_buffer struct { - header W_Mnth - filesys_info [128]W_Mntent - } - var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer)) - fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size) - if err != nil { - return err - } - err = ERANGE //return ERANGE if no match is found in this batch - for i := 0; i < fs_count; i++ { - if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) { - stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0]) - err = nil - break - } - } - return err -} - -func tryGetmntent256(stat *Statfs_t) (err error) { - var mnt_ent_buffer struct { - header W_Mnth - filesys_info [256]W_Mntent - } - var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer)) - fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size) - if err != nil { - return err - } - err = ERANGE //return ERANGE if no match is found in this batch - for i := 0; i < fs_count; i++ { - if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) { - stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0]) - err = nil - break - } - } - return err -} - -func tryGetmntent512(stat *Statfs_t) (err error) { - var mnt_ent_buffer struct { - header W_Mnth - filesys_info [512]W_Mntent - } - var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer)) - fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size) - if err != nil { - return err - } - err = ERANGE //return ERANGE if no match is found in this batch - for i := 0; i < fs_count; i++ { - if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) { - stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0]) - err = nil - break - } - } - return err -} - -func tryGetmntent1024(stat *Statfs_t) (err error) { - var mnt_ent_buffer struct { - header W_Mnth - filesys_info [1024]W_Mntent - } - var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer)) - fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size) - if err != nil { - return err - } - err = ERANGE //return ERANGE if no match is found in this batch - for i := 0; i < fs_count; i++ { - if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) { - stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0]) - err = nil - break - } - } - return err -} diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh index 6202638..fdcaa97 100644 --- a/vendor/golang.org/x/sys/unix/mkerrors.sh +++ b/vendor/golang.org/x/sys/unix/mkerrors.sh @@ -248,6 +248,7 @@ struct ltchars { #include #include #include +#include #include #include #include @@ -283,10 +284,6 @@ struct ltchars { #include #endif -#ifndef MSG_FASTOPEN -#define MSG_FASTOPEN 0x20000000 -#endif - #ifndef PTRACE_GETREGS #define PTRACE_GETREGS 0xc #endif @@ -295,14 +292,6 @@ struct ltchars { #define PTRACE_SETREGS 0xd #endif -#ifndef SOL_NETLINK -#define SOL_NETLINK 270 -#endif - -#ifndef SOL_SMC -#define SOL_SMC 286 -#endif - #ifdef SOL_BLUETOOTH // SPARC includes this in /usr/include/sparc64-linux-gnu/bits/socket.h // but it is already in bluetooth_linux.go @@ -319,10 +308,23 @@ struct ltchars { #undef TIPC_WAIT_FOREVER #define TIPC_WAIT_FOREVER 0xffffffff -// Copied from linux/l2tp.h -// Including linux/l2tp.h here causes conflicts between linux/in.h -// and netinet/in.h included via net/route.h above. -#define IPPROTO_L2TP 115 +// Copied from linux/netfilter/nf_nat.h +// Including linux/netfilter/nf_nat.h here causes conflicts between linux/in.h +// and netinet/in.h. +#define NF_NAT_RANGE_MAP_IPS (1 << 0) +#define NF_NAT_RANGE_PROTO_SPECIFIED (1 << 1) +#define NF_NAT_RANGE_PROTO_RANDOM (1 << 2) +#define NF_NAT_RANGE_PERSISTENT (1 << 3) +#define NF_NAT_RANGE_PROTO_RANDOM_FULLY (1 << 4) +#define NF_NAT_RANGE_PROTO_OFFSET (1 << 5) +#define NF_NAT_RANGE_NETMAP (1 << 6) +#define NF_NAT_RANGE_PROTO_RANDOM_ALL \ + (NF_NAT_RANGE_PROTO_RANDOM | NF_NAT_RANGE_PROTO_RANDOM_FULLY) +#define NF_NAT_RANGE_MASK \ + (NF_NAT_RANGE_MAP_IPS | NF_NAT_RANGE_PROTO_SPECIFIED | \ + NF_NAT_RANGE_PROTO_RANDOM | NF_NAT_RANGE_PERSISTENT | \ + NF_NAT_RANGE_PROTO_RANDOM_FULLY | NF_NAT_RANGE_PROTO_OFFSET | \ + NF_NAT_RANGE_NETMAP) // Copied from linux/hid.h. // Keep in sync with the size of the referenced fields. @@ -582,7 +584,7 @@ ccflags="$@" $2 ~ /^KEY_(SPEC|REQKEY_DEFL)_/ || $2 ~ /^KEYCTL_/ || $2 ~ /^PERF_/ || - $2 ~ /^SECCOMP_MODE_/ || + $2 ~ /^SECCOMP_/ || $2 ~ /^SEEK_/ || $2 ~ /^SCHED_/ || $2 ~ /^SPLICE_/ || @@ -603,6 +605,9 @@ ccflags="$@" $2 ~ /^FSOPT_/ || $2 ~ /^WDIO[CFS]_/ || $2 ~ /^NFN/ || + $2 !~ /^NFT_META_IIFTYPE/ && + $2 ~ /^NFT_/ || + $2 ~ /^NF_NAT_/ || $2 ~ /^XDP_/ || $2 ~ /^RWF_/ || $2 ~ /^(HDIO|WIN|SMART)_/ || diff --git a/vendor/golang.org/x/sys/unix/mmap_nomremap.go b/vendor/golang.org/x/sys/unix/mmap_nomremap.go index 4b68e59..7f602ff 100644 --- a/vendor/golang.org/x/sys/unix/mmap_nomremap.go +++ b/vendor/golang.org/x/sys/unix/mmap_nomremap.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build aix || darwin || dragonfly || freebsd || openbsd || solaris +//go:build aix || darwin || dragonfly || freebsd || openbsd || solaris || zos package unix diff --git a/vendor/golang.org/x/sys/unix/pagesize_unix.go b/vendor/golang.org/x/sys/unix/pagesize_unix.go index 4d0a343..0482408 100644 --- a/vendor/golang.org/x/sys/unix/pagesize_unix.go +++ b/vendor/golang.org/x/sys/unix/pagesize_unix.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris +//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos // For Unix, get the pagesize from the runtime. diff --git a/vendor/golang.org/x/sys/unix/readdirent_getdirentries.go b/vendor/golang.org/x/sys/unix/readdirent_getdirentries.go index 130398b..b903c00 100644 --- a/vendor/golang.org/x/sys/unix/readdirent_getdirentries.go +++ b/vendor/golang.org/x/sys/unix/readdirent_getdirentries.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build darwin +//go:build darwin || zos package unix diff --git a/vendor/golang.org/x/sys/unix/sockcmsg_zos.go b/vendor/golang.org/x/sys/unix/sockcmsg_zos.go new file mode 100644 index 0000000..3e53dbc --- /dev/null +++ b/vendor/golang.org/x/sys/unix/sockcmsg_zos.go @@ -0,0 +1,58 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Socket control messages + +package unix + +import "unsafe" + +// UnixCredentials encodes credentials into a socket control message +// for sending to another process. This can be used for +// authentication. +func UnixCredentials(ucred *Ucred) []byte { + b := make([]byte, CmsgSpace(SizeofUcred)) + h := (*Cmsghdr)(unsafe.Pointer(&b[0])) + h.Level = SOL_SOCKET + h.Type = SCM_CREDENTIALS + h.SetLen(CmsgLen(SizeofUcred)) + *(*Ucred)(h.data(0)) = *ucred + return b +} + +// ParseUnixCredentials decodes a socket control message that contains +// credentials in a Ucred structure. To receive such a message, the +// SO_PASSCRED option must be enabled on the socket. +func ParseUnixCredentials(m *SocketControlMessage) (*Ucred, error) { + if m.Header.Level != SOL_SOCKET { + return nil, EINVAL + } + if m.Header.Type != SCM_CREDENTIALS { + return nil, EINVAL + } + ucred := *(*Ucred)(unsafe.Pointer(&m.Data[0])) + return &ucred, nil +} + +// PktInfo4 encodes Inet4Pktinfo into a socket control message of type IP_PKTINFO. +func PktInfo4(info *Inet4Pktinfo) []byte { + b := make([]byte, CmsgSpace(SizeofInet4Pktinfo)) + h := (*Cmsghdr)(unsafe.Pointer(&b[0])) + h.Level = SOL_IP + h.Type = IP_PKTINFO + h.SetLen(CmsgLen(SizeofInet4Pktinfo)) + *(*Inet4Pktinfo)(h.data(0)) = *info + return b +} + +// PktInfo6 encodes Inet6Pktinfo into a socket control message of type IPV6_PKTINFO. +func PktInfo6(info *Inet6Pktinfo) []byte { + b := make([]byte, CmsgSpace(SizeofInet6Pktinfo)) + h := (*Cmsghdr)(unsafe.Pointer(&b[0])) + h.Level = SOL_IPV6 + h.Type = IPV6_PKTINFO + h.SetLen(CmsgLen(SizeofInet6Pktinfo)) + *(*Inet6Pktinfo)(h.data(0)) = *info + return b +} diff --git a/vendor/golang.org/x/sys/unix/symaddr_zos_s390x.s b/vendor/golang.org/x/sys/unix/symaddr_zos_s390x.s new file mode 100644 index 0000000..3c4f33c --- /dev/null +++ b/vendor/golang.org/x/sys/unix/symaddr_zos_s390x.s @@ -0,0 +1,75 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build zos && s390x && gc + +#include "textflag.h" + +// provide the address of function variable to be fixed up. + +TEXT ·getPipe2Addr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Pipe2(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_FlockAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Flock(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_GetxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Getxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_NanosleepAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Nanosleep(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_SetxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Setxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_Wait4Addr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Wait4(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_MountAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Mount(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_UnmountAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Unmount(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_UtimesNanoAtAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·UtimesNanoAt(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_UtimesNanoAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·UtimesNano(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_MkfifoatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Mkfifoat(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_ChtagAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Chtag(SB), R8 + MOVD R8, ret+0(FP) + RET + +TEXT ·get_ReadlinkatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Readlinkat(SB), R8 + MOVD R8, ret+0(FP) + RET + diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go b/vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go index 16dc699..2f0fa76 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build darwin && go1.12 +//go:build darwin package unix diff --git a/vendor/golang.org/x/sys/unix/syscall_freebsd.go b/vendor/golang.org/x/sys/unix/syscall_freebsd.go index 64d1bb4..2b57e0f 100644 --- a/vendor/golang.org/x/sys/unix/syscall_freebsd.go +++ b/vendor/golang.org/x/sys/unix/syscall_freebsd.go @@ -13,6 +13,7 @@ package unix import ( + "errors" "sync" "unsafe" ) @@ -169,25 +170,26 @@ func Getfsstat(buf []Statfs_t, flags int) (n int, err error) { func Uname(uname *Utsname) error { mib := []_C_int{CTL_KERN, KERN_OSTYPE} n := unsafe.Sizeof(uname.Sysname) - if err := sysctl(mib, &uname.Sysname[0], &n, nil, 0); err != nil { + // Suppress ENOMEM errors to be compatible with the C library __xuname() implementation. + if err := sysctl(mib, &uname.Sysname[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) { return err } mib = []_C_int{CTL_KERN, KERN_HOSTNAME} n = unsafe.Sizeof(uname.Nodename) - if err := sysctl(mib, &uname.Nodename[0], &n, nil, 0); err != nil { + if err := sysctl(mib, &uname.Nodename[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) { return err } mib = []_C_int{CTL_KERN, KERN_OSRELEASE} n = unsafe.Sizeof(uname.Release) - if err := sysctl(mib, &uname.Release[0], &n, nil, 0); err != nil { + if err := sysctl(mib, &uname.Release[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) { return err } mib = []_C_int{CTL_KERN, KERN_VERSION} n = unsafe.Sizeof(uname.Version) - if err := sysctl(mib, &uname.Version[0], &n, nil, 0); err != nil { + if err := sysctl(mib, &uname.Version[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) { return err } @@ -205,7 +207,7 @@ func Uname(uname *Utsname) error { mib = []_C_int{CTL_HW, HW_MACHINE} n = unsafe.Sizeof(uname.Machine) - if err := sysctl(mib, &uname.Machine[0], &n, nil, 0); err != nil { + if err := sysctl(mib, &uname.Machine[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) { return err } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux.go b/vendor/golang.org/x/sys/unix/syscall_linux.go index 0f85e29..5682e26 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux.go @@ -1849,6 +1849,105 @@ func Dup2(oldfd, newfd int) error { //sys Fsmount(fd int, flags int, mountAttrs int) (fsfd int, err error) //sys Fsopen(fsName string, flags int) (fd int, err error) //sys Fspick(dirfd int, pathName string, flags int) (fd int, err error) + +//sys fsconfig(fd int, cmd uint, key *byte, value *byte, aux int) (err error) + +func fsconfigCommon(fd int, cmd uint, key string, value *byte, aux int) (err error) { + var keyp *byte + if keyp, err = BytePtrFromString(key); err != nil { + return + } + return fsconfig(fd, cmd, keyp, value, aux) +} + +// FsconfigSetFlag is equivalent to fsconfig(2) called +// with cmd == FSCONFIG_SET_FLAG. +// +// fd is the filesystem context to act upon. +// key the parameter key to set. +func FsconfigSetFlag(fd int, key string) (err error) { + return fsconfigCommon(fd, FSCONFIG_SET_FLAG, key, nil, 0) +} + +// FsconfigSetString is equivalent to fsconfig(2) called +// with cmd == FSCONFIG_SET_STRING. +// +// fd is the filesystem context to act upon. +// key the parameter key to set. +// value is the parameter value to set. +func FsconfigSetString(fd int, key string, value string) (err error) { + var valuep *byte + if valuep, err = BytePtrFromString(value); err != nil { + return + } + return fsconfigCommon(fd, FSCONFIG_SET_STRING, key, valuep, 0) +} + +// FsconfigSetBinary is equivalent to fsconfig(2) called +// with cmd == FSCONFIG_SET_BINARY. +// +// fd is the filesystem context to act upon. +// key the parameter key to set. +// value is the parameter value to set. +func FsconfigSetBinary(fd int, key string, value []byte) (err error) { + if len(value) == 0 { + return EINVAL + } + return fsconfigCommon(fd, FSCONFIG_SET_BINARY, key, &value[0], len(value)) +} + +// FsconfigSetPath is equivalent to fsconfig(2) called +// with cmd == FSCONFIG_SET_PATH. +// +// fd is the filesystem context to act upon. +// key the parameter key to set. +// path is a non-empty path for specified key. +// atfd is a file descriptor at which to start lookup from or AT_FDCWD. +func FsconfigSetPath(fd int, key string, path string, atfd int) (err error) { + var valuep *byte + if valuep, err = BytePtrFromString(path); err != nil { + return + } + return fsconfigCommon(fd, FSCONFIG_SET_PATH, key, valuep, atfd) +} + +// FsconfigSetPathEmpty is equivalent to fsconfig(2) called +// with cmd == FSCONFIG_SET_PATH_EMPTY. The same as +// FconfigSetPath but with AT_PATH_EMPTY implied. +func FsconfigSetPathEmpty(fd int, key string, path string, atfd int) (err error) { + var valuep *byte + if valuep, err = BytePtrFromString(path); err != nil { + return + } + return fsconfigCommon(fd, FSCONFIG_SET_PATH_EMPTY, key, valuep, atfd) +} + +// FsconfigSetFd is equivalent to fsconfig(2) called +// with cmd == FSCONFIG_SET_FD. +// +// fd is the filesystem context to act upon. +// key the parameter key to set. +// value is a file descriptor to be assigned to specified key. +func FsconfigSetFd(fd int, key string, value int) (err error) { + return fsconfigCommon(fd, FSCONFIG_SET_FD, key, nil, value) +} + +// FsconfigCreate is equivalent to fsconfig(2) called +// with cmd == FSCONFIG_CMD_CREATE. +// +// fd is the filesystem context to act upon. +func FsconfigCreate(fd int) (err error) { + return fsconfig(fd, FSCONFIG_CMD_CREATE, nil, nil, 0) +} + +// FsconfigReconfigure is equivalent to fsconfig(2) called +// with cmd == FSCONFIG_CMD_RECONFIGURE. +// +// fd is the filesystem context to act upon. +func FsconfigReconfigure(fd int) (err error) { + return fsconfig(fd, FSCONFIG_CMD_RECONFIGURE, nil, nil, 0) +} + //sys Getdents(fd int, buf []byte) (n int, err error) = SYS_GETDENTS64 //sysnb Getpgid(pid int) (pgid int, err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go b/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go index b473038..312ae6a 100644 --- a/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go +++ b/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go @@ -4,11 +4,21 @@ //go:build zos && s390x +// Many of the following syscalls are not available on all versions of z/OS. +// Some missing calls have legacy implementations/simulations but others +// will be missing completely. To achieve consistent failing behaviour on +// legacy systems, we first test the function pointer via a safeloading +// mechanism to see if the function exists on a given system. Then execution +// is branched to either continue the function call, or return an error. + package unix import ( "bytes" "fmt" + "os" + "reflect" + "regexp" "runtime" "sort" "strings" @@ -17,17 +27,205 @@ import ( "unsafe" ) +//go:noescape +func initZosLibVec() + +//go:noescape +func GetZosLibVec() uintptr + +func init() { + initZosLibVec() + r0, _, _ := CallLeFuncWithPtrReturn(GetZosLibVec()+SYS_____GETENV_A<<4, uintptr(unsafe.Pointer(&([]byte("__ZOS_XSYSTRACE\x00"))[0]))) + if r0 != 0 { + n, _, _ := CallLeFuncWithPtrReturn(GetZosLibVec()+SYS___ATOI_A<<4, r0) + ZosTraceLevel = int(n) + r0, _, _ := CallLeFuncWithPtrReturn(GetZosLibVec()+SYS_____GETENV_A<<4, uintptr(unsafe.Pointer(&([]byte("__ZOS_XSYSTRACEFD\x00"))[0]))) + if r0 != 0 { + fd, _, _ := CallLeFuncWithPtrReturn(GetZosLibVec()+SYS___ATOI_A<<4, r0) + f := os.NewFile(fd, "zostracefile") + if f != nil { + ZosTracefile = f + } + } + + } +} + +//go:noescape +func CallLeFuncWithErr(funcdesc uintptr, parms ...uintptr) (ret, errno2 uintptr, err Errno) + +//go:noescape +func CallLeFuncWithPtrReturn(funcdesc uintptr, parms ...uintptr) (ret, errno2 uintptr, err Errno) + +// ------------------------------- +// pointer validity test +// good pointer returns 0 +// bad pointer returns 1 +// +//go:nosplit +func ptrtest(uintptr) uint64 + +// Load memory at ptr location with error handling if the location is invalid +// +//go:noescape +func safeload(ptr uintptr) (value uintptr, error uintptr) + const ( - O_CLOEXEC = 0 // Dummy value (not supported). - AF_LOCAL = AF_UNIX // AF_LOCAL is an alias for AF_UNIX + entrypointLocationOffset = 8 // From function descriptor + + xplinkEyecatcher = 0x00c300c500c500f1 // ".C.E.E.1" + eyecatcherOffset = 16 // From function entrypoint (negative) + ppa1LocationOffset = 8 // From function entrypoint (negative) + + nameLenOffset = 0x14 // From PPA1 start + nameOffset = 0x16 // From PPA1 start ) -func syscall_syscall(trap, a1, a2, a3 uintptr) (r1, r2 uintptr, err Errno) -func syscall_rawsyscall(trap, a1, a2, a3 uintptr) (r1, r2 uintptr, err Errno) -func syscall_syscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno) -func syscall_rawsyscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno) -func syscall_syscall9(trap, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err Errno) -func syscall_rawsyscall9(trap, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err Errno) +func getPpaOffset(funcptr uintptr) int64 { + entrypoint, err := safeload(funcptr + entrypointLocationOffset) + if err != 0 { + return -1 + } + + // XPLink functions have ".C.E.E.1" as the first 8 bytes (EBCDIC) + val, err := safeload(entrypoint - eyecatcherOffset) + if err != 0 { + return -1 + } + if val != xplinkEyecatcher { + return -1 + } + + ppaoff, err := safeload(entrypoint - ppa1LocationOffset) + if err != 0 { + return -1 + } + + ppaoff >>= 32 + return int64(ppaoff) +} + +//------------------------------- +// function descriptor pointer validity test +// good pointer returns 0 +// bad pointer returns 1 + +// TODO: currently mksyscall_zos_s390x.go generate empty string for funcName +// have correct funcName pass to the funcptrtest function +func funcptrtest(funcptr uintptr, funcName string) uint64 { + entrypoint, err := safeload(funcptr + entrypointLocationOffset) + if err != 0 { + return 1 + } + + ppaoff := getPpaOffset(funcptr) + if ppaoff == -1 { + return 1 + } + + // PPA1 offset value is from the start of the entire function block, not the entrypoint + ppa1 := (entrypoint - eyecatcherOffset) + uintptr(ppaoff) + + nameLen, err := safeload(ppa1 + nameLenOffset) + if err != 0 { + return 1 + } + + nameLen >>= 48 + if nameLen > 128 { + return 1 + } + + // no function name input to argument end here + if funcName == "" { + return 0 + } + + var funcname [128]byte + for i := 0; i < int(nameLen); i += 8 { + v, err := safeload(ppa1 + nameOffset + uintptr(i)) + if err != 0 { + return 1 + } + funcname[i] = byte(v >> 56) + funcname[i+1] = byte(v >> 48) + funcname[i+2] = byte(v >> 40) + funcname[i+3] = byte(v >> 32) + funcname[i+4] = byte(v >> 24) + funcname[i+5] = byte(v >> 16) + funcname[i+6] = byte(v >> 8) + funcname[i+7] = byte(v) + } + + runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___E2A_L<<4, // __e2a_l + []uintptr{uintptr(unsafe.Pointer(&funcname[0])), nameLen}) + + name := string(funcname[:nameLen]) + if name != funcName { + return 1 + } + + return 0 +} + +// For detection of capabilities on a system. +// Is function descriptor f a valid function? +func isValidLeFunc(f uintptr) error { + ret := funcptrtest(f, "") + if ret != 0 { + return fmt.Errorf("Bad pointer, not an LE function ") + } + return nil +} + +// Retrieve function name from descriptor +func getLeFuncName(f uintptr) (string, error) { + // assume it has been checked, only check ppa1 validity here + entry := ((*[2]uintptr)(unsafe.Pointer(f)))[1] + preamp := ((*[4]uint32)(unsafe.Pointer(entry - eyecatcherOffset))) + + offsetPpa1 := preamp[2] + if offsetPpa1 > 0x0ffff { + return "", fmt.Errorf("PPA1 offset seems too big 0x%x\n", offsetPpa1) + } + + ppa1 := uintptr(unsafe.Pointer(preamp)) + uintptr(offsetPpa1) + res := ptrtest(ppa1) + if res != 0 { + return "", fmt.Errorf("PPA1 address not valid") + } + + size := *(*uint16)(unsafe.Pointer(ppa1 + nameLenOffset)) + if size > 128 { + return "", fmt.Errorf("Function name seems too long, length=%d\n", size) + } + + var name [128]byte + funcname := (*[128]byte)(unsafe.Pointer(ppa1 + nameOffset)) + copy(name[0:size], funcname[0:size]) + + runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___E2A_L<<4, // __e2a_l + []uintptr{uintptr(unsafe.Pointer(&name[0])), uintptr(size)}) + + return string(name[:size]), nil +} + +// Check z/OS version +func zosLeVersion() (version, release uint32) { + p1 := (*(*uintptr)(unsafe.Pointer(uintptr(1208)))) >> 32 + p1 = *(*uintptr)(unsafe.Pointer(uintptr(p1 + 88))) + p1 = *(*uintptr)(unsafe.Pointer(uintptr(p1 + 8))) + p1 = *(*uintptr)(unsafe.Pointer(uintptr(p1 + 984))) + vrm := *(*uint32)(unsafe.Pointer(p1 + 80)) + version = (vrm & 0x00ff0000) >> 16 + release = (vrm & 0x0000ff00) >> 8 + return +} + +// returns a zos C FILE * for stdio fd 0, 1, 2 +func ZosStdioFilep(fd int32) uintptr { + return uintptr(*(*uint64)(unsafe.Pointer(uintptr(*(*uint64)(unsafe.Pointer(uintptr(*(*uint64)(unsafe.Pointer(uintptr(uint64(*(*uint32)(unsafe.Pointer(uintptr(1208)))) + 80))) + uint64((fd+2)<<3)))))))) +} func copyStat(stat *Stat_t, statLE *Stat_LE_t) { stat.Dev = uint64(statLE.Dev) @@ -65,6 +263,21 @@ func (d *Dirent) NameString() string { } } +func DecodeData(dest []byte, sz int, val uint64) { + for i := 0; i < sz; i++ { + dest[sz-1-i] = byte((val >> (uint64(i * 8))) & 0xff) + } +} + +func EncodeData(data []byte) uint64 { + var value uint64 + sz := len(data) + for i := 0; i < sz; i++ { + value |= uint64(data[i]) << uint64(((sz - i - 1) * 8)) + } + return value +} + func (sa *SockaddrInet4) sockaddr() (unsafe.Pointer, _Socklen, error) { if sa.Port < 0 || sa.Port > 0xFFFF { return nil, 0, EINVAL @@ -74,7 +287,9 @@ func (sa *SockaddrInet4) sockaddr() (unsafe.Pointer, _Socklen, error) { p := (*[2]byte)(unsafe.Pointer(&sa.raw.Port)) p[0] = byte(sa.Port >> 8) p[1] = byte(sa.Port) - sa.raw.Addr = sa.Addr + for i := 0; i < len(sa.Addr); i++ { + sa.raw.Addr[i] = sa.Addr[i] + } return unsafe.Pointer(&sa.raw), _Socklen(sa.raw.Len), nil } @@ -88,7 +303,9 @@ func (sa *SockaddrInet6) sockaddr() (unsafe.Pointer, _Socklen, error) { p[0] = byte(sa.Port >> 8) p[1] = byte(sa.Port) sa.raw.Scope_id = sa.ZoneId - sa.raw.Addr = sa.Addr + for i := 0; i < len(sa.Addr); i++ { + sa.raw.Addr[i] = sa.Addr[i] + } return unsafe.Pointer(&sa.raw), _Socklen(sa.raw.Len), nil } @@ -146,7 +363,9 @@ func anyToSockaddr(_ int, rsa *RawSockaddrAny) (Sockaddr, error) { sa := new(SockaddrInet4) p := (*[2]byte)(unsafe.Pointer(&pp.Port)) sa.Port = int(p[0])<<8 + int(p[1]) - sa.Addr = pp.Addr + for i := 0; i < len(sa.Addr); i++ { + sa.Addr[i] = pp.Addr[i] + } return sa, nil case AF_INET6: @@ -155,7 +374,9 @@ func anyToSockaddr(_ int, rsa *RawSockaddrAny) (Sockaddr, error) { p := (*[2]byte)(unsafe.Pointer(&pp.Port)) sa.Port = int(p[0])<<8 + int(p[1]) sa.ZoneId = pp.Scope_id - sa.Addr = pp.Addr + for i := 0; i < len(sa.Addr); i++ { + sa.Addr[i] = pp.Addr[i] + } return sa, nil } return nil, EAFNOSUPPORT @@ -177,6 +398,43 @@ func Accept(fd int) (nfd int, sa Sockaddr, err error) { return } +func Accept4(fd int, flags int) (nfd int, sa Sockaddr, err error) { + var rsa RawSockaddrAny + var len _Socklen = SizeofSockaddrAny + nfd, err = accept4(fd, &rsa, &len, flags) + if err != nil { + return + } + if len > SizeofSockaddrAny { + panic("RawSockaddrAny too small") + } + // TODO(neeilan): Remove 0 in call + sa, err = anyToSockaddr(0, &rsa) + if err != nil { + Close(nfd) + nfd = 0 + } + return +} + +func Ctermid() (tty string, err error) { + var termdev [1025]byte + runtime.EnterSyscall() + r0, err2, err1 := CallLeFuncWithPtrReturn(GetZosLibVec()+SYS___CTERMID_A<<4, uintptr(unsafe.Pointer(&termdev[0]))) + runtime.ExitSyscall() + if r0 == 0 { + return "", fmt.Errorf("%s (errno2=0x%x)\n", err1.Error(), err2) + } + s := string(termdev[:]) + idx := strings.Index(s, string(rune(0))) + if idx == -1 { + tty = s + } else { + tty = s[:idx] + } + return +} + func (iov *Iovec) SetLen(length int) { iov.Len = uint64(length) } @@ -190,10 +448,16 @@ func (cmsg *Cmsghdr) SetLen(length int) { } //sys fcntl(fd int, cmd int, arg int) (val int, err error) +//sys Flistxattr(fd int, dest []byte) (sz int, err error) = SYS___FLISTXATTR_A +//sys Fremovexattr(fd int, attr string) (err error) = SYS___FREMOVEXATTR_A //sys read(fd int, p []byte) (n int, err error) //sys write(fd int, p []byte) (n int, err error) +//sys Fgetxattr(fd int, attr string, dest []byte) (sz int, err error) = SYS___FGETXATTR_A +//sys Fsetxattr(fd int, attr string, data []byte, flag int) (err error) = SYS___FSETXATTR_A + //sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) = SYS___ACCEPT_A +//sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) = SYS___ACCEPT4_A //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) = SYS___BIND_A //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) = SYS___CONNECT_A //sysnb getgroups(n int, list *_Gid_t) (nn int, err error) @@ -204,6 +468,7 @@ func (cmsg *Cmsghdr) SetLen(length int) { //sysnb socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) //sysnb getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) = SYS___GETPEERNAME_A //sysnb getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) = SYS___GETSOCKNAME_A +//sys Removexattr(path string, attr string) (err error) = SYS___REMOVEXATTR_A //sys recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) = SYS___RECVFROM_A //sys sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) = SYS___SENDTO_A //sys recvmsg(s int, msg *Msghdr, flags int) (n int, err error) = SYS___RECVMSG_A @@ -212,6 +477,10 @@ func (cmsg *Cmsghdr) SetLen(length int) { //sys munmap(addr uintptr, length uintptr) (err error) = SYS_MUNMAP //sys ioctl(fd int, req int, arg uintptr) (err error) = SYS_IOCTL //sys ioctlPtr(fd int, req int, arg unsafe.Pointer) (err error) = SYS_IOCTL +//sys shmat(id int, addr uintptr, flag int) (ret uintptr, err error) = SYS_SHMAT +//sys shmctl(id int, cmd int, buf *SysvShmDesc) (result int, err error) = SYS_SHMCTL64 +//sys shmdt(addr uintptr) (err error) = SYS_SHMDT +//sys shmget(key int, size int, flag int) (id int, err error) = SYS_SHMGET //sys Access(path string, mode uint32) (err error) = SYS___ACCESS_A //sys Chdir(path string) (err error) = SYS___CHDIR_A @@ -220,14 +489,31 @@ func (cmsg *Cmsghdr) SetLen(length int) { //sys Creat(path string, mode uint32) (fd int, err error) = SYS___CREAT_A //sys Dup(oldfd int) (fd int, err error) //sys Dup2(oldfd int, newfd int) (err error) +//sys Dup3(oldfd int, newfd int, flags int) (err error) = SYS_DUP3 +//sys Dirfd(dirp uintptr) (fd int, err error) = SYS_DIRFD +//sys EpollCreate(size int) (fd int, err error) = SYS_EPOLL_CREATE +//sys EpollCreate1(flags int) (fd int, err error) = SYS_EPOLL_CREATE1 +//sys EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) = SYS_EPOLL_CTL +//sys EpollPwait(epfd int, events []EpollEvent, msec int, sigmask *int) (n int, err error) = SYS_EPOLL_PWAIT +//sys EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) = SYS_EPOLL_WAIT //sys Errno2() (er2 int) = SYS___ERRNO2 -//sys Err2ad() (eadd *int) = SYS___ERR2AD +//sys Eventfd(initval uint, flags int) (fd int, err error) = SYS_EVENTFD //sys Exit(code int) +//sys Faccessat(dirfd int, path string, mode uint32, flags int) (err error) = SYS___FACCESSAT_A + +func Faccessat2(dirfd int, path string, mode uint32, flags int) (err error) { + return Faccessat(dirfd, path, mode, flags) +} + //sys Fchdir(fd int) (err error) //sys Fchmod(fd int, mode uint32) (err error) +//sys Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) = SYS___FCHMODAT_A //sys Fchown(fd int, uid int, gid int) (err error) +//sys Fchownat(fd int, path string, uid int, gid int, flags int) (err error) = SYS___FCHOWNAT_A //sys FcntlInt(fd uintptr, cmd int, arg int) (retval int, err error) = SYS_FCNTL +//sys Fdatasync(fd int) (err error) = SYS_FDATASYNC //sys fstat(fd int, stat *Stat_LE_t) (err error) +//sys fstatat(dirfd int, path string, stat *Stat_LE_t, flags int) (err error) = SYS___FSTATAT_A func Fstat(fd int, stat *Stat_t) (err error) { var statLE Stat_LE_t @@ -236,28 +522,208 @@ func Fstat(fd int, stat *Stat_t) (err error) { return } +func Fstatat(dirfd int, path string, stat *Stat_t, flags int) (err error) { + var statLE Stat_LE_t + err = fstatat(dirfd, path, &statLE, flags) + copyStat(stat, &statLE) + return +} + +func impl_Getxattr(path string, attr string, dest []byte) (sz int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(attr) + if err != nil { + return + } + var _p2 unsafe.Pointer + if len(dest) > 0 { + _p2 = unsafe.Pointer(&dest[0]) + } else { + _p2 = unsafe.Pointer(&_zero) + } + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___GETXATTR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(dest))) + sz = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_GetxattrAddr() *(func(path string, attr string, dest []byte) (sz int, err error)) + +var Getxattr = enter_Getxattr + +func enter_Getxattr(path string, attr string, dest []byte) (sz int, err error) { + funcref := get_GetxattrAddr() + if validGetxattr() { + *funcref = impl_Getxattr + } else { + *funcref = error_Getxattr + } + return (*funcref)(path, attr, dest) +} + +func error_Getxattr(path string, attr string, dest []byte) (sz int, err error) { + return -1, ENOSYS +} + +func validGetxattr() bool { + if funcptrtest(GetZosLibVec()+SYS___GETXATTR_A<<4, "") == 0 { + if name, err := getLeFuncName(GetZosLibVec() + SYS___GETXATTR_A<<4); err == nil { + return name == "__getxattr_a" + } + } + return false +} + +//sys Lgetxattr(link string, attr string, dest []byte) (sz int, err error) = SYS___LGETXATTR_A +//sys Lsetxattr(path string, attr string, data []byte, flags int) (err error) = SYS___LSETXATTR_A + +func impl_Setxattr(path string, attr string, data []byte, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(attr) + if err != nil { + return + } + var _p2 unsafe.Pointer + if len(data) > 0 { + _p2 = unsafe.Pointer(&data[0]) + } else { + _p2 = unsafe.Pointer(&_zero) + } + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___SETXATTR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(data)), uintptr(flags)) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_SetxattrAddr() *(func(path string, attr string, data []byte, flags int) (err error)) + +var Setxattr = enter_Setxattr + +func enter_Setxattr(path string, attr string, data []byte, flags int) (err error) { + funcref := get_SetxattrAddr() + if validSetxattr() { + *funcref = impl_Setxattr + } else { + *funcref = error_Setxattr + } + return (*funcref)(path, attr, data, flags) +} + +func error_Setxattr(path string, attr string, data []byte, flags int) (err error) { + return ENOSYS +} + +func validSetxattr() bool { + if funcptrtest(GetZosLibVec()+SYS___SETXATTR_A<<4, "") == 0 { + if name, err := getLeFuncName(GetZosLibVec() + SYS___SETXATTR_A<<4); err == nil { + return name == "__setxattr_a" + } + } + return false +} + +//sys Fstatfs(fd int, buf *Statfs_t) (err error) = SYS_FSTATFS //sys Fstatvfs(fd int, stat *Statvfs_t) (err error) = SYS_FSTATVFS //sys Fsync(fd int) (err error) +//sys Futimes(fd int, tv []Timeval) (err error) = SYS_FUTIMES +//sys Futimesat(dirfd int, path string, tv []Timeval) (err error) = SYS___FUTIMESAT_A //sys Ftruncate(fd int, length int64) (err error) -//sys Getpagesize() (pgsize int) = SYS_GETPAGESIZE +//sys Getrandom(buf []byte, flags int) (n int, err error) = SYS_GETRANDOM +//sys InotifyInit() (fd int, err error) = SYS_INOTIFY_INIT +//sys InotifyInit1(flags int) (fd int, err error) = SYS_INOTIFY_INIT1 +//sys InotifyAddWatch(fd int, pathname string, mask uint32) (watchdesc int, err error) = SYS___INOTIFY_ADD_WATCH_A +//sys InotifyRmWatch(fd int, watchdesc uint32) (success int, err error) = SYS_INOTIFY_RM_WATCH +//sys Listxattr(path string, dest []byte) (sz int, err error) = SYS___LISTXATTR_A +//sys Llistxattr(path string, dest []byte) (sz int, err error) = SYS___LLISTXATTR_A +//sys Lremovexattr(path string, attr string) (err error) = SYS___LREMOVEXATTR_A +//sys Lutimes(path string, tv []Timeval) (err error) = SYS___LUTIMES_A //sys Mprotect(b []byte, prot int) (err error) = SYS_MPROTECT //sys Msync(b []byte, flags int) (err error) = SYS_MSYNC +//sys Console2(cmsg *ConsMsg2, modstr *byte, concmd *uint32) (err error) = SYS___CONSOLE2 + +// Pipe2 begin + +//go:nosplit +func getPipe2Addr() *(func([]int, int) error) + +var Pipe2 = pipe2Enter + +func pipe2Enter(p []int, flags int) (err error) { + if funcptrtest(GetZosLibVec()+SYS_PIPE2<<4, "") == 0 { + *getPipe2Addr() = pipe2Impl + } else { + *getPipe2Addr() = pipe2Error + } + return (*getPipe2Addr())(p, flags) +} + +func pipe2Impl(p []int, flags int) (err error) { + var pp [2]_C_int + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_PIPE2<<4, uintptr(unsafe.Pointer(&pp[0])), uintptr(flags)) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } else { + p[0] = int(pp[0]) + p[1] = int(pp[1]) + } + return +} +func pipe2Error(p []int, flags int) (err error) { + return fmt.Errorf("Pipe2 is not available on this system") +} + +// Pipe2 end + //sys Poll(fds []PollFd, timeout int) (n int, err error) = SYS_POLL + +func Readdir(dir uintptr) (dirent *Dirent, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___READDIR_A<<4, uintptr(dir)) + runtime.ExitSyscall() + dirent = (*Dirent)(unsafe.Pointer(r0)) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//sys Readdir_r(dirp uintptr, entry *direntLE, result **direntLE) (err error) = SYS___READDIR_R_A +//sys Statfs(path string, buf *Statfs_t) (err error) = SYS___STATFS_A +//sys Syncfs(fd int) (err error) = SYS_SYNCFS //sys Times(tms *Tms) (ticks uintptr, err error) = SYS_TIMES //sys W_Getmntent(buff *byte, size int) (lastsys int, err error) = SYS_W_GETMNTENT //sys W_Getmntent_A(buff *byte, size int) (lastsys int, err error) = SYS___W_GETMNTENT_A //sys mount_LE(path string, filesystem string, fstype string, mtm uint32, parmlen int32, parm string) (err error) = SYS___MOUNT_A -//sys unmount(filesystem string, mtm int) (err error) = SYS___UMOUNT_A +//sys unmount_LE(filesystem string, mtm int) (err error) = SYS___UMOUNT_A //sys Chroot(path string) (err error) = SYS___CHROOT_A //sys Select(nmsgsfds int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (ret int, err error) = SYS_SELECT -//sysnb Uname(buf *Utsname) (err error) = SYS___UNAME_A +//sysnb Uname(buf *Utsname) (err error) = SYS_____OSNAME_A +//sys Unshare(flags int) (err error) = SYS_UNSHARE func Ptsname(fd int) (name string, err error) { - r0, _, e1 := syscall_syscall(SYS___PTSNAME_A, uintptr(fd), 0, 0) - name = u2s(unsafe.Pointer(r0)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithPtrReturn(GetZosLibVec()+SYS___PTSNAME_A<<4, uintptr(fd)) + runtime.ExitSyscall() + if r0 == 0 { + err = errnoErr2(e1, e2) + } else { + name = u2s(unsafe.Pointer(r0)) } return } @@ -272,13 +738,19 @@ func u2s(cstr unsafe.Pointer) string { } func Close(fd int) (err error) { - _, _, e1 := syscall_syscall(SYS_CLOSE, uintptr(fd), 0, 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_CLOSE<<4, uintptr(fd)) + runtime.ExitSyscall() for i := 0; e1 == EAGAIN && i < 10; i++ { - _, _, _ = syscall_syscall(SYS_USLEEP, uintptr(10), 0, 0) - _, _, e1 = syscall_syscall(SYS_CLOSE, uintptr(fd), 0, 0) + runtime.EnterSyscall() + CallLeFuncWithErr(GetZosLibVec()+SYS_USLEEP<<4, uintptr(10)) + runtime.ExitSyscall() + runtime.EnterSyscall() + r0, e2, e1 = CallLeFuncWithErr(GetZosLibVec()+SYS_CLOSE<<4, uintptr(fd)) + runtime.ExitSyscall() } - if e1 != 0 { - err = errnoErr(e1) + if r0 != 0 { + err = errnoErr2(e1, e2) } return } @@ -288,9 +760,15 @@ func Madvise(b []byte, advice int) (err error) { return } +func Mmap(fd int, offset int64, length int, prot int, flags int) (data []byte, err error) { + return mapper.Mmap(fd, offset, length, prot, flags) +} + +func Munmap(b []byte) (err error) { + return mapper.Munmap(b) +} + //sys Gethostname(buf []byte) (err error) = SYS___GETHOSTNAME_A -//sysnb Getegid() (egid int) -//sysnb Geteuid() (uid int) //sysnb Getgid() (gid int) //sysnb Getpid() (pid int) //sysnb Getpgid(pid int) (pgid int, err error) = SYS_GETPGID @@ -317,11 +795,14 @@ func Getrusage(who int, rusage *Rusage) (err error) { return } +//sys Getegid() (egid int) = SYS_GETEGID +//sys Geteuid() (euid int) = SYS_GETEUID //sysnb Getsid(pid int) (sid int, err error) = SYS_GETSID //sysnb Getuid() (uid int) //sysnb Kill(pid int, sig Signal) (err error) //sys Lchown(path string, uid int, gid int) (err error) = SYS___LCHOWN_A //sys Link(path string, link string) (err error) = SYS___LINK_A +//sys Linkat(oldDirFd int, oldPath string, newDirFd int, newPath string, flags int) (err error) = SYS___LINKAT_A //sys Listen(s int, n int) (err error) //sys lstat(path string, stat *Stat_LE_t) (err error) = SYS___LSTAT_A @@ -332,15 +813,150 @@ func Lstat(path string, stat *Stat_t) (err error) { return } +// for checking symlinks begins with $VERSION/ $SYSNAME/ $SYSSYMR/ $SYSSYMA/ +func isSpecialPath(path []byte) (v bool) { + var special = [4][8]byte{ + [8]byte{'V', 'E', 'R', 'S', 'I', 'O', 'N', '/'}, + [8]byte{'S', 'Y', 'S', 'N', 'A', 'M', 'E', '/'}, + [8]byte{'S', 'Y', 'S', 'S', 'Y', 'M', 'R', '/'}, + [8]byte{'S', 'Y', 'S', 'S', 'Y', 'M', 'A', '/'}} + + var i, j int + for i = 0; i < len(special); i++ { + for j = 0; j < len(special[i]); j++ { + if path[j] != special[i][j] { + break + } + } + if j == len(special[i]) { + return true + } + } + return false +} + +func realpath(srcpath string, abspath []byte) (pathlen int, errno int) { + var source [1024]byte + copy(source[:], srcpath) + source[len(srcpath)] = 0 + ret := runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___REALPATH_A<<4, //__realpath_a() + []uintptr{uintptr(unsafe.Pointer(&source[0])), + uintptr(unsafe.Pointer(&abspath[0]))}) + if ret != 0 { + index := bytes.IndexByte(abspath[:], byte(0)) + if index != -1 { + return index, 0 + } + } else { + errptr := (*int)(unsafe.Pointer(runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___ERRNO<<4, []uintptr{}))) //__errno() + return 0, *errptr + } + return 0, 245 // EBADDATA 245 +} + +func Readlink(path string, buf []byte) (n int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(buf) > 0 { + _p1 = unsafe.Pointer(&buf[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + n = int(runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___READLINK_A<<4, + []uintptr{uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf))})) + runtime.KeepAlive(unsafe.Pointer(_p0)) + if n == -1 { + value := *(*int32)(unsafe.Pointer(runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___ERRNO<<4, []uintptr{}))) + err = errnoErr(Errno(value)) + } else { + if buf[0] == '$' { + if isSpecialPath(buf[1:9]) { + cnt, err1 := realpath(path, buf) + if err1 == 0 { + n = cnt + } + } + } + } + return +} + +func impl_Readlinkat(dirfd int, path string, buf []byte) (n int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(buf) > 0 { + _p1 = unsafe.Pointer(&buf[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___READLINKAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf))) + runtime.ExitSyscall() + n = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + return n, err + } else { + if buf[0] == '$' { + if isSpecialPath(buf[1:9]) { + cnt, err1 := realpath(path, buf) + if err1 == 0 { + n = cnt + } + } + } + } + return +} + +//go:nosplit +func get_ReadlinkatAddr() *(func(dirfd int, path string, buf []byte) (n int, err error)) + +var Readlinkat = enter_Readlinkat + +func enter_Readlinkat(dirfd int, path string, buf []byte) (n int, err error) { + funcref := get_ReadlinkatAddr() + if funcptrtest(GetZosLibVec()+SYS___READLINKAT_A<<4, "") == 0 { + *funcref = impl_Readlinkat + } else { + *funcref = error_Readlinkat + } + return (*funcref)(dirfd, path, buf) +} + +func error_Readlinkat(dirfd int, path string, buf []byte) (n int, err error) { + n = -1 + err = ENOSYS + return +} + //sys Mkdir(path string, mode uint32) (err error) = SYS___MKDIR_A +//sys Mkdirat(dirfd int, path string, mode uint32) (err error) = SYS___MKDIRAT_A //sys Mkfifo(path string, mode uint32) (err error) = SYS___MKFIFO_A //sys Mknod(path string, mode uint32, dev int) (err error) = SYS___MKNOD_A +//sys Mknodat(dirfd int, path string, mode uint32, dev int) (err error) = SYS___MKNODAT_A +//sys PivotRoot(newroot string, oldroot string) (err error) = SYS___PIVOT_ROOT_A //sys Pread(fd int, p []byte, offset int64) (n int, err error) //sys Pwrite(fd int, p []byte, offset int64) (n int, err error) -//sys Readlink(path string, buf []byte) (n int, err error) = SYS___READLINK_A +//sys Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) = SYS___PRCTL_A +//sysnb Prlimit(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error) = SYS_PRLIMIT //sys Rename(from string, to string) (err error) = SYS___RENAME_A +//sys Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) = SYS___RENAMEAT_A +//sys Renameat2(olddirfd int, oldpath string, newdirfd int, newpath string, flags uint) (err error) = SYS___RENAMEAT2_A //sys Rmdir(path string) (err error) = SYS___RMDIR_A //sys Seek(fd int, offset int64, whence int) (off int64, err error) = SYS_LSEEK +//sys Setegid(egid int) (err error) = SYS_SETEGID +//sys Seteuid(euid int) (err error) = SYS_SETEUID +//sys Sethostname(p []byte) (err error) = SYS___SETHOSTNAME_A +//sys Setns(fd int, nstype int) (err error) = SYS_SETNS //sys Setpriority(which int, who int, prio int) (err error) //sysnb Setpgid(pid int, pgid int) (err error) = SYS_SETPGID //sysnb Setrlimit(resource int, lim *Rlimit) (err error) @@ -360,32 +976,57 @@ func Stat(path string, sta *Stat_t) (err error) { } //sys Symlink(path string, link string) (err error) = SYS___SYMLINK_A +//sys Symlinkat(oldPath string, dirfd int, newPath string) (err error) = SYS___SYMLINKAT_A //sys Sync() = SYS_SYNC //sys Truncate(path string, length int64) (err error) = SYS___TRUNCATE_A //sys Tcgetattr(fildes int, termptr *Termios) (err error) = SYS_TCGETATTR //sys Tcsetattr(fildes int, when int, termptr *Termios) (err error) = SYS_TCSETATTR //sys Umask(mask int) (oldmask int) //sys Unlink(path string) (err error) = SYS___UNLINK_A +//sys Unlinkat(dirfd int, path string, flags int) (err error) = SYS___UNLINKAT_A //sys Utime(path string, utim *Utimbuf) (err error) = SYS___UTIME_A //sys open(path string, mode int, perm uint32) (fd int, err error) = SYS___OPEN_A func Open(path string, mode int, perm uint32) (fd int, err error) { + if mode&O_ACCMODE == 0 { + mode |= O_RDONLY + } return open(path, mode, perm) } -func Mkfifoat(dirfd int, path string, mode uint32) (err error) { - wd, err := Getwd() - if err != nil { - return err - } +//sys openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) = SYS___OPENAT_A - if err := Fchdir(dirfd); err != nil { - return err +func Openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) { + if flags&O_ACCMODE == 0 { + flags |= O_RDONLY } - defer Chdir(wd) + return openat(dirfd, path, flags, mode) +} - return Mkfifo(path, mode) +//sys openat2(dirfd int, path string, open_how *OpenHow, size int) (fd int, err error) = SYS___OPENAT2_A + +func Openat2(dirfd int, path string, how *OpenHow) (fd int, err error) { + if how.Flags&O_ACCMODE == 0 { + how.Flags |= O_RDONLY + } + return openat2(dirfd, path, how, SizeofOpenHow) +} + +func ZosFdToPath(dirfd int) (path string, err error) { + var buffer [1024]byte + runtime.EnterSyscall() + ret, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_W_IOCTL<<4, uintptr(dirfd), 17, 1024, uintptr(unsafe.Pointer(&buffer[0]))) + runtime.ExitSyscall() + if ret == 0 { + zb := bytes.IndexByte(buffer[:], 0) + if zb == -1 { + zb = len(buffer) + } + CallLeFuncWithErr(GetZosLibVec()+SYS___E2A_L<<4, uintptr(unsafe.Pointer(&buffer[0])), uintptr(zb)) + return string(buffer[:zb]), nil + } + return "", errnoErr2(e1, e2) } //sys remove(path string) (err error) @@ -403,10 +1044,12 @@ func Getcwd(buf []byte) (n int, err error) { } else { p = unsafe.Pointer(&_zero) } - _, _, e := syscall_syscall(SYS___GETCWD_A, uintptr(p), uintptr(len(buf)), 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithPtrReturn(GetZosLibVec()+SYS___GETCWD_A<<4, uintptr(p), uintptr(len(buf))) + runtime.ExitSyscall() n = clen(buf) + 1 - if e != 0 { - err = errnoErr(e) + if r0 == 0 { + err = errnoErr2(e1, e2) } return } @@ -520,9 +1163,41 @@ func (w WaitStatus) StopSignal() Signal { func (w WaitStatus) TrapCause() int { return -1 } +//sys waitid(idType int, id int, info *Siginfo, options int) (err error) + +func Waitid(idType int, id int, info *Siginfo, options int, rusage *Rusage) (err error) { + return waitid(idType, id, info, options) +} + //sys waitpid(pid int, wstatus *_C_int, options int) (wpid int, err error) -func Wait4(pid int, wstatus *WaitStatus, options int, rusage *Rusage) (wpid int, err error) { +func impl_Wait4(pid int, wstatus *WaitStatus, options int, rusage *Rusage) (wpid int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_WAIT4<<4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options), uintptr(unsafe.Pointer(rusage))) + runtime.ExitSyscall() + wpid = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_Wait4Addr() *(func(pid int, wstatus *WaitStatus, options int, rusage *Rusage) (wpid int, err error)) + +var Wait4 = enter_Wait4 + +func enter_Wait4(pid int, wstatus *WaitStatus, options int, rusage *Rusage) (wpid int, err error) { + funcref := get_Wait4Addr() + if funcptrtest(GetZosLibVec()+SYS_WAIT4<<4, "") == 0 { + *funcref = impl_Wait4 + } else { + *funcref = legacyWait4 + } + return (*funcref)(pid, wstatus, options, rusage) +} + +func legacyWait4(pid int, wstatus *WaitStatus, options int, rusage *Rusage) (wpid int, err error) { // TODO(mundaym): z/OS doesn't have wait4. I don't think getrusage does what we want. // At the moment rusage will not be touched. var status _C_int @@ -571,23 +1246,62 @@ func Pipe(p []int) (err error) { } var pp [2]_C_int err = pipe(&pp) - if err == nil { - p[0] = int(pp[0]) - p[1] = int(pp[1]) - } + p[0] = int(pp[0]) + p[1] = int(pp[1]) return } //sys utimes(path string, timeval *[2]Timeval) (err error) = SYS___UTIMES_A func Utimes(path string, tv []Timeval) (err error) { + if tv == nil { + return utimes(path, nil) + } if len(tv) != 2 { return EINVAL } return utimes(path, (*[2]Timeval)(unsafe.Pointer(&tv[0]))) } -func UtimesNano(path string, ts []Timespec) error { +//sys utimensat(dirfd int, path string, ts *[2]Timespec, flags int) (err error) = SYS___UTIMENSAT_A + +func validUtimensat() bool { + if funcptrtest(GetZosLibVec()+SYS___UTIMENSAT_A<<4, "") == 0 { + if name, err := getLeFuncName(GetZosLibVec() + SYS___UTIMENSAT_A<<4); err == nil { + return name == "__utimensat_a" + } + } + return false +} + +// Begin UtimesNano + +//go:nosplit +func get_UtimesNanoAddr() *(func(path string, ts []Timespec) (err error)) + +var UtimesNano = enter_UtimesNano + +func enter_UtimesNano(path string, ts []Timespec) (err error) { + funcref := get_UtimesNanoAddr() + if validUtimensat() { + *funcref = utimesNanoImpl + } else { + *funcref = legacyUtimesNano + } + return (*funcref)(path, ts) +} + +func utimesNanoImpl(path string, ts []Timespec) (err error) { + if ts == nil { + return utimensat(AT_FDCWD, path, nil, 0) + } + if len(ts) != 2 { + return EINVAL + } + return utimensat(AT_FDCWD, path, (*[2]Timespec)(unsafe.Pointer(&ts[0])), 0) +} + +func legacyUtimesNano(path string, ts []Timespec) (err error) { if len(ts) != 2 { return EINVAL } @@ -600,6 +1314,70 @@ func UtimesNano(path string, ts []Timespec) error { return utimes(path, (*[2]Timeval)(unsafe.Pointer(&tv[0]))) } +// End UtimesNano + +// Begin UtimesNanoAt + +//go:nosplit +func get_UtimesNanoAtAddr() *(func(dirfd int, path string, ts []Timespec, flags int) (err error)) + +var UtimesNanoAt = enter_UtimesNanoAt + +func enter_UtimesNanoAt(dirfd int, path string, ts []Timespec, flags int) (err error) { + funcref := get_UtimesNanoAtAddr() + if validUtimensat() { + *funcref = utimesNanoAtImpl + } else { + *funcref = legacyUtimesNanoAt + } + return (*funcref)(dirfd, path, ts, flags) +} + +func utimesNanoAtImpl(dirfd int, path string, ts []Timespec, flags int) (err error) { + if ts == nil { + return utimensat(dirfd, path, nil, flags) + } + if len(ts) != 2 { + return EINVAL + } + return utimensat(dirfd, path, (*[2]Timespec)(unsafe.Pointer(&ts[0])), flags) +} + +func legacyUtimesNanoAt(dirfd int, path string, ts []Timespec, flags int) (err error) { + if path[0] != '/' { + dirPath, err := ZosFdToPath(dirfd) + if err != nil { + return err + } + path = dirPath + "/" + path + } + if flags == AT_SYMLINK_NOFOLLOW { + if len(ts) != 2 { + return EINVAL + } + + if ts[0].Nsec >= 5e8 { + ts[0].Sec++ + } + ts[0].Nsec = 0 + if ts[1].Nsec >= 5e8 { + ts[1].Sec++ + } + ts[1].Nsec = 0 + + // Not as efficient as it could be because Timespec and + // Timeval have different types in the different OSes + tv := []Timeval{ + NsecToTimeval(TimespecToNsec(ts[0])), + NsecToTimeval(TimespecToNsec(ts[1])), + } + return Lutimes(path, tv) + } + return UtimesNano(path, ts) +} + +// End UtimesNanoAt + func Getsockname(fd int) (sa Sockaddr, err error) { var rsa RawSockaddrAny var len _Socklen = SizeofSockaddrAny @@ -1191,10 +1969,13 @@ func Opendir(name string) (uintptr, error) { if err != nil { return 0, err } - dir, _, e := syscall_syscall(SYS___OPENDIR_A, uintptr(unsafe.Pointer(p)), 0, 0) + err = nil + runtime.EnterSyscall() + dir, e2, e1 := CallLeFuncWithPtrReturn(GetZosLibVec()+SYS___OPENDIR_A<<4, uintptr(unsafe.Pointer(p))) + runtime.ExitSyscall() runtime.KeepAlive(unsafe.Pointer(p)) - if e != 0 { - err = errnoErr(e) + if dir == 0 { + err = errnoErr2(e1, e2) } return dir, err } @@ -1202,51 +1983,27 @@ func Opendir(name string) (uintptr, error) { // clearsyscall.Errno resets the errno value to 0. func clearErrno() -func Readdir(dir uintptr) (*Dirent, error) { - var ent Dirent - var res uintptr - // __readdir_r_a returns errno at the end of the directory stream, rather than 0. - // Therefore to avoid false positives we clear errno before calling it. - - // TODO(neeilan): Commented this out to get sys/unix compiling on z/OS. Uncomment and fix. Error: "undefined: clearsyscall" - //clearsyscall.Errno() // TODO(mundaym): check pre-emption rules. - - e, _, _ := syscall_syscall(SYS___READDIR_R_A, dir, uintptr(unsafe.Pointer(&ent)), uintptr(unsafe.Pointer(&res))) - var err error - if e != 0 { - err = errnoErr(Errno(e)) - } - if res == 0 { - return nil, err - } - return &ent, err -} - -func readdir_r(dirp uintptr, entry *direntLE, result **direntLE) (err error) { - r0, _, e1 := syscall_syscall(SYS___READDIR_R_A, dirp, uintptr(unsafe.Pointer(entry)), uintptr(unsafe.Pointer(result))) - if int64(r0) == -1 { - err = errnoErr(Errno(e1)) - } - return -} - func Closedir(dir uintptr) error { - _, _, e := syscall_syscall(SYS_CLOSEDIR, dir, 0, 0) - if e != 0 { - return errnoErr(e) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_CLOSEDIR<<4, dir) + runtime.ExitSyscall() + if r0 != 0 { + return errnoErr2(e1, e2) } return nil } func Seekdir(dir uintptr, pos int) { - _, _, _ = syscall_syscall(SYS_SEEKDIR, dir, uintptr(pos), 0) + runtime.EnterSyscall() + CallLeFuncWithErr(GetZosLibVec()+SYS_SEEKDIR<<4, dir, uintptr(pos)) + runtime.ExitSyscall() } func Telldir(dir uintptr) (int, error) { - p, _, e := syscall_syscall(SYS_TELLDIR, dir, 0, 0) + p, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_TELLDIR<<4, dir) pos := int(p) - if pos == -1 { - return pos, errnoErr(e) + if int64(p) == -1 { + return pos, errnoErr2(e1, e2) } return pos, nil } @@ -1261,19 +2018,55 @@ func FcntlFlock(fd uintptr, cmd int, lk *Flock_t) error { *(*int64)(unsafe.Pointer(&flock[4])) = lk.Start *(*int64)(unsafe.Pointer(&flock[12])) = lk.Len *(*int32)(unsafe.Pointer(&flock[20])) = lk.Pid - _, _, errno := syscall_syscall(SYS_FCNTL, fd, uintptr(cmd), uintptr(unsafe.Pointer(&flock))) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FCNTL<<4, fd, uintptr(cmd), uintptr(unsafe.Pointer(&flock))) + runtime.ExitSyscall() lk.Type = *(*int16)(unsafe.Pointer(&flock[0])) lk.Whence = *(*int16)(unsafe.Pointer(&flock[2])) lk.Start = *(*int64)(unsafe.Pointer(&flock[4])) lk.Len = *(*int64)(unsafe.Pointer(&flock[12])) lk.Pid = *(*int32)(unsafe.Pointer(&flock[20])) - if errno == 0 { + if r0 == 0 { return nil } - return errno + return errnoErr2(e1, e2) } -func Flock(fd int, how int) error { +func impl_Flock(fd int, how int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FLOCK<<4, uintptr(fd), uintptr(how)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FlockAddr() *(func(fd int, how int) (err error)) + +var Flock = enter_Flock + +func validFlock(fp uintptr) bool { + if funcptrtest(GetZosLibVec()+SYS_FLOCK<<4, "") == 0 { + if name, err := getLeFuncName(GetZosLibVec() + SYS_FLOCK<<4); err == nil { + return name == "flock" + } + } + return false +} + +func enter_Flock(fd int, how int) (err error) { + funcref := get_FlockAddr() + if validFlock(GetZosLibVec() + SYS_FLOCK<<4) { + *funcref = impl_Flock + } else { + *funcref = legacyFlock + } + return (*funcref)(fd, how) +} + +func legacyFlock(fd int, how int) error { var flock_type int16 var fcntl_cmd int @@ -1307,41 +2100,51 @@ func Flock(fd int, how int) error { } func Mlock(b []byte) (err error) { - _, _, e1 := syscall_syscall(SYS___MLOCKALL, _BPX_NONSWAP, 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MLOCKALL<<4, _BPX_NONSWAP) + runtime.ExitSyscall() + if r0 != 0 { + err = errnoErr2(e1, e2) } return } func Mlock2(b []byte, flags int) (err error) { - _, _, e1 := syscall_syscall(SYS___MLOCKALL, _BPX_NONSWAP, 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MLOCKALL<<4, _BPX_NONSWAP) + runtime.ExitSyscall() + if r0 != 0 { + err = errnoErr2(e1, e2) } return } func Mlockall(flags int) (err error) { - _, _, e1 := syscall_syscall(SYS___MLOCKALL, _BPX_NONSWAP, 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MLOCKALL<<4, _BPX_NONSWAP) + runtime.ExitSyscall() + if r0 != 0 { + err = errnoErr2(e1, e2) } return } func Munlock(b []byte) (err error) { - _, _, e1 := syscall_syscall(SYS___MLOCKALL, _BPX_SWAP, 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MLOCKALL<<4, _BPX_SWAP) + runtime.ExitSyscall() + if r0 != 0 { + err = errnoErr2(e1, e2) } return } func Munlockall() (err error) { - _, _, e1 := syscall_syscall(SYS___MLOCKALL, _BPX_SWAP, 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MLOCKALL<<4, _BPX_SWAP) + runtime.ExitSyscall() + if r0 != 0 { + err = errnoErr2(e1, e2) } return } @@ -1372,15 +2175,104 @@ func ClockGettime(clockid int32, ts *Timespec) error { return nil } -func Statfs(path string, stat *Statfs_t) (err error) { - fd, err := open(path, O_RDONLY, 0) - defer Close(fd) - if err != nil { - return err +// Chtag + +//go:nosplit +func get_ChtagAddr() *(func(path string, ccsid uint64, textbit uint64) error) + +var Chtag = enter_Chtag + +func enter_Chtag(path string, ccsid uint64, textbit uint64) error { + funcref := get_ChtagAddr() + if validSetxattr() { + *funcref = impl_Chtag + } else { + *funcref = legacy_Chtag } - return Fstatfs(fd, stat) + return (*funcref)(path, ccsid, textbit) } +func legacy_Chtag(path string, ccsid uint64, textbit uint64) error { + tag := ccsid<<16 | textbit<<15 + var tag_buff [8]byte + DecodeData(tag_buff[:], 8, tag) + return Setxattr(path, "filetag", tag_buff[:], XATTR_REPLACE) +} + +func impl_Chtag(path string, ccsid uint64, textbit uint64) error { + tag := ccsid<<16 | textbit<<15 + var tag_buff [4]byte + DecodeData(tag_buff[:], 4, tag) + return Setxattr(path, "system.filetag", tag_buff[:], XATTR_REPLACE) +} + +// End of Chtag + +// Nanosleep + +//go:nosplit +func get_NanosleepAddr() *(func(time *Timespec, leftover *Timespec) error) + +var Nanosleep = enter_Nanosleep + +func enter_Nanosleep(time *Timespec, leftover *Timespec) error { + funcref := get_NanosleepAddr() + if funcptrtest(GetZosLibVec()+SYS_NANOSLEEP<<4, "") == 0 { + *funcref = impl_Nanosleep + } else { + *funcref = legacyNanosleep + } + return (*funcref)(time, leftover) +} + +func impl_Nanosleep(time *Timespec, leftover *Timespec) error { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_NANOSLEEP<<4, uintptr(unsafe.Pointer(time)), uintptr(unsafe.Pointer(leftover))) + runtime.ExitSyscall() + if int64(r0) == -1 { + return errnoErr2(e1, e2) + } + return nil +} + +func legacyNanosleep(time *Timespec, leftover *Timespec) error { + t0 := runtime.Nanotime1() + var secrem uint32 + var nsecrem uint32 + total := time.Sec*1000000000 + time.Nsec + elapsed := runtime.Nanotime1() - t0 + var rv int32 + var rc int32 + var err error + // repeatedly sleep for 1 second until less than 1 second left + for total-elapsed > 1000000000 { + rv, rc, _ = BpxCondTimedWait(uint32(1), uint32(0), uint32(CW_CONDVAR), &secrem, &nsecrem) + if rv != 0 && rc != 112 { // 112 is EAGAIN + if leftover != nil && rc == 120 { // 120 is EINTR + leftover.Sec = int64(secrem) + leftover.Nsec = int64(nsecrem) + } + err = Errno(rc) + return err + } + elapsed = runtime.Nanotime1() - t0 + } + // sleep the remainder + if total > elapsed { + rv, rc, _ = BpxCondTimedWait(uint32(0), uint32(total-elapsed), uint32(CW_CONDVAR), &secrem, &nsecrem) + } + if leftover != nil && rc == 120 { + leftover.Sec = int64(secrem) + leftover.Nsec = int64(nsecrem) + } + if rv != 0 && rc != 112 { + err = Errno(rc) + } + return err +} + +// End of Nanosleep + var ( Stdin = 0 Stdout = 1 @@ -1395,6 +2287,9 @@ var ( errENOENT error = syscall.ENOENT ) +var ZosTraceLevel int +var ZosTracefile *os.File + var ( signalNameMapOnce sync.Once signalNameMap map[string]syscall.Signal @@ -1416,6 +2311,56 @@ func errnoErr(e Errno) error { return e } +var reg *regexp.Regexp + +// enhanced with zos specific errno2 +func errnoErr2(e Errno, e2 uintptr) error { + switch e { + case 0: + return nil + case EAGAIN: + return errEAGAIN + /* + Allow the retrieval of errno2 for EINVAL and ENOENT on zos + case EINVAL: + return errEINVAL + case ENOENT: + return errENOENT + */ + } + if ZosTraceLevel > 0 { + var name string + if reg == nil { + reg = regexp.MustCompile("(^unix\\.[^/]+$|.*\\/unix\\.[^/]+$)") + } + i := 1 + pc, file, line, ok := runtime.Caller(i) + if ok { + name = runtime.FuncForPC(pc).Name() + } + for ok && reg.MatchString(runtime.FuncForPC(pc).Name()) { + i += 1 + pc, file, line, ok = runtime.Caller(i) + } + if ok { + if ZosTracefile == nil { + ZosConsolePrintf("From %s:%d\n", file, line) + ZosConsolePrintf("%s: %s (errno2=0x%x)\n", name, e.Error(), e2) + } else { + fmt.Fprintf(ZosTracefile, "From %s:%d\n", file, line) + fmt.Fprintf(ZosTracefile, "%s: %s (errno2=0x%x)\n", name, e.Error(), e2) + } + } else { + if ZosTracefile == nil { + ZosConsolePrintf("%s (errno2=0x%x)\n", e.Error(), e2) + } else { + fmt.Fprintf(ZosTracefile, "%s (errno2=0x%x)\n", e.Error(), e2) + } + } + } + return e +} + // ErrnoName returns the error name for error number e. func ErrnoName(e Errno) string { i := sort.Search(len(errorList), func(i int) bool { @@ -1474,6 +2419,9 @@ func (m *mmapper) Mmap(fd int, offset int64, length int, prot int, flags int) (d return nil, EINVAL } + // Set __MAP_64 by default + flags |= __MAP_64 + // Map the requested memory. addr, errno := m.mmap(0, uintptr(length), prot, flags, fd, offset) if errno != nil { @@ -1778,83 +2726,170 @@ func Exec(argv0 string, argv []string, envv []string) error { return syscall.Exec(argv0, argv, envv) } -func Mount(source string, target string, fstype string, flags uintptr, data string) (err error) { - if needspace := 8 - len(fstype); needspace <= 0 { - fstype = fstype[:8] +func Getag(path string) (ccsid uint16, flag uint16, err error) { + var val [8]byte + sz, err := Getxattr(path, "ccsid", val[:]) + if err != nil { + return + } + ccsid = uint16(EncodeData(val[0:sz])) + sz, err = Getxattr(path, "flags", val[:]) + if err != nil { + return + } + flag = uint16(EncodeData(val[0:sz]) >> 15) + return +} + +// Mount begin +func impl_Mount(source string, target string, fstype string, flags uintptr, data string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(source) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(target) + if err != nil { + return + } + var _p2 *byte + _p2, err = BytePtrFromString(fstype) + if err != nil { + return + } + var _p3 *byte + _p3, err = BytePtrFromString(data) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MOUNT1_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(unsafe.Pointer(_p2)), uintptr(flags), uintptr(unsafe.Pointer(_p3))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_MountAddr() *(func(source string, target string, fstype string, flags uintptr, data string) (err error)) + +var Mount = enter_Mount + +func enter_Mount(source string, target string, fstype string, flags uintptr, data string) (err error) { + funcref := get_MountAddr() + if validMount() { + *funcref = impl_Mount } else { - fstype += " "[:needspace] + *funcref = legacyMount + } + return (*funcref)(source, target, fstype, flags, data) +} + +func legacyMount(source string, target string, fstype string, flags uintptr, data string) (err error) { + if needspace := 8 - len(fstype); needspace <= 0 { + fstype = fstype[0:8] + } else { + fstype += " "[0:needspace] } return mount_LE(target, source, fstype, uint32(flags), int32(len(data)), data) } -func Unmount(name string, mtm int) (err error) { +func validMount() bool { + if funcptrtest(GetZosLibVec()+SYS___MOUNT1_A<<4, "") == 0 { + if name, err := getLeFuncName(GetZosLibVec() + SYS___MOUNT1_A<<4); err == nil { + return name == "__mount1_a" + } + } + return false +} + +// Mount end + +// Unmount begin +func impl_Unmount(target string, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(target) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___UMOUNT2_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_UnmountAddr() *(func(target string, flags int) (err error)) + +var Unmount = enter_Unmount + +func enter_Unmount(target string, flags int) (err error) { + funcref := get_UnmountAddr() + if funcptrtest(GetZosLibVec()+SYS___UMOUNT2_A<<4, "") == 0 { + *funcref = impl_Unmount + } else { + *funcref = legacyUnmount + } + return (*funcref)(target, flags) +} + +func legacyUnmount(name string, mtm int) (err error) { // mountpoint is always a full path and starts with a '/' // check if input string is not a mountpoint but a filesystem name if name[0] != '/' { - return unmount(name, mtm) + return unmount_LE(name, mtm) } // treat name as mountpoint b2s := func(arr []byte) string { - nulli := bytes.IndexByte(arr, 0) - if nulli == -1 { - return string(arr) - } else { - return string(arr[:nulli]) + var str string + for i := 0; i < len(arr); i++ { + if arr[i] == 0 { + str = string(arr[:i]) + break + } } + return str } var buffer struct { header W_Mnth fsinfo [64]W_Mntent } - fsCount, err := W_Getmntent_A((*byte)(unsafe.Pointer(&buffer)), int(unsafe.Sizeof(buffer))) - if err != nil { - return err - } - if fsCount == 0 { - return EINVAL - } - for i := 0; i < fsCount; i++ { - if b2s(buffer.fsinfo[i].Mountpoint[:]) == name { - err = unmount(b2s(buffer.fsinfo[i].Fsname[:]), mtm) - break + fs_count, err := W_Getmntent_A((*byte)(unsafe.Pointer(&buffer)), int(unsafe.Sizeof(buffer))) + if err == nil { + err = EINVAL + for i := 0; i < fs_count; i++ { + if b2s(buffer.fsinfo[i].Mountpoint[:]) == name { + err = unmount_LE(b2s(buffer.fsinfo[i].Fsname[:]), mtm) + break + } } + } else if fs_count == 0 { + err = EINVAL } return err } -func fdToPath(dirfd int) (path string, err error) { - var buffer [1024]byte - // w_ctrl() - ret := runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS_W_IOCTL<<4, - []uintptr{uintptr(dirfd), 17, 1024, uintptr(unsafe.Pointer(&buffer[0]))}) - if ret == 0 { - zb := bytes.IndexByte(buffer[:], 0) - if zb == -1 { - zb = len(buffer) - } - // __e2a_l() - runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___E2A_L<<4, - []uintptr{uintptr(unsafe.Pointer(&buffer[0])), uintptr(zb)}) - return string(buffer[:zb]), nil - } - // __errno() - errno := int(*(*int32)(unsafe.Pointer(runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___ERRNO<<4, - []uintptr{})))) - // __errno2() - errno2 := int(runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___ERRNO2<<4, - []uintptr{})) - // strerror_r() - ret = runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS_STRERROR_R<<4, - []uintptr{uintptr(errno), uintptr(unsafe.Pointer(&buffer[0])), 1024}) - if ret == 0 { - zb := bytes.IndexByte(buffer[:], 0) - if zb == -1 { - zb = len(buffer) - } - return "", fmt.Errorf("%s (errno2=0x%x)", buffer[:zb], errno2) - } else { - return "", fmt.Errorf("fdToPath errno %d (errno2=0x%x)", errno, errno2) +// Unmount end + +func direntIno(buf []byte) (uint64, bool) { + return readInt(buf, unsafe.Offsetof(Dirent{}.Ino), unsafe.Sizeof(Dirent{}.Ino)) +} + +func direntReclen(buf []byte) (uint64, bool) { + return readInt(buf, unsafe.Offsetof(Dirent{}.Reclen), unsafe.Sizeof(Dirent{}.Reclen)) +} + +func direntNamlen(buf []byte) (uint64, bool) { + reclen, ok := direntReclen(buf) + if !ok { + return 0, false } + return reclen - uint64(unsafe.Offsetof(Dirent{}.Name)), true } func direntLeToDirentUnix(dirent *direntLE, dir uintptr, path string) (Dirent, error) { @@ -1896,7 +2931,7 @@ func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { } // Get path from fd to avoid unavailable call (fdopendir) - path, err := fdToPath(fd) + path, err := ZosFdToPath(fd) if err != nil { return 0, err } @@ -1910,7 +2945,7 @@ func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { for { var entryLE direntLE var entrypLE *direntLE - e := readdir_r(d, &entryLE, &entrypLE) + e := Readdir_r(d, &entryLE, &entrypLE) if e != nil { return n, e } @@ -1956,23 +2991,127 @@ func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { return n, nil } -func ReadDirent(fd int, buf []byte) (n int, err error) { - var base = (*uintptr)(unsafe.Pointer(new(uint64))) - return Getdirentries(fd, buf, base) +func Err2ad() (eadd *int) { + r0, _, _ := CallLeFuncWithErr(GetZosLibVec() + SYS___ERR2AD<<4) + eadd = (*int)(unsafe.Pointer(r0)) + return } -func direntIno(buf []byte) (uint64, bool) { - return readInt(buf, unsafe.Offsetof(Dirent{}.Ino), unsafe.Sizeof(Dirent{}.Ino)) -} - -func direntReclen(buf []byte) (uint64, bool) { - return readInt(buf, unsafe.Offsetof(Dirent{}.Reclen), unsafe.Sizeof(Dirent{}.Reclen)) -} - -func direntNamlen(buf []byte) (uint64, bool) { - reclen, ok := direntReclen(buf) - if !ok { - return 0, false +func ZosConsolePrintf(format string, v ...interface{}) (int, error) { + type __cmsg struct { + _ uint16 + _ [2]uint8 + __msg_length uint32 + __msg uintptr + _ [4]uint8 } - return reclen - uint64(unsafe.Offsetof(Dirent{}.Name)), true + msg := fmt.Sprintf(format, v...) + strptr := unsafe.Pointer((*reflect.StringHeader)(unsafe.Pointer(&msg)).Data) + len := (*reflect.StringHeader)(unsafe.Pointer(&msg)).Len + cmsg := __cmsg{__msg_length: uint32(len), __msg: uintptr(strptr)} + cmd := uint32(0) + runtime.EnterSyscall() + rc, err2, err1 := CallLeFuncWithErr(GetZosLibVec()+SYS_____CONSOLE_A<<4, uintptr(unsafe.Pointer(&cmsg)), 0, uintptr(unsafe.Pointer(&cmd))) + runtime.ExitSyscall() + if rc != 0 { + return 0, fmt.Errorf("%s (errno2=0x%x)\n", err1.Error(), err2) + } + return 0, nil } +func ZosStringToEbcdicBytes(str string, nullterm bool) (ebcdicBytes []byte) { + if nullterm { + ebcdicBytes = []byte(str + "\x00") + } else { + ebcdicBytes = []byte(str) + } + A2e(ebcdicBytes) + return +} +func ZosEbcdicBytesToString(b []byte, trimRight bool) (str string) { + res := make([]byte, len(b)) + copy(res, b) + E2a(res) + if trimRight { + str = string(bytes.TrimRight(res, " \x00")) + } else { + str = string(res) + } + return +} + +func fdToPath(dirfd int) (path string, err error) { + var buffer [1024]byte + // w_ctrl() + ret := runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS_W_IOCTL<<4, + []uintptr{uintptr(dirfd), 17, 1024, uintptr(unsafe.Pointer(&buffer[0]))}) + if ret == 0 { + zb := bytes.IndexByte(buffer[:], 0) + if zb == -1 { + zb = len(buffer) + } + // __e2a_l() + runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___E2A_L<<4, + []uintptr{uintptr(unsafe.Pointer(&buffer[0])), uintptr(zb)}) + return string(buffer[:zb]), nil + } + // __errno() + errno := int(*(*int32)(unsafe.Pointer(runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___ERRNO<<4, + []uintptr{})))) + // __errno2() + errno2 := int(runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS___ERRNO2<<4, + []uintptr{})) + // strerror_r() + ret = runtime.CallLeFuncByPtr(runtime.XplinkLibvec+SYS_STRERROR_R<<4, + []uintptr{uintptr(errno), uintptr(unsafe.Pointer(&buffer[0])), 1024}) + if ret == 0 { + zb := bytes.IndexByte(buffer[:], 0) + if zb == -1 { + zb = len(buffer) + } + return "", fmt.Errorf("%s (errno2=0x%x)", buffer[:zb], errno2) + } else { + return "", fmt.Errorf("fdToPath errno %d (errno2=0x%x)", errno, errno2) + } +} + +func impl_Mkfifoat(dirfd int, path string, mode uint32) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MKFIFOAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_MkfifoatAddr() *(func(dirfd int, path string, mode uint32) (err error)) + +var Mkfifoat = enter_Mkfifoat + +func enter_Mkfifoat(dirfd int, path string, mode uint32) (err error) { + funcref := get_MkfifoatAddr() + if funcptrtest(GetZosLibVec()+SYS___MKFIFOAT_A<<4, "") == 0 { + *funcref = impl_Mkfifoat + } else { + *funcref = legacy_Mkfifoat + } + return (*funcref)(dirfd, path, mode) +} + +func legacy_Mkfifoat(dirfd int, path string, mode uint32) (err error) { + dirname, err := ZosFdToPath(dirfd) + if err != nil { + return err + } + return Mkfifo(dirname+"/"+path, mode) +} + +//sys Posix_openpt(oflag int) (fd int, err error) = SYS_POSIX_OPENPT +//sys Grantpt(fildes int) (rc int, err error) = SYS_GRANTPT +//sys Unlockpt(fildes int) (rc int, err error) = SYS_UNLOCKPT diff --git a/vendor/golang.org/x/sys/unix/sysvshm_unix.go b/vendor/golang.org/x/sys/unix/sysvshm_unix.go index 79a84f1..672d6b0 100644 --- a/vendor/golang.org/x/sys/unix/sysvshm_unix.go +++ b/vendor/golang.org/x/sys/unix/sysvshm_unix.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build (darwin && !ios) || linux +//go:build (darwin && !ios) || linux || zos package unix diff --git a/vendor/golang.org/x/sys/unix/sysvshm_unix_other.go b/vendor/golang.org/x/sys/unix/sysvshm_unix_other.go index 9eb0db6..8b7977a 100644 --- a/vendor/golang.org/x/sys/unix/sysvshm_unix_other.go +++ b/vendor/golang.org/x/sys/unix/sysvshm_unix_other.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build darwin && !ios +//go:build (darwin && !ios) || zos package unix diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux.go b/vendor/golang.org/x/sys/unix/zerrors_linux.go index c73cfe2..93a38a9 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux.go @@ -491,6 +491,7 @@ const ( BPF_F_REPLACE = 0x4 BPF_F_SLEEPABLE = 0x10 BPF_F_STRICT_ALIGNMENT = 0x1 + BPF_F_TEST_REG_INVARIANTS = 0x80 BPF_F_TEST_RND_HI32 = 0x4 BPF_F_TEST_RUN_ON_CPU = 0x1 BPF_F_TEST_STATE_FREQ = 0x8 @@ -1697,6 +1698,7 @@ const ( KEXEC_ARCH_S390 = 0x160000 KEXEC_ARCH_SH = 0x2a0000 KEXEC_ARCH_X86_64 = 0x3e0000 + KEXEC_FILE_DEBUG = 0x8 KEXEC_FILE_NO_INITRAMFS = 0x4 KEXEC_FILE_ON_CRASH = 0x2 KEXEC_FILE_UNLOAD = 0x1 @@ -1785,6 +1787,8 @@ const ( LANDLOCK_ACCESS_FS_REMOVE_FILE = 0x20 LANDLOCK_ACCESS_FS_TRUNCATE = 0x4000 LANDLOCK_ACCESS_FS_WRITE_FILE = 0x2 + LANDLOCK_ACCESS_NET_BIND_TCP = 0x1 + LANDLOCK_ACCESS_NET_CONNECT_TCP = 0x2 LANDLOCK_CREATE_RULESET_VERSION = 0x1 LINUX_REBOOT_CMD_CAD_OFF = 0x0 LINUX_REBOOT_CMD_CAD_ON = 0x89abcdef @@ -1896,6 +1900,7 @@ const ( MNT_DETACH = 0x2 MNT_EXPIRE = 0x4 MNT_FORCE = 0x1 + MNT_ID_REQ_SIZE_VER0 = 0x18 MODULE_INIT_COMPRESSED_FILE = 0x4 MODULE_INIT_IGNORE_MODVERSIONS = 0x1 MODULE_INIT_IGNORE_VERMAGIC = 0x2 @@ -2127,6 +2132,60 @@ const ( NFNL_SUBSYS_QUEUE = 0x3 NFNL_SUBSYS_ULOG = 0x4 NFS_SUPER_MAGIC = 0x6969 + NFT_CHAIN_FLAGS = 0x7 + NFT_CHAIN_MAXNAMELEN = 0x100 + NFT_CT_MAX = 0x17 + NFT_DATA_RESERVED_MASK = 0xffffff00 + NFT_DATA_VALUE_MAXLEN = 0x40 + NFT_EXTHDR_OP_MAX = 0x4 + NFT_FIB_RESULT_MAX = 0x3 + NFT_INNER_MASK = 0xf + NFT_LOGLEVEL_MAX = 0x8 + NFT_NAME_MAXLEN = 0x100 + NFT_NG_MAX = 0x1 + NFT_OBJECT_CONNLIMIT = 0x5 + NFT_OBJECT_COUNTER = 0x1 + NFT_OBJECT_CT_EXPECT = 0x9 + NFT_OBJECT_CT_HELPER = 0x3 + NFT_OBJECT_CT_TIMEOUT = 0x7 + NFT_OBJECT_LIMIT = 0x4 + NFT_OBJECT_MAX = 0xa + NFT_OBJECT_QUOTA = 0x2 + NFT_OBJECT_SECMARK = 0x8 + NFT_OBJECT_SYNPROXY = 0xa + NFT_OBJECT_TUNNEL = 0x6 + NFT_OBJECT_UNSPEC = 0x0 + NFT_OBJ_MAXNAMELEN = 0x100 + NFT_OSF_MAXGENRELEN = 0x10 + NFT_QUEUE_FLAG_BYPASS = 0x1 + NFT_QUEUE_FLAG_CPU_FANOUT = 0x2 + NFT_QUEUE_FLAG_MASK = 0x3 + NFT_REG32_COUNT = 0x10 + NFT_REG32_SIZE = 0x4 + NFT_REG_MAX = 0x4 + NFT_REG_SIZE = 0x10 + NFT_REJECT_ICMPX_MAX = 0x3 + NFT_RT_MAX = 0x4 + NFT_SECMARK_CTX_MAXLEN = 0x100 + NFT_SET_MAXNAMELEN = 0x100 + NFT_SOCKET_MAX = 0x3 + NFT_TABLE_F_MASK = 0x3 + NFT_TABLE_MAXNAMELEN = 0x100 + NFT_TRACETYPE_MAX = 0x3 + NFT_TUNNEL_F_MASK = 0x7 + NFT_TUNNEL_MAX = 0x1 + NFT_TUNNEL_MODE_MAX = 0x2 + NFT_USERDATA_MAXLEN = 0x100 + NFT_XFRM_KEY_MAX = 0x6 + NF_NAT_RANGE_MAP_IPS = 0x1 + NF_NAT_RANGE_MASK = 0x7f + NF_NAT_RANGE_NETMAP = 0x40 + NF_NAT_RANGE_PERSISTENT = 0x8 + NF_NAT_RANGE_PROTO_OFFSET = 0x20 + NF_NAT_RANGE_PROTO_RANDOM = 0x4 + NF_NAT_RANGE_PROTO_RANDOM_ALL = 0x14 + NF_NAT_RANGE_PROTO_RANDOM_FULLY = 0x10 + NF_NAT_RANGE_PROTO_SPECIFIED = 0x2 NILFS_SUPER_MAGIC = 0x3434 NL0 = 0x0 NL1 = 0x100 @@ -2246,6 +2305,7 @@ const ( PERF_AUX_FLAG_PARTIAL = 0x4 PERF_AUX_FLAG_PMU_FORMAT_TYPE_MASK = 0xff00 PERF_AUX_FLAG_TRUNCATED = 0x1 + PERF_BRANCH_ENTRY_INFO_BITS_MAX = 0x21 PERF_BR_ARM64_DEBUG_DATA = 0x7 PERF_BR_ARM64_DEBUG_EXIT = 0x5 PERF_BR_ARM64_DEBUG_HALT = 0x4 @@ -2411,6 +2471,7 @@ const ( PR_MCE_KILL_GET = 0x22 PR_MCE_KILL_LATE = 0x0 PR_MCE_KILL_SET = 0x1 + PR_MDWE_NO_INHERIT = 0x2 PR_MDWE_REFUSE_EXEC_GAIN = 0x1 PR_MPX_DISABLE_MANAGEMENT = 0x2c PR_MPX_ENABLE_MANAGEMENT = 0x2b @@ -2615,8 +2676,9 @@ const ( RTAX_FEATURES = 0xc RTAX_FEATURE_ALLFRAG = 0x8 RTAX_FEATURE_ECN = 0x1 - RTAX_FEATURE_MASK = 0xf + RTAX_FEATURE_MASK = 0x1f RTAX_FEATURE_SACK = 0x2 + RTAX_FEATURE_TCP_USEC_TS = 0x10 RTAX_FEATURE_TIMESTAMP = 0x4 RTAX_HOPLIMIT = 0xa RTAX_INITCWND = 0xb @@ -2859,9 +2921,38 @@ const ( SCM_RIGHTS = 0x1 SCM_TIMESTAMP = 0x1d SC_LOG_FLUSH = 0x100000 + SECCOMP_ADDFD_FLAG_SEND = 0x2 + SECCOMP_ADDFD_FLAG_SETFD = 0x1 + SECCOMP_FILTER_FLAG_LOG = 0x2 + SECCOMP_FILTER_FLAG_NEW_LISTENER = 0x8 + SECCOMP_FILTER_FLAG_SPEC_ALLOW = 0x4 + SECCOMP_FILTER_FLAG_TSYNC = 0x1 + SECCOMP_FILTER_FLAG_TSYNC_ESRCH = 0x10 + SECCOMP_FILTER_FLAG_WAIT_KILLABLE_RECV = 0x20 + SECCOMP_GET_ACTION_AVAIL = 0x2 + SECCOMP_GET_NOTIF_SIZES = 0x3 + SECCOMP_IOCTL_NOTIF_RECV = 0xc0502100 + SECCOMP_IOCTL_NOTIF_SEND = 0xc0182101 + SECCOMP_IOC_MAGIC = '!' SECCOMP_MODE_DISABLED = 0x0 SECCOMP_MODE_FILTER = 0x2 SECCOMP_MODE_STRICT = 0x1 + SECCOMP_RET_ACTION = 0x7fff0000 + SECCOMP_RET_ACTION_FULL = 0xffff0000 + SECCOMP_RET_ALLOW = 0x7fff0000 + SECCOMP_RET_DATA = 0xffff + SECCOMP_RET_ERRNO = 0x50000 + SECCOMP_RET_KILL = 0x0 + SECCOMP_RET_KILL_PROCESS = 0x80000000 + SECCOMP_RET_KILL_THREAD = 0x0 + SECCOMP_RET_LOG = 0x7ffc0000 + SECCOMP_RET_TRACE = 0x7ff00000 + SECCOMP_RET_TRAP = 0x30000 + SECCOMP_RET_USER_NOTIF = 0x7fc00000 + SECCOMP_SET_MODE_FILTER = 0x1 + SECCOMP_SET_MODE_STRICT = 0x0 + SECCOMP_USER_NOTIF_FD_SYNC_WAKE_UP = 0x1 + SECCOMP_USER_NOTIF_FLAG_CONTINUE = 0x1 SECRETMEM_MAGIC = 0x5345434d SECURITYFS_MAGIC = 0x73636673 SEEK_CUR = 0x1 @@ -3021,6 +3112,7 @@ const ( SOL_TIPC = 0x10f SOL_TLS = 0x11a SOL_UDP = 0x11 + SOL_VSOCK = 0x11f SOL_X25 = 0x106 SOL_XDP = 0x11b SOMAXCONN = 0x1000 @@ -3080,6 +3172,7 @@ const ( STATX_GID = 0x10 STATX_INO = 0x100 STATX_MNT_ID = 0x1000 + STATX_MNT_ID_UNIQUE = 0x4000 STATX_MODE = 0x2 STATX_MTIME = 0x40 STATX_NLINK = 0x4 @@ -3474,12 +3567,16 @@ const ( XDP_RX_RING = 0x2 XDP_SHARED_UMEM = 0x1 XDP_STATISTICS = 0x7 + XDP_TXMD_FLAGS_CHECKSUM = 0x2 + XDP_TXMD_FLAGS_TIMESTAMP = 0x1 + XDP_TX_METADATA = 0x2 XDP_TX_RING = 0x3 XDP_UMEM_COMPLETION_RING = 0x6 XDP_UMEM_FILL_RING = 0x5 XDP_UMEM_PGOFF_COMPLETION_RING = 0x180000000 XDP_UMEM_PGOFF_FILL_RING = 0x100000000 XDP_UMEM_REG = 0x4 + XDP_UMEM_TX_SW_CSUM = 0x2 XDP_UMEM_UNALIGNED_CHUNK_FLAG = 0x1 XDP_USE_NEED_WAKEUP = 0x8 XDP_USE_SG = 0x10 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go index 4920821..42ff8c3 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go @@ -281,6 +281,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x40182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x40082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x40082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go index a0c1e41..dca4360 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go @@ -282,6 +282,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x40182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x40082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x40082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go index c639855..5cca668 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go @@ -288,6 +288,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x40182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x40082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x40082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go index 47cc62e..d8cae6d 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go @@ -278,6 +278,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x40182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x40082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x40082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go index 27ac4a0..28e39af 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go @@ -275,6 +275,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x40182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x40082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x40082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go index 5469464..cd66e92 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go @@ -281,6 +281,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x80182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x80082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x80082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x80 SIOCATMARK = 0x40047307 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go index 3adb81d..c1595eb 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go @@ -281,6 +281,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x80182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x80082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x80082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x80 SIOCATMARK = 0x40047307 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go index 2dfe98f..ee9456b 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go @@ -281,6 +281,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x80182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x80082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x80082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x80 SIOCATMARK = 0x40047307 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go index f5398f8..8cfca81 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go @@ -281,6 +281,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x80182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x80082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x80082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x80 SIOCATMARK = 0x40047307 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go index c54f152..60b0deb 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go @@ -336,6 +336,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x80182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x80082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x80082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go index 76057dc..f90aa72 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go @@ -340,6 +340,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x80182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x80082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x80082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go index e0c3725..ba9e015 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go @@ -340,6 +340,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x80182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x80082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x80082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go index 18f2813..07cdfd6 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go @@ -272,6 +272,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x40182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x40082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x40082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go index 11619d4..2f1dd21 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go @@ -344,6 +344,9 @@ const ( SCM_TIMESTAMPNS = 0x23 SCM_TXTIME = 0x3d SCM_WIFI_STATUS = 0x29 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x40182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x40082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x40082104 SFD_CLOEXEC = 0x80000 SFD_NONBLOCK = 0x800 SIOCATMARK = 0x8905 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go index 396d994..f40519d 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go @@ -335,6 +335,9 @@ const ( SCM_TIMESTAMPNS = 0x21 SCM_TXTIME = 0x3f SCM_WIFI_STATUS = 0x25 + SECCOMP_IOCTL_NOTIF_ADDFD = 0x80182103 + SECCOMP_IOCTL_NOTIF_ID_VALID = 0x80082102 + SECCOMP_IOCTL_NOTIF_SET_FLAGS = 0x80082104 SFD_CLOEXEC = 0x400000 SFD_NONBLOCK = 0x4000 SF_FP = 0x38 diff --git a/vendor/golang.org/x/sys/unix/zerrors_zos_s390x.go b/vendor/golang.org/x/sys/unix/zerrors_zos_s390x.go index 4dfd2e0..da08b2a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_zos_s390x.go +++ b/vendor/golang.org/x/sys/unix/zerrors_zos_s390x.go @@ -10,41 +10,99 @@ package unix const ( - BRKINT = 0x0001 - CLOCK_MONOTONIC = 0x1 - CLOCK_PROCESS_CPUTIME_ID = 0x2 - CLOCK_REALTIME = 0x0 - CLOCK_THREAD_CPUTIME_ID = 0x3 - CS8 = 0x0030 - CSIZE = 0x0030 - ECHO = 0x00000008 - ECHONL = 0x00000001 - FD_CLOEXEC = 0x01 - FD_CLOFORK = 0x02 - FNDELAY = 0x04 - F_CLOSFD = 9 - F_CONTROL_CVT = 13 - F_DUPFD = 0 - F_DUPFD2 = 8 - F_GETFD = 1 - F_GETFL = 259 - F_GETLK = 5 - F_GETOWN = 10 - F_OK = 0x0 - F_RDLCK = 1 - F_SETFD = 2 - F_SETFL = 4 - F_SETLK = 6 - F_SETLKW = 7 - F_SETOWN = 11 - F_SETTAG = 12 - F_UNLCK = 3 - F_WRLCK = 2 - FSTYPE_ZFS = 0xe9 //"Z" - FSTYPE_HFS = 0xc8 //"H" - FSTYPE_NFS = 0xd5 //"N" - FSTYPE_TFS = 0xe3 //"T" - FSTYPE_AUTOMOUNT = 0xc1 //"A" + BRKINT = 0x0001 + CLOCAL = 0x1 + CLOCK_MONOTONIC = 0x1 + CLOCK_PROCESS_CPUTIME_ID = 0x2 + CLOCK_REALTIME = 0x0 + CLOCK_THREAD_CPUTIME_ID = 0x3 + CLONE_NEWIPC = 0x08000000 + CLONE_NEWNET = 0x40000000 + CLONE_NEWNS = 0x00020000 + CLONE_NEWPID = 0x20000000 + CLONE_NEWUTS = 0x04000000 + CLONE_PARENT = 0x00008000 + CS8 = 0x0030 + CSIZE = 0x0030 + ECHO = 0x00000008 + ECHONL = 0x00000001 + EFD_SEMAPHORE = 0x00002000 + EFD_CLOEXEC = 0x00001000 + EFD_NONBLOCK = 0x00000004 + EPOLL_CLOEXEC = 0x00001000 + EPOLL_CTL_ADD = 0 + EPOLL_CTL_MOD = 1 + EPOLL_CTL_DEL = 2 + EPOLLRDNORM = 0x0001 + EPOLLRDBAND = 0x0002 + EPOLLIN = 0x0003 + EPOLLOUT = 0x0004 + EPOLLWRBAND = 0x0008 + EPOLLPRI = 0x0010 + EPOLLERR = 0x0020 + EPOLLHUP = 0x0040 + EPOLLEXCLUSIVE = 0x20000000 + EPOLLONESHOT = 0x40000000 + FD_CLOEXEC = 0x01 + FD_CLOFORK = 0x02 + FD_SETSIZE = 0x800 + FNDELAY = 0x04 + F_CLOSFD = 9 + F_CONTROL_CVT = 13 + F_DUPFD = 0 + F_DUPFD2 = 8 + F_GETFD = 1 + F_GETFL = 259 + F_GETLK = 5 + F_GETOWN = 10 + F_OK = 0x0 + F_RDLCK = 1 + F_SETFD = 2 + F_SETFL = 4 + F_SETLK = 6 + F_SETLKW = 7 + F_SETOWN = 11 + F_SETTAG = 12 + F_UNLCK = 3 + F_WRLCK = 2 + FSTYPE_ZFS = 0xe9 //"Z" + FSTYPE_HFS = 0xc8 //"H" + FSTYPE_NFS = 0xd5 //"N" + FSTYPE_TFS = 0xe3 //"T" + FSTYPE_AUTOMOUNT = 0xc1 //"A" + GRND_NONBLOCK = 1 + GRND_RANDOM = 2 + HUPCL = 0x0100 // Hang up on last close + IN_CLOEXEC = 0x00001000 + IN_NONBLOCK = 0x00000004 + IN_ACCESS = 0x00000001 + IN_MODIFY = 0x00000002 + IN_ATTRIB = 0x00000004 + IN_CLOSE_WRITE = 0x00000008 + IN_CLOSE_NOWRITE = 0x00000010 + IN_OPEN = 0x00000020 + IN_MOVED_FROM = 0x00000040 + IN_MOVED_TO = 0x00000080 + IN_CREATE = 0x00000100 + IN_DELETE = 0x00000200 + IN_DELETE_SELF = 0x00000400 + IN_MOVE_SELF = 0x00000800 + IN_UNMOUNT = 0x00002000 + IN_Q_OVERFLOW = 0x00004000 + IN_IGNORED = 0x00008000 + IN_CLOSE = (IN_CLOSE_WRITE | IN_CLOSE_NOWRITE) + IN_MOVE = (IN_MOVED_FROM | IN_MOVED_TO) + IN_ALL_EVENTS = (IN_ACCESS | IN_MODIFY | IN_ATTRIB | + IN_CLOSE | IN_OPEN | IN_MOVE | + IN_CREATE | IN_DELETE | IN_DELETE_SELF | + IN_MOVE_SELF) + IN_ONLYDIR = 0x01000000 + IN_DONT_FOLLOW = 0x02000000 + IN_EXCL_UNLINK = 0x04000000 + IN_MASK_CREATE = 0x10000000 + IN_MASK_ADD = 0x20000000 + IN_ISDIR = 0x40000000 + IN_ONESHOT = 0x80000000 IP6F_MORE_FRAG = 0x0001 IP6F_OFF_MASK = 0xfff8 IP6F_RESERVED_MASK = 0x0006 @@ -152,10 +210,18 @@ const ( IP_PKTINFO = 101 IP_RECVPKTINFO = 102 IP_TOS = 2 - IP_TTL = 3 + IP_TTL = 14 IP_UNBLOCK_SOURCE = 11 + ICMP6_FILTER = 1 + MCAST_INCLUDE = 0 + MCAST_EXCLUDE = 1 + MCAST_JOIN_GROUP = 40 + MCAST_LEAVE_GROUP = 41 + MCAST_JOIN_SOURCE_GROUP = 42 + MCAST_LEAVE_SOURCE_GROUP = 43 + MCAST_BLOCK_SOURCE = 44 + MCAST_UNBLOCK_SOURCE = 46 ICANON = 0x0010 - ICMP6_FILTER = 0x26 ICRNL = 0x0002 IEXTEN = 0x0020 IGNBRK = 0x0004 @@ -165,10 +231,10 @@ const ( ISTRIP = 0x0080 IXON = 0x0200 IXOFF = 0x0100 - LOCK_SH = 0x1 // Not exist on zOS - LOCK_EX = 0x2 // Not exist on zOS - LOCK_NB = 0x4 // Not exist on zOS - LOCK_UN = 0x8 // Not exist on zOS + LOCK_SH = 0x1 + LOCK_EX = 0x2 + LOCK_NB = 0x4 + LOCK_UN = 0x8 POLLIN = 0x0003 POLLOUT = 0x0004 POLLPRI = 0x0010 @@ -182,15 +248,29 @@ const ( MAP_PRIVATE = 0x1 // changes are private MAP_SHARED = 0x2 // changes are shared MAP_FIXED = 0x4 // place exactly - MCAST_JOIN_GROUP = 40 - MCAST_LEAVE_GROUP = 41 - MCAST_JOIN_SOURCE_GROUP = 42 - MCAST_LEAVE_SOURCE_GROUP = 43 - MCAST_BLOCK_SOURCE = 44 - MCAST_UNBLOCK_SOURCE = 45 + __MAP_MEGA = 0x8 + __MAP_64 = 0x10 + MAP_ANON = 0x20 + MAP_ANONYMOUS = 0x20 MS_SYNC = 0x1 // msync - synchronous writes MS_ASYNC = 0x2 // asynchronous writes MS_INVALIDATE = 0x4 // invalidate mappings + MS_BIND = 0x00001000 + MS_MOVE = 0x00002000 + MS_NOSUID = 0x00000002 + MS_PRIVATE = 0x00040000 + MS_REC = 0x00004000 + MS_REMOUNT = 0x00008000 + MS_RDONLY = 0x00000001 + MS_UNBINDABLE = 0x00020000 + MNT_DETACH = 0x00000004 + ZOSDSFS_SUPER_MAGIC = 0x44534653 // zOS DSFS + NFS_SUPER_MAGIC = 0x6969 // NFS + NSFS_MAGIC = 0x6e736673 // PROCNS + PROC_SUPER_MAGIC = 0x9fa0 // proc FS + ZOSTFS_SUPER_MAGIC = 0x544653 // zOS TFS + ZOSUFS_SUPER_MAGIC = 0x554653 // zOS UFS + ZOSZFS_SUPER_MAGIC = 0x5A4653 // zOS ZFS MTM_RDONLY = 0x80000000 MTM_RDWR = 0x40000000 MTM_UMOUNT = 0x10000000 @@ -205,13 +285,20 @@ const ( MTM_REMOUNT = 0x00000100 MTM_NOSECURITY = 0x00000080 NFDBITS = 0x20 + ONLRET = 0x0020 // NL performs CR function O_ACCMODE = 0x03 O_APPEND = 0x08 O_ASYNCSIG = 0x0200 O_CREAT = 0x80 + O_DIRECT = 0x00002000 + O_NOFOLLOW = 0x00004000 + O_DIRECTORY = 0x00008000 + O_PATH = 0x00080000 + O_CLOEXEC = 0x00001000 O_EXCL = 0x40 O_GETFL = 0x0F O_LARGEFILE = 0x0400 + O_NDELAY = 0x4 O_NONBLOCK = 0x04 O_RDONLY = 0x02 O_RDWR = 0x03 @@ -248,6 +335,7 @@ const ( AF_IUCV = 17 AF_LAT = 14 AF_LINK = 18 + AF_LOCAL = AF_UNIX // AF_LOCAL is an alias for AF_UNIX AF_MAX = 30 AF_NBS = 7 AF_NDD = 23 @@ -285,15 +373,33 @@ const ( RLIMIT_AS = 5 RLIMIT_NOFILE = 6 RLIMIT_MEMLIMIT = 7 + RLIMIT_MEMLOCK = 0x8 RLIM_INFINITY = 2147483647 + SCHED_FIFO = 0x2 + SCM_CREDENTIALS = 0x2 SCM_RIGHTS = 0x01 SF_CLOSE = 0x00000002 SF_REUSE = 0x00000001 + SHM_RND = 0x2 + SHM_RDONLY = 0x1 + SHMLBA = 0x1000 + IPC_STAT = 0x3 + IPC_SET = 0x2 + IPC_RMID = 0x1 + IPC_PRIVATE = 0x0 + IPC_CREAT = 0x1000000 + __IPC_MEGA = 0x4000000 + __IPC_SHAREAS = 0x20000000 + __IPC_BELOWBAR = 0x10000000 + IPC_EXCL = 0x2000000 + __IPC_GIGA = 0x8000000 SHUT_RD = 0 SHUT_RDWR = 2 SHUT_WR = 1 + SOCK_CLOEXEC = 0x00001000 SOCK_CONN_DGRAM = 6 SOCK_DGRAM = 2 + SOCK_NONBLOCK = 0x800 SOCK_RAW = 3 SOCK_RDM = 4 SOCK_SEQPACKET = 5 @@ -378,8 +484,6 @@ const ( S_IFMST = 0x00FF0000 TCP_KEEPALIVE = 0x8 TCP_NODELAY = 0x1 - TCP_INFO = 0xb - TCP_USER_TIMEOUT = 0x1 TIOCGWINSZ = 0x4008a368 TIOCSWINSZ = 0x8008a367 TIOCSBRK = 0x2000a77b @@ -427,7 +531,10 @@ const ( VSUSP = 9 VTIME = 10 WCONTINUED = 0x4 + WEXITED = 0x8 WNOHANG = 0x1 + WNOWAIT = 0x20 + WSTOPPED = 0x10 WUNTRACED = 0x2 _BPX_SWAP = 1 _BPX_NONSWAP = 2 @@ -452,8 +559,28 @@ const ( MADV_FREE = 15 // for Linux compatibility -- no zos semantics MADV_WIPEONFORK = 16 // for Linux compatibility -- no zos semantics MADV_KEEPONFORK = 17 // for Linux compatibility -- no zos semantics - AT_SYMLINK_NOFOLLOW = 1 // for Unix compatibility -- no zos semantics - AT_FDCWD = 2 // for Unix compatibility -- no zos semantics + AT_SYMLINK_FOLLOW = 0x400 + AT_SYMLINK_NOFOLLOW = 0x100 + XATTR_CREATE = 0x1 + XATTR_REPLACE = 0x2 + P_PID = 0 + P_PGID = 1 + P_ALL = 2 + PR_SET_NAME = 15 + PR_GET_NAME = 16 + PR_SET_NO_NEW_PRIVS = 38 + PR_GET_NO_NEW_PRIVS = 39 + PR_SET_DUMPABLE = 4 + PR_GET_DUMPABLE = 3 + PR_SET_PDEATHSIG = 1 + PR_GET_PDEATHSIG = 2 + PR_SET_CHILD_SUBREAPER = 36 + PR_GET_CHILD_SUBREAPER = 37 + AT_FDCWD = -100 + AT_EACCESS = 0x200 + AT_EMPTY_PATH = 0x1000 + AT_REMOVEDIR = 0x200 + RENAME_NOREPLACE = 1 << 0 ) const ( @@ -476,6 +603,7 @@ const ( EMLINK = Errno(125) ENAMETOOLONG = Errno(126) ENFILE = Errno(127) + ENOATTR = Errno(265) ENODEV = Errno(128) ENOENT = Errno(129) ENOEXEC = Errno(130) @@ -700,7 +828,7 @@ var errorList = [...]struct { {145, "EDC5145I", "The parameter list is too long, or the message to receive was too large for the buffer."}, {146, "EDC5146I", "Too many levels of symbolic links."}, {147, "EDC5147I", "Illegal byte sequence."}, - {148, "", ""}, + {148, "EDC5148I", "The named attribute or data not available."}, {149, "EDC5149I", "Value Overflow Error."}, {150, "EDC5150I", "UNIX System Services is not active."}, {151, "EDC5151I", "Dynamic allocation error."}, @@ -743,6 +871,7 @@ var errorList = [...]struct { {259, "EDC5259I", "A CUN_RS_NO_CONVERSION error was issued by Unicode Services."}, {260, "EDC5260I", "A CUN_RS_TABLE_NOT_ALIGNED error was issued by Unicode Services."}, {262, "EDC5262I", "An iconv() function encountered an unexpected error while using Unicode Services."}, + {265, "EDC5265I", "The named attribute not available."}, {1000, "EDC8000I", "A bad socket-call constant was found in the IUCV header."}, {1001, "EDC8001I", "An error was found in the IUCV header."}, {1002, "EDC8002I", "A socket descriptor is out of range."}, diff --git a/vendor/golang.org/x/sys/unix/zsymaddr_zos_s390x.s b/vendor/golang.org/x/sys/unix/zsymaddr_zos_s390x.s new file mode 100644 index 0000000..b77ff5d --- /dev/null +++ b/vendor/golang.org/x/sys/unix/zsymaddr_zos_s390x.s @@ -0,0 +1,364 @@ +// go run mksyscall_zos_s390x.go -o_sysnum zsysnum_zos_s390x.go -o_syscall zsyscall_zos_s390x.go -i_syscall syscall_zos_s390x.go -o_asm zsymaddr_zos_s390x.s +// Code generated by the command above; see README.md. DO NOT EDIT. + +//go:build zos && s390x +#include "textflag.h" + +// provide the address of function variable to be fixed up. + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FlistxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Flistxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FremovexattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Fremovexattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FgetxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Fgetxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FsetxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Fsetxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_accept4Addr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·accept4(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_RemovexattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Removexattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_Dup3Addr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Dup3(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_DirfdAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Dirfd(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_EpollCreateAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·EpollCreate(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_EpollCreate1Addr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·EpollCreate1(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_EpollCtlAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·EpollCtl(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_EpollPwaitAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·EpollPwait(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_EpollWaitAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·EpollWait(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_EventfdAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Eventfd(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FaccessatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Faccessat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FchmodatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Fchmodat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FchownatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Fchownat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FdatasyncAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Fdatasync(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_fstatatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·fstatat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_LgetxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Lgetxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_LsetxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Lsetxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FstatfsAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Fstatfs(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FutimesAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Futimes(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_FutimesatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Futimesat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_GetrandomAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Getrandom(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_InotifyInitAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·InotifyInit(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_InotifyInit1Addr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·InotifyInit1(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_InotifyAddWatchAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·InotifyAddWatch(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_InotifyRmWatchAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·InotifyRmWatch(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_ListxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Listxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_LlistxattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Llistxattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_LremovexattrAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Lremovexattr(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_LutimesAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Lutimes(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_StatfsAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Statfs(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_SyncfsAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Syncfs(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_UnshareAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Unshare(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_LinkatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Linkat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_MkdiratAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Mkdirat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_MknodatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Mknodat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_PivotRootAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·PivotRoot(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_PrctlAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Prctl(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_PrlimitAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Prlimit(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_RenameatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Renameat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_Renameat2Addr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Renameat2(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_SethostnameAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Sethostname(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_SetnsAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Setns(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_SymlinkatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Symlinkat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_UnlinkatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·Unlinkat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_openatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·openat(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_openat2Addr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·openat2(SB), R8 + MOVD R8, ret+0(FP) + RET + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +TEXT ·get_utimensatAddr(SB), NOSPLIT|NOFRAME, $0-8 + MOVD $·utimensat(SB), R8 + MOVD R8, ret+0(FP) + RET diff --git a/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gccgo.go b/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gccgo.go index d46b706..0a87450 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gccgo.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gccgo.go @@ -127,7 +127,6 @@ uintptr_t mmap64(uintptr_t, uintptr_t, int, int, int, long long); */ import "C" - import ( "syscall" "unsafe" diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux.go b/vendor/golang.org/x/sys/unix/zsyscall_linux.go index 1488d27..87d8612 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux.go @@ -906,6 +906,16 @@ func Fspick(dirfd int, pathName string, flags int) (fd int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fsconfig(fd int, cmd uint, key *byte, value *byte, aux int) (err error) { + _, _, e1 := Syscall6(SYS_FSCONFIG, uintptr(fd), uintptr(cmd), uintptr(unsafe.Pointer(key)), uintptr(unsafe.Pointer(value)), uintptr(aux), 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Getdents(fd int, buf []byte) (n int, err error) { var _p0 unsafe.Pointer if len(buf) > 0 { diff --git a/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go b/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go index 94f0112..7ccf66b 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -tags zos,s390x syscall_zos_s390x.go +// go run mksyscall_zos_s390x.go -o_sysnum zsysnum_zos_s390x.go -o_syscall zsyscall_zos_s390x.go -i_syscall syscall_zos_s390x.go -o_asm zsymaddr_zos_s390x.s // Code generated by the command above; see README.md. DO NOT EDIT. //go:build zos && s390x @@ -6,22 +6,105 @@ package unix import ( + "runtime" + "syscall" "unsafe" ) +var _ syscall.Errno + // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func fcntl(fd int, cmd int, arg int) (val int, err error) { - r0, _, e1 := syscall_syscall(SYS_FCNTL, uintptr(fd), uintptr(cmd), uintptr(arg)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FCNTL<<4, uintptr(fd), uintptr(cmd), uintptr(arg)) + runtime.ExitSyscall() val = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Flistxattr(fd int, dest []byte) (sz int, err error) { + var _p0 unsafe.Pointer + if len(dest) > 0 { + _p0 = unsafe.Pointer(&dest[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FLISTXATTR_A<<4, uintptr(fd), uintptr(_p0), uintptr(len(dest))) + runtime.ExitSyscall() + sz = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FlistxattrAddr() *(func(fd int, dest []byte) (sz int, err error)) + +var Flistxattr = enter_Flistxattr + +func enter_Flistxattr(fd int, dest []byte) (sz int, err error) { + funcref := get_FlistxattrAddr() + if funcptrtest(GetZosLibVec()+SYS___FLISTXATTR_A<<4, "") == 0 { + *funcref = impl_Flistxattr + } else { + *funcref = error_Flistxattr + } + return (*funcref)(fd, dest) +} + +func error_Flistxattr(fd int, dest []byte) (sz int, err error) { + sz = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Fremovexattr(fd int, attr string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(attr) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FREMOVEXATTR_A<<4, uintptr(fd), uintptr(unsafe.Pointer(_p0))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FremovexattrAddr() *(func(fd int, attr string) (err error)) + +var Fremovexattr = enter_Fremovexattr + +func enter_Fremovexattr(fd int, attr string) (err error) { + funcref := get_FremovexattrAddr() + if funcptrtest(GetZosLibVec()+SYS___FREMOVEXATTR_A<<4, "") == 0 { + *funcref = impl_Fremovexattr + } else { + *funcref = error_Fremovexattr + } + return (*funcref)(fd, attr) +} + +func error_Fremovexattr(fd int, attr string) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func read(fd int, p []byte) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { @@ -29,10 +112,12 @@ func read(fd int, p []byte) (n int, err error) { } else { _p0 = unsafe.Pointer(&_zero) } - r0, _, e1 := syscall_syscall(SYS_READ, uintptr(fd), uintptr(_p0), uintptr(len(p))) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_READ<<4, uintptr(fd), uintptr(_p0), uintptr(len(p))) + runtime.ExitSyscall() n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -46,31 +131,159 @@ func write(fd int, p []byte) (n int, err error) { } else { _p0 = unsafe.Pointer(&_zero) } - r0, _, e1 := syscall_syscall(SYS_WRITE, uintptr(fd), uintptr(_p0), uintptr(len(p))) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_WRITE<<4, uintptr(fd), uintptr(_p0), uintptr(len(p))) + runtime.ExitSyscall() n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Fgetxattr(fd int, attr string, dest []byte) (sz int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(attr) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(dest) > 0 { + _p1 = unsafe.Pointer(&dest[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FGETXATTR_A<<4, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest))) + runtime.ExitSyscall() + sz = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FgetxattrAddr() *(func(fd int, attr string, dest []byte) (sz int, err error)) + +var Fgetxattr = enter_Fgetxattr + +func enter_Fgetxattr(fd int, attr string, dest []byte) (sz int, err error) { + funcref := get_FgetxattrAddr() + if funcptrtest(GetZosLibVec()+SYS___FGETXATTR_A<<4, "") == 0 { + *funcref = impl_Fgetxattr + } else { + *funcref = error_Fgetxattr + } + return (*funcref)(fd, attr, dest) +} + +func error_Fgetxattr(fd int, attr string, dest []byte) (sz int, err error) { + sz = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Fsetxattr(fd int, attr string, data []byte, flag int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(attr) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(data) > 0 { + _p1 = unsafe.Pointer(&data[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FSETXATTR_A<<4, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(data)), uintptr(flag)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FsetxattrAddr() *(func(fd int, attr string, data []byte, flag int) (err error)) + +var Fsetxattr = enter_Fsetxattr + +func enter_Fsetxattr(fd int, attr string, data []byte, flag int) (err error) { + funcref := get_FsetxattrAddr() + if funcptrtest(GetZosLibVec()+SYS___FSETXATTR_A<<4, "") == 0 { + *funcref = impl_Fsetxattr + } else { + *funcref = error_Fsetxattr + } + return (*funcref)(fd, attr, data, flag) +} + +func error_Fsetxattr(fd int, attr string, data []byte, flag int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := syscall_syscall(SYS___ACCEPT_A, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___ACCEPT_A<<4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) + runtime.ExitSyscall() fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___ACCEPT4_A<<4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_accept4Addr() *(func(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error)) + +var accept4 = enter_accept4 + +func enter_accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { + funcref := get_accept4Addr() + if funcptrtest(GetZosLibVec()+SYS___ACCEPT4_A<<4, "") == 0 { + *funcref = impl_accept4 + } else { + *funcref = error_accept4 + } + return (*funcref)(s, rsa, addrlen, flags) +} + +func error_accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { + fd = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) { - _, _, e1 := syscall_syscall(SYS___BIND_A, uintptr(s), uintptr(addr), uintptr(addrlen)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___BIND_A<<4, uintptr(s), uintptr(addr), uintptr(addrlen)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -78,9 +291,11 @@ func bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) { - _, _, e1 := syscall_syscall(SYS___CONNECT_A, uintptr(s), uintptr(addr), uintptr(addrlen)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___CONNECT_A<<4, uintptr(s), uintptr(addr), uintptr(addrlen)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -88,10 +303,10 @@ func connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getgroups(n int, list *_Gid_t) (nn int, err error) { - r0, _, e1 := syscall_rawsyscall(SYS_GETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETGROUPS<<4, uintptr(n), uintptr(unsafe.Pointer(list))) nn = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -99,9 +314,9 @@ func getgroups(n int, list *_Gid_t) (nn int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func setgroups(n int, list *_Gid_t) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_SETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETGROUPS<<4, uintptr(n), uintptr(unsafe.Pointer(list))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -109,9 +324,11 @@ func setgroups(n int, list *_Gid_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error) { - _, _, e1 := syscall_syscall6(SYS_GETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETSOCKOPT<<4, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -119,9 +336,11 @@ func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error) { - _, _, e1 := syscall_syscall6(SYS_SETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(vallen), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETSOCKOPT<<4, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(vallen)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -129,10 +348,10 @@ func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func socket(domain int, typ int, proto int) (fd int, err error) { - r0, _, e1 := syscall_rawsyscall(SYS_SOCKET, uintptr(domain), uintptr(typ), uintptr(proto)) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SOCKET<<4, uintptr(domain), uintptr(typ), uintptr(proto)) fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -140,9 +359,9 @@ func socket(domain int, typ int, proto int) (fd int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) { - _, _, e1 := syscall_rawsyscall6(SYS_SOCKETPAIR, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SOCKETPAIR<<4, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -150,9 +369,9 @@ func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) { - _, _, e1 := syscall_rawsyscall(SYS___GETPEERNAME_A, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___GETPEERNAME_A<<4, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -160,15 +379,57 @@ func getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) { - _, _, e1 := syscall_rawsyscall(SYS___GETSOCKNAME_A, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___GETSOCKNAME_A<<4, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Removexattr(path string, attr string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(attr) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___REMOVEXATTR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_RemovexattrAddr() *(func(path string, attr string) (err error)) + +var Removexattr = enter_Removexattr + +func enter_Removexattr(path string, attr string) (err error) { + funcref := get_RemovexattrAddr() + if funcptrtest(GetZosLibVec()+SYS___REMOVEXATTR_A<<4, "") == 0 { + *funcref = impl_Removexattr + } else { + *funcref = error_Removexattr + } + return (*funcref)(path, attr) +} + +func error_Removexattr(path string, attr string) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { @@ -176,10 +437,12 @@ func recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Sockl } else { _p0 = unsafe.Pointer(&_zero) } - r0, _, e1 := syscall_syscall6(SYS___RECVFROM_A, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen))) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___RECVFROM_A<<4, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen))) + runtime.ExitSyscall() n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -193,9 +456,11 @@ func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) ( } else { _p0 = unsafe.Pointer(&_zero) } - _, _, e1 := syscall_syscall6(SYS___SENDTO_A, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___SENDTO_A<<4, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -203,10 +468,12 @@ func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) ( // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func recvmsg(s int, msg *Msghdr, flags int) (n int, err error) { - r0, _, e1 := syscall_syscall(SYS___RECVMSG_A, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___RECVMSG_A<<4, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)) + runtime.ExitSyscall() n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -214,10 +481,12 @@ func recvmsg(s int, msg *Msghdr, flags int) (n int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func sendmsg(s int, msg *Msghdr, flags int) (n int, err error) { - r0, _, e1 := syscall_syscall(SYS___SENDMSG_A, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___SENDMSG_A<<4, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)) + runtime.ExitSyscall() n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -225,10 +494,12 @@ func sendmsg(s int, msg *Msghdr, flags int) (n int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func mmap(addr uintptr, length uintptr, prot int, flag int, fd int, pos int64) (ret uintptr, err error) { - r0, _, e1 := syscall_syscall6(SYS_MMAP, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flag), uintptr(fd), uintptr(pos)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_MMAP<<4, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flag), uintptr(fd), uintptr(pos)) + runtime.ExitSyscall() ret = uintptr(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -236,9 +507,11 @@ func mmap(addr uintptr, length uintptr, prot int, flag int, fd int, pos int64) ( // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func munmap(addr uintptr, length uintptr) (err error) { - _, _, e1 := syscall_syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_MUNMAP<<4, uintptr(addr), uintptr(length)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -246,9 +519,11 @@ func munmap(addr uintptr, length uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func ioctl(fd int, req int, arg uintptr) (err error) { - _, _, e1 := syscall_syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_IOCTL<<4, uintptr(fd), uintptr(req), uintptr(arg)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -256,9 +531,62 @@ func ioctl(fd int, req int, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func ioctlPtr(fd int, req int, arg unsafe.Pointer) (err error) { - _, _, e1 := syscall_syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_IOCTL<<4, uintptr(fd), uintptr(req), uintptr(arg)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func shmat(id int, addr uintptr, flag int) (ret uintptr, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SHMAT<<4, uintptr(id), uintptr(addr), uintptr(flag)) + runtime.ExitSyscall() + ret = uintptr(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func shmctl(id int, cmd int, buf *SysvShmDesc) (result int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SHMCTL64<<4, uintptr(id), uintptr(cmd), uintptr(unsafe.Pointer(buf))) + runtime.ExitSyscall() + result = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func shmdt(addr uintptr) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SHMDT<<4, uintptr(addr)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func shmget(key int, size int, flag int) (id int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SHMGET<<4, uintptr(key), uintptr(size), uintptr(flag)) + runtime.ExitSyscall() + id = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -271,9 +599,11 @@ func Access(path string, mode uint32) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___ACCESS_A, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___ACCESS_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mode)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -286,9 +616,11 @@ func Chdir(path string) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___CHDIR_A, uintptr(unsafe.Pointer(_p0)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___CHDIR_A<<4, uintptr(unsafe.Pointer(_p0))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -301,9 +633,11 @@ func Chown(path string, uid int, gid int) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___CHOWN_A, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___CHOWN_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -316,9 +650,11 @@ func Chmod(path string, mode uint32) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___CHMOD_A, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___CHMOD_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mode)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -331,10 +667,12 @@ func Creat(path string, mode uint32) (fd int, err error) { if err != nil { return } - r0, _, e1 := syscall_syscall(SYS___CREAT_A, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___CREAT_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mode)) + runtime.ExitSyscall() fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -342,10 +680,12 @@ func Creat(path string, mode uint32) (fd int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Dup(oldfd int) (fd int, err error) { - r0, _, e1 := syscall_syscall(SYS_DUP, uintptr(oldfd), 0, 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_DUP<<4, uintptr(oldfd)) + runtime.ExitSyscall() fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -353,42 +693,359 @@ func Dup(oldfd int) (fd int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Dup2(oldfd int, newfd int) (err error) { - _, _, e1 := syscall_syscall(SYS_DUP2, uintptr(oldfd), uintptr(newfd), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_DUP2<<4, uintptr(oldfd), uintptr(newfd)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Errno2() (er2 int) { - uer2, _, _ := syscall_syscall(SYS___ERRNO2, 0, 0, 0) - er2 = int(uer2) +func impl_Dup3(oldfd int, newfd int, flags int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_DUP3<<4, uintptr(oldfd), uintptr(newfd), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_Dup3Addr() *(func(oldfd int, newfd int, flags int) (err error)) + +var Dup3 = enter_Dup3 + +func enter_Dup3(oldfd int, newfd int, flags int) (err error) { + funcref := get_Dup3Addr() + if funcptrtest(GetZosLibVec()+SYS_DUP3<<4, "") == 0 { + *funcref = impl_Dup3 + } else { + *funcref = error_Dup3 + } + return (*funcref)(oldfd, newfd, flags) +} + +func error_Dup3(oldfd int, newfd int, flags int) (err error) { + err = ENOSYS return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Err2ad() (eadd *int) { - ueadd, _, _ := syscall_syscall(SYS___ERR2AD, 0, 0, 0) - eadd = (*int)(unsafe.Pointer(ueadd)) +func impl_Dirfd(dirp uintptr) (fd int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_DIRFD<<4, uintptr(dirp)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_DirfdAddr() *(func(dirp uintptr) (fd int, err error)) + +var Dirfd = enter_Dirfd + +func enter_Dirfd(dirp uintptr) (fd int, err error) { + funcref := get_DirfdAddr() + if funcptrtest(GetZosLibVec()+SYS_DIRFD<<4, "") == 0 { + *funcref = impl_Dirfd + } else { + *funcref = error_Dirfd + } + return (*funcref)(dirp) +} + +func error_Dirfd(dirp uintptr) (fd int, err error) { + fd = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_EpollCreate(size int) (fd int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_EPOLL_CREATE<<4, uintptr(size)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_EpollCreateAddr() *(func(size int) (fd int, err error)) + +var EpollCreate = enter_EpollCreate + +func enter_EpollCreate(size int) (fd int, err error) { + funcref := get_EpollCreateAddr() + if funcptrtest(GetZosLibVec()+SYS_EPOLL_CREATE<<4, "") == 0 { + *funcref = impl_EpollCreate + } else { + *funcref = error_EpollCreate + } + return (*funcref)(size) +} + +func error_EpollCreate(size int) (fd int, err error) { + fd = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_EpollCreate1(flags int) (fd int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_EPOLL_CREATE1<<4, uintptr(flags)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_EpollCreate1Addr() *(func(flags int) (fd int, err error)) + +var EpollCreate1 = enter_EpollCreate1 + +func enter_EpollCreate1(flags int) (fd int, err error) { + funcref := get_EpollCreate1Addr() + if funcptrtest(GetZosLibVec()+SYS_EPOLL_CREATE1<<4, "") == 0 { + *funcref = impl_EpollCreate1 + } else { + *funcref = error_EpollCreate1 + } + return (*funcref)(flags) +} + +func error_EpollCreate1(flags int) (fd int, err error) { + fd = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_EPOLL_CTL<<4, uintptr(epfd), uintptr(op), uintptr(fd), uintptr(unsafe.Pointer(event))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_EpollCtlAddr() *(func(epfd int, op int, fd int, event *EpollEvent) (err error)) + +var EpollCtl = enter_EpollCtl + +func enter_EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) { + funcref := get_EpollCtlAddr() + if funcptrtest(GetZosLibVec()+SYS_EPOLL_CTL<<4, "") == 0 { + *funcref = impl_EpollCtl + } else { + *funcref = error_EpollCtl + } + return (*funcref)(epfd, op, fd, event) +} + +func error_EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_EpollPwait(epfd int, events []EpollEvent, msec int, sigmask *int) (n int, err error) { + var _p0 unsafe.Pointer + if len(events) > 0 { + _p0 = unsafe.Pointer(&events[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_EPOLL_PWAIT<<4, uintptr(epfd), uintptr(_p0), uintptr(len(events)), uintptr(msec), uintptr(unsafe.Pointer(sigmask))) + runtime.ExitSyscall() + n = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_EpollPwaitAddr() *(func(epfd int, events []EpollEvent, msec int, sigmask *int) (n int, err error)) + +var EpollPwait = enter_EpollPwait + +func enter_EpollPwait(epfd int, events []EpollEvent, msec int, sigmask *int) (n int, err error) { + funcref := get_EpollPwaitAddr() + if funcptrtest(GetZosLibVec()+SYS_EPOLL_PWAIT<<4, "") == 0 { + *funcref = impl_EpollPwait + } else { + *funcref = error_EpollPwait + } + return (*funcref)(epfd, events, msec, sigmask) +} + +func error_EpollPwait(epfd int, events []EpollEvent, msec int, sigmask *int) (n int, err error) { + n = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) { + var _p0 unsafe.Pointer + if len(events) > 0 { + _p0 = unsafe.Pointer(&events[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_EPOLL_WAIT<<4, uintptr(epfd), uintptr(_p0), uintptr(len(events)), uintptr(msec)) + runtime.ExitSyscall() + n = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_EpollWaitAddr() *(func(epfd int, events []EpollEvent, msec int) (n int, err error)) + +var EpollWait = enter_EpollWait + +func enter_EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) { + funcref := get_EpollWaitAddr() + if funcptrtest(GetZosLibVec()+SYS_EPOLL_WAIT<<4, "") == 0 { + *funcref = impl_EpollWait + } else { + *funcref = error_EpollWait + } + return (*funcref)(epfd, events, msec) +} + +func error_EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) { + n = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Errno2() (er2 int) { + runtime.EnterSyscall() + r0, _, _ := CallLeFuncWithErr(GetZosLibVec() + SYS___ERRNO2<<4) + runtime.ExitSyscall() + er2 = int(r0) + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Eventfd(initval uint, flags int) (fd int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_EVENTFD<<4, uintptr(initval), uintptr(flags)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_EventfdAddr() *(func(initval uint, flags int) (fd int, err error)) + +var Eventfd = enter_Eventfd + +func enter_Eventfd(initval uint, flags int) (fd int, err error) { + funcref := get_EventfdAddr() + if funcptrtest(GetZosLibVec()+SYS_EVENTFD<<4, "") == 0 { + *funcref = impl_Eventfd + } else { + *funcref = error_Eventfd + } + return (*funcref)(initval, flags) +} + +func error_Eventfd(initval uint, flags int) (fd int, err error) { + fd = -1 + err = ENOSYS return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Exit(code int) { - syscall_syscall(SYS_EXIT, uintptr(code), 0, 0) + runtime.EnterSyscall() + CallLeFuncWithErr(GetZosLibVec()+SYS_EXIT<<4, uintptr(code)) + runtime.ExitSyscall() + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Faccessat(dirfd int, path string, mode uint32, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FACCESSAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FaccessatAddr() *(func(dirfd int, path string, mode uint32, flags int) (err error)) + +var Faccessat = enter_Faccessat + +func enter_Faccessat(dirfd int, path string, mode uint32, flags int) (err error) { + funcref := get_FaccessatAddr() + if funcptrtest(GetZosLibVec()+SYS___FACCESSAT_A<<4, "") == 0 { + *funcref = impl_Faccessat + } else { + *funcref = error_Faccessat + } + return (*funcref)(dirfd, path, mode, flags) +} + +func error_Faccessat(dirfd int, path string, mode uint32, flags int) (err error) { + err = ENOSYS return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchdir(fd int) (err error) { - _, _, e1 := syscall_syscall(SYS_FCHDIR, uintptr(fd), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FCHDIR<<4, uintptr(fd)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -396,50 +1053,333 @@ func Fchdir(fd int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchmod(fd int, mode uint32) (err error) { - _, _, e1 := syscall_syscall(SYS_FCHMOD, uintptr(fd), uintptr(mode), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FCHMOD<<4, uintptr(fd), uintptr(mode)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FCHMODAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FchmodatAddr() *(func(dirfd int, path string, mode uint32, flags int) (err error)) + +var Fchmodat = enter_Fchmodat + +func enter_Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) { + funcref := get_FchmodatAddr() + if funcptrtest(GetZosLibVec()+SYS___FCHMODAT_A<<4, "") == 0 { + *funcref = impl_Fchmodat + } else { + *funcref = error_Fchmodat + } + return (*funcref)(dirfd, path, mode, flags) +} + +func error_Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Fchown(fd int, uid int, gid int) (err error) { - _, _, e1 := syscall_syscall(SYS_FCHOWN, uintptr(fd), uintptr(uid), uintptr(gid)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FCHOWN<<4, uintptr(fd), uintptr(uid), uintptr(gid)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Fchownat(fd int, path string, uid int, gid int, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FCHOWNAT_A<<4, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FchownatAddr() *(func(fd int, path string, uid int, gid int, flags int) (err error)) + +var Fchownat = enter_Fchownat + +func enter_Fchownat(fd int, path string, uid int, gid int, flags int) (err error) { + funcref := get_FchownatAddr() + if funcptrtest(GetZosLibVec()+SYS___FCHOWNAT_A<<4, "") == 0 { + *funcref = impl_Fchownat + } else { + *funcref = error_Fchownat + } + return (*funcref)(fd, path, uid, gid, flags) +} + +func error_Fchownat(fd int, path string, uid int, gid int, flags int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func FcntlInt(fd uintptr, cmd int, arg int) (retval int, err error) { - r0, _, e1 := syscall_syscall(SYS_FCNTL, uintptr(fd), uintptr(cmd), uintptr(arg)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FCNTL<<4, uintptr(fd), uintptr(cmd), uintptr(arg)) + runtime.ExitSyscall() retval = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Fdatasync(fd int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FDATASYNC<<4, uintptr(fd)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FdatasyncAddr() *(func(fd int) (err error)) + +var Fdatasync = enter_Fdatasync + +func enter_Fdatasync(fd int) (err error) { + funcref := get_FdatasyncAddr() + if funcptrtest(GetZosLibVec()+SYS_FDATASYNC<<4, "") == 0 { + *funcref = impl_Fdatasync + } else { + *funcref = error_Fdatasync + } + return (*funcref)(fd) +} + +func error_Fdatasync(fd int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func fstat(fd int, stat *Stat_LE_t) (err error) { - _, _, e1 := syscall_syscall(SYS_FSTAT, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FSTAT<<4, uintptr(fd), uintptr(unsafe.Pointer(stat))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_fstatat(dirfd int, path string, stat *Stat_LE_t, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FSTATAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_fstatatAddr() *(func(dirfd int, path string, stat *Stat_LE_t, flags int) (err error)) + +var fstatat = enter_fstatat + +func enter_fstatat(dirfd int, path string, stat *Stat_LE_t, flags int) (err error) { + funcref := get_fstatatAddr() + if funcptrtest(GetZosLibVec()+SYS___FSTATAT_A<<4, "") == 0 { + *funcref = impl_fstatat + } else { + *funcref = error_fstatat + } + return (*funcref)(dirfd, path, stat, flags) +} + +func error_fstatat(dirfd int, path string, stat *Stat_LE_t, flags int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Lgetxattr(link string, attr string, dest []byte) (sz int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(link) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(attr) + if err != nil { + return + } + var _p2 unsafe.Pointer + if len(dest) > 0 { + _p2 = unsafe.Pointer(&dest[0]) + } else { + _p2 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LGETXATTR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(dest))) + runtime.ExitSyscall() + sz = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_LgetxattrAddr() *(func(link string, attr string, dest []byte) (sz int, err error)) + +var Lgetxattr = enter_Lgetxattr + +func enter_Lgetxattr(link string, attr string, dest []byte) (sz int, err error) { + funcref := get_LgetxattrAddr() + if funcptrtest(GetZosLibVec()+SYS___LGETXATTR_A<<4, "") == 0 { + *funcref = impl_Lgetxattr + } else { + *funcref = error_Lgetxattr + } + return (*funcref)(link, attr, dest) +} + +func error_Lgetxattr(link string, attr string, dest []byte) (sz int, err error) { + sz = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Lsetxattr(path string, attr string, data []byte, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(attr) + if err != nil { + return + } + var _p2 unsafe.Pointer + if len(data) > 0 { + _p2 = unsafe.Pointer(&data[0]) + } else { + _p2 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LSETXATTR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(data)), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_LsetxattrAddr() *(func(path string, attr string, data []byte, flags int) (err error)) + +var Lsetxattr = enter_Lsetxattr + +func enter_Lsetxattr(path string, attr string, data []byte, flags int) (err error) { + funcref := get_LsetxattrAddr() + if funcptrtest(GetZosLibVec()+SYS___LSETXATTR_A<<4, "") == 0 { + *funcref = impl_Lsetxattr + } else { + *funcref = error_Lsetxattr + } + return (*funcref)(path, attr, data, flags) +} + +func error_Lsetxattr(path string, attr string, data []byte, flags int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Fstatfs(fd int, buf *Statfs_t) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FSTATFS<<4, uintptr(fd), uintptr(unsafe.Pointer(buf))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FstatfsAddr() *(func(fd int, buf *Statfs_t) (err error)) + +var Fstatfs = enter_Fstatfs + +func enter_Fstatfs(fd int, buf *Statfs_t) (err error) { + funcref := get_FstatfsAddr() + if funcptrtest(GetZosLibVec()+SYS_FSTATFS<<4, "") == 0 { + *funcref = impl_Fstatfs + } else { + *funcref = error_Fstatfs + } + return (*funcref)(fd, buf) +} + +func error_Fstatfs(fd int, buf *Statfs_t) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Fstatvfs(fd int, stat *Statvfs_t) (err error) { - _, _, e1 := syscall_syscall(SYS_FSTATVFS, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FSTATVFS<<4, uintptr(fd), uintptr(unsafe.Pointer(stat))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -447,28 +1387,461 @@ func Fstatvfs(fd int, stat *Statvfs_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fsync(fd int) (err error) { - _, _, e1 := syscall_syscall(SYS_FSYNC, uintptr(fd), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FSYNC<<4, uintptr(fd)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Futimes(fd int, tv []Timeval) (err error) { + var _p0 unsafe.Pointer + if len(tv) > 0 { + _p0 = unsafe.Pointer(&tv[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FUTIMES<<4, uintptr(fd), uintptr(_p0), uintptr(len(tv))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FutimesAddr() *(func(fd int, tv []Timeval) (err error)) + +var Futimes = enter_Futimes + +func enter_Futimes(fd int, tv []Timeval) (err error) { + funcref := get_FutimesAddr() + if funcptrtest(GetZosLibVec()+SYS_FUTIMES<<4, "") == 0 { + *funcref = impl_Futimes + } else { + *funcref = error_Futimes + } + return (*funcref)(fd, tv) +} + +func error_Futimes(fd int, tv []Timeval) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Futimesat(dirfd int, path string, tv []Timeval) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(tv) > 0 { + _p1 = unsafe.Pointer(&tv[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___FUTIMESAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(tv))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_FutimesatAddr() *(func(dirfd int, path string, tv []Timeval) (err error)) + +var Futimesat = enter_Futimesat + +func enter_Futimesat(dirfd int, path string, tv []Timeval) (err error) { + funcref := get_FutimesatAddr() + if funcptrtest(GetZosLibVec()+SYS___FUTIMESAT_A<<4, "") == 0 { + *funcref = impl_Futimesat + } else { + *funcref = error_Futimesat + } + return (*funcref)(dirfd, path, tv) +} + +func error_Futimesat(dirfd int, path string, tv []Timeval) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Ftruncate(fd int, length int64) (err error) { - _, _, e1 := syscall_syscall(SYS_FTRUNCATE, uintptr(fd), uintptr(length), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FTRUNCATE<<4, uintptr(fd), uintptr(length)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Getpagesize() (pgsize int) { - r0, _, _ := syscall_syscall(SYS_GETPAGESIZE, 0, 0, 0) - pgsize = int(r0) +func impl_Getrandom(buf []byte, flags int) (n int, err error) { + var _p0 unsafe.Pointer + if len(buf) > 0 { + _p0 = unsafe.Pointer(&buf[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETRANDOM<<4, uintptr(_p0), uintptr(len(buf)), uintptr(flags)) + runtime.ExitSyscall() + n = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_GetrandomAddr() *(func(buf []byte, flags int) (n int, err error)) + +var Getrandom = enter_Getrandom + +func enter_Getrandom(buf []byte, flags int) (n int, err error) { + funcref := get_GetrandomAddr() + if funcptrtest(GetZosLibVec()+SYS_GETRANDOM<<4, "") == 0 { + *funcref = impl_Getrandom + } else { + *funcref = error_Getrandom + } + return (*funcref)(buf, flags) +} + +func error_Getrandom(buf []byte, flags int) (n int, err error) { + n = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_InotifyInit() (fd int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec() + SYS_INOTIFY_INIT<<4) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_InotifyInitAddr() *(func() (fd int, err error)) + +var InotifyInit = enter_InotifyInit + +func enter_InotifyInit() (fd int, err error) { + funcref := get_InotifyInitAddr() + if funcptrtest(GetZosLibVec()+SYS_INOTIFY_INIT<<4, "") == 0 { + *funcref = impl_InotifyInit + } else { + *funcref = error_InotifyInit + } + return (*funcref)() +} + +func error_InotifyInit() (fd int, err error) { + fd = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_InotifyInit1(flags int) (fd int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_INOTIFY_INIT1<<4, uintptr(flags)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_InotifyInit1Addr() *(func(flags int) (fd int, err error)) + +var InotifyInit1 = enter_InotifyInit1 + +func enter_InotifyInit1(flags int) (fd int, err error) { + funcref := get_InotifyInit1Addr() + if funcptrtest(GetZosLibVec()+SYS_INOTIFY_INIT1<<4, "") == 0 { + *funcref = impl_InotifyInit1 + } else { + *funcref = error_InotifyInit1 + } + return (*funcref)(flags) +} + +func error_InotifyInit1(flags int) (fd int, err error) { + fd = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_InotifyAddWatch(fd int, pathname string, mask uint32) (watchdesc int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(pathname) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___INOTIFY_ADD_WATCH_A<<4, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(mask)) + runtime.ExitSyscall() + watchdesc = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_InotifyAddWatchAddr() *(func(fd int, pathname string, mask uint32) (watchdesc int, err error)) + +var InotifyAddWatch = enter_InotifyAddWatch + +func enter_InotifyAddWatch(fd int, pathname string, mask uint32) (watchdesc int, err error) { + funcref := get_InotifyAddWatchAddr() + if funcptrtest(GetZosLibVec()+SYS___INOTIFY_ADD_WATCH_A<<4, "") == 0 { + *funcref = impl_InotifyAddWatch + } else { + *funcref = error_InotifyAddWatch + } + return (*funcref)(fd, pathname, mask) +} + +func error_InotifyAddWatch(fd int, pathname string, mask uint32) (watchdesc int, err error) { + watchdesc = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_InotifyRmWatch(fd int, watchdesc uint32) (success int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_INOTIFY_RM_WATCH<<4, uintptr(fd), uintptr(watchdesc)) + runtime.ExitSyscall() + success = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_InotifyRmWatchAddr() *(func(fd int, watchdesc uint32) (success int, err error)) + +var InotifyRmWatch = enter_InotifyRmWatch + +func enter_InotifyRmWatch(fd int, watchdesc uint32) (success int, err error) { + funcref := get_InotifyRmWatchAddr() + if funcptrtest(GetZosLibVec()+SYS_INOTIFY_RM_WATCH<<4, "") == 0 { + *funcref = impl_InotifyRmWatch + } else { + *funcref = error_InotifyRmWatch + } + return (*funcref)(fd, watchdesc) +} + +func error_InotifyRmWatch(fd int, watchdesc uint32) (success int, err error) { + success = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Listxattr(path string, dest []byte) (sz int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(dest) > 0 { + _p1 = unsafe.Pointer(&dest[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LISTXATTR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest))) + runtime.ExitSyscall() + sz = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_ListxattrAddr() *(func(path string, dest []byte) (sz int, err error)) + +var Listxattr = enter_Listxattr + +func enter_Listxattr(path string, dest []byte) (sz int, err error) { + funcref := get_ListxattrAddr() + if funcptrtest(GetZosLibVec()+SYS___LISTXATTR_A<<4, "") == 0 { + *funcref = impl_Listxattr + } else { + *funcref = error_Listxattr + } + return (*funcref)(path, dest) +} + +func error_Listxattr(path string, dest []byte) (sz int, err error) { + sz = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Llistxattr(path string, dest []byte) (sz int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(dest) > 0 { + _p1 = unsafe.Pointer(&dest[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LLISTXATTR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest))) + runtime.ExitSyscall() + sz = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_LlistxattrAddr() *(func(path string, dest []byte) (sz int, err error)) + +var Llistxattr = enter_Llistxattr + +func enter_Llistxattr(path string, dest []byte) (sz int, err error) { + funcref := get_LlistxattrAddr() + if funcptrtest(GetZosLibVec()+SYS___LLISTXATTR_A<<4, "") == 0 { + *funcref = impl_Llistxattr + } else { + *funcref = error_Llistxattr + } + return (*funcref)(path, dest) +} + +func error_Llistxattr(path string, dest []byte) (sz int, err error) { + sz = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Lremovexattr(path string, attr string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(attr) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LREMOVEXATTR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_LremovexattrAddr() *(func(path string, attr string) (err error)) + +var Lremovexattr = enter_Lremovexattr + +func enter_Lremovexattr(path string, attr string) (err error) { + funcref := get_LremovexattrAddr() + if funcptrtest(GetZosLibVec()+SYS___LREMOVEXATTR_A<<4, "") == 0 { + *funcref = impl_Lremovexattr + } else { + *funcref = error_Lremovexattr + } + return (*funcref)(path, attr) +} + +func error_Lremovexattr(path string, attr string) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Lutimes(path string, tv []Timeval) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(tv) > 0 { + _p1 = unsafe.Pointer(&tv[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LUTIMES_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(tv))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_LutimesAddr() *(func(path string, tv []Timeval) (err error)) + +var Lutimes = enter_Lutimes + +func enter_Lutimes(path string, tv []Timeval) (err error) { + funcref := get_LutimesAddr() + if funcptrtest(GetZosLibVec()+SYS___LUTIMES_A<<4, "") == 0 { + *funcref = impl_Lutimes + } else { + *funcref = error_Lutimes + } + return (*funcref)(path, tv) +} + +func error_Lutimes(path string, tv []Timeval) (err error) { + err = ENOSYS return } @@ -481,9 +1854,11 @@ func Mprotect(b []byte, prot int) (err error) { } else { _p0 = unsafe.Pointer(&_zero) } - _, _, e1 := syscall_syscall(SYS_MPROTECT, uintptr(_p0), uintptr(len(b)), uintptr(prot)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_MPROTECT<<4, uintptr(_p0), uintptr(len(b)), uintptr(prot)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -497,9 +1872,23 @@ func Msync(b []byte, flags int) (err error) { } else { _p0 = unsafe.Pointer(&_zero) } - _, _, e1 := syscall_syscall(SYS_MSYNC, uintptr(_p0), uintptr(len(b)), uintptr(flags)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_MSYNC<<4, uintptr(_p0), uintptr(len(b)), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Console2(cmsg *ConsMsg2, modstr *byte, concmd *uint32) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___CONSOLE2<<4, uintptr(unsafe.Pointer(cmsg)), uintptr(unsafe.Pointer(modstr)), uintptr(unsafe.Pointer(concmd))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -513,21 +1902,106 @@ func Poll(fds []PollFd, timeout int) (n int, err error) { } else { _p0 = unsafe.Pointer(&_zero) } - r0, _, e1 := syscall_syscall(SYS_POLL, uintptr(_p0), uintptr(len(fds)), uintptr(timeout)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_POLL<<4, uintptr(_p0), uintptr(len(fds)), uintptr(timeout)) + runtime.ExitSyscall() n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func Readdir_r(dirp uintptr, entry *direntLE, result **direntLE) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___READDIR_R_A<<4, uintptr(dirp), uintptr(unsafe.Pointer(entry)), uintptr(unsafe.Pointer(result))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Statfs(path string, buf *Statfs_t) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___STATFS_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_StatfsAddr() *(func(path string, buf *Statfs_t) (err error)) + +var Statfs = enter_Statfs + +func enter_Statfs(path string, buf *Statfs_t) (err error) { + funcref := get_StatfsAddr() + if funcptrtest(GetZosLibVec()+SYS___STATFS_A<<4, "") == 0 { + *funcref = impl_Statfs + } else { + *funcref = error_Statfs + } + return (*funcref)(path, buf) +} + +func error_Statfs(path string, buf *Statfs_t) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Syncfs(fd int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SYNCFS<<4, uintptr(fd)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_SyncfsAddr() *(func(fd int) (err error)) + +var Syncfs = enter_Syncfs + +func enter_Syncfs(fd int) (err error) { + funcref := get_SyncfsAddr() + if funcptrtest(GetZosLibVec()+SYS_SYNCFS<<4, "") == 0 { + *funcref = impl_Syncfs + } else { + *funcref = error_Syncfs + } + return (*funcref)(fd) +} + +func error_Syncfs(fd int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Times(tms *Tms) (ticks uintptr, err error) { - r0, _, e1 := syscall_syscall(SYS_TIMES, uintptr(unsafe.Pointer(tms)), 0, 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_TIMES<<4, uintptr(unsafe.Pointer(tms))) + runtime.ExitSyscall() ticks = uintptr(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -535,10 +2009,12 @@ func Times(tms *Tms) (ticks uintptr, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func W_Getmntent(buff *byte, size int) (lastsys int, err error) { - r0, _, e1 := syscall_syscall(SYS_W_GETMNTENT, uintptr(unsafe.Pointer(buff)), uintptr(size), 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_W_GETMNTENT<<4, uintptr(unsafe.Pointer(buff)), uintptr(size)) + runtime.ExitSyscall() lastsys = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -546,10 +2022,12 @@ func W_Getmntent(buff *byte, size int) (lastsys int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func W_Getmntent_A(buff *byte, size int) (lastsys int, err error) { - r0, _, e1 := syscall_syscall(SYS___W_GETMNTENT_A, uintptr(unsafe.Pointer(buff)), uintptr(size), 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___W_GETMNTENT_A<<4, uintptr(unsafe.Pointer(buff)), uintptr(size)) + runtime.ExitSyscall() lastsys = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -577,24 +2055,28 @@ func mount_LE(path string, filesystem string, fstype string, mtm uint32, parmlen if err != nil { return } - _, _, e1 := syscall_syscall6(SYS___MOUNT_A, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(unsafe.Pointer(_p2)), uintptr(mtm), uintptr(parmlen), uintptr(unsafe.Pointer(_p3))) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MOUNT_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(unsafe.Pointer(_p2)), uintptr(mtm), uintptr(parmlen), uintptr(unsafe.Pointer(_p3))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func unmount(filesystem string, mtm int) (err error) { +func unmount_LE(filesystem string, mtm int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(filesystem) if err != nil { return } - _, _, e1 := syscall_syscall(SYS___UMOUNT_A, uintptr(unsafe.Pointer(_p0)), uintptr(mtm), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___UMOUNT_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mtm)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -607,9 +2089,24 @@ func Chroot(path string) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___CHROOT_A, uintptr(unsafe.Pointer(_p0)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___CHROOT_A<<4, uintptr(unsafe.Pointer(_p0))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Select(nmsgsfds int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (ret int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SELECT<<4, uintptr(nmsgsfds), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout))) + runtime.ExitSyscall() + ret = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -617,15 +2114,47 @@ func Chroot(path string) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Uname(buf *Utsname) (err error) { - _, _, e1 := syscall_rawsyscall(SYS___UNAME_A, uintptr(unsafe.Pointer(buf)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_____OSNAME_A<<4, uintptr(unsafe.Pointer(buf))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Unshare(flags int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_UNSHARE<<4, uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_UnshareAddr() *(func(flags int) (err error)) + +var Unshare = enter_Unshare + +func enter_Unshare(flags int) (err error) { + funcref := get_UnshareAddr() + if funcptrtest(GetZosLibVec()+SYS_UNSHARE<<4, "") == 0 { + *funcref = impl_Unshare + } else { + *funcref = error_Unshare + } + return (*funcref)(flags) +} + +func error_Unshare(flags int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Gethostname(buf []byte) (err error) { var _p0 unsafe.Pointer if len(buf) > 0 { @@ -633,33 +2162,19 @@ func Gethostname(buf []byte) (err error) { } else { _p0 = unsafe.Pointer(&_zero) } - _, _, e1 := syscall_syscall(SYS___GETHOSTNAME_A, uintptr(_p0), uintptr(len(buf)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___GETHOSTNAME_A<<4, uintptr(_p0), uintptr(len(buf))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Getegid() (egid int) { - r0, _, _ := syscall_rawsyscall(SYS_GETEGID, 0, 0, 0) - egid = int(r0) - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - -func Geteuid() (uid int) { - r0, _, _ := syscall_rawsyscall(SYS_GETEUID, 0, 0, 0) - uid = int(r0) - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Getgid() (gid int) { - r0, _, _ := syscall_rawsyscall(SYS_GETGID, 0, 0, 0) + r0, _, _ := CallLeFuncWithErr(GetZosLibVec() + SYS_GETGID<<4) gid = int(r0) return } @@ -667,7 +2182,7 @@ func Getgid() (gid int) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpid() (pid int) { - r0, _, _ := syscall_rawsyscall(SYS_GETPID, 0, 0, 0) + r0, _, _ := CallLeFuncWithErr(GetZosLibVec() + SYS_GETPID<<4) pid = int(r0) return } @@ -675,10 +2190,10 @@ func Getpid() (pid int) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpgid(pid int) (pgid int, err error) { - r0, _, e1 := syscall_rawsyscall(SYS_GETPGID, uintptr(pid), 0, 0) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETPGID<<4, uintptr(pid)) pgid = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -686,7 +2201,7 @@ func Getpgid(pid int) (pgid int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getppid() (pid int) { - r0, _, _ := syscall_rawsyscall(SYS_GETPPID, 0, 0, 0) + r0, _, _ := CallLeFuncWithErr(GetZosLibVec() + SYS_GETPPID<<4) pid = int(r0) return } @@ -694,10 +2209,12 @@ func Getppid() (pid int) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpriority(which int, who int) (prio int, err error) { - r0, _, e1 := syscall_syscall(SYS_GETPRIORITY, uintptr(which), uintptr(who), 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETPRIORITY<<4, uintptr(which), uintptr(who)) + runtime.ExitSyscall() prio = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -705,9 +2222,9 @@ func Getpriority(which int, who int) (prio int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_GETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETRLIMIT<<4, uintptr(resource), uintptr(unsafe.Pointer(rlim))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -715,20 +2232,40 @@ func Getrlimit(resource int, rlim *Rlimit) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getrusage(who int, rusage *rusage_zos) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_GETRUSAGE, uintptr(who), uintptr(unsafe.Pointer(rusage)), 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETRUSAGE<<4, uintptr(who), uintptr(unsafe.Pointer(rusage))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func Getegid() (egid int) { + runtime.EnterSyscall() + r0, _, _ := CallLeFuncWithErr(GetZosLibVec() + SYS_GETEGID<<4) + runtime.ExitSyscall() + egid = int(r0) + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Geteuid() (euid int) { + runtime.EnterSyscall() + r0, _, _ := CallLeFuncWithErr(GetZosLibVec() + SYS_GETEUID<<4) + runtime.ExitSyscall() + euid = int(r0) + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Getsid(pid int) (sid int, err error) { - r0, _, e1 := syscall_rawsyscall(SYS_GETSID, uintptr(pid), 0, 0) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETSID<<4, uintptr(pid)) sid = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -736,7 +2273,7 @@ func Getsid(pid int) (sid int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getuid() (uid int) { - r0, _, _ := syscall_rawsyscall(SYS_GETUID, 0, 0, 0) + r0, _, _ := CallLeFuncWithErr(GetZosLibVec() + SYS_GETUID<<4) uid = int(r0) return } @@ -744,9 +2281,9 @@ func Getuid() (uid int) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Kill(pid int, sig Signal) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_KILL, uintptr(pid), uintptr(sig), 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_KILL<<4, uintptr(pid), uintptr(sig)) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -759,9 +2296,11 @@ func Lchown(path string, uid int, gid int) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___LCHOWN_A, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LCHOWN_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -779,19 +2318,65 @@ func Link(path string, link string) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___LINK_A, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LINK_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Linkat(oldDirFd int, oldPath string, newDirFd int, newPath string, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(oldPath) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(newPath) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LINKAT_A<<4, uintptr(oldDirFd), uintptr(unsafe.Pointer(_p0)), uintptr(newDirFd), uintptr(unsafe.Pointer(_p1)), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_LinkatAddr() *(func(oldDirFd int, oldPath string, newDirFd int, newPath string, flags int) (err error)) + +var Linkat = enter_Linkat + +func enter_Linkat(oldDirFd int, oldPath string, newDirFd int, newPath string, flags int) (err error) { + funcref := get_LinkatAddr() + if funcptrtest(GetZosLibVec()+SYS___LINKAT_A<<4, "") == 0 { + *funcref = impl_Linkat + } else { + *funcref = error_Linkat + } + return (*funcref)(oldDirFd, oldPath, newDirFd, newPath, flags) +} + +func error_Linkat(oldDirFd int, oldPath string, newDirFd int, newPath string, flags int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Listen(s int, n int) (err error) { - _, _, e1 := syscall_syscall(SYS_LISTEN, uintptr(s), uintptr(n), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_LISTEN<<4, uintptr(s), uintptr(n)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -804,9 +2389,11 @@ func lstat(path string, stat *Stat_LE_t) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___LSTAT_A, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___LSTAT_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -819,24 +2406,65 @@ func Mkdir(path string, mode uint32) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___MKDIR_A, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MKDIR_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mode)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Mkdirat(dirfd int, path string, mode uint32) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MKDIRAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_MkdiratAddr() *(func(dirfd int, path string, mode uint32) (err error)) + +var Mkdirat = enter_Mkdirat + +func enter_Mkdirat(dirfd int, path string, mode uint32) (err error) { + funcref := get_MkdiratAddr() + if funcptrtest(GetZosLibVec()+SYS___MKDIRAT_A<<4, "") == 0 { + *funcref = impl_Mkdirat + } else { + *funcref = error_Mkdirat + } + return (*funcref)(dirfd, path, mode) +} + +func error_Mkdirat(dirfd int, path string, mode uint32) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Mkfifo(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } - _, _, e1 := syscall_syscall(SYS___MKFIFO_A, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MKFIFO_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mode)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -849,15 +2477,96 @@ func Mknod(path string, mode uint32, dev int) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___MKNOD_A, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MKNOD_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Mknodat(dirfd int, path string, mode uint32, dev int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___MKNODAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_MknodatAddr() *(func(dirfd int, path string, mode uint32, dev int) (err error)) + +var Mknodat = enter_Mknodat + +func enter_Mknodat(dirfd int, path string, mode uint32, dev int) (err error) { + funcref := get_MknodatAddr() + if funcptrtest(GetZosLibVec()+SYS___MKNODAT_A<<4, "") == 0 { + *funcref = impl_Mknodat + } else { + *funcref = error_Mknodat + } + return (*funcref)(dirfd, path, mode, dev) +} + +func error_Mknodat(dirfd int, path string, mode uint32, dev int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_PivotRoot(newroot string, oldroot string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(newroot) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(oldroot) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___PIVOT_ROOT_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_PivotRootAddr() *(func(newroot string, oldroot string) (err error)) + +var PivotRoot = enter_PivotRoot + +func enter_PivotRoot(newroot string, oldroot string) (err error) { + funcref := get_PivotRootAddr() + if funcptrtest(GetZosLibVec()+SYS___PIVOT_ROOT_A<<4, "") == 0 { + *funcref = impl_PivotRoot + } else { + *funcref = error_PivotRoot + } + return (*funcref)(newroot, oldroot) +} + +func error_PivotRoot(newroot string, oldroot string) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Pread(fd int, p []byte, offset int64) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { @@ -865,10 +2574,12 @@ func Pread(fd int, p []byte, offset int64) (n int, err error) { } else { _p0 = unsafe.Pointer(&_zero) } - r0, _, e1 := syscall_syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), 0, 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_PREAD<<4, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset)) + runtime.ExitSyscall() n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -882,36 +2593,78 @@ func Pwrite(fd int, p []byte, offset int64) (n int, err error) { } else { _p0 = unsafe.Pointer(&_zero) } - r0, _, e1 := syscall_syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), 0, 0) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_PWRITE<<4, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset)) + runtime.ExitSyscall() n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Readlink(path string, buf []byte) (n int, err error) { - var _p0 *byte - _p0, err = BytePtrFromString(path) - if err != nil { - return +func impl_Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___PRCTL_A<<4, uintptr(option), uintptr(arg2), uintptr(arg3), uintptr(arg4), uintptr(arg5)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } - var _p1 unsafe.Pointer - if len(buf) > 0 { - _p1 = unsafe.Pointer(&buf[0]) + return +} + +//go:nosplit +func get_PrctlAddr() *(func(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error)) + +var Prctl = enter_Prctl + +func enter_Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) { + funcref := get_PrctlAddr() + if funcptrtest(GetZosLibVec()+SYS___PRCTL_A<<4, "") == 0 { + *funcref = impl_Prctl } else { - _p1 = unsafe.Pointer(&_zero) + *funcref = error_Prctl } - r0, _, e1 := syscall_syscall(SYS___READLINK_A, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf))) - n = int(r0) - if e1 != 0 { - err = errnoErr(e1) + return (*funcref)(option, arg2, arg3, arg4, arg5) +} + +func error_Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Prlimit(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error) { + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_PRLIMIT<<4, uintptr(pid), uintptr(resource), uintptr(unsafe.Pointer(newlimit)), uintptr(unsafe.Pointer(old))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } +//go:nosplit +func get_PrlimitAddr() *(func(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error)) + +var Prlimit = enter_Prlimit + +func enter_Prlimit(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error) { + funcref := get_PrlimitAddr() + if funcptrtest(GetZosLibVec()+SYS_PRLIMIT<<4, "") == 0 { + *funcref = impl_Prlimit + } else { + *funcref = error_Prlimit + } + return (*funcref)(pid, resource, newlimit, old) +} + +func error_Prlimit(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error) { + err = ENOSYS + return +} + // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Rename(from string, to string) (err error) { @@ -925,24 +2678,112 @@ func Rename(from string, to string) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___RENAME_A, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___RENAME_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(oldpath) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(newpath) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___RENAMEAT_A<<4, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_RenameatAddr() *(func(olddirfd int, oldpath string, newdirfd int, newpath string) (err error)) + +var Renameat = enter_Renameat + +func enter_Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) { + funcref := get_RenameatAddr() + if funcptrtest(GetZosLibVec()+SYS___RENAMEAT_A<<4, "") == 0 { + *funcref = impl_Renameat + } else { + *funcref = error_Renameat + } + return (*funcref)(olddirfd, oldpath, newdirfd, newpath) +} + +func error_Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Renameat2(olddirfd int, oldpath string, newdirfd int, newpath string, flags uint) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(oldpath) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(newpath) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___RENAMEAT2_A<<4, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_Renameat2Addr() *(func(olddirfd int, oldpath string, newdirfd int, newpath string, flags uint) (err error)) + +var Renameat2 = enter_Renameat2 + +func enter_Renameat2(olddirfd int, oldpath string, newdirfd int, newpath string, flags uint) (err error) { + funcref := get_Renameat2Addr() + if funcptrtest(GetZosLibVec()+SYS___RENAMEAT2_A<<4, "") == 0 { + *funcref = impl_Renameat2 + } else { + *funcref = error_Renameat2 + } + return (*funcref)(olddirfd, oldpath, newdirfd, newpath, flags) +} + +func error_Renameat2(olddirfd int, oldpath string, newdirfd int, newpath string, flags uint) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Rmdir(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } - _, _, e1 := syscall_syscall(SYS___RMDIR_A, uintptr(unsafe.Pointer(_p0)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___RMDIR_A<<4, uintptr(unsafe.Pointer(_p0))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -950,20 +2791,118 @@ func Rmdir(path string) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Seek(fd int, offset int64, whence int) (off int64, err error) { - r0, _, e1 := syscall_syscall(SYS_LSEEK, uintptr(fd), uintptr(offset), uintptr(whence)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_LSEEK<<4, uintptr(fd), uintptr(offset), uintptr(whence)) + runtime.ExitSyscall() off = int64(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func Setegid(egid int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETEGID<<4, uintptr(egid)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Seteuid(euid int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETEUID<<4, uintptr(euid)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Sethostname(p []byte) (err error) { + var _p0 unsafe.Pointer + if len(p) > 0 { + _p0 = unsafe.Pointer(&p[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___SETHOSTNAME_A<<4, uintptr(_p0), uintptr(len(p))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_SethostnameAddr() *(func(p []byte) (err error)) + +var Sethostname = enter_Sethostname + +func enter_Sethostname(p []byte) (err error) { + funcref := get_SethostnameAddr() + if funcptrtest(GetZosLibVec()+SYS___SETHOSTNAME_A<<4, "") == 0 { + *funcref = impl_Sethostname + } else { + *funcref = error_Sethostname + } + return (*funcref)(p) +} + +func error_Sethostname(p []byte) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_Setns(fd int, nstype int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETNS<<4, uintptr(fd), uintptr(nstype)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_SetnsAddr() *(func(fd int, nstype int) (err error)) + +var Setns = enter_Setns + +func enter_Setns(fd int, nstype int) (err error) { + funcref := get_SetnsAddr() + if funcptrtest(GetZosLibVec()+SYS_SETNS<<4, "") == 0 { + *funcref = impl_Setns + } else { + *funcref = error_Setns + } + return (*funcref)(fd, nstype) +} + +func error_Setns(fd int, nstype int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Setpriority(which int, who int, prio int) (err error) { - _, _, e1 := syscall_syscall(SYS_SETPRIORITY, uintptr(which), uintptr(who), uintptr(prio)) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETPRIORITY<<4, uintptr(which), uintptr(who), uintptr(prio)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -971,9 +2910,9 @@ func Setpriority(which int, who int, prio int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setpgid(pid int, pgid int) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_SETPGID, uintptr(pid), uintptr(pgid), 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETPGID<<4, uintptr(pid), uintptr(pgid)) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -981,9 +2920,9 @@ func Setpgid(pid int, pgid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setrlimit(resource int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETRLIMIT<<4, uintptr(resource), uintptr(unsafe.Pointer(lim))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -991,9 +2930,9 @@ func Setrlimit(resource int, lim *Rlimit) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setregid(rgid int, egid int) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_SETREGID, uintptr(rgid), uintptr(egid), 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETREGID<<4, uintptr(rgid), uintptr(egid)) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1001,9 +2940,9 @@ func Setregid(rgid int, egid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setreuid(ruid int, euid int) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_SETREUID, uintptr(ruid), uintptr(euid), 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETREUID<<4, uintptr(ruid), uintptr(euid)) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1011,10 +2950,10 @@ func Setreuid(ruid int, euid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setsid() (pid int, err error) { - r0, _, e1 := syscall_rawsyscall(SYS_SETSID, 0, 0, 0) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec() + SYS_SETSID<<4) pid = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1022,9 +2961,11 @@ func Setsid() (pid int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setuid(uid int) (err error) { - _, _, e1 := syscall_syscall(SYS_SETUID, uintptr(uid), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETUID<<4, uintptr(uid)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1032,9 +2973,11 @@ func Setuid(uid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setgid(uid int) (err error) { - _, _, e1 := syscall_syscall(SYS_SETGID, uintptr(uid), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SETGID<<4, uintptr(uid)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1042,9 +2985,11 @@ func Setgid(uid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Shutdown(fd int, how int) (err error) { - _, _, e1 := syscall_syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_SHUTDOWN<<4, uintptr(fd), uintptr(how)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1057,9 +3002,11 @@ func stat(path string, statLE *Stat_LE_t) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___STAT_A, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(statLE)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___STAT_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(statLE))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1077,17 +3024,63 @@ func Symlink(path string, link string) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___SYMLINK_A, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___SYMLINK_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Symlinkat(oldPath string, dirfd int, newPath string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(oldPath) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(newPath) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___SYMLINKAT_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(dirfd), uintptr(unsafe.Pointer(_p1))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_SymlinkatAddr() *(func(oldPath string, dirfd int, newPath string) (err error)) + +var Symlinkat = enter_Symlinkat + +func enter_Symlinkat(oldPath string, dirfd int, newPath string) (err error) { + funcref := get_SymlinkatAddr() + if funcptrtest(GetZosLibVec()+SYS___SYMLINKAT_A<<4, "") == 0 { + *funcref = impl_Symlinkat + } else { + *funcref = error_Symlinkat + } + return (*funcref)(oldPath, dirfd, newPath) +} + +func error_Symlinkat(oldPath string, dirfd int, newPath string) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Sync() { - syscall_syscall(SYS_SYNC, 0, 0, 0) + runtime.EnterSyscall() + CallLeFuncWithErr(GetZosLibVec() + SYS_SYNC<<4) + runtime.ExitSyscall() return } @@ -1099,9 +3092,11 @@ func Truncate(path string, length int64) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___TRUNCATE_A, uintptr(unsafe.Pointer(_p0)), uintptr(length), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___TRUNCATE_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(length)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1109,9 +3104,11 @@ func Truncate(path string, length int64) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Tcgetattr(fildes int, termptr *Termios) (err error) { - _, _, e1 := syscall_syscall(SYS_TCGETATTR, uintptr(fildes), uintptr(unsafe.Pointer(termptr)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_TCGETATTR<<4, uintptr(fildes), uintptr(unsafe.Pointer(termptr))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1119,9 +3116,11 @@ func Tcgetattr(fildes int, termptr *Termios) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Tcsetattr(fildes int, when int, termptr *Termios) (err error) { - _, _, e1 := syscall_syscall(SYS_TCSETATTR, uintptr(fildes), uintptr(when), uintptr(unsafe.Pointer(termptr))) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_TCSETATTR<<4, uintptr(fildes), uintptr(when), uintptr(unsafe.Pointer(termptr))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1129,7 +3128,9 @@ func Tcsetattr(fildes int, when int, termptr *Termios) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Umask(mask int) (oldmask int) { - r0, _, _ := syscall_syscall(SYS_UMASK, uintptr(mask), 0, 0) + runtime.EnterSyscall() + r0, _, _ := CallLeFuncWithErr(GetZosLibVec()+SYS_UMASK<<4, uintptr(mask)) + runtime.ExitSyscall() oldmask = int(r0) return } @@ -1142,24 +3143,65 @@ func Unlink(path string) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___UNLINK_A, uintptr(unsafe.Pointer(_p0)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___UNLINK_A<<4, uintptr(unsafe.Pointer(_p0))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_Unlinkat(dirfd int, path string, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___UNLINKAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_UnlinkatAddr() *(func(dirfd int, path string, flags int) (err error)) + +var Unlinkat = enter_Unlinkat + +func enter_Unlinkat(dirfd int, path string, flags int) (err error) { + funcref := get_UnlinkatAddr() + if funcptrtest(GetZosLibVec()+SYS___UNLINKAT_A<<4, "") == 0 { + *funcref = impl_Unlinkat + } else { + *funcref = error_Unlinkat + } + return (*funcref)(dirfd, path, flags) +} + +func error_Unlinkat(dirfd int, path string, flags int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Utime(path string, utim *Utimbuf) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } - _, _, e1 := syscall_syscall(SYS___UTIME_A, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(utim)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___UTIME_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(utim))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1172,25 +3214,119 @@ func open(path string, mode int, perm uint32) (fd int, err error) { if err != nil { return } - r0, _, e1 := syscall_syscall(SYS___OPEN_A, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___OPEN_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm)) + runtime.ExitSyscall() fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func impl_openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___OPENAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags), uintptr(mode)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_openatAddr() *(func(dirfd int, path string, flags int, mode uint32) (fd int, err error)) + +var openat = enter_openat + +func enter_openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) { + funcref := get_openatAddr() + if funcptrtest(GetZosLibVec()+SYS___OPENAT_A<<4, "") == 0 { + *funcref = impl_openat + } else { + *funcref = error_openat + } + return (*funcref)(dirfd, path, flags, mode) +} + +func error_openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) { + fd = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func impl_openat2(dirfd int, path string, open_how *OpenHow, size int) (fd int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___OPENAT2_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(open_how)), uintptr(size)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_openat2Addr() *(func(dirfd int, path string, open_how *OpenHow, size int) (fd int, err error)) + +var openat2 = enter_openat2 + +func enter_openat2(dirfd int, path string, open_how *OpenHow, size int) (fd int, err error) { + funcref := get_openat2Addr() + if funcptrtest(GetZosLibVec()+SYS___OPENAT2_A<<4, "") == 0 { + *funcref = impl_openat2 + } else { + *funcref = error_openat2 + } + return (*funcref)(dirfd, path, open_how, size) +} + +func error_openat2(dirfd int, path string, open_how *OpenHow, size int) (fd int, err error) { + fd = -1 + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func remove(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } - _, _, e1 := syscall_syscall(SYS_REMOVE, uintptr(unsafe.Pointer(_p0)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_REMOVE<<4, uintptr(unsafe.Pointer(_p0))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func waitid(idType int, id int, info *Siginfo, options int) (err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_WAITID<<4, uintptr(idType), uintptr(id), uintptr(unsafe.Pointer(info)), uintptr(options)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1198,10 +3334,12 @@ func remove(path string) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func waitpid(pid int, wstatus *_C_int, options int) (wpid int, err error) { - r0, _, e1 := syscall_syscall(SYS_WAITPID, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options)) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_WAITPID<<4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options)) + runtime.ExitSyscall() wpid = int(r0) - if e1 != 0 { - err = errnoErr(e1) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1209,9 +3347,9 @@ func waitpid(pid int, wstatus *_C_int, options int) (wpid int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func gettimeofday(tv *timeval_zos) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_GETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GETTIMEOFDAY<<4, uintptr(unsafe.Pointer(tv))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1219,9 +3357,9 @@ func gettimeofday(tv *timeval_zos) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func pipe(p *[2]_C_int) (err error) { - _, _, e1 := syscall_rawsyscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_PIPE<<4, uintptr(unsafe.Pointer(p))) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } @@ -1234,20 +3372,87 @@ func utimes(path string, timeval *[2]Timeval) (err error) { if err != nil { return } - _, _, e1 := syscall_syscall(SYS___UTIMES_A, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(timeval)), 0) - if e1 != 0 { - err = errnoErr(e1) + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___UTIMES_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(timeval))) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Select(nmsgsfds int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (ret int, err error) { - r0, _, e1 := syscall_syscall6(SYS_SELECT, uintptr(nmsgsfds), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0) - ret = int(r0) - if e1 != 0 { - err = errnoErr(e1) +func impl_utimensat(dirfd int, path string, ts *[2]Timespec, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___UTIMENSAT_A<<4, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(ts)), uintptr(flags)) + runtime.ExitSyscall() + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +//go:nosplit +func get_utimensatAddr() *(func(dirfd int, path string, ts *[2]Timespec, flags int) (err error)) + +var utimensat = enter_utimensat + +func enter_utimensat(dirfd int, path string, ts *[2]Timespec, flags int) (err error) { + funcref := get_utimensatAddr() + if funcptrtest(GetZosLibVec()+SYS___UTIMENSAT_A<<4, "") == 0 { + *funcref = impl_utimensat + } else { + *funcref = error_utimensat + } + return (*funcref)(dirfd, path, ts, flags) +} + +func error_utimensat(dirfd int, path string, ts *[2]Timespec, flags int) (err error) { + err = ENOSYS + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Posix_openpt(oflag int) (fd int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_POSIX_OPENPT<<4, uintptr(oflag)) + runtime.ExitSyscall() + fd = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Grantpt(fildes int) (rc int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_GRANTPT<<4, uintptr(fildes)) + runtime.ExitSyscall() + rc = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Unlockpt(fildes int) (rc int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_UNLOCKPT<<4, uintptr(fildes)) + runtime.ExitSyscall() + rc = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) } return } diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go index fcf3ecb..53aef5d 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go @@ -448,4 +448,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go index f56dc25..71d5247 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go @@ -371,4 +371,12 @@ const ( SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go index 974bf24..c747706 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go @@ -412,4 +412,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go index 39a2739..f96e214 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go @@ -315,4 +315,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go index cf9c9d7..2842534 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go @@ -309,4 +309,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go index 10b7362..d095301 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go @@ -432,4 +432,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 4450 SYS_CACHESTAT = 4451 SYS_FCHMODAT2 = 4452 + SYS_MAP_SHADOW_STACK = 4453 + SYS_FUTEX_WAKE = 4454 + SYS_FUTEX_WAIT = 4455 + SYS_FUTEX_REQUEUE = 4456 + SYS_STATMOUNT = 4457 + SYS_LISTMOUNT = 4458 + SYS_LSM_GET_SELF_ATTR = 4459 + SYS_LSM_SET_SELF_ATTR = 4460 + SYS_LSM_LIST_MODULES = 4461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go index cd4d8b4..295c7f4 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go @@ -362,4 +362,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 5450 SYS_CACHESTAT = 5451 SYS_FCHMODAT2 = 5452 + SYS_MAP_SHADOW_STACK = 5453 + SYS_FUTEX_WAKE = 5454 + SYS_FUTEX_WAIT = 5455 + SYS_FUTEX_REQUEUE = 5456 + SYS_STATMOUNT = 5457 + SYS_LISTMOUNT = 5458 + SYS_LSM_GET_SELF_ATTR = 5459 + SYS_LSM_SET_SELF_ATTR = 5460 + SYS_LSM_LIST_MODULES = 5461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go index 2c0efca..d1a9eac 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go @@ -362,4 +362,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 5450 SYS_CACHESTAT = 5451 SYS_FCHMODAT2 = 5452 + SYS_MAP_SHADOW_STACK = 5453 + SYS_FUTEX_WAKE = 5454 + SYS_FUTEX_WAIT = 5455 + SYS_FUTEX_REQUEUE = 5456 + SYS_STATMOUNT = 5457 + SYS_LISTMOUNT = 5458 + SYS_LSM_GET_SELF_ATTR = 5459 + SYS_LSM_SET_SELF_ATTR = 5460 + SYS_LSM_LIST_MODULES = 5461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go index a72e31d..bec157c 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go @@ -432,4 +432,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 4450 SYS_CACHESTAT = 4451 SYS_FCHMODAT2 = 4452 + SYS_MAP_SHADOW_STACK = 4453 + SYS_FUTEX_WAKE = 4454 + SYS_FUTEX_WAIT = 4455 + SYS_FUTEX_REQUEUE = 4456 + SYS_STATMOUNT = 4457 + SYS_LISTMOUNT = 4458 + SYS_LSM_GET_SELF_ATTR = 4459 + SYS_LSM_SET_SELF_ATTR = 4460 + SYS_LSM_LIST_MODULES = 4461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go index c7d1e37..7ee7bdc 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go @@ -439,4 +439,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go index f4d4838..fad1f25 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go @@ -411,4 +411,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go index b64f0e5..7d3e163 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go @@ -411,4 +411,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go index 9571119..0ed53ad 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go @@ -316,4 +316,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go index f94e943..2fba04a 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go @@ -377,4 +377,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go index ba0c2bc..621d00d 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go @@ -390,4 +390,13 @@ const ( SYS_SET_MEMPOLICY_HOME_NODE = 450 SYS_CACHESTAT = 451 SYS_FCHMODAT2 = 452 + SYS_MAP_SHADOW_STACK = 453 + SYS_FUTEX_WAKE = 454 + SYS_FUTEX_WAIT = 455 + SYS_FUTEX_REQUEUE = 456 + SYS_STATMOUNT = 457 + SYS_LISTMOUNT = 458 + SYS_LSM_GET_SELF_ATTR = 459 + SYS_LSM_SET_SELF_ATTR = 460 + SYS_LSM_LIST_MODULES = 461 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_zos_s390x.go b/vendor/golang.org/x/sys/unix/zsysnum_zos_s390x.go index b2e3085..5e8c263 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_zos_s390x.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_zos_s390x.go @@ -1,2669 +1,2852 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. +// go run mksyscall_zos_s390x.go -o_sysnum zsysnum_zos_s390x.go -o_syscall zsyscall_zos_s390x.go -i_syscall syscall_zos_s390x.go -o_asm zsymaddr_zos_s390x.s +// Code generated by the command above; see README.md. DO NOT EDIT. //go:build zos && s390x package unix -// TODO: auto-generate. - const ( - SYS_ACOSD128 = 0xB80 - SYS_ACOSD32 = 0xB7E - SYS_ACOSD64 = 0xB7F - SYS_ACOSHD128 = 0xB83 - SYS_ACOSHD32 = 0xB81 - SYS_ACOSHD64 = 0xB82 - SYS_AIO_FSYNC = 0xC69 - SYS_ASCTIME = 0x0AE - SYS_ASCTIME64 = 0xCD7 - SYS_ASCTIME64_R = 0xCD8 - SYS_ASIND128 = 0xB86 - SYS_ASIND32 = 0xB84 - SYS_ASIND64 = 0xB85 - SYS_ASINHD128 = 0xB89 - SYS_ASINHD32 = 0xB87 - SYS_ASINHD64 = 0xB88 - SYS_ATAN2D128 = 0xB8F - SYS_ATAN2D32 = 0xB8D - SYS_ATAN2D64 = 0xB8E - SYS_ATAND128 = 0xB8C - SYS_ATAND32 = 0xB8A - SYS_ATAND64 = 0xB8B - SYS_ATANHD128 = 0xB92 - SYS_ATANHD32 = 0xB90 - SYS_ATANHD64 = 0xB91 - SYS_BIND2ADDRSEL = 0xD59 - SYS_C16RTOMB = 0xD40 - SYS_C32RTOMB = 0xD41 - SYS_CBRTD128 = 0xB95 - SYS_CBRTD32 = 0xB93 - SYS_CBRTD64 = 0xB94 - SYS_CEILD128 = 0xB98 - SYS_CEILD32 = 0xB96 - SYS_CEILD64 = 0xB97 - SYS_CLEARENV = 0x0C9 - SYS_CLEARERR_UNLOCKED = 0xCA1 - SYS_CLOCK = 0x0AA - SYS_CLOGL = 0xA00 - SYS_CLRMEMF = 0x0BD - SYS_CONJ = 0xA03 - SYS_CONJF = 0xA06 - SYS_CONJL = 0xA09 - SYS_COPYSIGND128 = 0xB9E - SYS_COPYSIGND32 = 0xB9C - SYS_COPYSIGND64 = 0xB9D - SYS_COSD128 = 0xBA1 - SYS_COSD32 = 0xB9F - SYS_COSD64 = 0xBA0 - SYS_COSHD128 = 0xBA4 - SYS_COSHD32 = 0xBA2 - SYS_COSHD64 = 0xBA3 - SYS_CPOW = 0xA0C - SYS_CPOWF = 0xA0F - SYS_CPOWL = 0xA12 - SYS_CPROJ = 0xA15 - SYS_CPROJF = 0xA18 - SYS_CPROJL = 0xA1B - SYS_CREAL = 0xA1E - SYS_CREALF = 0xA21 - SYS_CREALL = 0xA24 - SYS_CSIN = 0xA27 - SYS_CSINF = 0xA2A - SYS_CSINH = 0xA30 - SYS_CSINHF = 0xA33 - SYS_CSINHL = 0xA36 - SYS_CSINL = 0xA2D - SYS_CSNAP = 0x0C5 - SYS_CSQRT = 0xA39 - SYS_CSQRTF = 0xA3C - SYS_CSQRTL = 0xA3F - SYS_CTAN = 0xA42 - SYS_CTANF = 0xA45 - SYS_CTANH = 0xA4B - SYS_CTANHF = 0xA4E - SYS_CTANHL = 0xA51 - SYS_CTANL = 0xA48 - SYS_CTIME = 0x0AB - SYS_CTIME64 = 0xCD9 - SYS_CTIME64_R = 0xCDA - SYS_CTRACE = 0x0C6 - SYS_DIFFTIME = 0x0A7 - SYS_DIFFTIME64 = 0xCDB - SYS_DLADDR = 0xC82 - SYS_DYNALLOC = 0x0C3 - SYS_DYNFREE = 0x0C2 - SYS_ERFCD128 = 0xBAA - SYS_ERFCD32 = 0xBA8 - SYS_ERFCD64 = 0xBA9 - SYS_ERFD128 = 0xBA7 - SYS_ERFD32 = 0xBA5 - SYS_ERFD64 = 0xBA6 - SYS_EXP2D128 = 0xBB0 - SYS_EXP2D32 = 0xBAE - SYS_EXP2D64 = 0xBAF - SYS_EXPD128 = 0xBAD - SYS_EXPD32 = 0xBAB - SYS_EXPD64 = 0xBAC - SYS_EXPM1D128 = 0xBB3 - SYS_EXPM1D32 = 0xBB1 - SYS_EXPM1D64 = 0xBB2 - SYS_FABSD128 = 0xBB6 - SYS_FABSD32 = 0xBB4 - SYS_FABSD64 = 0xBB5 - SYS_FDELREC_UNLOCKED = 0xCA2 - SYS_FDIMD128 = 0xBB9 - SYS_FDIMD32 = 0xBB7 - SYS_FDIMD64 = 0xBB8 - SYS_FDOPEN_UNLOCKED = 0xCFC - SYS_FECLEAREXCEPT = 0xAEA - SYS_FEGETENV = 0xAEB - SYS_FEGETEXCEPTFLAG = 0xAEC - SYS_FEGETROUND = 0xAED - SYS_FEHOLDEXCEPT = 0xAEE - SYS_FEOF_UNLOCKED = 0xCA3 - SYS_FERAISEEXCEPT = 0xAEF - SYS_FERROR_UNLOCKED = 0xCA4 - SYS_FESETENV = 0xAF0 - SYS_FESETEXCEPTFLAG = 0xAF1 - SYS_FESETROUND = 0xAF2 - SYS_FETCHEP = 0x0BF - SYS_FETESTEXCEPT = 0xAF3 - SYS_FEUPDATEENV = 0xAF4 - SYS_FE_DEC_GETROUND = 0xBBA - SYS_FE_DEC_SETROUND = 0xBBB - SYS_FFLUSH_UNLOCKED = 0xCA5 - SYS_FGETC_UNLOCKED = 0xC80 - SYS_FGETPOS64 = 0xCEE - SYS_FGETPOS64_UNLOCKED = 0xCF4 - SYS_FGETPOS_UNLOCKED = 0xCA6 - SYS_FGETS_UNLOCKED = 0xC7C - SYS_FGETWC_UNLOCKED = 0xCA7 - SYS_FGETWS_UNLOCKED = 0xCA8 - SYS_FILENO_UNLOCKED = 0xCA9 - SYS_FLDATA = 0x0C1 - SYS_FLDATA_UNLOCKED = 0xCAA - SYS_FLOCATE_UNLOCKED = 0xCAB - SYS_FLOORD128 = 0xBBE - SYS_FLOORD32 = 0xBBC - SYS_FLOORD64 = 0xBBD - SYS_FMA = 0xA63 - SYS_FMAD128 = 0xBC1 - SYS_FMAD32 = 0xBBF - SYS_FMAD64 = 0xBC0 - SYS_FMAF = 0xA66 - SYS_FMAL = 0xA69 - SYS_FMAX = 0xA6C - SYS_FMAXD128 = 0xBC4 - SYS_FMAXD32 = 0xBC2 - SYS_FMAXD64 = 0xBC3 - SYS_FMAXF = 0xA6F - SYS_FMAXL = 0xA72 - SYS_FMIN = 0xA75 - SYS_FMIND128 = 0xBC7 - SYS_FMIND32 = 0xBC5 - SYS_FMIND64 = 0xBC6 - SYS_FMINF = 0xA78 - SYS_FMINL = 0xA7B - SYS_FMODD128 = 0xBCA - SYS_FMODD32 = 0xBC8 - SYS_FMODD64 = 0xBC9 - SYS_FOPEN64 = 0xD49 - SYS_FOPEN64_UNLOCKED = 0xD4A - SYS_FOPEN_UNLOCKED = 0xCFA - SYS_FPRINTF_UNLOCKED = 0xCAC - SYS_FPUTC_UNLOCKED = 0xC81 - SYS_FPUTS_UNLOCKED = 0xC7E - SYS_FPUTWC_UNLOCKED = 0xCAD - SYS_FPUTWS_UNLOCKED = 0xCAE - SYS_FREAD_NOUPDATE = 0xCEC - SYS_FREAD_NOUPDATE_UNLOCKED = 0xCED - SYS_FREAD_UNLOCKED = 0xC7B - SYS_FREEIFADDRS = 0xCE6 - SYS_FREOPEN64 = 0xD4B - SYS_FREOPEN64_UNLOCKED = 0xD4C - SYS_FREOPEN_UNLOCKED = 0xCFB - SYS_FREXPD128 = 0xBCE - SYS_FREXPD32 = 0xBCC - SYS_FREXPD64 = 0xBCD - SYS_FSCANF_UNLOCKED = 0xCAF - SYS_FSEEK64 = 0xCEF - SYS_FSEEK64_UNLOCKED = 0xCF5 - SYS_FSEEKO64 = 0xCF0 - SYS_FSEEKO64_UNLOCKED = 0xCF6 - SYS_FSEEKO_UNLOCKED = 0xCB1 - SYS_FSEEK_UNLOCKED = 0xCB0 - SYS_FSETPOS64 = 0xCF1 - SYS_FSETPOS64_UNLOCKED = 0xCF7 - SYS_FSETPOS_UNLOCKED = 0xCB3 - SYS_FTELL64 = 0xCF2 - SYS_FTELL64_UNLOCKED = 0xCF8 - SYS_FTELLO64 = 0xCF3 - SYS_FTELLO64_UNLOCKED = 0xCF9 - SYS_FTELLO_UNLOCKED = 0xCB5 - SYS_FTELL_UNLOCKED = 0xCB4 - SYS_FUPDATE = 0x0B5 - SYS_FUPDATE_UNLOCKED = 0xCB7 - SYS_FWIDE_UNLOCKED = 0xCB8 - SYS_FWPRINTF_UNLOCKED = 0xCB9 - SYS_FWRITE_UNLOCKED = 0xC7A - SYS_FWSCANF_UNLOCKED = 0xCBA - SYS_GETDATE64 = 0xD4F - SYS_GETIFADDRS = 0xCE7 - SYS_GETIPV4SOURCEFILTER = 0xC77 - SYS_GETSOURCEFILTER = 0xC79 - SYS_GETSYNTX = 0x0FD - SYS_GETS_UNLOCKED = 0xC7D - SYS_GETTIMEOFDAY64 = 0xD50 - SYS_GETWCHAR_UNLOCKED = 0xCBC - SYS_GETWC_UNLOCKED = 0xCBB - SYS_GMTIME = 0x0B0 - SYS_GMTIME64 = 0xCDC - SYS_GMTIME64_R = 0xCDD - SYS_HYPOTD128 = 0xBD1 - SYS_HYPOTD32 = 0xBCF - SYS_HYPOTD64 = 0xBD0 - SYS_ILOGBD128 = 0xBD4 - SYS_ILOGBD32 = 0xBD2 - SYS_ILOGBD64 = 0xBD3 - SYS_ILOGBF = 0xA7E - SYS_ILOGBL = 0xA81 - SYS_INET6_IS_SRCADDR = 0xD5A - SYS_ISBLANK = 0x0FE - SYS_ISWALNUM = 0x0FF - SYS_LDEXPD128 = 0xBD7 - SYS_LDEXPD32 = 0xBD5 - SYS_LDEXPD64 = 0xBD6 - SYS_LGAMMAD128 = 0xBDA - SYS_LGAMMAD32 = 0xBD8 - SYS_LGAMMAD64 = 0xBD9 - SYS_LIO_LISTIO = 0xC6A - SYS_LLRINT = 0xA84 - SYS_LLRINTD128 = 0xBDD - SYS_LLRINTD32 = 0xBDB - SYS_LLRINTD64 = 0xBDC - SYS_LLRINTF = 0xA87 - SYS_LLRINTL = 0xA8A - SYS_LLROUND = 0xA8D - SYS_LLROUNDD128 = 0xBE0 - SYS_LLROUNDD32 = 0xBDE - SYS_LLROUNDD64 = 0xBDF - SYS_LLROUNDF = 0xA90 - SYS_LLROUNDL = 0xA93 - SYS_LOCALTIM = 0x0B1 - SYS_LOCALTIME = 0x0B1 - SYS_LOCALTIME64 = 0xCDE - SYS_LOCALTIME64_R = 0xCDF - SYS_LOG10D128 = 0xBE6 - SYS_LOG10D32 = 0xBE4 - SYS_LOG10D64 = 0xBE5 - SYS_LOG1PD128 = 0xBE9 - SYS_LOG1PD32 = 0xBE7 - SYS_LOG1PD64 = 0xBE8 - SYS_LOG2D128 = 0xBEC - SYS_LOG2D32 = 0xBEA - SYS_LOG2D64 = 0xBEB - SYS_LOGBD128 = 0xBEF - SYS_LOGBD32 = 0xBED - SYS_LOGBD64 = 0xBEE - SYS_LOGBF = 0xA96 - SYS_LOGBL = 0xA99 - SYS_LOGD128 = 0xBE3 - SYS_LOGD32 = 0xBE1 - SYS_LOGD64 = 0xBE2 - SYS_LRINT = 0xA9C - SYS_LRINTD128 = 0xBF2 - SYS_LRINTD32 = 0xBF0 - SYS_LRINTD64 = 0xBF1 - SYS_LRINTF = 0xA9F - SYS_LRINTL = 0xAA2 - SYS_LROUNDD128 = 0xBF5 - SYS_LROUNDD32 = 0xBF3 - SYS_LROUNDD64 = 0xBF4 - SYS_LROUNDL = 0xAA5 - SYS_MBLEN = 0x0AF - SYS_MBRTOC16 = 0xD42 - SYS_MBRTOC32 = 0xD43 - SYS_MEMSET = 0x0A3 - SYS_MKTIME = 0x0AC - SYS_MKTIME64 = 0xCE0 - SYS_MODFD128 = 0xBF8 - SYS_MODFD32 = 0xBF6 - SYS_MODFD64 = 0xBF7 - SYS_NAN = 0xAA8 - SYS_NAND128 = 0xBFB - SYS_NAND32 = 0xBF9 - SYS_NAND64 = 0xBFA - SYS_NANF = 0xAAA - SYS_NANL = 0xAAC - SYS_NEARBYINT = 0xAAE - SYS_NEARBYINTD128 = 0xBFE - SYS_NEARBYINTD32 = 0xBFC - SYS_NEARBYINTD64 = 0xBFD - SYS_NEARBYINTF = 0xAB1 - SYS_NEARBYINTL = 0xAB4 - SYS_NEXTAFTERD128 = 0xC01 - SYS_NEXTAFTERD32 = 0xBFF - SYS_NEXTAFTERD64 = 0xC00 - SYS_NEXTAFTERF = 0xAB7 - SYS_NEXTAFTERL = 0xABA - SYS_NEXTTOWARD = 0xABD - SYS_NEXTTOWARDD128 = 0xC04 - SYS_NEXTTOWARDD32 = 0xC02 - SYS_NEXTTOWARDD64 = 0xC03 - SYS_NEXTTOWARDF = 0xAC0 - SYS_NEXTTOWARDL = 0xAC3 - SYS_NL_LANGINFO = 0x0FC - SYS_PERROR_UNLOCKED = 0xCBD - SYS_POSIX_FALLOCATE = 0xCE8 - SYS_POSIX_MEMALIGN = 0xCE9 - SYS_POSIX_OPENPT = 0xC66 - SYS_POWD128 = 0xC07 - SYS_POWD32 = 0xC05 - SYS_POWD64 = 0xC06 - SYS_PRINTF_UNLOCKED = 0xCBE - SYS_PSELECT = 0xC67 - SYS_PTHREAD_ATTR_GETSTACK = 0xB3E - SYS_PTHREAD_ATTR_SETSTACK = 0xB3F - SYS_PTHREAD_SECURITY_APPLID_NP = 0xCE4 - SYS_PUTS_UNLOCKED = 0xC7F - SYS_PUTWCHAR_UNLOCKED = 0xCC0 - SYS_PUTWC_UNLOCKED = 0xCBF - SYS_QUANTEXPD128 = 0xD46 - SYS_QUANTEXPD32 = 0xD44 - SYS_QUANTEXPD64 = 0xD45 - SYS_QUANTIZED128 = 0xC0A - SYS_QUANTIZED32 = 0xC08 - SYS_QUANTIZED64 = 0xC09 - SYS_REMAINDERD128 = 0xC0D - SYS_REMAINDERD32 = 0xC0B - SYS_REMAINDERD64 = 0xC0C - SYS_RESIZE_ALLOC = 0xCEB - SYS_REWIND_UNLOCKED = 0xCC1 - SYS_RINTD128 = 0xC13 - SYS_RINTD32 = 0xC11 - SYS_RINTD64 = 0xC12 - SYS_RINTF = 0xACB - SYS_RINTL = 0xACD - SYS_ROUND = 0xACF - SYS_ROUNDD128 = 0xC16 - SYS_ROUNDD32 = 0xC14 - SYS_ROUNDD64 = 0xC15 - SYS_ROUNDF = 0xAD2 - SYS_ROUNDL = 0xAD5 - SYS_SAMEQUANTUMD128 = 0xC19 - SYS_SAMEQUANTUMD32 = 0xC17 - SYS_SAMEQUANTUMD64 = 0xC18 - SYS_SCALBLN = 0xAD8 - SYS_SCALBLND128 = 0xC1C - SYS_SCALBLND32 = 0xC1A - SYS_SCALBLND64 = 0xC1B - SYS_SCALBLNF = 0xADB - SYS_SCALBLNL = 0xADE - SYS_SCALBND128 = 0xC1F - SYS_SCALBND32 = 0xC1D - SYS_SCALBND64 = 0xC1E - SYS_SCALBNF = 0xAE3 - SYS_SCALBNL = 0xAE6 - SYS_SCANF_UNLOCKED = 0xCC2 - SYS_SCHED_YIELD = 0xB32 - SYS_SETENV = 0x0C8 - SYS_SETIPV4SOURCEFILTER = 0xC76 - SYS_SETSOURCEFILTER = 0xC78 - SYS_SHM_OPEN = 0xC8C - SYS_SHM_UNLINK = 0xC8D - SYS_SIND128 = 0xC22 - SYS_SIND32 = 0xC20 - SYS_SIND64 = 0xC21 - SYS_SINHD128 = 0xC25 - SYS_SINHD32 = 0xC23 - SYS_SINHD64 = 0xC24 - SYS_SIZEOF_ALLOC = 0xCEA - SYS_SOCKATMARK = 0xC68 - SYS_SQRTD128 = 0xC28 - SYS_SQRTD32 = 0xC26 - SYS_SQRTD64 = 0xC27 - SYS_STRCHR = 0x0A0 - SYS_STRCSPN = 0x0A1 - SYS_STRERROR = 0x0A8 - SYS_STRERROR_R = 0xB33 - SYS_STRFTIME = 0x0B2 - SYS_STRLEN = 0x0A9 - SYS_STRPBRK = 0x0A2 - SYS_STRSPN = 0x0A4 - SYS_STRSTR = 0x0A5 - SYS_STRTOD128 = 0xC2B - SYS_STRTOD32 = 0xC29 - SYS_STRTOD64 = 0xC2A - SYS_STRTOK = 0x0A6 - SYS_TAND128 = 0xC2E - SYS_TAND32 = 0xC2C - SYS_TAND64 = 0xC2D - SYS_TANHD128 = 0xC31 - SYS_TANHD32 = 0xC2F - SYS_TANHD64 = 0xC30 - SYS_TGAMMAD128 = 0xC34 - SYS_TGAMMAD32 = 0xC32 - SYS_TGAMMAD64 = 0xC33 - SYS_TIME = 0x0AD - SYS_TIME64 = 0xCE1 - SYS_TMPFILE64 = 0xD4D - SYS_TMPFILE64_UNLOCKED = 0xD4E - SYS_TMPFILE_UNLOCKED = 0xCFD - SYS_TRUNCD128 = 0xC40 - SYS_TRUNCD32 = 0xC3E - SYS_TRUNCD64 = 0xC3F - SYS_UNGETC_UNLOCKED = 0xCC3 - SYS_UNGETWC_UNLOCKED = 0xCC4 - SYS_UNSETENV = 0xB34 - SYS_VFPRINTF_UNLOCKED = 0xCC5 - SYS_VFSCANF_UNLOCKED = 0xCC7 - SYS_VFWPRINTF_UNLOCKED = 0xCC9 - SYS_VFWSCANF_UNLOCKED = 0xCCB - SYS_VPRINTF_UNLOCKED = 0xCCD - SYS_VSCANF_UNLOCKED = 0xCCF - SYS_VWPRINTF_UNLOCKED = 0xCD1 - SYS_VWSCANF_UNLOCKED = 0xCD3 - SYS_WCSTOD128 = 0xC43 - SYS_WCSTOD32 = 0xC41 - SYS_WCSTOD64 = 0xC42 - SYS_WPRINTF_UNLOCKED = 0xCD5 - SYS_WSCANF_UNLOCKED = 0xCD6 - SYS__FLUSHLBF = 0xD68 - SYS__FLUSHLBF_UNLOCKED = 0xD6F - SYS___ACOSHF_H = 0xA54 - SYS___ACOSHL_H = 0xA55 - SYS___ASINHF_H = 0xA56 - SYS___ASINHL_H = 0xA57 - SYS___ATANPID128 = 0xC6D - SYS___ATANPID32 = 0xC6B - SYS___ATANPID64 = 0xC6C - SYS___CBRTF_H = 0xA58 - SYS___CBRTL_H = 0xA59 - SYS___CDUMP = 0x0C4 - SYS___CLASS = 0xAFA - SYS___CLASS2 = 0xB99 - SYS___CLASS2D128 = 0xC99 - SYS___CLASS2D32 = 0xC97 - SYS___CLASS2D64 = 0xC98 - SYS___CLASS2F = 0xC91 - SYS___CLASS2F_B = 0xC93 - SYS___CLASS2F_H = 0xC94 - SYS___CLASS2L = 0xC92 - SYS___CLASS2L_B = 0xC95 - SYS___CLASS2L_H = 0xC96 - SYS___CLASS2_B = 0xB9A - SYS___CLASS2_H = 0xB9B - SYS___CLASS_B = 0xAFB - SYS___CLASS_H = 0xAFC - SYS___CLOGL_B = 0xA01 - SYS___CLOGL_H = 0xA02 - SYS___CLRENV = 0x0C9 - SYS___CLRMF = 0x0BD - SYS___CODEPAGE_INFO = 0xC64 - SYS___CONJF_B = 0xA07 - SYS___CONJF_H = 0xA08 - SYS___CONJL_B = 0xA0A - SYS___CONJL_H = 0xA0B - SYS___CONJ_B = 0xA04 - SYS___CONJ_H = 0xA05 - SYS___COPYSIGN_B = 0xA5A - SYS___COPYSIGN_H = 0xAF5 - SYS___COSPID128 = 0xC70 - SYS___COSPID32 = 0xC6E - SYS___COSPID64 = 0xC6F - SYS___CPOWF_B = 0xA10 - SYS___CPOWF_H = 0xA11 - SYS___CPOWL_B = 0xA13 - SYS___CPOWL_H = 0xA14 - SYS___CPOW_B = 0xA0D - SYS___CPOW_H = 0xA0E - SYS___CPROJF_B = 0xA19 - SYS___CPROJF_H = 0xA1A - SYS___CPROJL_B = 0xA1C - SYS___CPROJL_H = 0xA1D - SYS___CPROJ_B = 0xA16 - SYS___CPROJ_H = 0xA17 - SYS___CREALF_B = 0xA22 - SYS___CREALF_H = 0xA23 - SYS___CREALL_B = 0xA25 - SYS___CREALL_H = 0xA26 - SYS___CREAL_B = 0xA1F - SYS___CREAL_H = 0xA20 - SYS___CSINF_B = 0xA2B - SYS___CSINF_H = 0xA2C - SYS___CSINHF_B = 0xA34 - SYS___CSINHF_H = 0xA35 - SYS___CSINHL_B = 0xA37 - SYS___CSINHL_H = 0xA38 - SYS___CSINH_B = 0xA31 - SYS___CSINH_H = 0xA32 - SYS___CSINL_B = 0xA2E - SYS___CSINL_H = 0xA2F - SYS___CSIN_B = 0xA28 - SYS___CSIN_H = 0xA29 - SYS___CSNAP = 0x0C5 - SYS___CSQRTF_B = 0xA3D - SYS___CSQRTF_H = 0xA3E - SYS___CSQRTL_B = 0xA40 - SYS___CSQRTL_H = 0xA41 - SYS___CSQRT_B = 0xA3A - SYS___CSQRT_H = 0xA3B - SYS___CTANF_B = 0xA46 - SYS___CTANF_H = 0xA47 - SYS___CTANHF_B = 0xA4F - SYS___CTANHF_H = 0xA50 - SYS___CTANHL_B = 0xA52 - SYS___CTANHL_H = 0xA53 - SYS___CTANH_B = 0xA4C - SYS___CTANH_H = 0xA4D - SYS___CTANL_B = 0xA49 - SYS___CTANL_H = 0xA4A - SYS___CTAN_B = 0xA43 - SYS___CTAN_H = 0xA44 - SYS___CTEST = 0x0C7 - SYS___CTRACE = 0x0C6 - SYS___D1TOP = 0xC9B - SYS___D2TOP = 0xC9C - SYS___D4TOP = 0xC9D - SYS___DYNALL = 0x0C3 - SYS___DYNFRE = 0x0C2 - SYS___EXP2F_H = 0xA5E - SYS___EXP2L_H = 0xA5F - SYS___EXP2_H = 0xA5D - SYS___EXPM1F_H = 0xA5B - SYS___EXPM1L_H = 0xA5C - SYS___FBUFSIZE = 0xD60 - SYS___FLBF = 0xD62 - SYS___FLDATA = 0x0C1 - SYS___FMAF_B = 0xA67 - SYS___FMAF_H = 0xA68 - SYS___FMAL_B = 0xA6A - SYS___FMAL_H = 0xA6B - SYS___FMAXF_B = 0xA70 - SYS___FMAXF_H = 0xA71 - SYS___FMAXL_B = 0xA73 - SYS___FMAXL_H = 0xA74 - SYS___FMAX_B = 0xA6D - SYS___FMAX_H = 0xA6E - SYS___FMA_B = 0xA64 - SYS___FMA_H = 0xA65 - SYS___FMINF_B = 0xA79 - SYS___FMINF_H = 0xA7A - SYS___FMINL_B = 0xA7C - SYS___FMINL_H = 0xA7D - SYS___FMIN_B = 0xA76 - SYS___FMIN_H = 0xA77 - SYS___FPENDING = 0xD61 - SYS___FPENDING_UNLOCKED = 0xD6C - SYS___FPURGE = 0xD69 - SYS___FPURGE_UNLOCKED = 0xD70 - SYS___FP_CAST_D = 0xBCB - SYS___FREADABLE = 0xD63 - SYS___FREADAHEAD = 0xD6A - SYS___FREADAHEAD_UNLOCKED = 0xD71 - SYS___FREADING = 0xD65 - SYS___FREADING_UNLOCKED = 0xD6D - SYS___FSEEK2 = 0xB3C - SYS___FSETERR = 0xD6B - SYS___FSETLOCKING = 0xD67 - SYS___FTCHEP = 0x0BF - SYS___FTELL2 = 0xB3B - SYS___FUPDT = 0x0B5 - SYS___FWRITABLE = 0xD64 - SYS___FWRITING = 0xD66 - SYS___FWRITING_UNLOCKED = 0xD6E - SYS___GETCB = 0x0B4 - SYS___GETGRGID1 = 0xD5B - SYS___GETGRNAM1 = 0xD5C - SYS___GETTHENT = 0xCE5 - SYS___GETTOD = 0xD3E - SYS___HYPOTF_H = 0xAF6 - SYS___HYPOTL_H = 0xAF7 - SYS___ILOGBF_B = 0xA7F - SYS___ILOGBF_H = 0xA80 - SYS___ILOGBL_B = 0xA82 - SYS___ILOGBL_H = 0xA83 - SYS___ISBLANK_A = 0xB2E - SYS___ISBLNK = 0x0FE - SYS___ISWBLANK_A = 0xB2F - SYS___LE_CEEGTJS = 0xD72 - SYS___LE_TRACEBACK = 0xB7A - SYS___LGAMMAL_H = 0xA62 - SYS___LGAMMA_B_C99 = 0xB39 - SYS___LGAMMA_H_C99 = 0xB38 - SYS___LGAMMA_R_C99 = 0xB3A - SYS___LLRINTF_B = 0xA88 - SYS___LLRINTF_H = 0xA89 - SYS___LLRINTL_B = 0xA8B - SYS___LLRINTL_H = 0xA8C - SYS___LLRINT_B = 0xA85 - SYS___LLRINT_H = 0xA86 - SYS___LLROUNDF_B = 0xA91 - SYS___LLROUNDF_H = 0xA92 - SYS___LLROUNDL_B = 0xA94 - SYS___LLROUNDL_H = 0xA95 - SYS___LLROUND_B = 0xA8E - SYS___LLROUND_H = 0xA8F - SYS___LOCALE_CTL = 0xD47 - SYS___LOG1PF_H = 0xA60 - SYS___LOG1PL_H = 0xA61 - SYS___LOGBF_B = 0xA97 - SYS___LOGBF_H = 0xA98 - SYS___LOGBL_B = 0xA9A - SYS___LOGBL_H = 0xA9B - SYS___LOGIN_APPLID = 0xCE2 - SYS___LRINTF_B = 0xAA0 - SYS___LRINTF_H = 0xAA1 - SYS___LRINTL_B = 0xAA3 - SYS___LRINTL_H = 0xAA4 - SYS___LRINT_B = 0xA9D - SYS___LRINT_H = 0xA9E - SYS___LROUNDF_FIXUP = 0xB31 - SYS___LROUNDL_B = 0xAA6 - SYS___LROUNDL_H = 0xAA7 - SYS___LROUND_FIXUP = 0xB30 - SYS___MOSERVICES = 0xD3D - SYS___MUST_STAY_CLEAN = 0xB7C - SYS___NANF_B = 0xAAB - SYS___NANL_B = 0xAAD - SYS___NAN_B = 0xAA9 - SYS___NEARBYINTF_B = 0xAB2 - SYS___NEARBYINTF_H = 0xAB3 - SYS___NEARBYINTL_B = 0xAB5 - SYS___NEARBYINTL_H = 0xAB6 - SYS___NEARBYINT_B = 0xAAF - SYS___NEARBYINT_H = 0xAB0 - SYS___NEXTAFTERF_B = 0xAB8 - SYS___NEXTAFTERF_H = 0xAB9 - SYS___NEXTAFTERL_B = 0xABB - SYS___NEXTAFTERL_H = 0xABC - SYS___NEXTTOWARDF_B = 0xAC1 - SYS___NEXTTOWARDF_H = 0xAC2 - SYS___NEXTTOWARDL_B = 0xAC4 - SYS___NEXTTOWARDL_H = 0xAC5 - SYS___NEXTTOWARD_B = 0xABE - SYS___NEXTTOWARD_H = 0xABF - SYS___O_ENV = 0xB7D - SYS___PASSWD_APPLID = 0xCE3 - SYS___PTOD1 = 0xC9E - SYS___PTOD2 = 0xC9F - SYS___PTOD4 = 0xCA0 - SYS___REGCOMP_STD = 0x0EA - SYS___REMAINDERF_H = 0xAC6 - SYS___REMAINDERL_H = 0xAC7 - SYS___REMQUOD128 = 0xC10 - SYS___REMQUOD32 = 0xC0E - SYS___REMQUOD64 = 0xC0F - SYS___REMQUOF_H = 0xAC9 - SYS___REMQUOL_H = 0xACA - SYS___REMQUO_H = 0xAC8 - SYS___RINTF_B = 0xACC - SYS___RINTL_B = 0xACE - SYS___ROUNDF_B = 0xAD3 - SYS___ROUNDF_H = 0xAD4 - SYS___ROUNDL_B = 0xAD6 - SYS___ROUNDL_H = 0xAD7 - SYS___ROUND_B = 0xAD0 - SYS___ROUND_H = 0xAD1 - SYS___SCALBLNF_B = 0xADC - SYS___SCALBLNF_H = 0xADD - SYS___SCALBLNL_B = 0xADF - SYS___SCALBLNL_H = 0xAE0 - SYS___SCALBLN_B = 0xAD9 - SYS___SCALBLN_H = 0xADA - SYS___SCALBNF_B = 0xAE4 - SYS___SCALBNF_H = 0xAE5 - SYS___SCALBNL_B = 0xAE7 - SYS___SCALBNL_H = 0xAE8 - SYS___SCALBN_B = 0xAE1 - SYS___SCALBN_H = 0xAE2 - SYS___SETENV = 0x0C8 - SYS___SINPID128 = 0xC73 - SYS___SINPID32 = 0xC71 - SYS___SINPID64 = 0xC72 - SYS___SMF_RECORD2 = 0xD48 - SYS___STATIC_REINIT = 0xB3D - SYS___TGAMMAF_H_C99 = 0xB79 - SYS___TGAMMAL_H = 0xAE9 - SYS___TGAMMA_H_C99 = 0xB78 - SYS___TOCSNAME2 = 0xC9A - SYS_CEIL = 0x01F - SYS_CHAUDIT = 0x1E0 - SYS_EXP = 0x01A - SYS_FCHAUDIT = 0x1E1 - SYS_FREXP = 0x01D - SYS_GETGROUPSBYNAME = 0x1E2 - SYS_GETPWUID = 0x1A0 - SYS_GETUID = 0x1A1 - SYS_ISATTY = 0x1A3 - SYS_KILL = 0x1A4 - SYS_LDEXP = 0x01E - SYS_LINK = 0x1A5 - SYS_LOG10 = 0x01C - SYS_LSEEK = 0x1A6 - SYS_LSTAT = 0x1A7 - SYS_MKDIR = 0x1A8 - SYS_MKFIFO = 0x1A9 - SYS_MKNOD = 0x1AA - SYS_MODF = 0x01B - SYS_MOUNT = 0x1AB - SYS_OPEN = 0x1AC - SYS_OPENDIR = 0x1AD - SYS_PATHCONF = 0x1AE - SYS_PAUSE = 0x1AF - SYS_PIPE = 0x1B0 - SYS_PTHREAD_ATTR_DESTROY = 0x1E7 - SYS_PTHREAD_ATTR_GETDETACHSTATE = 0x1EB - SYS_PTHREAD_ATTR_GETSTACKSIZE = 0x1E9 - SYS_PTHREAD_ATTR_GETWEIGHT_NP = 0x1ED - SYS_PTHREAD_ATTR_INIT = 0x1E6 - SYS_PTHREAD_ATTR_SETDETACHSTATE = 0x1EA - SYS_PTHREAD_ATTR_SETSTACKSIZE = 0x1E8 - SYS_PTHREAD_ATTR_SETWEIGHT_NP = 0x1EC - SYS_PTHREAD_CANCEL = 0x1EE - SYS_PTHREAD_CLEANUP_POP = 0x1F0 - SYS_PTHREAD_CLEANUP_PUSH = 0x1EF - SYS_PTHREAD_CONDATTR_DESTROY = 0x1F2 - SYS_PTHREAD_CONDATTR_INIT = 0x1F1 - SYS_PTHREAD_COND_BROADCAST = 0x1F6 - SYS_PTHREAD_COND_DESTROY = 0x1F4 - SYS_PTHREAD_COND_INIT = 0x1F3 - SYS_PTHREAD_COND_SIGNAL = 0x1F5 - SYS_PTHREAD_COND_TIMEDWAIT = 0x1F8 - SYS_PTHREAD_COND_WAIT = 0x1F7 - SYS_PTHREAD_CREATE = 0x1F9 - SYS_PTHREAD_DETACH = 0x1FA - SYS_PTHREAD_EQUAL = 0x1FB - SYS_PTHREAD_EXIT = 0x1E4 - SYS_PTHREAD_GETSPECIFIC = 0x1FC - SYS_PTHREAD_JOIN = 0x1FD - SYS_PTHREAD_KEY_CREATE = 0x1FE - SYS_PTHREAD_KILL = 0x1E5 - SYS_PTHREAD_MUTEXATTR_INIT = 0x1FF - SYS_READ = 0x1B2 - SYS_READDIR = 0x1B3 - SYS_READLINK = 0x1B4 - SYS_REWINDDIR = 0x1B5 - SYS_RMDIR = 0x1B6 - SYS_SETEGID = 0x1B7 - SYS_SETEUID = 0x1B8 - SYS_SETGID = 0x1B9 - SYS_SETPGID = 0x1BA - SYS_SETSID = 0x1BB - SYS_SETUID = 0x1BC - SYS_SIGACTION = 0x1BD - SYS_SIGADDSET = 0x1BE - SYS_SIGDELSET = 0x1BF - SYS_SIGEMPTYSET = 0x1C0 - SYS_SIGFILLSET = 0x1C1 - SYS_SIGISMEMBER = 0x1C2 - SYS_SIGLONGJMP = 0x1C3 - SYS_SIGPENDING = 0x1C4 - SYS_SIGPROCMASK = 0x1C5 - SYS_SIGSETJMP = 0x1C6 - SYS_SIGSUSPEND = 0x1C7 - SYS_SIGWAIT = 0x1E3 - SYS_SLEEP = 0x1C8 - SYS_STAT = 0x1C9 - SYS_SYMLINK = 0x1CB - SYS_SYSCONF = 0x1CC - SYS_TCDRAIN = 0x1CD - SYS_TCFLOW = 0x1CE - SYS_TCFLUSH = 0x1CF - SYS_TCGETATTR = 0x1D0 - SYS_TCGETPGRP = 0x1D1 - SYS_TCSENDBREAK = 0x1D2 - SYS_TCSETATTR = 0x1D3 - SYS_TCSETPGRP = 0x1D4 - SYS_TIMES = 0x1D5 - SYS_TTYNAME = 0x1D6 - SYS_TZSET = 0x1D7 - SYS_UMASK = 0x1D8 - SYS_UMOUNT = 0x1D9 - SYS_UNAME = 0x1DA - SYS_UNLINK = 0x1DB - SYS_UTIME = 0x1DC - SYS_WAIT = 0x1DD - SYS_WAITPID = 0x1DE - SYS_WRITE = 0x1DF - SYS_W_GETPSENT = 0x1B1 - SYS_W_IOCTL = 0x1A2 - SYS_W_STATFS = 0x1CA - SYS_A64L = 0x2EF - SYS_BCMP = 0x2B9 - SYS_BCOPY = 0x2BA - SYS_BZERO = 0x2BB - SYS_CATCLOSE = 0x2B6 - SYS_CATGETS = 0x2B7 - SYS_CATOPEN = 0x2B8 - SYS_CRYPT = 0x2AC - SYS_DBM_CLEARERR = 0x2F7 - SYS_DBM_CLOSE = 0x2F8 - SYS_DBM_DELETE = 0x2F9 - SYS_DBM_ERROR = 0x2FA - SYS_DBM_FETCH = 0x2FB - SYS_DBM_FIRSTKEY = 0x2FC - SYS_DBM_NEXTKEY = 0x2FD - SYS_DBM_OPEN = 0x2FE - SYS_DBM_STORE = 0x2FF - SYS_DRAND48 = 0x2B2 - SYS_ENCRYPT = 0x2AD - SYS_ENDUTXENT = 0x2E1 - SYS_ERAND48 = 0x2B3 - SYS_ERF = 0x02C - SYS_ERFC = 0x02D - SYS_FCHDIR = 0x2D9 - SYS_FFS = 0x2BC - SYS_FMTMSG = 0x2E5 - SYS_FSTATVFS = 0x2B4 - SYS_FTIME = 0x2F5 - SYS_GAMMA = 0x02E - SYS_GETDATE = 0x2A6 - SYS_GETPAGESIZE = 0x2D8 - SYS_GETTIMEOFDAY = 0x2F6 - SYS_GETUTXENT = 0x2E0 - SYS_GETUTXID = 0x2E2 - SYS_GETUTXLINE = 0x2E3 - SYS_HCREATE = 0x2C6 - SYS_HDESTROY = 0x2C7 - SYS_HSEARCH = 0x2C8 - SYS_HYPOT = 0x02B - SYS_INDEX = 0x2BD - SYS_INITSTATE = 0x2C2 - SYS_INSQUE = 0x2CF - SYS_ISASCII = 0x2ED - SYS_JRAND48 = 0x2E6 - SYS_L64A = 0x2F0 - SYS_LCONG48 = 0x2EA - SYS_LFIND = 0x2C9 - SYS_LRAND48 = 0x2E7 - SYS_LSEARCH = 0x2CA - SYS_MEMCCPY = 0x2D4 - SYS_MRAND48 = 0x2E8 - SYS_NRAND48 = 0x2E9 - SYS_PCLOSE = 0x2D2 - SYS_POPEN = 0x2D1 - SYS_PUTUTXLINE = 0x2E4 - SYS_RANDOM = 0x2C4 - SYS_REMQUE = 0x2D0 - SYS_RINDEX = 0x2BE - SYS_SEED48 = 0x2EC - SYS_SETKEY = 0x2AE - SYS_SETSTATE = 0x2C3 - SYS_SETUTXENT = 0x2DF - SYS_SRAND48 = 0x2EB - SYS_SRANDOM = 0x2C5 - SYS_STATVFS = 0x2B5 - SYS_STRCASECMP = 0x2BF - SYS_STRDUP = 0x2C0 - SYS_STRNCASECMP = 0x2C1 - SYS_SWAB = 0x2D3 - SYS_TDELETE = 0x2CB - SYS_TFIND = 0x2CC - SYS_TOASCII = 0x2EE - SYS_TSEARCH = 0x2CD - SYS_TWALK = 0x2CE - SYS_UALARM = 0x2F1 - SYS_USLEEP = 0x2F2 - SYS_WAIT3 = 0x2A7 - SYS_WAITID = 0x2A8 - SYS_Y1 = 0x02A - SYS___ATOE = 0x2DB - SYS___ATOE_L = 0x2DC - SYS___CATTRM = 0x2A9 - SYS___CNVBLK = 0x2AF - SYS___CRYTRM = 0x2B0 - SYS___DLGHT = 0x2A1 - SYS___ECRTRM = 0x2B1 - SYS___ETOA = 0x2DD - SYS___ETOA_L = 0x2DE - SYS___GDTRM = 0x2AA - SYS___OCLCK = 0x2DA - SYS___OPARGF = 0x2A2 - SYS___OPERRF = 0x2A5 - SYS___OPINDF = 0x2A4 - SYS___OPOPTF = 0x2A3 - SYS___RNDTRM = 0x2AB - SYS___SRCTRM = 0x2F4 - SYS___TZONE = 0x2A0 - SYS___UTXTRM = 0x2F3 - SYS_ASIN = 0x03E - SYS_ISXDIGIT = 0x03B - SYS_SETLOCAL = 0x03A - SYS_SETLOCALE = 0x03A - SYS_SIN = 0x03F - SYS_TOLOWER = 0x03C - SYS_TOUPPER = 0x03D - SYS_ACCEPT_AND_RECV = 0x4F7 - SYS_ATOL = 0x04E - SYS_CHECKSCH = 0x4BC - SYS_CHECKSCHENV = 0x4BC - SYS_CLEARERR = 0x04C - SYS_CONNECTS = 0x4B5 - SYS_CONNECTSERVER = 0x4B5 - SYS_CONNECTW = 0x4B4 - SYS_CONNECTWORKMGR = 0x4B4 - SYS_CONTINUE = 0x4B3 - SYS_CONTINUEWORKUNIT = 0x4B3 - SYS_COPYSIGN = 0x4C2 - SYS_CREATEWO = 0x4B2 - SYS_CREATEWORKUNIT = 0x4B2 - SYS_DELETEWO = 0x4B9 - SYS_DELETEWORKUNIT = 0x4B9 - SYS_DISCONNE = 0x4B6 - SYS_DISCONNECTSERVER = 0x4B6 - SYS_FEOF = 0x04D - SYS_FERROR = 0x04A - SYS_FINITE = 0x4C8 - SYS_GAMMA_R = 0x4E2 - SYS_JOINWORK = 0x4B7 - SYS_JOINWORKUNIT = 0x4B7 - SYS_LEAVEWOR = 0x4B8 - SYS_LEAVEWORKUNIT = 0x4B8 - SYS_LGAMMA_R = 0x4EB - SYS_MATHERR = 0x4D0 - SYS_PERROR = 0x04F - SYS_QUERYMET = 0x4BA - SYS_QUERYMETRICS = 0x4BA - SYS_QUERYSCH = 0x4BB - SYS_QUERYSCHENV = 0x4BB - SYS_REWIND = 0x04B - SYS_SCALBN = 0x4D4 - SYS_SIGNIFIC = 0x4D5 - SYS_SIGNIFICAND = 0x4D5 - SYS___ACOSH_B = 0x4DA - SYS___ACOS_B = 0x4D9 - SYS___ASINH_B = 0x4BE - SYS___ASIN_B = 0x4DB - SYS___ATAN2_B = 0x4DC - SYS___ATANH_B = 0x4DD - SYS___ATAN_B = 0x4BF - SYS___CBRT_B = 0x4C0 - SYS___CEIL_B = 0x4C1 - SYS___COSH_B = 0x4DE - SYS___COS_B = 0x4C3 - SYS___DGHT = 0x4A8 - SYS___ENVN = 0x4B0 - SYS___ERFC_B = 0x4C5 - SYS___ERF_B = 0x4C4 - SYS___EXPM1_B = 0x4C6 - SYS___EXP_B = 0x4DF - SYS___FABS_B = 0x4C7 - SYS___FLOOR_B = 0x4C9 - SYS___FMOD_B = 0x4E0 - SYS___FP_SETMODE = 0x4F8 - SYS___FREXP_B = 0x4CA - SYS___GAMMA_B = 0x4E1 - SYS___GDRR = 0x4A1 - SYS___HRRNO = 0x4A2 - SYS___HYPOT_B = 0x4E3 - SYS___ILOGB_B = 0x4CB - SYS___ISNAN_B = 0x4CC - SYS___J0_B = 0x4E4 - SYS___J1_B = 0x4E6 - SYS___JN_B = 0x4E8 - SYS___LDEXP_B = 0x4CD - SYS___LGAMMA_B = 0x4EA - SYS___LOG10_B = 0x4ED - SYS___LOG1P_B = 0x4CE - SYS___LOGB_B = 0x4CF - SYS___LOGIN = 0x4F5 - SYS___LOG_B = 0x4EC - SYS___MLOCKALL = 0x4B1 - SYS___MODF_B = 0x4D1 - SYS___NEXTAFTER_B = 0x4D2 - SYS___OPENDIR2 = 0x4F3 - SYS___OPEN_STAT = 0x4F6 - SYS___OPND = 0x4A5 - SYS___OPPT = 0x4A6 - SYS___OPRG = 0x4A3 - SYS___OPRR = 0x4A4 - SYS___PID_AFFINITY = 0x4BD - SYS___POW_B = 0x4EE - SYS___READDIR2 = 0x4F4 - SYS___REMAINDER_B = 0x4EF - SYS___RINT_B = 0x4D3 - SYS___SCALB_B = 0x4F0 - SYS___SIGACTIONSET = 0x4FB - SYS___SIGGM = 0x4A7 - SYS___SINH_B = 0x4F1 - SYS___SIN_B = 0x4D6 - SYS___SQRT_B = 0x4F2 - SYS___TANH_B = 0x4D8 - SYS___TAN_B = 0x4D7 - SYS___TRRNO = 0x4AF - SYS___TZNE = 0x4A9 - SYS___TZZN = 0x4AA - SYS___UCREATE = 0x4FC - SYS___UFREE = 0x4FE - SYS___UHEAPREPORT = 0x4FF - SYS___UMALLOC = 0x4FD - SYS___Y0_B = 0x4E5 - SYS___Y1_B = 0x4E7 - SYS___YN_B = 0x4E9 - SYS_ABORT = 0x05C - SYS_ASCTIME_R = 0x5E0 - SYS_ATEXIT = 0x05D - SYS_CONNECTE = 0x5AE - SYS_CONNECTEXPORTIMPORT = 0x5AE - SYS_CTIME_R = 0x5E1 - SYS_DN_COMP = 0x5DF - SYS_DN_EXPAND = 0x5DD - SYS_DN_SKIPNAME = 0x5DE - SYS_EXIT = 0x05A - SYS_EXPORTWO = 0x5A1 - SYS_EXPORTWORKUNIT = 0x5A1 - SYS_EXTRACTW = 0x5A5 - SYS_EXTRACTWORKUNIT = 0x5A5 - SYS_FSEEKO = 0x5C9 - SYS_FTELLO = 0x5C8 - SYS_GETGRGID_R = 0x5E7 - SYS_GETGRNAM_R = 0x5E8 - SYS_GETLOGIN_R = 0x5E9 - SYS_GETPWNAM_R = 0x5EA - SYS_GETPWUID_R = 0x5EB - SYS_GMTIME_R = 0x5E2 - SYS_IMPORTWO = 0x5A3 - SYS_IMPORTWORKUNIT = 0x5A3 - SYS_INET_NTOP = 0x5D3 - SYS_INET_PTON = 0x5D4 - SYS_LLABS = 0x5CE - SYS_LLDIV = 0x5CB - SYS_LOCALTIME_R = 0x5E3 - SYS_PTHREAD_ATFORK = 0x5ED - SYS_PTHREAD_ATTR_GETDETACHSTATE_U98 = 0x5FB - SYS_PTHREAD_ATTR_GETGUARDSIZE = 0x5EE - SYS_PTHREAD_ATTR_GETSCHEDPARAM = 0x5F9 - SYS_PTHREAD_ATTR_GETSTACKADDR = 0x5EF - SYS_PTHREAD_ATTR_SETDETACHSTATE_U98 = 0x5FC - SYS_PTHREAD_ATTR_SETGUARDSIZE = 0x5F0 - SYS_PTHREAD_ATTR_SETSCHEDPARAM = 0x5FA - SYS_PTHREAD_ATTR_SETSTACKADDR = 0x5F1 - SYS_PTHREAD_CONDATTR_GETPSHARED = 0x5F2 - SYS_PTHREAD_CONDATTR_SETPSHARED = 0x5F3 - SYS_PTHREAD_DETACH_U98 = 0x5FD - SYS_PTHREAD_GETCONCURRENCY = 0x5F4 - SYS_PTHREAD_GETSPECIFIC_U98 = 0x5FE - SYS_PTHREAD_KEY_DELETE = 0x5F5 - SYS_PTHREAD_SETCANCELSTATE = 0x5FF - SYS_PTHREAD_SETCONCURRENCY = 0x5F6 - SYS_PTHREAD_SIGMASK = 0x5F7 - SYS_QUERYENC = 0x5AD - SYS_QUERYWORKUNITCLASSIFICATION = 0x5AD - SYS_RAISE = 0x05E - SYS_RAND_R = 0x5E4 - SYS_READDIR_R = 0x5E6 - SYS_REALLOC = 0x05B - SYS_RES_INIT = 0x5D8 - SYS_RES_MKQUERY = 0x5D7 - SYS_RES_QUERY = 0x5D9 - SYS_RES_QUERYDOMAIN = 0x5DC - SYS_RES_SEARCH = 0x5DA - SYS_RES_SEND = 0x5DB - SYS_SETJMP = 0x05F - SYS_SIGQUEUE = 0x5A9 - SYS_STRTOK_R = 0x5E5 - SYS_STRTOLL = 0x5B0 - SYS_STRTOULL = 0x5B1 - SYS_TTYNAME_R = 0x5EC - SYS_UNDOEXPO = 0x5A2 - SYS_UNDOEXPORTWORKUNIT = 0x5A2 - SYS_UNDOIMPO = 0x5A4 - SYS_UNDOIMPORTWORKUNIT = 0x5A4 - SYS_WCSTOLL = 0x5CC - SYS_WCSTOULL = 0x5CD - SYS___ABORT = 0x05C - SYS___CONSOLE2 = 0x5D2 - SYS___CPL = 0x5A6 - SYS___DISCARDDATA = 0x5F8 - SYS___DSA_PREV = 0x5B2 - SYS___EP_FIND = 0x5B3 - SYS___FP_SWAPMODE = 0x5AF - SYS___GETUSERID = 0x5AB - SYS___GET_CPUID = 0x5B9 - SYS___GET_SYSTEM_SETTINGS = 0x5BA - SYS___IPDOMAINNAME = 0x5AC - SYS___MAP_INIT = 0x5A7 - SYS___MAP_SERVICE = 0x5A8 - SYS___MOUNT = 0x5AA - SYS___MSGRCV_TIMED = 0x5B7 - SYS___RES = 0x5D6 - SYS___SEMOP_TIMED = 0x5B8 - SYS___SERVER_THREADS_QUERY = 0x5B4 - SYS_FPRINTF = 0x06D - SYS_FSCANF = 0x06A - SYS_PRINTF = 0x06F - SYS_SETBUF = 0x06B - SYS_SETVBUF = 0x06C - SYS_SSCANF = 0x06E - SYS___CATGETS_A = 0x6C0 - SYS___CHAUDIT_A = 0x6F4 - SYS___CHMOD_A = 0x6E8 - SYS___COLLATE_INIT_A = 0x6AC - SYS___CREAT_A = 0x6F6 - SYS___CTYPE_INIT_A = 0x6AF - SYS___DLLLOAD_A = 0x6DF - SYS___DLLQUERYFN_A = 0x6E0 - SYS___DLLQUERYVAR_A = 0x6E1 - SYS___E2A_L = 0x6E3 - SYS___EXECLE_A = 0x6A0 - SYS___EXECLP_A = 0x6A4 - SYS___EXECVE_A = 0x6C1 - SYS___EXECVP_A = 0x6C2 - SYS___EXECV_A = 0x6B1 - SYS___FPRINTF_A = 0x6FA - SYS___GETADDRINFO_A = 0x6BF - SYS___GETNAMEINFO_A = 0x6C4 - SYS___GET_WCTYPE_STD_A = 0x6AE - SYS___ICONV_OPEN_A = 0x6DE - SYS___IF_INDEXTONAME_A = 0x6DC - SYS___IF_NAMETOINDEX_A = 0x6DB - SYS___ISWCTYPE_A = 0x6B0 - SYS___IS_WCTYPE_STD_A = 0x6B2 - SYS___LOCALECONV_A = 0x6B8 - SYS___LOCALECONV_STD_A = 0x6B9 - SYS___LOCALE_INIT_A = 0x6B7 - SYS___LSTAT_A = 0x6EE - SYS___LSTAT_O_A = 0x6EF - SYS___MKDIR_A = 0x6E9 - SYS___MKFIFO_A = 0x6EC - SYS___MKNOD_A = 0x6F0 - SYS___MONETARY_INIT_A = 0x6BC - SYS___MOUNT_A = 0x6F1 - SYS___NL_CSINFO_A = 0x6D6 - SYS___NL_LANGINFO_A = 0x6BA - SYS___NL_LNAGINFO_STD_A = 0x6BB - SYS___NL_MONINFO_A = 0x6D7 - SYS___NL_NUMINFO_A = 0x6D8 - SYS___NL_RESPINFO_A = 0x6D9 - SYS___NL_TIMINFO_A = 0x6DA - SYS___NUMERIC_INIT_A = 0x6C6 - SYS___OPEN_A = 0x6F7 - SYS___PRINTF_A = 0x6DD - SYS___RESP_INIT_A = 0x6C7 - SYS___RPMATCH_A = 0x6C8 - SYS___RPMATCH_C_A = 0x6C9 - SYS___RPMATCH_STD_A = 0x6CA - SYS___SETLOCALE_A = 0x6F9 - SYS___SPAWNP_A = 0x6C5 - SYS___SPAWN_A = 0x6C3 - SYS___SPRINTF_A = 0x6FB - SYS___STAT_A = 0x6EA - SYS___STAT_O_A = 0x6EB - SYS___STRCOLL_STD_A = 0x6A1 - SYS___STRFMON_A = 0x6BD - SYS___STRFMON_STD_A = 0x6BE - SYS___STRFTIME_A = 0x6CC - SYS___STRFTIME_STD_A = 0x6CD - SYS___STRPTIME_A = 0x6CE - SYS___STRPTIME_STD_A = 0x6CF - SYS___STRXFRM_A = 0x6A2 - SYS___STRXFRM_C_A = 0x6A3 - SYS___STRXFRM_STD_A = 0x6A5 - SYS___SYNTAX_INIT_A = 0x6D4 - SYS___TIME_INIT_A = 0x6CB - SYS___TOD_INIT_A = 0x6D5 - SYS___TOWLOWER_A = 0x6B3 - SYS___TOWLOWER_STD_A = 0x6B4 - SYS___TOWUPPER_A = 0x6B5 - SYS___TOWUPPER_STD_A = 0x6B6 - SYS___UMOUNT_A = 0x6F2 - SYS___VFPRINTF_A = 0x6FC - SYS___VPRINTF_A = 0x6FD - SYS___VSPRINTF_A = 0x6FE - SYS___VSWPRINTF_A = 0x6FF - SYS___WCSCOLL_A = 0x6A6 - SYS___WCSCOLL_C_A = 0x6A7 - SYS___WCSCOLL_STD_A = 0x6A8 - SYS___WCSFTIME_A = 0x6D0 - SYS___WCSFTIME_STD_A = 0x6D1 - SYS___WCSXFRM_A = 0x6A9 - SYS___WCSXFRM_C_A = 0x6AA - SYS___WCSXFRM_STD_A = 0x6AB - SYS___WCTYPE_A = 0x6AD - SYS___W_GETMNTENT_A = 0x6F5 - SYS_____CCSIDTYPE_A = 0x6E6 - SYS_____CHATTR_A = 0x6E2 - SYS_____CSNAMETYPE_A = 0x6E7 - SYS_____OPEN_STAT_A = 0x6ED - SYS_____SPAWN2_A = 0x6D2 - SYS_____SPAWNP2_A = 0x6D3 - SYS_____TOCCSID_A = 0x6E4 - SYS_____TOCSNAME_A = 0x6E5 - SYS_ACL_FREE = 0x7FF - SYS_ACL_INIT = 0x7FE - SYS_FWIDE = 0x7DF - SYS_FWPRINTF = 0x7D1 - SYS_FWRITE = 0x07E - SYS_FWSCANF = 0x7D5 - SYS_GETCHAR = 0x07B - SYS_GETS = 0x07C - SYS_M_CREATE_LAYOUT = 0x7C9 - SYS_M_DESTROY_LAYOUT = 0x7CA - SYS_M_GETVALUES_LAYOUT = 0x7CB - SYS_M_SETVALUES_LAYOUT = 0x7CC - SYS_M_TRANSFORM_LAYOUT = 0x7CD - SYS_M_WTRANSFORM_LAYOUT = 0x7CE - SYS_PREAD = 0x7C7 - SYS_PUTC = 0x07D - SYS_PUTCHAR = 0x07A - SYS_PUTS = 0x07F - SYS_PWRITE = 0x7C8 - SYS_TOWCTRAN = 0x7D8 - SYS_TOWCTRANS = 0x7D8 - SYS_UNATEXIT = 0x7B5 - SYS_VFWPRINT = 0x7D3 - SYS_VFWPRINTF = 0x7D3 - SYS_VWPRINTF = 0x7D4 - SYS_WCTRANS = 0x7D7 - SYS_WPRINTF = 0x7D2 - SYS_WSCANF = 0x7D6 - SYS___ASCTIME_R_A = 0x7A1 - SYS___BASENAME_A = 0x7DC - SYS___BTOWC_A = 0x7E4 - SYS___CDUMP_A = 0x7B7 - SYS___CEE3DMP_A = 0x7B6 - SYS___CEILF_H = 0x7F4 - SYS___CEILL_H = 0x7F5 - SYS___CEIL_H = 0x7EA - SYS___CRYPT_A = 0x7BE - SYS___CSNAP_A = 0x7B8 - SYS___CTEST_A = 0x7B9 - SYS___CTIME_R_A = 0x7A2 - SYS___CTRACE_A = 0x7BA - SYS___DBM_OPEN_A = 0x7E6 - SYS___DIRNAME_A = 0x7DD - SYS___FABSF_H = 0x7FA - SYS___FABSL_H = 0x7FB - SYS___FABS_H = 0x7ED - SYS___FGETWC_A = 0x7AA - SYS___FGETWS_A = 0x7AD - SYS___FLOORF_H = 0x7F6 - SYS___FLOORL_H = 0x7F7 - SYS___FLOOR_H = 0x7EB - SYS___FPUTWC_A = 0x7A5 - SYS___FPUTWS_A = 0x7A8 - SYS___GETTIMEOFDAY_A = 0x7AE - SYS___GETWCHAR_A = 0x7AC - SYS___GETWC_A = 0x7AB - SYS___GLOB_A = 0x7DE - SYS___GMTIME_A = 0x7AF - SYS___GMTIME_R_A = 0x7B0 - SYS___INET_PTON_A = 0x7BC - SYS___J0_H = 0x7EE - SYS___J1_H = 0x7EF - SYS___JN_H = 0x7F0 - SYS___LOCALTIME_A = 0x7B1 - SYS___LOCALTIME_R_A = 0x7B2 - SYS___MALLOC24 = 0x7FC - SYS___MALLOC31 = 0x7FD - SYS___MKTIME_A = 0x7B3 - SYS___MODFF_H = 0x7F8 - SYS___MODFL_H = 0x7F9 - SYS___MODF_H = 0x7EC - SYS___OPENDIR_A = 0x7C2 - SYS___OSNAME = 0x7E0 - SYS___PUTWCHAR_A = 0x7A7 - SYS___PUTWC_A = 0x7A6 - SYS___READDIR_A = 0x7C3 - SYS___STRTOLL_A = 0x7A3 - SYS___STRTOULL_A = 0x7A4 - SYS___SYSLOG_A = 0x7BD - SYS___TZZNA = 0x7B4 - SYS___UNGETWC_A = 0x7A9 - SYS___UTIME_A = 0x7A0 - SYS___VFPRINTF2_A = 0x7E7 - SYS___VPRINTF2_A = 0x7E8 - SYS___VSPRINTF2_A = 0x7E9 - SYS___VSWPRNTF2_A = 0x7BB - SYS___WCSTOD_A = 0x7D9 - SYS___WCSTOL_A = 0x7DA - SYS___WCSTOUL_A = 0x7DB - SYS___WCTOB_A = 0x7E5 - SYS___Y0_H = 0x7F1 - SYS___Y1_H = 0x7F2 - SYS___YN_H = 0x7F3 - SYS_____OPENDIR2_A = 0x7BF - SYS_____OSNAME_A = 0x7E1 - SYS_____READDIR2_A = 0x7C0 - SYS_DLCLOSE = 0x8DF - SYS_DLERROR = 0x8E0 - SYS_DLOPEN = 0x8DD - SYS_DLSYM = 0x8DE - SYS_FLOCKFILE = 0x8D3 - SYS_FTRYLOCKFILE = 0x8D4 - SYS_FUNLOCKFILE = 0x8D5 - SYS_GETCHAR_UNLOCKED = 0x8D7 - SYS_GETC_UNLOCKED = 0x8D6 - SYS_PUTCHAR_UNLOCKED = 0x8D9 - SYS_PUTC_UNLOCKED = 0x8D8 - SYS_SNPRINTF = 0x8DA - SYS_VSNPRINTF = 0x8DB - SYS_WCSCSPN = 0x08B - SYS_WCSLEN = 0x08C - SYS_WCSNCAT = 0x08D - SYS_WCSNCMP = 0x08A - SYS_WCSNCPY = 0x08F - SYS_WCSSPN = 0x08E - SYS___ABSF_H = 0x8E7 - SYS___ABSL_H = 0x8E8 - SYS___ABS_H = 0x8E6 - SYS___ACOSF_H = 0x8EA - SYS___ACOSH_H = 0x8EC - SYS___ACOSL_H = 0x8EB - SYS___ACOS_H = 0x8E9 - SYS___ASINF_H = 0x8EE - SYS___ASINH_H = 0x8F0 - SYS___ASINL_H = 0x8EF - SYS___ASIN_H = 0x8ED - SYS___ATAN2F_H = 0x8F8 - SYS___ATAN2L_H = 0x8F9 - SYS___ATAN2_H = 0x8F7 - SYS___ATANF_H = 0x8F2 - SYS___ATANHF_H = 0x8F5 - SYS___ATANHL_H = 0x8F6 - SYS___ATANH_H = 0x8F4 - SYS___ATANL_H = 0x8F3 - SYS___ATAN_H = 0x8F1 - SYS___CBRT_H = 0x8FA - SYS___COPYSIGNF_H = 0x8FB - SYS___COPYSIGNL_H = 0x8FC - SYS___COSF_H = 0x8FE - SYS___COSL_H = 0x8FF - SYS___COS_H = 0x8FD - SYS___DLERROR_A = 0x8D2 - SYS___DLOPEN_A = 0x8D0 - SYS___DLSYM_A = 0x8D1 - SYS___GETUTXENT_A = 0x8C6 - SYS___GETUTXID_A = 0x8C7 - SYS___GETUTXLINE_A = 0x8C8 - SYS___ITOA = 0x8AA - SYS___ITOA_A = 0x8B0 - SYS___LE_CONDITION_TOKEN_BUILD = 0x8A5 - SYS___LE_MSG_ADD_INSERT = 0x8A6 - SYS___LE_MSG_GET = 0x8A7 - SYS___LE_MSG_GET_AND_WRITE = 0x8A8 - SYS___LE_MSG_WRITE = 0x8A9 - SYS___LLTOA = 0x8AE - SYS___LLTOA_A = 0x8B4 - SYS___LTOA = 0x8AC - SYS___LTOA_A = 0x8B2 - SYS___PUTCHAR_UNLOCKED_A = 0x8CC - SYS___PUTC_UNLOCKED_A = 0x8CB - SYS___PUTUTXLINE_A = 0x8C9 - SYS___RESET_EXCEPTION_HANDLER = 0x8E3 - SYS___REXEC_A = 0x8C4 - SYS___REXEC_AF_A = 0x8C5 - SYS___SET_EXCEPTION_HANDLER = 0x8E2 - SYS___SNPRINTF_A = 0x8CD - SYS___SUPERKILL = 0x8A4 - SYS___TCGETATTR_A = 0x8A1 - SYS___TCSETATTR_A = 0x8A2 - SYS___ULLTOA = 0x8AF - SYS___ULLTOA_A = 0x8B5 - SYS___ULTOA = 0x8AD - SYS___ULTOA_A = 0x8B3 - SYS___UTOA = 0x8AB - SYS___UTOA_A = 0x8B1 - SYS___VHM_EVENT = 0x8E4 - SYS___VSNPRINTF_A = 0x8CE - SYS_____GETENV_A = 0x8C3 - SYS_____UTMPXNAME_A = 0x8CA - SYS_CACOSH = 0x9A0 - SYS_CACOSHF = 0x9A3 - SYS_CACOSHL = 0x9A6 - SYS_CARG = 0x9A9 - SYS_CARGF = 0x9AC - SYS_CARGL = 0x9AF - SYS_CASIN = 0x9B2 - SYS_CASINF = 0x9B5 - SYS_CASINH = 0x9BB - SYS_CASINHF = 0x9BE - SYS_CASINHL = 0x9C1 - SYS_CASINL = 0x9B8 - SYS_CATAN = 0x9C4 - SYS_CATANF = 0x9C7 - SYS_CATANH = 0x9CD - SYS_CATANHF = 0x9D0 - SYS_CATANHL = 0x9D3 - SYS_CATANL = 0x9CA - SYS_CCOS = 0x9D6 - SYS_CCOSF = 0x9D9 - SYS_CCOSH = 0x9DF - SYS_CCOSHF = 0x9E2 - SYS_CCOSHL = 0x9E5 - SYS_CCOSL = 0x9DC - SYS_CEXP = 0x9E8 - SYS_CEXPF = 0x9EB - SYS_CEXPL = 0x9EE - SYS_CIMAG = 0x9F1 - SYS_CIMAGF = 0x9F4 - SYS_CIMAGL = 0x9F7 - SYS_CLOGF = 0x9FD - SYS_MEMCHR = 0x09B - SYS_MEMCMP = 0x09A - SYS_STRCOLL = 0x09C - SYS_STRNCMP = 0x09D - SYS_STRRCHR = 0x09F - SYS_STRXFRM = 0x09E - SYS___CACOSHF_B = 0x9A4 - SYS___CACOSHF_H = 0x9A5 - SYS___CACOSHL_B = 0x9A7 - SYS___CACOSHL_H = 0x9A8 - SYS___CACOSH_B = 0x9A1 - SYS___CACOSH_H = 0x9A2 - SYS___CARGF_B = 0x9AD - SYS___CARGF_H = 0x9AE - SYS___CARGL_B = 0x9B0 - SYS___CARGL_H = 0x9B1 - SYS___CARG_B = 0x9AA - SYS___CARG_H = 0x9AB - SYS___CASINF_B = 0x9B6 - SYS___CASINF_H = 0x9B7 - SYS___CASINHF_B = 0x9BF - SYS___CASINHF_H = 0x9C0 - SYS___CASINHL_B = 0x9C2 - SYS___CASINHL_H = 0x9C3 - SYS___CASINH_B = 0x9BC - SYS___CASINH_H = 0x9BD - SYS___CASINL_B = 0x9B9 - SYS___CASINL_H = 0x9BA - SYS___CASIN_B = 0x9B3 - SYS___CASIN_H = 0x9B4 - SYS___CATANF_B = 0x9C8 - SYS___CATANF_H = 0x9C9 - SYS___CATANHF_B = 0x9D1 - SYS___CATANHF_H = 0x9D2 - SYS___CATANHL_B = 0x9D4 - SYS___CATANHL_H = 0x9D5 - SYS___CATANH_B = 0x9CE - SYS___CATANH_H = 0x9CF - SYS___CATANL_B = 0x9CB - SYS___CATANL_H = 0x9CC - SYS___CATAN_B = 0x9C5 - SYS___CATAN_H = 0x9C6 - SYS___CCOSF_B = 0x9DA - SYS___CCOSF_H = 0x9DB - SYS___CCOSHF_B = 0x9E3 - SYS___CCOSHF_H = 0x9E4 - SYS___CCOSHL_B = 0x9E6 - SYS___CCOSHL_H = 0x9E7 - SYS___CCOSH_B = 0x9E0 - SYS___CCOSH_H = 0x9E1 - SYS___CCOSL_B = 0x9DD - SYS___CCOSL_H = 0x9DE - SYS___CCOS_B = 0x9D7 - SYS___CCOS_H = 0x9D8 - SYS___CEXPF_B = 0x9EC - SYS___CEXPF_H = 0x9ED - SYS___CEXPL_B = 0x9EF - SYS___CEXPL_H = 0x9F0 - SYS___CEXP_B = 0x9E9 - SYS___CEXP_H = 0x9EA - SYS___CIMAGF_B = 0x9F5 - SYS___CIMAGF_H = 0x9F6 - SYS___CIMAGL_B = 0x9F8 - SYS___CIMAGL_H = 0x9F9 - SYS___CIMAG_B = 0x9F2 - SYS___CIMAG_H = 0x9F3 - SYS___CLOG = 0x9FA - SYS___CLOGF_B = 0x9FE - SYS___CLOGF_H = 0x9FF - SYS___CLOG_B = 0x9FB - SYS___CLOG_H = 0x9FC - SYS_ISWCTYPE = 0x10C - SYS_ISWXDIGI = 0x10A - SYS_ISWXDIGIT = 0x10A - SYS_MBSINIT = 0x10F - SYS_TOWLOWER = 0x10D - SYS_TOWUPPER = 0x10E - SYS_WCTYPE = 0x10B - SYS_WCSSTR = 0x11B - SYS___RPMTCH = 0x11A - SYS_WCSTOD = 0x12E - SYS_WCSTOK = 0x12C - SYS_WCSTOL = 0x12D - SYS_WCSTOUL = 0x12F - SYS_FGETWC = 0x13C - SYS_FGETWS = 0x13D - SYS_FPUTWC = 0x13E - SYS_FPUTWS = 0x13F - SYS_REGERROR = 0x13B - SYS_REGFREE = 0x13A - SYS_COLLEQUIV = 0x14F - SYS_COLLTOSTR = 0x14E - SYS_ISMCCOLLEL = 0x14C - SYS_STRTOCOLL = 0x14D - SYS_DLLFREE = 0x16F - SYS_DLLQUERYFN = 0x16D - SYS_DLLQUERYVAR = 0x16E - SYS_GETMCCOLL = 0x16A - SYS_GETWMCCOLL = 0x16B - SYS___ERR2AD = 0x16C - SYS_CFSETOSPEED = 0x17A - SYS_CHDIR = 0x17B - SYS_CHMOD = 0x17C - SYS_CHOWN = 0x17D - SYS_CLOSE = 0x17E - SYS_CLOSEDIR = 0x17F - SYS_LOG = 0x017 - SYS_COSH = 0x018 - SYS_FCHMOD = 0x18A - SYS_FCHOWN = 0x18B - SYS_FCNTL = 0x18C - SYS_FILENO = 0x18D - SYS_FORK = 0x18E - SYS_FPATHCONF = 0x18F - SYS_GETLOGIN = 0x19A - SYS_GETPGRP = 0x19C - SYS_GETPID = 0x19D - SYS_GETPPID = 0x19E - SYS_GETPWNAM = 0x19F - SYS_TANH = 0x019 - SYS_W_GETMNTENT = 0x19B - SYS_POW = 0x020 - SYS_PTHREAD_SELF = 0x20A - SYS_PTHREAD_SETINTR = 0x20B - SYS_PTHREAD_SETINTRTYPE = 0x20C - SYS_PTHREAD_SETSPECIFIC = 0x20D - SYS_PTHREAD_TESTINTR = 0x20E - SYS_PTHREAD_YIELD = 0x20F - SYS_SQRT = 0x021 - SYS_FLOOR = 0x022 - SYS_J1 = 0x023 - SYS_WCSPBRK = 0x23F - SYS_BSEARCH = 0x24C - SYS_FABS = 0x024 - SYS_GETENV = 0x24A - SYS_LDIV = 0x24D - SYS_SYSTEM = 0x24B - SYS_FMOD = 0x025 - SYS___RETHROW = 0x25F - SYS___THROW = 0x25E - SYS_J0 = 0x026 - SYS_PUTENV = 0x26A - SYS___GETENV = 0x26F - SYS_SEMCTL = 0x27A - SYS_SEMGET = 0x27B - SYS_SEMOP = 0x27C - SYS_SHMAT = 0x27D - SYS_SHMCTL = 0x27E - SYS_SHMDT = 0x27F - SYS_YN = 0x027 - SYS_JN = 0x028 - SYS_SIGALTSTACK = 0x28A - SYS_SIGHOLD = 0x28B - SYS_SIGIGNORE = 0x28C - SYS_SIGINTERRUPT = 0x28D - SYS_SIGPAUSE = 0x28E - SYS_SIGRELSE = 0x28F - SYS_GETOPT = 0x29A - SYS_GETSUBOPT = 0x29D - SYS_LCHOWN = 0x29B - SYS_SETPGRP = 0x29E - SYS_TRUNCATE = 0x29C - SYS_Y0 = 0x029 - SYS___GDERR = 0x29F - SYS_ISALPHA = 0x030 - SYS_VFORK = 0x30F - SYS__LONGJMP = 0x30D - SYS__SETJMP = 0x30E - SYS_GLOB = 0x31A - SYS_GLOBFREE = 0x31B - SYS_ISALNUM = 0x031 - SYS_PUTW = 0x31C - SYS_SEEKDIR = 0x31D - SYS_TELLDIR = 0x31E - SYS_TEMPNAM = 0x31F - SYS_GETTIMEOFDAY_R = 0x32E - SYS_ISLOWER = 0x032 - SYS_LGAMMA = 0x32C - SYS_REMAINDER = 0x32A - SYS_SCALB = 0x32B - SYS_SYNC = 0x32F - SYS_TTYSLOT = 0x32D - SYS_ENDPROTOENT = 0x33A - SYS_ENDSERVENT = 0x33B - SYS_GETHOSTBYADDR = 0x33D - SYS_GETHOSTBYADDR_R = 0x33C - SYS_GETHOSTBYNAME = 0x33F - SYS_GETHOSTBYNAME_R = 0x33E - SYS_ISCNTRL = 0x033 - SYS_GETSERVBYNAME = 0x34A - SYS_GETSERVBYPORT = 0x34B - SYS_GETSERVENT = 0x34C - SYS_GETSOCKNAME = 0x34D - SYS_GETSOCKOPT = 0x34E - SYS_INET_ADDR = 0x34F - SYS_ISDIGIT = 0x034 - SYS_ISGRAPH = 0x035 - SYS_SELECT = 0x35B - SYS_SELECTEX = 0x35C - SYS_SEND = 0x35D - SYS_SENDTO = 0x35F - SYS_CHROOT = 0x36A - SYS_ISNAN = 0x36D - SYS_ISUPPER = 0x036 - SYS_ULIMIT = 0x36C - SYS_UTIMES = 0x36E - SYS_W_STATVFS = 0x36B - SYS___H_ERRNO = 0x36F - SYS_GRANTPT = 0x37A - SYS_ISPRINT = 0x037 - SYS_TCGETSID = 0x37C - SYS_UNLOCKPT = 0x37B - SYS___TCGETCP = 0x37D - SYS___TCSETCP = 0x37E - SYS___TCSETTABLES = 0x37F - SYS_ISPUNCT = 0x038 - SYS_NLIST = 0x38C - SYS___IPDBCS = 0x38D - SYS___IPDSPX = 0x38E - SYS___IPMSGC = 0x38F - SYS___STHOSTENT = 0x38B - SYS___STSERVENT = 0x38A - SYS_ISSPACE = 0x039 - SYS_COS = 0x040 - SYS_T_ALLOC = 0x40A - SYS_T_BIND = 0x40B - SYS_T_CLOSE = 0x40C - SYS_T_CONNECT = 0x40D - SYS_T_ERROR = 0x40E - SYS_T_FREE = 0x40F - SYS_TAN = 0x041 - SYS_T_RCVREL = 0x41A - SYS_T_RCVUDATA = 0x41B - SYS_T_RCVUDERR = 0x41C - SYS_T_SND = 0x41D - SYS_T_SNDDIS = 0x41E - SYS_T_SNDREL = 0x41F - SYS_GETPMSG = 0x42A - SYS_ISASTREAM = 0x42B - SYS_PUTMSG = 0x42C - SYS_PUTPMSG = 0x42D - SYS_SINH = 0x042 - SYS___ISPOSIXON = 0x42E - SYS___OPENMVSREL = 0x42F - SYS_ACOS = 0x043 - SYS_ATAN = 0x044 - SYS_ATAN2 = 0x045 - SYS_FTELL = 0x046 - SYS_FGETPOS = 0x047 - SYS_SOCK_DEBUG = 0x47A - SYS_SOCK_DO_TESTSTOR = 0x47D - SYS_TAKESOCKET = 0x47E - SYS___SERVER_INIT = 0x47F - SYS_FSEEK = 0x048 - SYS___IPHOST = 0x48B - SYS___IPNODE = 0x48C - SYS___SERVER_CLASSIFY_CREATE = 0x48D - SYS___SERVER_CLASSIFY_DESTROY = 0x48E - SYS___SERVER_CLASSIFY_RESET = 0x48F - SYS___SMF_RECORD = 0x48A - SYS_FSETPOS = 0x049 - SYS___FNWSA = 0x49B - SYS___SPAWN2 = 0x49D - SYS___SPAWNP2 = 0x49E - SYS_ATOF = 0x050 - SYS_PTHREAD_MUTEXATTR_GETPSHARED = 0x50A - SYS_PTHREAD_MUTEXATTR_SETPSHARED = 0x50B - SYS_PTHREAD_RWLOCK_DESTROY = 0x50C - SYS_PTHREAD_RWLOCK_INIT = 0x50D - SYS_PTHREAD_RWLOCK_RDLOCK = 0x50E - SYS_PTHREAD_RWLOCK_TRYRDLOCK = 0x50F - SYS_ATOI = 0x051 - SYS___FP_CLASS = 0x51D - SYS___FP_CLR_FLAG = 0x51A - SYS___FP_FINITE = 0x51E - SYS___FP_ISNAN = 0x51F - SYS___FP_RAISE_XCP = 0x51C - SYS___FP_READ_FLAG = 0x51B - SYS_RAND = 0x052 - SYS_SIGTIMEDWAIT = 0x52D - SYS_SIGWAITINFO = 0x52E - SYS___CHKBFP = 0x52F - SYS___FPC_RS = 0x52C - SYS___FPC_RW = 0x52A - SYS___FPC_SM = 0x52B - SYS_STRTOD = 0x053 - SYS_STRTOL = 0x054 - SYS_STRTOUL = 0x055 - SYS_MALLOC = 0x056 - SYS_SRAND = 0x057 - SYS_CALLOC = 0x058 - SYS_FREE = 0x059 - SYS___OSENV = 0x59F - SYS___W_PIOCTL = 0x59E - SYS_LONGJMP = 0x060 - SYS___FLOORF_B = 0x60A - SYS___FLOORL_B = 0x60B - SYS___FREXPF_B = 0x60C - SYS___FREXPL_B = 0x60D - SYS___LDEXPF_B = 0x60E - SYS___LDEXPL_B = 0x60F - SYS_SIGNAL = 0x061 - SYS___ATAN2F_B = 0x61A - SYS___ATAN2L_B = 0x61B - SYS___COSHF_B = 0x61C - SYS___COSHL_B = 0x61D - SYS___EXPF_B = 0x61E - SYS___EXPL_B = 0x61F - SYS_TMPNAM = 0x062 - SYS___ABSF_B = 0x62A - SYS___ABSL_B = 0x62C - SYS___ABS_B = 0x62B - SYS___FMODF_B = 0x62D - SYS___FMODL_B = 0x62E - SYS___MODFF_B = 0x62F - SYS_ATANL = 0x63A - SYS_CEILF = 0x63B - SYS_CEILL = 0x63C - SYS_COSF = 0x63D - SYS_COSHF = 0x63F - SYS_COSL = 0x63E - SYS_REMOVE = 0x063 - SYS_POWL = 0x64A - SYS_RENAME = 0x064 - SYS_SINF = 0x64B - SYS_SINHF = 0x64F - SYS_SINL = 0x64C - SYS_SQRTF = 0x64D - SYS_SQRTL = 0x64E - SYS_BTOWC = 0x65F - SYS_FREXPL = 0x65A - SYS_LDEXPF = 0x65B - SYS_LDEXPL = 0x65C - SYS_MODFF = 0x65D - SYS_MODFL = 0x65E - SYS_TMPFILE = 0x065 - SYS_FREOPEN = 0x066 - SYS___CHARMAP_INIT_A = 0x66E - SYS___GETHOSTBYADDR_R_A = 0x66C - SYS___GETHOSTBYNAME_A = 0x66A - SYS___GETHOSTBYNAME_R_A = 0x66D - SYS___MBLEN_A = 0x66F - SYS___RES_INIT_A = 0x66B - SYS_FCLOSE = 0x067 - SYS___GETGRGID_R_A = 0x67D - SYS___WCSTOMBS_A = 0x67A - SYS___WCSTOMBS_STD_A = 0x67B - SYS___WCSWIDTH_A = 0x67C - SYS___WCSWIDTH_ASIA = 0x67F - SYS___WCSWIDTH_STD_A = 0x67E - SYS_FFLUSH = 0x068 - SYS___GETLOGIN_R_A = 0x68E - SYS___GETPWNAM_R_A = 0x68C - SYS___GETPWUID_R_A = 0x68D - SYS___TTYNAME_R_A = 0x68F - SYS___WCWIDTH_ASIA = 0x68B - SYS___WCWIDTH_STD_A = 0x68A - SYS_FOPEN = 0x069 - SYS___REGEXEC_A = 0x69A - SYS___REGEXEC_STD_A = 0x69B - SYS___REGFREE_A = 0x69C - SYS___REGFREE_STD_A = 0x69D - SYS___STRCOLL_A = 0x69E - SYS___STRCOLL_C_A = 0x69F - SYS_SCANF = 0x070 - SYS___A64L_A = 0x70C - SYS___ECVT_A = 0x70D - SYS___FCVT_A = 0x70E - SYS___GCVT_A = 0x70F - SYS___STRTOUL_A = 0x70A - SYS_____AE_CORRESTBL_QUERY_A = 0x70B - SYS_SPRINTF = 0x071 - SYS___ACCESS_A = 0x71F - SYS___CATOPEN_A = 0x71E - SYS___GETOPT_A = 0x71D - SYS___REALPATH_A = 0x71A - SYS___SETENV_A = 0x71B - SYS___SYSTEM_A = 0x71C - SYS_FGETC = 0x072 - SYS___GAI_STRERROR_A = 0x72F - SYS___RMDIR_A = 0x72A - SYS___STATVFS_A = 0x72B - SYS___SYMLINK_A = 0x72C - SYS___TRUNCATE_A = 0x72D - SYS___UNLINK_A = 0x72E - SYS_VFPRINTF = 0x073 - SYS___ISSPACE_A = 0x73A - SYS___ISUPPER_A = 0x73B - SYS___ISWALNUM_A = 0x73F - SYS___ISXDIGIT_A = 0x73C - SYS___TOLOWER_A = 0x73D - SYS___TOUPPER_A = 0x73E - SYS_VPRINTF = 0x074 - SYS___CONFSTR_A = 0x74B - SYS___FDOPEN_A = 0x74E - SYS___FLDATA_A = 0x74F - SYS___FTOK_A = 0x74C - SYS___ISWXDIGIT_A = 0x74A - SYS___MKTEMP_A = 0x74D - SYS_VSPRINTF = 0x075 - SYS___GETGRGID_A = 0x75A - SYS___GETGRNAM_A = 0x75B - SYS___GETGROUPSBYNAME_A = 0x75C - SYS___GETHOSTENT_A = 0x75D - SYS___GETHOSTNAME_A = 0x75E - SYS___GETLOGIN_A = 0x75F - SYS_GETC = 0x076 - SYS___CREATEWORKUNIT_A = 0x76A - SYS___CTERMID_A = 0x76B - SYS___FMTMSG_A = 0x76C - SYS___INITGROUPS_A = 0x76D - SYS___MSGRCV_A = 0x76F - SYS_____LOGIN_A = 0x76E - SYS_FGETS = 0x077 - SYS___STRCASECMP_A = 0x77B - SYS___STRNCASECMP_A = 0x77C - SYS___TTYNAME_A = 0x77D - SYS___UNAME_A = 0x77E - SYS___UTIMES_A = 0x77F - SYS_____SERVER_PWU_A = 0x77A - SYS_FPUTC = 0x078 - SYS___CREAT_O_A = 0x78E - SYS___ENVNA = 0x78F - SYS___FREAD_A = 0x78A - SYS___FWRITE_A = 0x78B - SYS___ISASCII = 0x78D - SYS___OPEN_O_A = 0x78C - SYS_FPUTS = 0x079 - SYS___ASCTIME_A = 0x79C - SYS___CTIME_A = 0x79D - SYS___GETDATE_A = 0x79E - SYS___GETSERVBYPORT_A = 0x79A - SYS___GETSERVENT_A = 0x79B - SYS___TZSET_A = 0x79F - SYS_ACL_FROM_TEXT = 0x80C - SYS_ACL_SET_FD = 0x80A - SYS_ACL_SET_FILE = 0x80B - SYS_ACL_SORT = 0x80E - SYS_ACL_TO_TEXT = 0x80D - SYS_UNGETC = 0x080 - SYS___SHUTDOWN_REGISTRATION = 0x80F - SYS_FREAD = 0x081 - SYS_FREEADDRINFO = 0x81A - SYS_GAI_STRERROR = 0x81B - SYS_REXEC_AF = 0x81C - SYS___DYNALLOC_A = 0x81F - SYS___POE = 0x81D - SYS_WCSTOMBS = 0x082 - SYS___INET_ADDR_A = 0x82F - SYS___NLIST_A = 0x82A - SYS_____TCGETCP_A = 0x82B - SYS_____TCSETCP_A = 0x82C - SYS_____W_PIOCTL_A = 0x82E - SYS_MBTOWC = 0x083 - SYS___CABEND = 0x83D - SYS___LE_CIB_GET = 0x83E - SYS___RECVMSG_A = 0x83B - SYS___SENDMSG_A = 0x83A - SYS___SET_LAA_FOR_JIT = 0x83F - SYS_____LCHATTR_A = 0x83C - SYS_WCTOMB = 0x084 - SYS___CBRTL_B = 0x84A - SYS___COPYSIGNF_B = 0x84B - SYS___COPYSIGNL_B = 0x84C - SYS___COTANF_B = 0x84D - SYS___COTANL_B = 0x84F - SYS___COTAN_B = 0x84E - SYS_MBSTOWCS = 0x085 - SYS___LOG1PL_B = 0x85A - SYS___LOG2F_B = 0x85B - SYS___LOG2L_B = 0x85D - SYS___LOG2_B = 0x85C - SYS___REMAINDERF_B = 0x85E - SYS___REMAINDERL_B = 0x85F - SYS_ACOSHF = 0x86E - SYS_ACOSHL = 0x86F - SYS_WCSCPY = 0x086 - SYS___ERFCF_B = 0x86D - SYS___ERFF_B = 0x86C - SYS___LROUNDF_B = 0x86A - SYS___LROUND_B = 0x86B - SYS_COTANL = 0x87A - SYS_EXP2F = 0x87B - SYS_EXP2L = 0x87C - SYS_EXPM1F = 0x87D - SYS_EXPM1L = 0x87E - SYS_FDIMF = 0x87F - SYS_WCSCAT = 0x087 - SYS___COTANL = 0x87A - SYS_REMAINDERF = 0x88A - SYS_REMAINDERL = 0x88B - SYS_REMAINDF = 0x88A - SYS_REMAINDL = 0x88B - SYS_REMQUO = 0x88D - SYS_REMQUOF = 0x88C - SYS_REMQUOL = 0x88E - SYS_TGAMMAF = 0x88F - SYS_WCSCHR = 0x088 - SYS_ERFCF = 0x89B - SYS_ERFCL = 0x89C - SYS_ERFL = 0x89A - SYS_EXP2 = 0x89E - SYS_WCSCMP = 0x089 - SYS___EXP2_B = 0x89D - SYS___FAR_JUMP = 0x89F - SYS_ABS = 0x090 - SYS___ERFCL_H = 0x90A - SYS___EXPF_H = 0x90C - SYS___EXPL_H = 0x90D - SYS___EXPM1_H = 0x90E - SYS___EXP_H = 0x90B - SYS___FDIM_H = 0x90F - SYS_DIV = 0x091 - SYS___LOG2F_H = 0x91F - SYS___LOG2_H = 0x91E - SYS___LOGB_H = 0x91D - SYS___LOGF_H = 0x91B - SYS___LOGL_H = 0x91C - SYS___LOG_H = 0x91A - SYS_LABS = 0x092 - SYS___POWL_H = 0x92A - SYS___REMAINDER_H = 0x92B - SYS___RINT_H = 0x92C - SYS___SCALB_H = 0x92D - SYS___SINF_H = 0x92F - SYS___SIN_H = 0x92E - SYS_STRNCPY = 0x093 - SYS___TANHF_H = 0x93B - SYS___TANHL_H = 0x93C - SYS___TANH_H = 0x93A - SYS___TGAMMAF_H = 0x93E - SYS___TGAMMA_H = 0x93D - SYS___TRUNC_H = 0x93F - SYS_MEMCPY = 0x094 - SYS_VFWSCANF = 0x94A - SYS_VSWSCANF = 0x94E - SYS_VWSCANF = 0x94C - SYS_INET6_RTH_ADD = 0x95D - SYS_INET6_RTH_INIT = 0x95C - SYS_INET6_RTH_REVERSE = 0x95E - SYS_INET6_RTH_SEGMENTS = 0x95F - SYS_INET6_RTH_SPACE = 0x95B - SYS_MEMMOVE = 0x095 - SYS_WCSTOLD = 0x95A - SYS_STRCPY = 0x096 - SYS_STRCMP = 0x097 - SYS_CABS = 0x98E - SYS_STRCAT = 0x098 - SYS___CABS_B = 0x98F - SYS___POW_II = 0x98A - SYS___POW_II_B = 0x98B - SYS___POW_II_H = 0x98C - SYS_CACOSF = 0x99A - SYS_CACOSL = 0x99D - SYS_STRNCAT = 0x099 - SYS___CACOSF_B = 0x99B - SYS___CACOSF_H = 0x99C - SYS___CACOSL_B = 0x99E - SYS___CACOSL_H = 0x99F - SYS_ISWALPHA = 0x100 - SYS_ISWBLANK = 0x101 - SYS___ISWBLK = 0x101 - SYS_ISWCNTRL = 0x102 - SYS_ISWDIGIT = 0x103 - SYS_ISWGRAPH = 0x104 - SYS_ISWLOWER = 0x105 - SYS_ISWPRINT = 0x106 - SYS_ISWPUNCT = 0x107 - SYS_ISWSPACE = 0x108 - SYS_ISWUPPER = 0x109 - SYS_WCTOB = 0x110 - SYS_MBRLEN = 0x111 - SYS_MBRTOWC = 0x112 - SYS_MBSRTOWC = 0x113 - SYS_MBSRTOWCS = 0x113 - SYS_WCRTOMB = 0x114 - SYS_WCSRTOMB = 0x115 - SYS_WCSRTOMBS = 0x115 - SYS___CSID = 0x116 - SYS___WCSID = 0x117 - SYS_STRPTIME = 0x118 - SYS___STRPTM = 0x118 - SYS_STRFMON = 0x119 - SYS_WCSCOLL = 0x130 - SYS_WCSXFRM = 0x131 - SYS_WCSWIDTH = 0x132 - SYS_WCWIDTH = 0x133 - SYS_WCSFTIME = 0x134 - SYS_SWPRINTF = 0x135 - SYS_VSWPRINT = 0x136 - SYS_VSWPRINTF = 0x136 - SYS_SWSCANF = 0x137 - SYS_REGCOMP = 0x138 - SYS_REGEXEC = 0x139 - SYS_GETWC = 0x140 - SYS_GETWCHAR = 0x141 - SYS_PUTWC = 0x142 - SYS_PUTWCHAR = 0x143 - SYS_UNGETWC = 0x144 - SYS_ICONV_OPEN = 0x145 - SYS_ICONV = 0x146 - SYS_ICONV_CLOSE = 0x147 - SYS_COLLRANGE = 0x150 - SYS_CCLASS = 0x151 - SYS_COLLORDER = 0x152 - SYS___DEMANGLE = 0x154 - SYS_FDOPEN = 0x155 - SYS___ERRNO = 0x156 - SYS___ERRNO2 = 0x157 - SYS___TERROR = 0x158 - SYS_MAXCOLL = 0x169 - SYS_DLLLOAD = 0x170 - SYS__EXIT = 0x174 - SYS_ACCESS = 0x175 - SYS_ALARM = 0x176 - SYS_CFGETISPEED = 0x177 - SYS_CFGETOSPEED = 0x178 - SYS_CFSETISPEED = 0x179 - SYS_CREAT = 0x180 - SYS_CTERMID = 0x181 - SYS_DUP = 0x182 - SYS_DUP2 = 0x183 - SYS_EXECL = 0x184 - SYS_EXECLE = 0x185 - SYS_EXECLP = 0x186 - SYS_EXECV = 0x187 - SYS_EXECVE = 0x188 - SYS_EXECVP = 0x189 - SYS_FSTAT = 0x190 - SYS_FSYNC = 0x191 - SYS_FTRUNCATE = 0x192 - SYS_GETCWD = 0x193 - SYS_GETEGID = 0x194 - SYS_GETEUID = 0x195 - SYS_GETGID = 0x196 - SYS_GETGRGID = 0x197 - SYS_GETGRNAM = 0x198 - SYS_GETGROUPS = 0x199 - SYS_PTHREAD_MUTEXATTR_DESTROY = 0x200 - SYS_PTHREAD_MUTEXATTR_SETKIND_NP = 0x201 - SYS_PTHREAD_MUTEXATTR_GETKIND_NP = 0x202 - SYS_PTHREAD_MUTEX_INIT = 0x203 - SYS_PTHREAD_MUTEX_DESTROY = 0x204 - SYS_PTHREAD_MUTEX_LOCK = 0x205 - SYS_PTHREAD_MUTEX_TRYLOCK = 0x206 - SYS_PTHREAD_MUTEX_UNLOCK = 0x207 - SYS_PTHREAD_ONCE = 0x209 - SYS_TW_OPEN = 0x210 - SYS_TW_FCNTL = 0x211 - SYS_PTHREAD_JOIN_D4_NP = 0x212 - SYS_PTHREAD_CONDATTR_SETKIND_NP = 0x213 - SYS_PTHREAD_CONDATTR_GETKIND_NP = 0x214 - SYS_EXTLINK_NP = 0x215 - SYS___PASSWD = 0x216 - SYS_SETGROUPS = 0x217 - SYS_INITGROUPS = 0x218 - SYS_WCSRCHR = 0x240 - SYS_SVC99 = 0x241 - SYS___SVC99 = 0x241 - SYS_WCSWCS = 0x242 - SYS_LOCALECO = 0x243 - SYS_LOCALECONV = 0x243 - SYS___LIBREL = 0x244 - SYS_RELEASE = 0x245 - SYS___RLSE = 0x245 - SYS_FLOCATE = 0x246 - SYS___FLOCT = 0x246 - SYS_FDELREC = 0x247 - SYS___FDLREC = 0x247 - SYS_FETCH = 0x248 - SYS___FETCH = 0x248 - SYS_QSORT = 0x249 - SYS___CLEANUPCATCH = 0x260 - SYS___CATCHMATCH = 0x261 - SYS___CLEAN2UPCATCH = 0x262 - SYS_GETPRIORITY = 0x270 - SYS_NICE = 0x271 - SYS_SETPRIORITY = 0x272 - SYS_GETITIMER = 0x273 - SYS_SETITIMER = 0x274 - SYS_MSGCTL = 0x275 - SYS_MSGGET = 0x276 - SYS_MSGRCV = 0x277 - SYS_MSGSND = 0x278 - SYS_MSGXRCV = 0x279 - SYS___MSGXR = 0x279 - SYS_SHMGET = 0x280 - SYS___GETIPC = 0x281 - SYS_SETGRENT = 0x282 - SYS_GETGRENT = 0x283 - SYS_ENDGRENT = 0x284 - SYS_SETPWENT = 0x285 - SYS_GETPWENT = 0x286 - SYS_ENDPWENT = 0x287 - SYS_BSD_SIGNAL = 0x288 - SYS_KILLPG = 0x289 - SYS_SIGSET = 0x290 - SYS_SIGSTACK = 0x291 - SYS_GETRLIMIT = 0x292 - SYS_SETRLIMIT = 0x293 - SYS_GETRUSAGE = 0x294 - SYS_MMAP = 0x295 - SYS_MPROTECT = 0x296 - SYS_MSYNC = 0x297 - SYS_MUNMAP = 0x298 - SYS_CONFSTR = 0x299 - SYS___NDMTRM = 0x300 - SYS_FTOK = 0x301 - SYS_BASENAME = 0x302 - SYS_DIRNAME = 0x303 - SYS_GETDTABLESIZE = 0x304 - SYS_MKSTEMP = 0x305 - SYS_MKTEMP = 0x306 - SYS_NFTW = 0x307 - SYS_GETWD = 0x308 - SYS_LOCKF = 0x309 - SYS_WORDEXP = 0x310 - SYS_WORDFREE = 0x311 - SYS_GETPGID = 0x312 - SYS_GETSID = 0x313 - SYS___UTMPXNAME = 0x314 - SYS_CUSERID = 0x315 - SYS_GETPASS = 0x316 - SYS_FNMATCH = 0x317 - SYS_FTW = 0x318 - SYS_GETW = 0x319 - SYS_ACOSH = 0x320 - SYS_ASINH = 0x321 - SYS_ATANH = 0x322 - SYS_CBRT = 0x323 - SYS_EXPM1 = 0x324 - SYS_ILOGB = 0x325 - SYS_LOGB = 0x326 - SYS_LOG1P = 0x327 - SYS_NEXTAFTER = 0x328 - SYS_RINT = 0x329 - SYS_SPAWN = 0x330 - SYS_SPAWNP = 0x331 - SYS_GETLOGIN_UU = 0x332 - SYS_ECVT = 0x333 - SYS_FCVT = 0x334 - SYS_GCVT = 0x335 - SYS_ACCEPT = 0x336 - SYS_BIND = 0x337 - SYS_CONNECT = 0x338 - SYS_ENDHOSTENT = 0x339 - SYS_GETHOSTENT = 0x340 - SYS_GETHOSTID = 0x341 - SYS_GETHOSTNAME = 0x342 - SYS_GETNETBYADDR = 0x343 - SYS_GETNETBYNAME = 0x344 - SYS_GETNETENT = 0x345 - SYS_GETPEERNAME = 0x346 - SYS_GETPROTOBYNAME = 0x347 - SYS_GETPROTOBYNUMBER = 0x348 - SYS_GETPROTOENT = 0x349 - SYS_INET_LNAOF = 0x350 - SYS_INET_MAKEADDR = 0x351 - SYS_INET_NETOF = 0x352 - SYS_INET_NETWORK = 0x353 - SYS_INET_NTOA = 0x354 - SYS_IOCTL = 0x355 - SYS_LISTEN = 0x356 - SYS_READV = 0x357 - SYS_RECV = 0x358 - SYS_RECVFROM = 0x359 - SYS_SETHOSTENT = 0x360 - SYS_SETNETENT = 0x361 - SYS_SETPEER = 0x362 - SYS_SETPROTOENT = 0x363 - SYS_SETSERVENT = 0x364 - SYS_SETSOCKOPT = 0x365 - SYS_SHUTDOWN = 0x366 - SYS_SOCKET = 0x367 - SYS_SOCKETPAIR = 0x368 - SYS_WRITEV = 0x369 - SYS_ENDNETENT = 0x370 - SYS_CLOSELOG = 0x371 - SYS_OPENLOG = 0x372 - SYS_SETLOGMASK = 0x373 - SYS_SYSLOG = 0x374 - SYS_PTSNAME = 0x375 - SYS_SETREUID = 0x376 - SYS_SETREGID = 0x377 - SYS_REALPATH = 0x378 - SYS___SIGNGAM = 0x379 - SYS_POLL = 0x380 - SYS_REXEC = 0x381 - SYS___ISASCII2 = 0x382 - SYS___TOASCII2 = 0x383 - SYS_CHPRIORITY = 0x384 - SYS_PTHREAD_ATTR_SETSYNCTYPE_NP = 0x385 - SYS_PTHREAD_ATTR_GETSYNCTYPE_NP = 0x386 - SYS_PTHREAD_SET_LIMIT_NP = 0x387 - SYS___STNETENT = 0x388 - SYS___STPROTOENT = 0x389 - SYS___SELECT1 = 0x390 - SYS_PTHREAD_SECURITY_NP = 0x391 - SYS___CHECK_RESOURCE_AUTH_NP = 0x392 - SYS___CONVERT_ID_NP = 0x393 - SYS___OPENVMREL = 0x394 - SYS_WMEMCHR = 0x395 - SYS_WMEMCMP = 0x396 - SYS_WMEMCPY = 0x397 - SYS_WMEMMOVE = 0x398 - SYS_WMEMSET = 0x399 - SYS___FPUTWC = 0x400 - SYS___PUTWC = 0x401 - SYS___PWCHAR = 0x402 - SYS___WCSFTM = 0x403 - SYS___WCSTOK = 0x404 - SYS___WCWDTH = 0x405 - SYS_T_ACCEPT = 0x409 - SYS_T_GETINFO = 0x410 - SYS_T_GETPROTADDR = 0x411 - SYS_T_GETSTATE = 0x412 - SYS_T_LISTEN = 0x413 - SYS_T_LOOK = 0x414 - SYS_T_OPEN = 0x415 - SYS_T_OPTMGMT = 0x416 - SYS_T_RCV = 0x417 - SYS_T_RCVCONNECT = 0x418 - SYS_T_RCVDIS = 0x419 - SYS_T_SNDUDATA = 0x420 - SYS_T_STRERROR = 0x421 - SYS_T_SYNC = 0x422 - SYS_T_UNBIND = 0x423 - SYS___T_ERRNO = 0x424 - SYS___RECVMSG2 = 0x425 - SYS___SENDMSG2 = 0x426 - SYS_FATTACH = 0x427 - SYS_FDETACH = 0x428 - SYS_GETMSG = 0x429 - SYS_GETCONTEXT = 0x430 - SYS_SETCONTEXT = 0x431 - SYS_MAKECONTEXT = 0x432 - SYS_SWAPCONTEXT = 0x433 - SYS_PTHREAD_GETSPECIFIC_D8_NP = 0x434 - SYS_GETCLIENTID = 0x470 - SYS___GETCLIENTID = 0x471 - SYS_GETSTABLESIZE = 0x472 - SYS_GETIBMOPT = 0x473 - SYS_GETIBMSOCKOPT = 0x474 - SYS_GIVESOCKET = 0x475 - SYS_IBMSFLUSH = 0x476 - SYS_MAXDESC = 0x477 - SYS_SETIBMOPT = 0x478 - SYS_SETIBMSOCKOPT = 0x479 - SYS___SERVER_PWU = 0x480 - SYS_PTHREAD_TAG_NP = 0x481 - SYS___CONSOLE = 0x482 - SYS___WSINIT = 0x483 - SYS___IPTCPN = 0x489 - SYS___SERVER_CLASSIFY = 0x490 - SYS___HEAPRPT = 0x496 - SYS___ISBFP = 0x500 - SYS___FP_CAST = 0x501 - SYS___CERTIFICATE = 0x502 - SYS_SEND_FILE = 0x503 - SYS_AIO_CANCEL = 0x504 - SYS_AIO_ERROR = 0x505 - SYS_AIO_READ = 0x506 - SYS_AIO_RETURN = 0x507 - SYS_AIO_SUSPEND = 0x508 - SYS_AIO_WRITE = 0x509 - SYS_PTHREAD_RWLOCK_TRYWRLOCK = 0x510 - SYS_PTHREAD_RWLOCK_UNLOCK = 0x511 - SYS_PTHREAD_RWLOCK_WRLOCK = 0x512 - SYS_PTHREAD_RWLOCKATTR_GETPSHARED = 0x513 - SYS_PTHREAD_RWLOCKATTR_SETPSHARED = 0x514 - SYS_PTHREAD_RWLOCKATTR_INIT = 0x515 - SYS_PTHREAD_RWLOCKATTR_DESTROY = 0x516 - SYS___CTTBL = 0x517 - SYS_PTHREAD_MUTEXATTR_SETTYPE = 0x518 - SYS_PTHREAD_MUTEXATTR_GETTYPE = 0x519 - SYS___FP_UNORDERED = 0x520 - SYS___FP_READ_RND = 0x521 - SYS___FP_READ_RND_B = 0x522 - SYS___FP_SWAP_RND = 0x523 - SYS___FP_SWAP_RND_B = 0x524 - SYS___FP_LEVEL = 0x525 - SYS___FP_BTOH = 0x526 - SYS___FP_HTOB = 0x527 - SYS___FPC_RD = 0x528 - SYS___FPC_WR = 0x529 - SYS_PTHREAD_SETCANCELTYPE = 0x600 - SYS_PTHREAD_TESTCANCEL = 0x601 - SYS___ATANF_B = 0x602 - SYS___ATANL_B = 0x603 - SYS___CEILF_B = 0x604 - SYS___CEILL_B = 0x605 - SYS___COSF_B = 0x606 - SYS___COSL_B = 0x607 - SYS___FABSF_B = 0x608 - SYS___FABSL_B = 0x609 - SYS___SINF_B = 0x610 - SYS___SINL_B = 0x611 - SYS___TANF_B = 0x612 - SYS___TANL_B = 0x613 - SYS___TANHF_B = 0x614 - SYS___TANHL_B = 0x615 - SYS___ACOSF_B = 0x616 - SYS___ACOSL_B = 0x617 - SYS___ASINF_B = 0x618 - SYS___ASINL_B = 0x619 - SYS___LOGF_B = 0x620 - SYS___LOGL_B = 0x621 - SYS___LOG10F_B = 0x622 - SYS___LOG10L_B = 0x623 - SYS___POWF_B = 0x624 - SYS___POWL_B = 0x625 - SYS___SINHF_B = 0x626 - SYS___SINHL_B = 0x627 - SYS___SQRTF_B = 0x628 - SYS___SQRTL_B = 0x629 - SYS___MODFL_B = 0x630 - SYS_ABSF = 0x631 - SYS_ABSL = 0x632 - SYS_ACOSF = 0x633 - SYS_ACOSL = 0x634 - SYS_ASINF = 0x635 - SYS_ASINL = 0x636 - SYS_ATAN2F = 0x637 - SYS_ATAN2L = 0x638 - SYS_ATANF = 0x639 - SYS_COSHL = 0x640 - SYS_EXPF = 0x641 - SYS_EXPL = 0x642 - SYS_TANHF = 0x643 - SYS_TANHL = 0x644 - SYS_LOG10F = 0x645 - SYS_LOG10L = 0x646 - SYS_LOGF = 0x647 - SYS_LOGL = 0x648 - SYS_POWF = 0x649 - SYS_SINHL = 0x650 - SYS_TANF = 0x651 - SYS_TANL = 0x652 - SYS_FABSF = 0x653 - SYS_FABSL = 0x654 - SYS_FLOORF = 0x655 - SYS_FLOORL = 0x656 - SYS_FMODF = 0x657 - SYS_FMODL = 0x658 - SYS_FREXPF = 0x659 - SYS___CHATTR = 0x660 - SYS___FCHATTR = 0x661 - SYS___TOCCSID = 0x662 - SYS___CSNAMETYPE = 0x663 - SYS___TOCSNAME = 0x664 - SYS___CCSIDTYPE = 0x665 - SYS___AE_CORRESTBL_QUERY = 0x666 - SYS___AE_AUTOCONVERT_STATE = 0x667 - SYS_DN_FIND = 0x668 - SYS___GETHOSTBYADDR_A = 0x669 - SYS___MBLEN_SB_A = 0x670 - SYS___MBLEN_STD_A = 0x671 - SYS___MBLEN_UTF = 0x672 - SYS___MBSTOWCS_A = 0x673 - SYS___MBSTOWCS_STD_A = 0x674 - SYS___MBTOWC_A = 0x675 - SYS___MBTOWC_ISO1 = 0x676 - SYS___MBTOWC_SBCS = 0x677 - SYS___MBTOWC_MBCS = 0x678 - SYS___MBTOWC_UTF = 0x679 - SYS___CSID_A = 0x680 - SYS___CSID_STD_A = 0x681 - SYS___WCSID_A = 0x682 - SYS___WCSID_STD_A = 0x683 - SYS___WCTOMB_A = 0x684 - SYS___WCTOMB_ISO1 = 0x685 - SYS___WCTOMB_STD_A = 0x686 - SYS___WCTOMB_UTF = 0x687 - SYS___WCWIDTH_A = 0x688 - SYS___GETGRNAM_R_A = 0x689 - SYS___READDIR_R_A = 0x690 - SYS___E2A_S = 0x691 - SYS___FNMATCH_A = 0x692 - SYS___FNMATCH_C_A = 0x693 - SYS___EXECL_A = 0x694 - SYS___FNMATCH_STD_A = 0x695 - SYS___REGCOMP_A = 0x696 - SYS___REGCOMP_STD_A = 0x697 - SYS___REGERROR_A = 0x698 - SYS___REGERROR_STD_A = 0x699 - SYS___SWPRINTF_A = 0x700 - SYS___FSCANF_A = 0x701 - SYS___SCANF_A = 0x702 - SYS___SSCANF_A = 0x703 - SYS___SWSCANF_A = 0x704 - SYS___ATOF_A = 0x705 - SYS___ATOI_A = 0x706 - SYS___ATOL_A = 0x707 - SYS___STRTOD_A = 0x708 - SYS___STRTOL_A = 0x709 - SYS___L64A_A = 0x710 - SYS___STRERROR_A = 0x711 - SYS___PERROR_A = 0x712 - SYS___FETCH_A = 0x713 - SYS___GETENV_A = 0x714 - SYS___MKSTEMP_A = 0x717 - SYS___PTSNAME_A = 0x718 - SYS___PUTENV_A = 0x719 - SYS___CHDIR_A = 0x720 - SYS___CHOWN_A = 0x721 - SYS___CHROOT_A = 0x722 - SYS___GETCWD_A = 0x723 - SYS___GETWD_A = 0x724 - SYS___LCHOWN_A = 0x725 - SYS___LINK_A = 0x726 - SYS___PATHCONF_A = 0x727 - SYS___IF_NAMEINDEX_A = 0x728 - SYS___READLINK_A = 0x729 - SYS___EXTLINK_NP_A = 0x730 - SYS___ISALNUM_A = 0x731 - SYS___ISALPHA_A = 0x732 - SYS___A2E_S = 0x733 - SYS___ISCNTRL_A = 0x734 - SYS___ISDIGIT_A = 0x735 - SYS___ISGRAPH_A = 0x736 - SYS___ISLOWER_A = 0x737 - SYS___ISPRINT_A = 0x738 - SYS___ISPUNCT_A = 0x739 - SYS___ISWALPHA_A = 0x740 - SYS___A2E_L = 0x741 - SYS___ISWCNTRL_A = 0x742 - SYS___ISWDIGIT_A = 0x743 - SYS___ISWGRAPH_A = 0x744 - SYS___ISWLOWER_A = 0x745 - SYS___ISWPRINT_A = 0x746 - SYS___ISWPUNCT_A = 0x747 - SYS___ISWSPACE_A = 0x748 - SYS___ISWUPPER_A = 0x749 - SYS___REMOVE_A = 0x750 - SYS___RENAME_A = 0x751 - SYS___TMPNAM_A = 0x752 - SYS___FOPEN_A = 0x753 - SYS___FREOPEN_A = 0x754 - SYS___CUSERID_A = 0x755 - SYS___POPEN_A = 0x756 - SYS___TEMPNAM_A = 0x757 - SYS___FTW_A = 0x758 - SYS___GETGRENT_A = 0x759 - SYS___INET_NTOP_A = 0x760 - SYS___GETPASS_A = 0x761 - SYS___GETPWENT_A = 0x762 - SYS___GETPWNAM_A = 0x763 - SYS___GETPWUID_A = 0x764 - SYS_____CHECK_RESOURCE_AUTH_NP_A = 0x765 - SYS___CHECKSCHENV_A = 0x766 - SYS___CONNECTSERVER_A = 0x767 - SYS___CONNECTWORKMGR_A = 0x768 - SYS_____CONSOLE_A = 0x769 - SYS___MSGSND_A = 0x770 - SYS___MSGXRCV_A = 0x771 - SYS___NFTW_A = 0x772 - SYS_____PASSWD_A = 0x773 - SYS___PTHREAD_SECURITY_NP_A = 0x774 - SYS___QUERYMETRICS_A = 0x775 - SYS___QUERYSCHENV = 0x776 - SYS___READV_A = 0x777 - SYS_____SERVER_CLASSIFY_A = 0x778 - SYS_____SERVER_INIT_A = 0x779 - SYS___W_GETPSENT_A = 0x780 - SYS___WRITEV_A = 0x781 - SYS___W_STATFS_A = 0x782 - SYS___W_STATVFS_A = 0x783 - SYS___FPUTC_A = 0x784 - SYS___PUTCHAR_A = 0x785 - SYS___PUTS_A = 0x786 - SYS___FGETS_A = 0x787 - SYS___GETS_A = 0x788 - SYS___FPUTS_A = 0x789 - SYS___PUTC_A = 0x790 - SYS___AE_THREAD_SETMODE = 0x791 - SYS___AE_THREAD_SWAPMODE = 0x792 - SYS___GETNETBYADDR_A = 0x793 - SYS___GETNETBYNAME_A = 0x794 - SYS___GETNETENT_A = 0x795 - SYS___GETPROTOBYNAME_A = 0x796 - SYS___GETPROTOBYNUMBER_A = 0x797 - SYS___GETPROTOENT_A = 0x798 - SYS___GETSERVBYNAME_A = 0x799 - SYS_ACL_FIRST_ENTRY = 0x800 - SYS_ACL_GET_ENTRY = 0x801 - SYS_ACL_VALID = 0x802 - SYS_ACL_CREATE_ENTRY = 0x803 - SYS_ACL_DELETE_ENTRY = 0x804 - SYS_ACL_UPDATE_ENTRY = 0x805 - SYS_ACL_DELETE_FD = 0x806 - SYS_ACL_DELETE_FILE = 0x807 - SYS_ACL_GET_FD = 0x808 - SYS_ACL_GET_FILE = 0x809 - SYS___ERFL_B = 0x810 - SYS___ERFCL_B = 0x811 - SYS___LGAMMAL_B = 0x812 - SYS___SETHOOKEVENTS = 0x813 - SYS_IF_NAMETOINDEX = 0x814 - SYS_IF_INDEXTONAME = 0x815 - SYS_IF_NAMEINDEX = 0x816 - SYS_IF_FREENAMEINDEX = 0x817 - SYS_GETADDRINFO = 0x818 - SYS_GETNAMEINFO = 0x819 - SYS___DYNFREE_A = 0x820 - SYS___RES_QUERY_A = 0x821 - SYS___RES_SEARCH_A = 0x822 - SYS___RES_QUERYDOMAIN_A = 0x823 - SYS___RES_MKQUERY_A = 0x824 - SYS___RES_SEND_A = 0x825 - SYS___DN_EXPAND_A = 0x826 - SYS___DN_SKIPNAME_A = 0x827 - SYS___DN_COMP_A = 0x828 - SYS___DN_FIND_A = 0x829 - SYS___INET_NTOA_A = 0x830 - SYS___INET_NETWORK_A = 0x831 - SYS___ACCEPT_A = 0x832 - SYS___ACCEPT_AND_RECV_A = 0x833 - SYS___BIND_A = 0x834 - SYS___CONNECT_A = 0x835 - SYS___GETPEERNAME_A = 0x836 - SYS___GETSOCKNAME_A = 0x837 - SYS___RECVFROM_A = 0x838 - SYS___SENDTO_A = 0x839 - SYS___LCHATTR = 0x840 - SYS___WRITEDOWN = 0x841 - SYS_PTHREAD_MUTEX_INIT2 = 0x842 - SYS___ACOSHF_B = 0x843 - SYS___ACOSHL_B = 0x844 - SYS___ASINHF_B = 0x845 - SYS___ASINHL_B = 0x846 - SYS___ATANHF_B = 0x847 - SYS___ATANHL_B = 0x848 - SYS___CBRTF_B = 0x849 - SYS___EXP2F_B = 0x850 - SYS___EXP2L_B = 0x851 - SYS___EXPM1F_B = 0x852 - SYS___EXPM1L_B = 0x853 - SYS___FDIMF_B = 0x854 - SYS___FDIM_B = 0x855 - SYS___FDIML_B = 0x856 - SYS___HYPOTF_B = 0x857 - SYS___HYPOTL_B = 0x858 - SYS___LOG1PF_B = 0x859 - SYS___REMQUOF_B = 0x860 - SYS___REMQUO_B = 0x861 - SYS___REMQUOL_B = 0x862 - SYS___TGAMMAF_B = 0x863 - SYS___TGAMMA_B = 0x864 - SYS___TGAMMAL_B = 0x865 - SYS___TRUNCF_B = 0x866 - SYS___TRUNC_B = 0x867 - SYS___TRUNCL_B = 0x868 - SYS___LGAMMAF_B = 0x869 - SYS_ASINHF = 0x870 - SYS_ASINHL = 0x871 - SYS_ATANHF = 0x872 - SYS_ATANHL = 0x873 - SYS_CBRTF = 0x874 - SYS_CBRTL = 0x875 - SYS_COPYSIGNF = 0x876 - SYS_CPYSIGNF = 0x876 - SYS_COPYSIGNL = 0x877 - SYS_CPYSIGNL = 0x877 - SYS_COTANF = 0x878 - SYS___COTANF = 0x878 - SYS_COTAN = 0x879 - SYS___COTAN = 0x879 - SYS_FDIM = 0x881 - SYS_FDIML = 0x882 - SYS_HYPOTF = 0x883 - SYS_HYPOTL = 0x884 - SYS_LOG1PF = 0x885 - SYS_LOG1PL = 0x886 - SYS_LOG2F = 0x887 - SYS_LOG2 = 0x888 - SYS_LOG2L = 0x889 - SYS_TGAMMA = 0x890 - SYS_TGAMMAL = 0x891 - SYS_TRUNCF = 0x892 - SYS_TRUNC = 0x893 - SYS_TRUNCL = 0x894 - SYS_LGAMMAF = 0x895 - SYS_LGAMMAL = 0x896 - SYS_LROUNDF = 0x897 - SYS_LROUND = 0x898 - SYS_ERFF = 0x899 - SYS___COSHF_H = 0x900 - SYS___COSHL_H = 0x901 - SYS___COTAN_H = 0x902 - SYS___COTANF_H = 0x903 - SYS___COTANL_H = 0x904 - SYS___ERF_H = 0x905 - SYS___ERFF_H = 0x906 - SYS___ERFL_H = 0x907 - SYS___ERFC_H = 0x908 - SYS___ERFCF_H = 0x909 - SYS___FDIMF_H = 0x910 - SYS___FDIML_H = 0x911 - SYS___FMOD_H = 0x912 - SYS___FMODF_H = 0x913 - SYS___FMODL_H = 0x914 - SYS___GAMMA_H = 0x915 - SYS___HYPOT_H = 0x916 - SYS___ILOGB_H = 0x917 - SYS___LGAMMA_H = 0x918 - SYS___LGAMMAF_H = 0x919 - SYS___LOG2L_H = 0x920 - SYS___LOG1P_H = 0x921 - SYS___LOG10_H = 0x922 - SYS___LOG10F_H = 0x923 - SYS___LOG10L_H = 0x924 - SYS___LROUND_H = 0x925 - SYS___LROUNDF_H = 0x926 - SYS___NEXTAFTER_H = 0x927 - SYS___POW_H = 0x928 - SYS___POWF_H = 0x929 - SYS___SINL_H = 0x930 - SYS___SINH_H = 0x931 - SYS___SINHF_H = 0x932 - SYS___SINHL_H = 0x933 - SYS___SQRT_H = 0x934 - SYS___SQRTF_H = 0x935 - SYS___SQRTL_H = 0x936 - SYS___TAN_H = 0x937 - SYS___TANF_H = 0x938 - SYS___TANL_H = 0x939 - SYS___TRUNCF_H = 0x940 - SYS___TRUNCL_H = 0x941 - SYS___COSH_H = 0x942 - SYS___LE_DEBUG_SET_RESUME_MCH = 0x943 - SYS_VFSCANF = 0x944 - SYS_VSCANF = 0x946 - SYS_VSSCANF = 0x948 - SYS_IMAXABS = 0x950 - SYS_IMAXDIV = 0x951 - SYS_STRTOIMAX = 0x952 - SYS_STRTOUMAX = 0x953 - SYS_WCSTOIMAX = 0x954 - SYS_WCSTOUMAX = 0x955 - SYS_ATOLL = 0x956 - SYS_STRTOF = 0x957 - SYS_STRTOLD = 0x958 - SYS_WCSTOF = 0x959 - SYS_INET6_RTH_GETADDR = 0x960 - SYS_INET6_OPT_INIT = 0x961 - SYS_INET6_OPT_APPEND = 0x962 - SYS_INET6_OPT_FINISH = 0x963 - SYS_INET6_OPT_SET_VAL = 0x964 - SYS_INET6_OPT_NEXT = 0x965 - SYS_INET6_OPT_FIND = 0x966 - SYS_INET6_OPT_GET_VAL = 0x967 - SYS___POW_I = 0x987 - SYS___POW_I_B = 0x988 - SYS___POW_I_H = 0x989 - SYS___CABS_H = 0x990 - SYS_CABSF = 0x991 - SYS___CABSF_B = 0x992 - SYS___CABSF_H = 0x993 - SYS_CABSL = 0x994 - SYS___CABSL_B = 0x995 - SYS___CABSL_H = 0x996 - SYS_CACOS = 0x997 - SYS___CACOS_B = 0x998 - SYS___CACOS_H = 0x999 + SYS_LOG = 0x17 // 23 + SYS_COSH = 0x18 // 24 + SYS_TANH = 0x19 // 25 + SYS_EXP = 0x1A // 26 + SYS_MODF = 0x1B // 27 + SYS_LOG10 = 0x1C // 28 + SYS_FREXP = 0x1D // 29 + SYS_LDEXP = 0x1E // 30 + SYS_CEIL = 0x1F // 31 + SYS_POW = 0x20 // 32 + SYS_SQRT = 0x21 // 33 + SYS_FLOOR = 0x22 // 34 + SYS_J1 = 0x23 // 35 + SYS_FABS = 0x24 // 36 + SYS_FMOD = 0x25 // 37 + SYS_J0 = 0x26 // 38 + SYS_YN = 0x27 // 39 + SYS_JN = 0x28 // 40 + SYS_Y0 = 0x29 // 41 + SYS_Y1 = 0x2A // 42 + SYS_HYPOT = 0x2B // 43 + SYS_ERF = 0x2C // 44 + SYS_ERFC = 0x2D // 45 + SYS_GAMMA = 0x2E // 46 + SYS_ISALPHA = 0x30 // 48 + SYS_ISALNUM = 0x31 // 49 + SYS_ISLOWER = 0x32 // 50 + SYS_ISCNTRL = 0x33 // 51 + SYS_ISDIGIT = 0x34 // 52 + SYS_ISGRAPH = 0x35 // 53 + SYS_ISUPPER = 0x36 // 54 + SYS_ISPRINT = 0x37 // 55 + SYS_ISPUNCT = 0x38 // 56 + SYS_ISSPACE = 0x39 // 57 + SYS_SETLOCAL = 0x3A // 58 + SYS_SETLOCALE = 0x3A // 58 + SYS_ISXDIGIT = 0x3B // 59 + SYS_TOLOWER = 0x3C // 60 + SYS_TOUPPER = 0x3D // 61 + SYS_ASIN = 0x3E // 62 + SYS_SIN = 0x3F // 63 + SYS_COS = 0x40 // 64 + SYS_TAN = 0x41 // 65 + SYS_SINH = 0x42 // 66 + SYS_ACOS = 0x43 // 67 + SYS_ATAN = 0x44 // 68 + SYS_ATAN2 = 0x45 // 69 + SYS_FTELL = 0x46 // 70 + SYS_FGETPOS = 0x47 // 71 + SYS_FSEEK = 0x48 // 72 + SYS_FSETPOS = 0x49 // 73 + SYS_FERROR = 0x4A // 74 + SYS_REWIND = 0x4B // 75 + SYS_CLEARERR = 0x4C // 76 + SYS_FEOF = 0x4D // 77 + SYS_ATOL = 0x4E // 78 + SYS_PERROR = 0x4F // 79 + SYS_ATOF = 0x50 // 80 + SYS_ATOI = 0x51 // 81 + SYS_RAND = 0x52 // 82 + SYS_STRTOD = 0x53 // 83 + SYS_STRTOL = 0x54 // 84 + SYS_STRTOUL = 0x55 // 85 + SYS_MALLOC = 0x56 // 86 + SYS_SRAND = 0x57 // 87 + SYS_CALLOC = 0x58 // 88 + SYS_FREE = 0x59 // 89 + SYS_EXIT = 0x5A // 90 + SYS_REALLOC = 0x5B // 91 + SYS_ABORT = 0x5C // 92 + SYS___ABORT = 0x5C // 92 + SYS_ATEXIT = 0x5D // 93 + SYS_RAISE = 0x5E // 94 + SYS_SETJMP = 0x5F // 95 + SYS_LONGJMP = 0x60 // 96 + SYS_SIGNAL = 0x61 // 97 + SYS_TMPNAM = 0x62 // 98 + SYS_REMOVE = 0x63 // 99 + SYS_RENAME = 0x64 // 100 + SYS_TMPFILE = 0x65 // 101 + SYS_FREOPEN = 0x66 // 102 + SYS_FCLOSE = 0x67 // 103 + SYS_FFLUSH = 0x68 // 104 + SYS_FOPEN = 0x69 // 105 + SYS_FSCANF = 0x6A // 106 + SYS_SETBUF = 0x6B // 107 + SYS_SETVBUF = 0x6C // 108 + SYS_FPRINTF = 0x6D // 109 + SYS_SSCANF = 0x6E // 110 + SYS_PRINTF = 0x6F // 111 + SYS_SCANF = 0x70 // 112 + SYS_SPRINTF = 0x71 // 113 + SYS_FGETC = 0x72 // 114 + SYS_VFPRINTF = 0x73 // 115 + SYS_VPRINTF = 0x74 // 116 + SYS_VSPRINTF = 0x75 // 117 + SYS_GETC = 0x76 // 118 + SYS_FGETS = 0x77 // 119 + SYS_FPUTC = 0x78 // 120 + SYS_FPUTS = 0x79 // 121 + SYS_PUTCHAR = 0x7A // 122 + SYS_GETCHAR = 0x7B // 123 + SYS_GETS = 0x7C // 124 + SYS_PUTC = 0x7D // 125 + SYS_FWRITE = 0x7E // 126 + SYS_PUTS = 0x7F // 127 + SYS_UNGETC = 0x80 // 128 + SYS_FREAD = 0x81 // 129 + SYS_WCSTOMBS = 0x82 // 130 + SYS_MBTOWC = 0x83 // 131 + SYS_WCTOMB = 0x84 // 132 + SYS_MBSTOWCS = 0x85 // 133 + SYS_WCSCPY = 0x86 // 134 + SYS_WCSCAT = 0x87 // 135 + SYS_WCSCHR = 0x88 // 136 + SYS_WCSCMP = 0x89 // 137 + SYS_WCSNCMP = 0x8A // 138 + SYS_WCSCSPN = 0x8B // 139 + SYS_WCSLEN = 0x8C // 140 + SYS_WCSNCAT = 0x8D // 141 + SYS_WCSSPN = 0x8E // 142 + SYS_WCSNCPY = 0x8F // 143 + SYS_ABS = 0x90 // 144 + SYS_DIV = 0x91 // 145 + SYS_LABS = 0x92 // 146 + SYS_STRNCPY = 0x93 // 147 + SYS_MEMCPY = 0x94 // 148 + SYS_MEMMOVE = 0x95 // 149 + SYS_STRCPY = 0x96 // 150 + SYS_STRCMP = 0x97 // 151 + SYS_STRCAT = 0x98 // 152 + SYS_STRNCAT = 0x99 // 153 + SYS_MEMCMP = 0x9A // 154 + SYS_MEMCHR = 0x9B // 155 + SYS_STRCOLL = 0x9C // 156 + SYS_STRNCMP = 0x9D // 157 + SYS_STRXFRM = 0x9E // 158 + SYS_STRRCHR = 0x9F // 159 + SYS_STRCHR = 0xA0 // 160 + SYS_STRCSPN = 0xA1 // 161 + SYS_STRPBRK = 0xA2 // 162 + SYS_MEMSET = 0xA3 // 163 + SYS_STRSPN = 0xA4 // 164 + SYS_STRSTR = 0xA5 // 165 + SYS_STRTOK = 0xA6 // 166 + SYS_DIFFTIME = 0xA7 // 167 + SYS_STRERROR = 0xA8 // 168 + SYS_STRLEN = 0xA9 // 169 + SYS_CLOCK = 0xAA // 170 + SYS_CTIME = 0xAB // 171 + SYS_MKTIME = 0xAC // 172 + SYS_TIME = 0xAD // 173 + SYS_ASCTIME = 0xAE // 174 + SYS_MBLEN = 0xAF // 175 + SYS_GMTIME = 0xB0 // 176 + SYS_LOCALTIM = 0xB1 // 177 + SYS_LOCALTIME = 0xB1 // 177 + SYS_STRFTIME = 0xB2 // 178 + SYS___GETCB = 0xB4 // 180 + SYS_FUPDATE = 0xB5 // 181 + SYS___FUPDT = 0xB5 // 181 + SYS_CLRMEMF = 0xBD // 189 + SYS___CLRMF = 0xBD // 189 + SYS_FETCHEP = 0xBF // 191 + SYS___FTCHEP = 0xBF // 191 + SYS_FLDATA = 0xC1 // 193 + SYS___FLDATA = 0xC1 // 193 + SYS_DYNFREE = 0xC2 // 194 + SYS___DYNFRE = 0xC2 // 194 + SYS_DYNALLOC = 0xC3 // 195 + SYS___DYNALL = 0xC3 // 195 + SYS___CDUMP = 0xC4 // 196 + SYS_CSNAP = 0xC5 // 197 + SYS___CSNAP = 0xC5 // 197 + SYS_CTRACE = 0xC6 // 198 + SYS___CTRACE = 0xC6 // 198 + SYS___CTEST = 0xC7 // 199 + SYS_SETENV = 0xC8 // 200 + SYS___SETENV = 0xC8 // 200 + SYS_CLEARENV = 0xC9 // 201 + SYS___CLRENV = 0xC9 // 201 + SYS___REGCOMP_STD = 0xEA // 234 + SYS_NL_LANGINFO = 0xFC // 252 + SYS_GETSYNTX = 0xFD // 253 + SYS_ISBLANK = 0xFE // 254 + SYS___ISBLNK = 0xFE // 254 + SYS_ISWALNUM = 0xFF // 255 + SYS_ISWALPHA = 0x100 // 256 + SYS_ISWBLANK = 0x101 // 257 + SYS___ISWBLK = 0x101 // 257 + SYS_ISWCNTRL = 0x102 // 258 + SYS_ISWDIGIT = 0x103 // 259 + SYS_ISWGRAPH = 0x104 // 260 + SYS_ISWLOWER = 0x105 // 261 + SYS_ISWPRINT = 0x106 // 262 + SYS_ISWPUNCT = 0x107 // 263 + SYS_ISWSPACE = 0x108 // 264 + SYS_ISWUPPER = 0x109 // 265 + SYS_ISWXDIGI = 0x10A // 266 + SYS_ISWXDIGIT = 0x10A // 266 + SYS_WCTYPE = 0x10B // 267 + SYS_ISWCTYPE = 0x10C // 268 + SYS_TOWLOWER = 0x10D // 269 + SYS_TOWUPPER = 0x10E // 270 + SYS_MBSINIT = 0x10F // 271 + SYS_WCTOB = 0x110 // 272 + SYS_MBRLEN = 0x111 // 273 + SYS_MBRTOWC = 0x112 // 274 + SYS_MBSRTOWC = 0x113 // 275 + SYS_MBSRTOWCS = 0x113 // 275 + SYS_WCRTOMB = 0x114 // 276 + SYS_WCSRTOMB = 0x115 // 277 + SYS_WCSRTOMBS = 0x115 // 277 + SYS___CSID = 0x116 // 278 + SYS___WCSID = 0x117 // 279 + SYS_STRPTIME = 0x118 // 280 + SYS___STRPTM = 0x118 // 280 + SYS_STRFMON = 0x119 // 281 + SYS___RPMTCH = 0x11A // 282 + SYS_WCSSTR = 0x11B // 283 + SYS_WCSTOK = 0x12C // 300 + SYS_WCSTOL = 0x12D // 301 + SYS_WCSTOD = 0x12E // 302 + SYS_WCSTOUL = 0x12F // 303 + SYS_WCSCOLL = 0x130 // 304 + SYS_WCSXFRM = 0x131 // 305 + SYS_WCSWIDTH = 0x132 // 306 + SYS_WCWIDTH = 0x133 // 307 + SYS_WCSFTIME = 0x134 // 308 + SYS_SWPRINTF = 0x135 // 309 + SYS_VSWPRINT = 0x136 // 310 + SYS_VSWPRINTF = 0x136 // 310 + SYS_SWSCANF = 0x137 // 311 + SYS_REGCOMP = 0x138 // 312 + SYS_REGEXEC = 0x139 // 313 + SYS_REGFREE = 0x13A // 314 + SYS_REGERROR = 0x13B // 315 + SYS_FGETWC = 0x13C // 316 + SYS_FGETWS = 0x13D // 317 + SYS_FPUTWC = 0x13E // 318 + SYS_FPUTWS = 0x13F // 319 + SYS_GETWC = 0x140 // 320 + SYS_GETWCHAR = 0x141 // 321 + SYS_PUTWC = 0x142 // 322 + SYS_PUTWCHAR = 0x143 // 323 + SYS_UNGETWC = 0x144 // 324 + SYS_ICONV_OPEN = 0x145 // 325 + SYS_ICONV = 0x146 // 326 + SYS_ICONV_CLOSE = 0x147 // 327 + SYS_ISMCCOLLEL = 0x14C // 332 + SYS_STRTOCOLL = 0x14D // 333 + SYS_COLLTOSTR = 0x14E // 334 + SYS_COLLEQUIV = 0x14F // 335 + SYS_COLLRANGE = 0x150 // 336 + SYS_CCLASS = 0x151 // 337 + SYS_COLLORDER = 0x152 // 338 + SYS___DEMANGLE = 0x154 // 340 + SYS_FDOPEN = 0x155 // 341 + SYS___ERRNO = 0x156 // 342 + SYS___ERRNO2 = 0x157 // 343 + SYS___TERROR = 0x158 // 344 + SYS_MAXCOLL = 0x169 // 361 + SYS_GETMCCOLL = 0x16A // 362 + SYS_GETWMCCOLL = 0x16B // 363 + SYS___ERR2AD = 0x16C // 364 + SYS_DLLQUERYFN = 0x16D // 365 + SYS_DLLQUERYVAR = 0x16E // 366 + SYS_DLLFREE = 0x16F // 367 + SYS_DLLLOAD = 0x170 // 368 + SYS__EXIT = 0x174 // 372 + SYS_ACCESS = 0x175 // 373 + SYS_ALARM = 0x176 // 374 + SYS_CFGETISPEED = 0x177 // 375 + SYS_CFGETOSPEED = 0x178 // 376 + SYS_CFSETISPEED = 0x179 // 377 + SYS_CFSETOSPEED = 0x17A // 378 + SYS_CHDIR = 0x17B // 379 + SYS_CHMOD = 0x17C // 380 + SYS_CHOWN = 0x17D // 381 + SYS_CLOSE = 0x17E // 382 + SYS_CLOSEDIR = 0x17F // 383 + SYS_CREAT = 0x180 // 384 + SYS_CTERMID = 0x181 // 385 + SYS_DUP = 0x182 // 386 + SYS_DUP2 = 0x183 // 387 + SYS_EXECL = 0x184 // 388 + SYS_EXECLE = 0x185 // 389 + SYS_EXECLP = 0x186 // 390 + SYS_EXECV = 0x187 // 391 + SYS_EXECVE = 0x188 // 392 + SYS_EXECVP = 0x189 // 393 + SYS_FCHMOD = 0x18A // 394 + SYS_FCHOWN = 0x18B // 395 + SYS_FCNTL = 0x18C // 396 + SYS_FILENO = 0x18D // 397 + SYS_FORK = 0x18E // 398 + SYS_FPATHCONF = 0x18F // 399 + SYS_FSTAT = 0x190 // 400 + SYS_FSYNC = 0x191 // 401 + SYS_FTRUNCATE = 0x192 // 402 + SYS_GETCWD = 0x193 // 403 + SYS_GETEGID = 0x194 // 404 + SYS_GETEUID = 0x195 // 405 + SYS_GETGID = 0x196 // 406 + SYS_GETGRGID = 0x197 // 407 + SYS_GETGRNAM = 0x198 // 408 + SYS_GETGROUPS = 0x199 // 409 + SYS_GETLOGIN = 0x19A // 410 + SYS_W_GETMNTENT = 0x19B // 411 + SYS_GETPGRP = 0x19C // 412 + SYS_GETPID = 0x19D // 413 + SYS_GETPPID = 0x19E // 414 + SYS_GETPWNAM = 0x19F // 415 + SYS_GETPWUID = 0x1A0 // 416 + SYS_GETUID = 0x1A1 // 417 + SYS_W_IOCTL = 0x1A2 // 418 + SYS_ISATTY = 0x1A3 // 419 + SYS_KILL = 0x1A4 // 420 + SYS_LINK = 0x1A5 // 421 + SYS_LSEEK = 0x1A6 // 422 + SYS_LSTAT = 0x1A7 // 423 + SYS_MKDIR = 0x1A8 // 424 + SYS_MKFIFO = 0x1A9 // 425 + SYS_MKNOD = 0x1AA // 426 + SYS_MOUNT = 0x1AB // 427 + SYS_OPEN = 0x1AC // 428 + SYS_OPENDIR = 0x1AD // 429 + SYS_PATHCONF = 0x1AE // 430 + SYS_PAUSE = 0x1AF // 431 + SYS_PIPE = 0x1B0 // 432 + SYS_W_GETPSENT = 0x1B1 // 433 + SYS_READ = 0x1B2 // 434 + SYS_READDIR = 0x1B3 // 435 + SYS_READLINK = 0x1B4 // 436 + SYS_REWINDDIR = 0x1B5 // 437 + SYS_RMDIR = 0x1B6 // 438 + SYS_SETEGID = 0x1B7 // 439 + SYS_SETEUID = 0x1B8 // 440 + SYS_SETGID = 0x1B9 // 441 + SYS_SETPGID = 0x1BA // 442 + SYS_SETSID = 0x1BB // 443 + SYS_SETUID = 0x1BC // 444 + SYS_SIGACTION = 0x1BD // 445 + SYS_SIGADDSET = 0x1BE // 446 + SYS_SIGDELSET = 0x1BF // 447 + SYS_SIGEMPTYSET = 0x1C0 // 448 + SYS_SIGFILLSET = 0x1C1 // 449 + SYS_SIGISMEMBER = 0x1C2 // 450 + SYS_SIGLONGJMP = 0x1C3 // 451 + SYS_SIGPENDING = 0x1C4 // 452 + SYS_SIGPROCMASK = 0x1C5 // 453 + SYS_SIGSETJMP = 0x1C6 // 454 + SYS_SIGSUSPEND = 0x1C7 // 455 + SYS_SLEEP = 0x1C8 // 456 + SYS_STAT = 0x1C9 // 457 + SYS_W_STATFS = 0x1CA // 458 + SYS_SYMLINK = 0x1CB // 459 + SYS_SYSCONF = 0x1CC // 460 + SYS_TCDRAIN = 0x1CD // 461 + SYS_TCFLOW = 0x1CE // 462 + SYS_TCFLUSH = 0x1CF // 463 + SYS_TCGETATTR = 0x1D0 // 464 + SYS_TCGETPGRP = 0x1D1 // 465 + SYS_TCSENDBREAK = 0x1D2 // 466 + SYS_TCSETATTR = 0x1D3 // 467 + SYS_TCSETPGRP = 0x1D4 // 468 + SYS_TIMES = 0x1D5 // 469 + SYS_TTYNAME = 0x1D6 // 470 + SYS_TZSET = 0x1D7 // 471 + SYS_UMASK = 0x1D8 // 472 + SYS_UMOUNT = 0x1D9 // 473 + SYS_UNAME = 0x1DA // 474 + SYS_UNLINK = 0x1DB // 475 + SYS_UTIME = 0x1DC // 476 + SYS_WAIT = 0x1DD // 477 + SYS_WAITPID = 0x1DE // 478 + SYS_WRITE = 0x1DF // 479 + SYS_CHAUDIT = 0x1E0 // 480 + SYS_FCHAUDIT = 0x1E1 // 481 + SYS_GETGROUPSBYNAME = 0x1E2 // 482 + SYS_SIGWAIT = 0x1E3 // 483 + SYS_PTHREAD_EXIT = 0x1E4 // 484 + SYS_PTHREAD_KILL = 0x1E5 // 485 + SYS_PTHREAD_ATTR_INIT = 0x1E6 // 486 + SYS_PTHREAD_ATTR_DESTROY = 0x1E7 // 487 + SYS_PTHREAD_ATTR_SETSTACKSIZE = 0x1E8 // 488 + SYS_PTHREAD_ATTR_GETSTACKSIZE = 0x1E9 // 489 + SYS_PTHREAD_ATTR_SETDETACHSTATE = 0x1EA // 490 + SYS_PTHREAD_ATTR_GETDETACHSTATE = 0x1EB // 491 + SYS_PTHREAD_ATTR_SETWEIGHT_NP = 0x1EC // 492 + SYS_PTHREAD_ATTR_GETWEIGHT_NP = 0x1ED // 493 + SYS_PTHREAD_CANCEL = 0x1EE // 494 + SYS_PTHREAD_CLEANUP_PUSH = 0x1EF // 495 + SYS_PTHREAD_CLEANUP_POP = 0x1F0 // 496 + SYS_PTHREAD_CONDATTR_INIT = 0x1F1 // 497 + SYS_PTHREAD_CONDATTR_DESTROY = 0x1F2 // 498 + SYS_PTHREAD_COND_INIT = 0x1F3 // 499 + SYS_PTHREAD_COND_DESTROY = 0x1F4 // 500 + SYS_PTHREAD_COND_SIGNAL = 0x1F5 // 501 + SYS_PTHREAD_COND_BROADCAST = 0x1F6 // 502 + SYS_PTHREAD_COND_WAIT = 0x1F7 // 503 + SYS_PTHREAD_COND_TIMEDWAIT = 0x1F8 // 504 + SYS_PTHREAD_CREATE = 0x1F9 // 505 + SYS_PTHREAD_DETACH = 0x1FA // 506 + SYS_PTHREAD_EQUAL = 0x1FB // 507 + SYS_PTHREAD_GETSPECIFIC = 0x1FC // 508 + SYS_PTHREAD_JOIN = 0x1FD // 509 + SYS_PTHREAD_KEY_CREATE = 0x1FE // 510 + SYS_PTHREAD_MUTEXATTR_INIT = 0x1FF // 511 + SYS_PTHREAD_MUTEXATTR_DESTROY = 0x200 // 512 + SYS_PTHREAD_MUTEXATTR_SETKIND_NP = 0x201 // 513 + SYS_PTHREAD_MUTEXATTR_GETKIND_NP = 0x202 // 514 + SYS_PTHREAD_MUTEX_INIT = 0x203 // 515 + SYS_PTHREAD_MUTEX_DESTROY = 0x204 // 516 + SYS_PTHREAD_MUTEX_LOCK = 0x205 // 517 + SYS_PTHREAD_MUTEX_TRYLOCK = 0x206 // 518 + SYS_PTHREAD_MUTEX_UNLOCK = 0x207 // 519 + SYS_PTHREAD_ONCE = 0x209 // 521 + SYS_PTHREAD_SELF = 0x20A // 522 + SYS_PTHREAD_SETINTR = 0x20B // 523 + SYS_PTHREAD_SETINTRTYPE = 0x20C // 524 + SYS_PTHREAD_SETSPECIFIC = 0x20D // 525 + SYS_PTHREAD_TESTINTR = 0x20E // 526 + SYS_PTHREAD_YIELD = 0x20F // 527 + SYS_TW_OPEN = 0x210 // 528 + SYS_TW_FCNTL = 0x211 // 529 + SYS_PTHREAD_JOIN_D4_NP = 0x212 // 530 + SYS_PTHREAD_CONDATTR_SETKIND_NP = 0x213 // 531 + SYS_PTHREAD_CONDATTR_GETKIND_NP = 0x214 // 532 + SYS_EXTLINK_NP = 0x215 // 533 + SYS___PASSWD = 0x216 // 534 + SYS_SETGROUPS = 0x217 // 535 + SYS_INITGROUPS = 0x218 // 536 + SYS_WCSPBRK = 0x23F // 575 + SYS_WCSRCHR = 0x240 // 576 + SYS_SVC99 = 0x241 // 577 + SYS___SVC99 = 0x241 // 577 + SYS_WCSWCS = 0x242 // 578 + SYS_LOCALECO = 0x243 // 579 + SYS_LOCALECONV = 0x243 // 579 + SYS___LIBREL = 0x244 // 580 + SYS_RELEASE = 0x245 // 581 + SYS___RLSE = 0x245 // 581 + SYS_FLOCATE = 0x246 // 582 + SYS___FLOCT = 0x246 // 582 + SYS_FDELREC = 0x247 // 583 + SYS___FDLREC = 0x247 // 583 + SYS_FETCH = 0x248 // 584 + SYS___FETCH = 0x248 // 584 + SYS_QSORT = 0x249 // 585 + SYS_GETENV = 0x24A // 586 + SYS_SYSTEM = 0x24B // 587 + SYS_BSEARCH = 0x24C // 588 + SYS_LDIV = 0x24D // 589 + SYS___THROW = 0x25E // 606 + SYS___RETHROW = 0x25F // 607 + SYS___CLEANUPCATCH = 0x260 // 608 + SYS___CATCHMATCH = 0x261 // 609 + SYS___CLEAN2UPCATCH = 0x262 // 610 + SYS_PUTENV = 0x26A // 618 + SYS___GETENV = 0x26F // 623 + SYS_GETPRIORITY = 0x270 // 624 + SYS_NICE = 0x271 // 625 + SYS_SETPRIORITY = 0x272 // 626 + SYS_GETITIMER = 0x273 // 627 + SYS_SETITIMER = 0x274 // 628 + SYS_MSGCTL = 0x275 // 629 + SYS_MSGGET = 0x276 // 630 + SYS_MSGRCV = 0x277 // 631 + SYS_MSGSND = 0x278 // 632 + SYS_MSGXRCV = 0x279 // 633 + SYS___MSGXR = 0x279 // 633 + SYS_SEMCTL = 0x27A // 634 + SYS_SEMGET = 0x27B // 635 + SYS_SEMOP = 0x27C // 636 + SYS_SHMAT = 0x27D // 637 + SYS_SHMCTL = 0x27E // 638 + SYS_SHMDT = 0x27F // 639 + SYS_SHMGET = 0x280 // 640 + SYS___GETIPC = 0x281 // 641 + SYS_SETGRENT = 0x282 // 642 + SYS_GETGRENT = 0x283 // 643 + SYS_ENDGRENT = 0x284 // 644 + SYS_SETPWENT = 0x285 // 645 + SYS_GETPWENT = 0x286 // 646 + SYS_ENDPWENT = 0x287 // 647 + SYS_BSD_SIGNAL = 0x288 // 648 + SYS_KILLPG = 0x289 // 649 + SYS_SIGALTSTACK = 0x28A // 650 + SYS_SIGHOLD = 0x28B // 651 + SYS_SIGIGNORE = 0x28C // 652 + SYS_SIGINTERRUPT = 0x28D // 653 + SYS_SIGPAUSE = 0x28E // 654 + SYS_SIGRELSE = 0x28F // 655 + SYS_SIGSET = 0x290 // 656 + SYS_SIGSTACK = 0x291 // 657 + SYS_GETRLIMIT = 0x292 // 658 + SYS_SETRLIMIT = 0x293 // 659 + SYS_GETRUSAGE = 0x294 // 660 + SYS_MMAP = 0x295 // 661 + SYS_MPROTECT = 0x296 // 662 + SYS_MSYNC = 0x297 // 663 + SYS_MUNMAP = 0x298 // 664 + SYS_CONFSTR = 0x299 // 665 + SYS_GETOPT = 0x29A // 666 + SYS_LCHOWN = 0x29B // 667 + SYS_TRUNCATE = 0x29C // 668 + SYS_GETSUBOPT = 0x29D // 669 + SYS_SETPGRP = 0x29E // 670 + SYS___GDERR = 0x29F // 671 + SYS___TZONE = 0x2A0 // 672 + SYS___DLGHT = 0x2A1 // 673 + SYS___OPARGF = 0x2A2 // 674 + SYS___OPOPTF = 0x2A3 // 675 + SYS___OPINDF = 0x2A4 // 676 + SYS___OPERRF = 0x2A5 // 677 + SYS_GETDATE = 0x2A6 // 678 + SYS_WAIT3 = 0x2A7 // 679 + SYS_WAITID = 0x2A8 // 680 + SYS___CATTRM = 0x2A9 // 681 + SYS___GDTRM = 0x2AA // 682 + SYS___RNDTRM = 0x2AB // 683 + SYS_CRYPT = 0x2AC // 684 + SYS_ENCRYPT = 0x2AD // 685 + SYS_SETKEY = 0x2AE // 686 + SYS___CNVBLK = 0x2AF // 687 + SYS___CRYTRM = 0x2B0 // 688 + SYS___ECRTRM = 0x2B1 // 689 + SYS_DRAND48 = 0x2B2 // 690 + SYS_ERAND48 = 0x2B3 // 691 + SYS_FSTATVFS = 0x2B4 // 692 + SYS_STATVFS = 0x2B5 // 693 + SYS_CATCLOSE = 0x2B6 // 694 + SYS_CATGETS = 0x2B7 // 695 + SYS_CATOPEN = 0x2B8 // 696 + SYS_BCMP = 0x2B9 // 697 + SYS_BCOPY = 0x2BA // 698 + SYS_BZERO = 0x2BB // 699 + SYS_FFS = 0x2BC // 700 + SYS_INDEX = 0x2BD // 701 + SYS_RINDEX = 0x2BE // 702 + SYS_STRCASECMP = 0x2BF // 703 + SYS_STRDUP = 0x2C0 // 704 + SYS_STRNCASECMP = 0x2C1 // 705 + SYS_INITSTATE = 0x2C2 // 706 + SYS_SETSTATE = 0x2C3 // 707 + SYS_RANDOM = 0x2C4 // 708 + SYS_SRANDOM = 0x2C5 // 709 + SYS_HCREATE = 0x2C6 // 710 + SYS_HDESTROY = 0x2C7 // 711 + SYS_HSEARCH = 0x2C8 // 712 + SYS_LFIND = 0x2C9 // 713 + SYS_LSEARCH = 0x2CA // 714 + SYS_TDELETE = 0x2CB // 715 + SYS_TFIND = 0x2CC // 716 + SYS_TSEARCH = 0x2CD // 717 + SYS_TWALK = 0x2CE // 718 + SYS_INSQUE = 0x2CF // 719 + SYS_REMQUE = 0x2D0 // 720 + SYS_POPEN = 0x2D1 // 721 + SYS_PCLOSE = 0x2D2 // 722 + SYS_SWAB = 0x2D3 // 723 + SYS_MEMCCPY = 0x2D4 // 724 + SYS_GETPAGESIZE = 0x2D8 // 728 + SYS_FCHDIR = 0x2D9 // 729 + SYS___OCLCK = 0x2DA // 730 + SYS___ATOE = 0x2DB // 731 + SYS___ATOE_L = 0x2DC // 732 + SYS___ETOA = 0x2DD // 733 + SYS___ETOA_L = 0x2DE // 734 + SYS_SETUTXENT = 0x2DF // 735 + SYS_GETUTXENT = 0x2E0 // 736 + SYS_ENDUTXENT = 0x2E1 // 737 + SYS_GETUTXID = 0x2E2 // 738 + SYS_GETUTXLINE = 0x2E3 // 739 + SYS_PUTUTXLINE = 0x2E4 // 740 + SYS_FMTMSG = 0x2E5 // 741 + SYS_JRAND48 = 0x2E6 // 742 + SYS_LRAND48 = 0x2E7 // 743 + SYS_MRAND48 = 0x2E8 // 744 + SYS_NRAND48 = 0x2E9 // 745 + SYS_LCONG48 = 0x2EA // 746 + SYS_SRAND48 = 0x2EB // 747 + SYS_SEED48 = 0x2EC // 748 + SYS_ISASCII = 0x2ED // 749 + SYS_TOASCII = 0x2EE // 750 + SYS_A64L = 0x2EF // 751 + SYS_L64A = 0x2F0 // 752 + SYS_UALARM = 0x2F1 // 753 + SYS_USLEEP = 0x2F2 // 754 + SYS___UTXTRM = 0x2F3 // 755 + SYS___SRCTRM = 0x2F4 // 756 + SYS_FTIME = 0x2F5 // 757 + SYS_GETTIMEOFDAY = 0x2F6 // 758 + SYS_DBM_CLEARERR = 0x2F7 // 759 + SYS_DBM_CLOSE = 0x2F8 // 760 + SYS_DBM_DELETE = 0x2F9 // 761 + SYS_DBM_ERROR = 0x2FA // 762 + SYS_DBM_FETCH = 0x2FB // 763 + SYS_DBM_FIRSTKEY = 0x2FC // 764 + SYS_DBM_NEXTKEY = 0x2FD // 765 + SYS_DBM_OPEN = 0x2FE // 766 + SYS_DBM_STORE = 0x2FF // 767 + SYS___NDMTRM = 0x300 // 768 + SYS_FTOK = 0x301 // 769 + SYS_BASENAME = 0x302 // 770 + SYS_DIRNAME = 0x303 // 771 + SYS_GETDTABLESIZE = 0x304 // 772 + SYS_MKSTEMP = 0x305 // 773 + SYS_MKTEMP = 0x306 // 774 + SYS_NFTW = 0x307 // 775 + SYS_GETWD = 0x308 // 776 + SYS_LOCKF = 0x309 // 777 + SYS__LONGJMP = 0x30D // 781 + SYS__SETJMP = 0x30E // 782 + SYS_VFORK = 0x30F // 783 + SYS_WORDEXP = 0x310 // 784 + SYS_WORDFREE = 0x311 // 785 + SYS_GETPGID = 0x312 // 786 + SYS_GETSID = 0x313 // 787 + SYS___UTMPXNAME = 0x314 // 788 + SYS_CUSERID = 0x315 // 789 + SYS_GETPASS = 0x316 // 790 + SYS_FNMATCH = 0x317 // 791 + SYS_FTW = 0x318 // 792 + SYS_GETW = 0x319 // 793 + SYS_GLOB = 0x31A // 794 + SYS_GLOBFREE = 0x31B // 795 + SYS_PUTW = 0x31C // 796 + SYS_SEEKDIR = 0x31D // 797 + SYS_TELLDIR = 0x31E // 798 + SYS_TEMPNAM = 0x31F // 799 + SYS_ACOSH = 0x320 // 800 + SYS_ASINH = 0x321 // 801 + SYS_ATANH = 0x322 // 802 + SYS_CBRT = 0x323 // 803 + SYS_EXPM1 = 0x324 // 804 + SYS_ILOGB = 0x325 // 805 + SYS_LOGB = 0x326 // 806 + SYS_LOG1P = 0x327 // 807 + SYS_NEXTAFTER = 0x328 // 808 + SYS_RINT = 0x329 // 809 + SYS_REMAINDER = 0x32A // 810 + SYS_SCALB = 0x32B // 811 + SYS_LGAMMA = 0x32C // 812 + SYS_TTYSLOT = 0x32D // 813 + SYS_GETTIMEOFDAY_R = 0x32E // 814 + SYS_SYNC = 0x32F // 815 + SYS_SPAWN = 0x330 // 816 + SYS_SPAWNP = 0x331 // 817 + SYS_GETLOGIN_UU = 0x332 // 818 + SYS_ECVT = 0x333 // 819 + SYS_FCVT = 0x334 // 820 + SYS_GCVT = 0x335 // 821 + SYS_ACCEPT = 0x336 // 822 + SYS_BIND = 0x337 // 823 + SYS_CONNECT = 0x338 // 824 + SYS_ENDHOSTENT = 0x339 // 825 + SYS_ENDPROTOENT = 0x33A // 826 + SYS_ENDSERVENT = 0x33B // 827 + SYS_GETHOSTBYADDR_R = 0x33C // 828 + SYS_GETHOSTBYADDR = 0x33D // 829 + SYS_GETHOSTBYNAME_R = 0x33E // 830 + SYS_GETHOSTBYNAME = 0x33F // 831 + SYS_GETHOSTENT = 0x340 // 832 + SYS_GETHOSTID = 0x341 // 833 + SYS_GETHOSTNAME = 0x342 // 834 + SYS_GETNETBYADDR = 0x343 // 835 + SYS_GETNETBYNAME = 0x344 // 836 + SYS_GETNETENT = 0x345 // 837 + SYS_GETPEERNAME = 0x346 // 838 + SYS_GETPROTOBYNAME = 0x347 // 839 + SYS_GETPROTOBYNUMBER = 0x348 // 840 + SYS_GETPROTOENT = 0x349 // 841 + SYS_GETSERVBYNAME = 0x34A // 842 + SYS_GETSERVBYPORT = 0x34B // 843 + SYS_GETSERVENT = 0x34C // 844 + SYS_GETSOCKNAME = 0x34D // 845 + SYS_GETSOCKOPT = 0x34E // 846 + SYS_INET_ADDR = 0x34F // 847 + SYS_INET_LNAOF = 0x350 // 848 + SYS_INET_MAKEADDR = 0x351 // 849 + SYS_INET_NETOF = 0x352 // 850 + SYS_INET_NETWORK = 0x353 // 851 + SYS_INET_NTOA = 0x354 // 852 + SYS_IOCTL = 0x355 // 853 + SYS_LISTEN = 0x356 // 854 + SYS_READV = 0x357 // 855 + SYS_RECV = 0x358 // 856 + SYS_RECVFROM = 0x359 // 857 + SYS_SELECT = 0x35B // 859 + SYS_SELECTEX = 0x35C // 860 + SYS_SEND = 0x35D // 861 + SYS_SENDTO = 0x35F // 863 + SYS_SETHOSTENT = 0x360 // 864 + SYS_SETNETENT = 0x361 // 865 + SYS_SETPEER = 0x362 // 866 + SYS_SETPROTOENT = 0x363 // 867 + SYS_SETSERVENT = 0x364 // 868 + SYS_SETSOCKOPT = 0x365 // 869 + SYS_SHUTDOWN = 0x366 // 870 + SYS_SOCKET = 0x367 // 871 + SYS_SOCKETPAIR = 0x368 // 872 + SYS_WRITEV = 0x369 // 873 + SYS_CHROOT = 0x36A // 874 + SYS_W_STATVFS = 0x36B // 875 + SYS_ULIMIT = 0x36C // 876 + SYS_ISNAN = 0x36D // 877 + SYS_UTIMES = 0x36E // 878 + SYS___H_ERRNO = 0x36F // 879 + SYS_ENDNETENT = 0x370 // 880 + SYS_CLOSELOG = 0x371 // 881 + SYS_OPENLOG = 0x372 // 882 + SYS_SETLOGMASK = 0x373 // 883 + SYS_SYSLOG = 0x374 // 884 + SYS_PTSNAME = 0x375 // 885 + SYS_SETREUID = 0x376 // 886 + SYS_SETREGID = 0x377 // 887 + SYS_REALPATH = 0x378 // 888 + SYS___SIGNGAM = 0x379 // 889 + SYS_GRANTPT = 0x37A // 890 + SYS_UNLOCKPT = 0x37B // 891 + SYS_TCGETSID = 0x37C // 892 + SYS___TCGETCP = 0x37D // 893 + SYS___TCSETCP = 0x37E // 894 + SYS___TCSETTABLES = 0x37F // 895 + SYS_POLL = 0x380 // 896 + SYS_REXEC = 0x381 // 897 + SYS___ISASCII2 = 0x382 // 898 + SYS___TOASCII2 = 0x383 // 899 + SYS_CHPRIORITY = 0x384 // 900 + SYS_PTHREAD_ATTR_SETSYNCTYPE_NP = 0x385 // 901 + SYS_PTHREAD_ATTR_GETSYNCTYPE_NP = 0x386 // 902 + SYS_PTHREAD_SET_LIMIT_NP = 0x387 // 903 + SYS___STNETENT = 0x388 // 904 + SYS___STPROTOENT = 0x389 // 905 + SYS___STSERVENT = 0x38A // 906 + SYS___STHOSTENT = 0x38B // 907 + SYS_NLIST = 0x38C // 908 + SYS___IPDBCS = 0x38D // 909 + SYS___IPDSPX = 0x38E // 910 + SYS___IPMSGC = 0x38F // 911 + SYS___SELECT1 = 0x390 // 912 + SYS_PTHREAD_SECURITY_NP = 0x391 // 913 + SYS___CHECK_RESOURCE_AUTH_NP = 0x392 // 914 + SYS___CONVERT_ID_NP = 0x393 // 915 + SYS___OPENVMREL = 0x394 // 916 + SYS_WMEMCHR = 0x395 // 917 + SYS_WMEMCMP = 0x396 // 918 + SYS_WMEMCPY = 0x397 // 919 + SYS_WMEMMOVE = 0x398 // 920 + SYS_WMEMSET = 0x399 // 921 + SYS___FPUTWC = 0x400 // 1024 + SYS___PUTWC = 0x401 // 1025 + SYS___PWCHAR = 0x402 // 1026 + SYS___WCSFTM = 0x403 // 1027 + SYS___WCSTOK = 0x404 // 1028 + SYS___WCWDTH = 0x405 // 1029 + SYS_T_ACCEPT = 0x409 // 1033 + SYS_T_ALLOC = 0x40A // 1034 + SYS_T_BIND = 0x40B // 1035 + SYS_T_CLOSE = 0x40C // 1036 + SYS_T_CONNECT = 0x40D // 1037 + SYS_T_ERROR = 0x40E // 1038 + SYS_T_FREE = 0x40F // 1039 + SYS_T_GETINFO = 0x410 // 1040 + SYS_T_GETPROTADDR = 0x411 // 1041 + SYS_T_GETSTATE = 0x412 // 1042 + SYS_T_LISTEN = 0x413 // 1043 + SYS_T_LOOK = 0x414 // 1044 + SYS_T_OPEN = 0x415 // 1045 + SYS_T_OPTMGMT = 0x416 // 1046 + SYS_T_RCV = 0x417 // 1047 + SYS_T_RCVCONNECT = 0x418 // 1048 + SYS_T_RCVDIS = 0x419 // 1049 + SYS_T_RCVREL = 0x41A // 1050 + SYS_T_RCVUDATA = 0x41B // 1051 + SYS_T_RCVUDERR = 0x41C // 1052 + SYS_T_SND = 0x41D // 1053 + SYS_T_SNDDIS = 0x41E // 1054 + SYS_T_SNDREL = 0x41F // 1055 + SYS_T_SNDUDATA = 0x420 // 1056 + SYS_T_STRERROR = 0x421 // 1057 + SYS_T_SYNC = 0x422 // 1058 + SYS_T_UNBIND = 0x423 // 1059 + SYS___T_ERRNO = 0x424 // 1060 + SYS___RECVMSG2 = 0x425 // 1061 + SYS___SENDMSG2 = 0x426 // 1062 + SYS_FATTACH = 0x427 // 1063 + SYS_FDETACH = 0x428 // 1064 + SYS_GETMSG = 0x429 // 1065 + SYS_GETPMSG = 0x42A // 1066 + SYS_ISASTREAM = 0x42B // 1067 + SYS_PUTMSG = 0x42C // 1068 + SYS_PUTPMSG = 0x42D // 1069 + SYS___ISPOSIXON = 0x42E // 1070 + SYS___OPENMVSREL = 0x42F // 1071 + SYS_GETCONTEXT = 0x430 // 1072 + SYS_SETCONTEXT = 0x431 // 1073 + SYS_MAKECONTEXT = 0x432 // 1074 + SYS_SWAPCONTEXT = 0x433 // 1075 + SYS_PTHREAD_GETSPECIFIC_D8_NP = 0x434 // 1076 + SYS_GETCLIENTID = 0x470 // 1136 + SYS___GETCLIENTID = 0x471 // 1137 + SYS_GETSTABLESIZE = 0x472 // 1138 + SYS_GETIBMOPT = 0x473 // 1139 + SYS_GETIBMSOCKOPT = 0x474 // 1140 + SYS_GIVESOCKET = 0x475 // 1141 + SYS_IBMSFLUSH = 0x476 // 1142 + SYS_MAXDESC = 0x477 // 1143 + SYS_SETIBMOPT = 0x478 // 1144 + SYS_SETIBMSOCKOPT = 0x479 // 1145 + SYS_SOCK_DEBUG = 0x47A // 1146 + SYS_SOCK_DO_TESTSTOR = 0x47D // 1149 + SYS_TAKESOCKET = 0x47E // 1150 + SYS___SERVER_INIT = 0x47F // 1151 + SYS___SERVER_PWU = 0x480 // 1152 + SYS_PTHREAD_TAG_NP = 0x481 // 1153 + SYS___CONSOLE = 0x482 // 1154 + SYS___WSINIT = 0x483 // 1155 + SYS___IPTCPN = 0x489 // 1161 + SYS___SMF_RECORD = 0x48A // 1162 + SYS___IPHOST = 0x48B // 1163 + SYS___IPNODE = 0x48C // 1164 + SYS___SERVER_CLASSIFY_CREATE = 0x48D // 1165 + SYS___SERVER_CLASSIFY_DESTROY = 0x48E // 1166 + SYS___SERVER_CLASSIFY_RESET = 0x48F // 1167 + SYS___SERVER_CLASSIFY = 0x490 // 1168 + SYS___HEAPRPT = 0x496 // 1174 + SYS___FNWSA = 0x49B // 1179 + SYS___SPAWN2 = 0x49D // 1181 + SYS___SPAWNP2 = 0x49E // 1182 + SYS___GDRR = 0x4A1 // 1185 + SYS___HRRNO = 0x4A2 // 1186 + SYS___OPRG = 0x4A3 // 1187 + SYS___OPRR = 0x4A4 // 1188 + SYS___OPND = 0x4A5 // 1189 + SYS___OPPT = 0x4A6 // 1190 + SYS___SIGGM = 0x4A7 // 1191 + SYS___DGHT = 0x4A8 // 1192 + SYS___TZNE = 0x4A9 // 1193 + SYS___TZZN = 0x4AA // 1194 + SYS___TRRNO = 0x4AF // 1199 + SYS___ENVN = 0x4B0 // 1200 + SYS___MLOCKALL = 0x4B1 // 1201 + SYS_CREATEWO = 0x4B2 // 1202 + SYS_CREATEWORKUNIT = 0x4B2 // 1202 + SYS_CONTINUE = 0x4B3 // 1203 + SYS_CONTINUEWORKUNIT = 0x4B3 // 1203 + SYS_CONNECTW = 0x4B4 // 1204 + SYS_CONNECTWORKMGR = 0x4B4 // 1204 + SYS_CONNECTS = 0x4B5 // 1205 + SYS_CONNECTSERVER = 0x4B5 // 1205 + SYS_DISCONNE = 0x4B6 // 1206 + SYS_DISCONNECTSERVER = 0x4B6 // 1206 + SYS_JOINWORK = 0x4B7 // 1207 + SYS_JOINWORKUNIT = 0x4B7 // 1207 + SYS_LEAVEWOR = 0x4B8 // 1208 + SYS_LEAVEWORKUNIT = 0x4B8 // 1208 + SYS_DELETEWO = 0x4B9 // 1209 + SYS_DELETEWORKUNIT = 0x4B9 // 1209 + SYS_QUERYMET = 0x4BA // 1210 + SYS_QUERYMETRICS = 0x4BA // 1210 + SYS_QUERYSCH = 0x4BB // 1211 + SYS_QUERYSCHENV = 0x4BB // 1211 + SYS_CHECKSCH = 0x4BC // 1212 + SYS_CHECKSCHENV = 0x4BC // 1212 + SYS___PID_AFFINITY = 0x4BD // 1213 + SYS___ASINH_B = 0x4BE // 1214 + SYS___ATAN_B = 0x4BF // 1215 + SYS___CBRT_B = 0x4C0 // 1216 + SYS___CEIL_B = 0x4C1 // 1217 + SYS_COPYSIGN = 0x4C2 // 1218 + SYS___COS_B = 0x4C3 // 1219 + SYS___ERF_B = 0x4C4 // 1220 + SYS___ERFC_B = 0x4C5 // 1221 + SYS___EXPM1_B = 0x4C6 // 1222 + SYS___FABS_B = 0x4C7 // 1223 + SYS_FINITE = 0x4C8 // 1224 + SYS___FLOOR_B = 0x4C9 // 1225 + SYS___FREXP_B = 0x4CA // 1226 + SYS___ILOGB_B = 0x4CB // 1227 + SYS___ISNAN_B = 0x4CC // 1228 + SYS___LDEXP_B = 0x4CD // 1229 + SYS___LOG1P_B = 0x4CE // 1230 + SYS___LOGB_B = 0x4CF // 1231 + SYS_MATHERR = 0x4D0 // 1232 + SYS___MODF_B = 0x4D1 // 1233 + SYS___NEXTAFTER_B = 0x4D2 // 1234 + SYS___RINT_B = 0x4D3 // 1235 + SYS_SCALBN = 0x4D4 // 1236 + SYS_SIGNIFIC = 0x4D5 // 1237 + SYS_SIGNIFICAND = 0x4D5 // 1237 + SYS___SIN_B = 0x4D6 // 1238 + SYS___TAN_B = 0x4D7 // 1239 + SYS___TANH_B = 0x4D8 // 1240 + SYS___ACOS_B = 0x4D9 // 1241 + SYS___ACOSH_B = 0x4DA // 1242 + SYS___ASIN_B = 0x4DB // 1243 + SYS___ATAN2_B = 0x4DC // 1244 + SYS___ATANH_B = 0x4DD // 1245 + SYS___COSH_B = 0x4DE // 1246 + SYS___EXP_B = 0x4DF // 1247 + SYS___FMOD_B = 0x4E0 // 1248 + SYS___GAMMA_B = 0x4E1 // 1249 + SYS_GAMMA_R = 0x4E2 // 1250 + SYS___HYPOT_B = 0x4E3 // 1251 + SYS___J0_B = 0x4E4 // 1252 + SYS___Y0_B = 0x4E5 // 1253 + SYS___J1_B = 0x4E6 // 1254 + SYS___Y1_B = 0x4E7 // 1255 + SYS___JN_B = 0x4E8 // 1256 + SYS___YN_B = 0x4E9 // 1257 + SYS___LGAMMA_B = 0x4EA // 1258 + SYS_LGAMMA_R = 0x4EB // 1259 + SYS___LOG_B = 0x4EC // 1260 + SYS___LOG10_B = 0x4ED // 1261 + SYS___POW_B = 0x4EE // 1262 + SYS___REMAINDER_B = 0x4EF // 1263 + SYS___SCALB_B = 0x4F0 // 1264 + SYS___SINH_B = 0x4F1 // 1265 + SYS___SQRT_B = 0x4F2 // 1266 + SYS___OPENDIR2 = 0x4F3 // 1267 + SYS___READDIR2 = 0x4F4 // 1268 + SYS___LOGIN = 0x4F5 // 1269 + SYS___OPEN_STAT = 0x4F6 // 1270 + SYS_ACCEPT_AND_RECV = 0x4F7 // 1271 + SYS___FP_SETMODE = 0x4F8 // 1272 + SYS___SIGACTIONSET = 0x4FB // 1275 + SYS___UCREATE = 0x4FC // 1276 + SYS___UMALLOC = 0x4FD // 1277 + SYS___UFREE = 0x4FE // 1278 + SYS___UHEAPREPORT = 0x4FF // 1279 + SYS___ISBFP = 0x500 // 1280 + SYS___FP_CAST = 0x501 // 1281 + SYS___CERTIFICATE = 0x502 // 1282 + SYS_SEND_FILE = 0x503 // 1283 + SYS_AIO_CANCEL = 0x504 // 1284 + SYS_AIO_ERROR = 0x505 // 1285 + SYS_AIO_READ = 0x506 // 1286 + SYS_AIO_RETURN = 0x507 // 1287 + SYS_AIO_SUSPEND = 0x508 // 1288 + SYS_AIO_WRITE = 0x509 // 1289 + SYS_PTHREAD_MUTEXATTR_GETPSHARED = 0x50A // 1290 + SYS_PTHREAD_MUTEXATTR_SETPSHARED = 0x50B // 1291 + SYS_PTHREAD_RWLOCK_DESTROY = 0x50C // 1292 + SYS_PTHREAD_RWLOCK_INIT = 0x50D // 1293 + SYS_PTHREAD_RWLOCK_RDLOCK = 0x50E // 1294 + SYS_PTHREAD_RWLOCK_TRYRDLOCK = 0x50F // 1295 + SYS_PTHREAD_RWLOCK_TRYWRLOCK = 0x510 // 1296 + SYS_PTHREAD_RWLOCK_UNLOCK = 0x511 // 1297 + SYS_PTHREAD_RWLOCK_WRLOCK = 0x512 // 1298 + SYS_PTHREAD_RWLOCKATTR_GETPSHARED = 0x513 // 1299 + SYS_PTHREAD_RWLOCKATTR_SETPSHARED = 0x514 // 1300 + SYS_PTHREAD_RWLOCKATTR_INIT = 0x515 // 1301 + SYS_PTHREAD_RWLOCKATTR_DESTROY = 0x516 // 1302 + SYS___CTTBL = 0x517 // 1303 + SYS_PTHREAD_MUTEXATTR_SETTYPE = 0x518 // 1304 + SYS_PTHREAD_MUTEXATTR_GETTYPE = 0x519 // 1305 + SYS___FP_CLR_FLAG = 0x51A // 1306 + SYS___FP_READ_FLAG = 0x51B // 1307 + SYS___FP_RAISE_XCP = 0x51C // 1308 + SYS___FP_CLASS = 0x51D // 1309 + SYS___FP_FINITE = 0x51E // 1310 + SYS___FP_ISNAN = 0x51F // 1311 + SYS___FP_UNORDERED = 0x520 // 1312 + SYS___FP_READ_RND = 0x521 // 1313 + SYS___FP_READ_RND_B = 0x522 // 1314 + SYS___FP_SWAP_RND = 0x523 // 1315 + SYS___FP_SWAP_RND_B = 0x524 // 1316 + SYS___FP_LEVEL = 0x525 // 1317 + SYS___FP_BTOH = 0x526 // 1318 + SYS___FP_HTOB = 0x527 // 1319 + SYS___FPC_RD = 0x528 // 1320 + SYS___FPC_WR = 0x529 // 1321 + SYS___FPC_RW = 0x52A // 1322 + SYS___FPC_SM = 0x52B // 1323 + SYS___FPC_RS = 0x52C // 1324 + SYS_SIGTIMEDWAIT = 0x52D // 1325 + SYS_SIGWAITINFO = 0x52E // 1326 + SYS___CHKBFP = 0x52F // 1327 + SYS___W_PIOCTL = 0x59E // 1438 + SYS___OSENV = 0x59F // 1439 + SYS_EXPORTWO = 0x5A1 // 1441 + SYS_EXPORTWORKUNIT = 0x5A1 // 1441 + SYS_UNDOEXPO = 0x5A2 // 1442 + SYS_UNDOEXPORTWORKUNIT = 0x5A2 // 1442 + SYS_IMPORTWO = 0x5A3 // 1443 + SYS_IMPORTWORKUNIT = 0x5A3 // 1443 + SYS_UNDOIMPO = 0x5A4 // 1444 + SYS_UNDOIMPORTWORKUNIT = 0x5A4 // 1444 + SYS_EXTRACTW = 0x5A5 // 1445 + SYS_EXTRACTWORKUNIT = 0x5A5 // 1445 + SYS___CPL = 0x5A6 // 1446 + SYS___MAP_INIT = 0x5A7 // 1447 + SYS___MAP_SERVICE = 0x5A8 // 1448 + SYS_SIGQUEUE = 0x5A9 // 1449 + SYS___MOUNT = 0x5AA // 1450 + SYS___GETUSERID = 0x5AB // 1451 + SYS___IPDOMAINNAME = 0x5AC // 1452 + SYS_QUERYENC = 0x5AD // 1453 + SYS_QUERYWORKUNITCLASSIFICATION = 0x5AD // 1453 + SYS_CONNECTE = 0x5AE // 1454 + SYS_CONNECTEXPORTIMPORT = 0x5AE // 1454 + SYS___FP_SWAPMODE = 0x5AF // 1455 + SYS_STRTOLL = 0x5B0 // 1456 + SYS_STRTOULL = 0x5B1 // 1457 + SYS___DSA_PREV = 0x5B2 // 1458 + SYS___EP_FIND = 0x5B3 // 1459 + SYS___SERVER_THREADS_QUERY = 0x5B4 // 1460 + SYS___MSGRCV_TIMED = 0x5B7 // 1463 + SYS___SEMOP_TIMED = 0x5B8 // 1464 + SYS___GET_CPUID = 0x5B9 // 1465 + SYS___GET_SYSTEM_SETTINGS = 0x5BA // 1466 + SYS_FTELLO = 0x5C8 // 1480 + SYS_FSEEKO = 0x5C9 // 1481 + SYS_LLDIV = 0x5CB // 1483 + SYS_WCSTOLL = 0x5CC // 1484 + SYS_WCSTOULL = 0x5CD // 1485 + SYS_LLABS = 0x5CE // 1486 + SYS___CONSOLE2 = 0x5D2 // 1490 + SYS_INET_NTOP = 0x5D3 // 1491 + SYS_INET_PTON = 0x5D4 // 1492 + SYS___RES = 0x5D6 // 1494 + SYS_RES_MKQUERY = 0x5D7 // 1495 + SYS_RES_INIT = 0x5D8 // 1496 + SYS_RES_QUERY = 0x5D9 // 1497 + SYS_RES_SEARCH = 0x5DA // 1498 + SYS_RES_SEND = 0x5DB // 1499 + SYS_RES_QUERYDOMAIN = 0x5DC // 1500 + SYS_DN_EXPAND = 0x5DD // 1501 + SYS_DN_SKIPNAME = 0x5DE // 1502 + SYS_DN_COMP = 0x5DF // 1503 + SYS_ASCTIME_R = 0x5E0 // 1504 + SYS_CTIME_R = 0x5E1 // 1505 + SYS_GMTIME_R = 0x5E2 // 1506 + SYS_LOCALTIME_R = 0x5E3 // 1507 + SYS_RAND_R = 0x5E4 // 1508 + SYS_STRTOK_R = 0x5E5 // 1509 + SYS_READDIR_R = 0x5E6 // 1510 + SYS_GETGRGID_R = 0x5E7 // 1511 + SYS_GETGRNAM_R = 0x5E8 // 1512 + SYS_GETLOGIN_R = 0x5E9 // 1513 + SYS_GETPWNAM_R = 0x5EA // 1514 + SYS_GETPWUID_R = 0x5EB // 1515 + SYS_TTYNAME_R = 0x5EC // 1516 + SYS_PTHREAD_ATFORK = 0x5ED // 1517 + SYS_PTHREAD_ATTR_GETGUARDSIZE = 0x5EE // 1518 + SYS_PTHREAD_ATTR_GETSTACKADDR = 0x5EF // 1519 + SYS_PTHREAD_ATTR_SETGUARDSIZE = 0x5F0 // 1520 + SYS_PTHREAD_ATTR_SETSTACKADDR = 0x5F1 // 1521 + SYS_PTHREAD_CONDATTR_GETPSHARED = 0x5F2 // 1522 + SYS_PTHREAD_CONDATTR_SETPSHARED = 0x5F3 // 1523 + SYS_PTHREAD_GETCONCURRENCY = 0x5F4 // 1524 + SYS_PTHREAD_KEY_DELETE = 0x5F5 // 1525 + SYS_PTHREAD_SETCONCURRENCY = 0x5F6 // 1526 + SYS_PTHREAD_SIGMASK = 0x5F7 // 1527 + SYS___DISCARDDATA = 0x5F8 // 1528 + SYS_PTHREAD_ATTR_GETSCHEDPARAM = 0x5F9 // 1529 + SYS_PTHREAD_ATTR_SETSCHEDPARAM = 0x5FA // 1530 + SYS_PTHREAD_ATTR_GETDETACHSTATE_U98 = 0x5FB // 1531 + SYS_PTHREAD_ATTR_SETDETACHSTATE_U98 = 0x5FC // 1532 + SYS_PTHREAD_DETACH_U98 = 0x5FD // 1533 + SYS_PTHREAD_GETSPECIFIC_U98 = 0x5FE // 1534 + SYS_PTHREAD_SETCANCELSTATE = 0x5FF // 1535 + SYS_PTHREAD_SETCANCELTYPE = 0x600 // 1536 + SYS_PTHREAD_TESTCANCEL = 0x601 // 1537 + SYS___ATANF_B = 0x602 // 1538 + SYS___ATANL_B = 0x603 // 1539 + SYS___CEILF_B = 0x604 // 1540 + SYS___CEILL_B = 0x605 // 1541 + SYS___COSF_B = 0x606 // 1542 + SYS___COSL_B = 0x607 // 1543 + SYS___FABSF_B = 0x608 // 1544 + SYS___FABSL_B = 0x609 // 1545 + SYS___FLOORF_B = 0x60A // 1546 + SYS___FLOORL_B = 0x60B // 1547 + SYS___FREXPF_B = 0x60C // 1548 + SYS___FREXPL_B = 0x60D // 1549 + SYS___LDEXPF_B = 0x60E // 1550 + SYS___LDEXPL_B = 0x60F // 1551 + SYS___SINF_B = 0x610 // 1552 + SYS___SINL_B = 0x611 // 1553 + SYS___TANF_B = 0x612 // 1554 + SYS___TANL_B = 0x613 // 1555 + SYS___TANHF_B = 0x614 // 1556 + SYS___TANHL_B = 0x615 // 1557 + SYS___ACOSF_B = 0x616 // 1558 + SYS___ACOSL_B = 0x617 // 1559 + SYS___ASINF_B = 0x618 // 1560 + SYS___ASINL_B = 0x619 // 1561 + SYS___ATAN2F_B = 0x61A // 1562 + SYS___ATAN2L_B = 0x61B // 1563 + SYS___COSHF_B = 0x61C // 1564 + SYS___COSHL_B = 0x61D // 1565 + SYS___EXPF_B = 0x61E // 1566 + SYS___EXPL_B = 0x61F // 1567 + SYS___LOGF_B = 0x620 // 1568 + SYS___LOGL_B = 0x621 // 1569 + SYS___LOG10F_B = 0x622 // 1570 + SYS___LOG10L_B = 0x623 // 1571 + SYS___POWF_B = 0x624 // 1572 + SYS___POWL_B = 0x625 // 1573 + SYS___SINHF_B = 0x626 // 1574 + SYS___SINHL_B = 0x627 // 1575 + SYS___SQRTF_B = 0x628 // 1576 + SYS___SQRTL_B = 0x629 // 1577 + SYS___ABSF_B = 0x62A // 1578 + SYS___ABS_B = 0x62B // 1579 + SYS___ABSL_B = 0x62C // 1580 + SYS___FMODF_B = 0x62D // 1581 + SYS___FMODL_B = 0x62E // 1582 + SYS___MODFF_B = 0x62F // 1583 + SYS___MODFL_B = 0x630 // 1584 + SYS_ABSF = 0x631 // 1585 + SYS_ABSL = 0x632 // 1586 + SYS_ACOSF = 0x633 // 1587 + SYS_ACOSL = 0x634 // 1588 + SYS_ASINF = 0x635 // 1589 + SYS_ASINL = 0x636 // 1590 + SYS_ATAN2F = 0x637 // 1591 + SYS_ATAN2L = 0x638 // 1592 + SYS_ATANF = 0x639 // 1593 + SYS_ATANL = 0x63A // 1594 + SYS_CEILF = 0x63B // 1595 + SYS_CEILL = 0x63C // 1596 + SYS_COSF = 0x63D // 1597 + SYS_COSL = 0x63E // 1598 + SYS_COSHF = 0x63F // 1599 + SYS_COSHL = 0x640 // 1600 + SYS_EXPF = 0x641 // 1601 + SYS_EXPL = 0x642 // 1602 + SYS_TANHF = 0x643 // 1603 + SYS_TANHL = 0x644 // 1604 + SYS_LOG10F = 0x645 // 1605 + SYS_LOG10L = 0x646 // 1606 + SYS_LOGF = 0x647 // 1607 + SYS_LOGL = 0x648 // 1608 + SYS_POWF = 0x649 // 1609 + SYS_POWL = 0x64A // 1610 + SYS_SINF = 0x64B // 1611 + SYS_SINL = 0x64C // 1612 + SYS_SQRTF = 0x64D // 1613 + SYS_SQRTL = 0x64E // 1614 + SYS_SINHF = 0x64F // 1615 + SYS_SINHL = 0x650 // 1616 + SYS_TANF = 0x651 // 1617 + SYS_TANL = 0x652 // 1618 + SYS_FABSF = 0x653 // 1619 + SYS_FABSL = 0x654 // 1620 + SYS_FLOORF = 0x655 // 1621 + SYS_FLOORL = 0x656 // 1622 + SYS_FMODF = 0x657 // 1623 + SYS_FMODL = 0x658 // 1624 + SYS_FREXPF = 0x659 // 1625 + SYS_FREXPL = 0x65A // 1626 + SYS_LDEXPF = 0x65B // 1627 + SYS_LDEXPL = 0x65C // 1628 + SYS_MODFF = 0x65D // 1629 + SYS_MODFL = 0x65E // 1630 + SYS_BTOWC = 0x65F // 1631 + SYS___CHATTR = 0x660 // 1632 + SYS___FCHATTR = 0x661 // 1633 + SYS___TOCCSID = 0x662 // 1634 + SYS___CSNAMETYPE = 0x663 // 1635 + SYS___TOCSNAME = 0x664 // 1636 + SYS___CCSIDTYPE = 0x665 // 1637 + SYS___AE_CORRESTBL_QUERY = 0x666 // 1638 + SYS___AE_AUTOCONVERT_STATE = 0x667 // 1639 + SYS_DN_FIND = 0x668 // 1640 + SYS___GETHOSTBYADDR_A = 0x669 // 1641 + SYS___GETHOSTBYNAME_A = 0x66A // 1642 + SYS___RES_INIT_A = 0x66B // 1643 + SYS___GETHOSTBYADDR_R_A = 0x66C // 1644 + SYS___GETHOSTBYNAME_R_A = 0x66D // 1645 + SYS___CHARMAP_INIT_A = 0x66E // 1646 + SYS___MBLEN_A = 0x66F // 1647 + SYS___MBLEN_SB_A = 0x670 // 1648 + SYS___MBLEN_STD_A = 0x671 // 1649 + SYS___MBLEN_UTF = 0x672 // 1650 + SYS___MBSTOWCS_A = 0x673 // 1651 + SYS___MBSTOWCS_STD_A = 0x674 // 1652 + SYS___MBTOWC_A = 0x675 // 1653 + SYS___MBTOWC_ISO1 = 0x676 // 1654 + SYS___MBTOWC_SBCS = 0x677 // 1655 + SYS___MBTOWC_MBCS = 0x678 // 1656 + SYS___MBTOWC_UTF = 0x679 // 1657 + SYS___WCSTOMBS_A = 0x67A // 1658 + SYS___WCSTOMBS_STD_A = 0x67B // 1659 + SYS___WCSWIDTH_A = 0x67C // 1660 + SYS___GETGRGID_R_A = 0x67D // 1661 + SYS___WCSWIDTH_STD_A = 0x67E // 1662 + SYS___WCSWIDTH_ASIA = 0x67F // 1663 + SYS___CSID_A = 0x680 // 1664 + SYS___CSID_STD_A = 0x681 // 1665 + SYS___WCSID_A = 0x682 // 1666 + SYS___WCSID_STD_A = 0x683 // 1667 + SYS___WCTOMB_A = 0x684 // 1668 + SYS___WCTOMB_ISO1 = 0x685 // 1669 + SYS___WCTOMB_STD_A = 0x686 // 1670 + SYS___WCTOMB_UTF = 0x687 // 1671 + SYS___WCWIDTH_A = 0x688 // 1672 + SYS___GETGRNAM_R_A = 0x689 // 1673 + SYS___WCWIDTH_STD_A = 0x68A // 1674 + SYS___WCWIDTH_ASIA = 0x68B // 1675 + SYS___GETPWNAM_R_A = 0x68C // 1676 + SYS___GETPWUID_R_A = 0x68D // 1677 + SYS___GETLOGIN_R_A = 0x68E // 1678 + SYS___TTYNAME_R_A = 0x68F // 1679 + SYS___READDIR_R_A = 0x690 // 1680 + SYS___E2A_S = 0x691 // 1681 + SYS___FNMATCH_A = 0x692 // 1682 + SYS___FNMATCH_C_A = 0x693 // 1683 + SYS___EXECL_A = 0x694 // 1684 + SYS___FNMATCH_STD_A = 0x695 // 1685 + SYS___REGCOMP_A = 0x696 // 1686 + SYS___REGCOMP_STD_A = 0x697 // 1687 + SYS___REGERROR_A = 0x698 // 1688 + SYS___REGERROR_STD_A = 0x699 // 1689 + SYS___REGEXEC_A = 0x69A // 1690 + SYS___REGEXEC_STD_A = 0x69B // 1691 + SYS___REGFREE_A = 0x69C // 1692 + SYS___REGFREE_STD_A = 0x69D // 1693 + SYS___STRCOLL_A = 0x69E // 1694 + SYS___STRCOLL_C_A = 0x69F // 1695 + SYS___EXECLE_A = 0x6A0 // 1696 + SYS___STRCOLL_STD_A = 0x6A1 // 1697 + SYS___STRXFRM_A = 0x6A2 // 1698 + SYS___STRXFRM_C_A = 0x6A3 // 1699 + SYS___EXECLP_A = 0x6A4 // 1700 + SYS___STRXFRM_STD_A = 0x6A5 // 1701 + SYS___WCSCOLL_A = 0x6A6 // 1702 + SYS___WCSCOLL_C_A = 0x6A7 // 1703 + SYS___WCSCOLL_STD_A = 0x6A8 // 1704 + SYS___WCSXFRM_A = 0x6A9 // 1705 + SYS___WCSXFRM_C_A = 0x6AA // 1706 + SYS___WCSXFRM_STD_A = 0x6AB // 1707 + SYS___COLLATE_INIT_A = 0x6AC // 1708 + SYS___WCTYPE_A = 0x6AD // 1709 + SYS___GET_WCTYPE_STD_A = 0x6AE // 1710 + SYS___CTYPE_INIT_A = 0x6AF // 1711 + SYS___ISWCTYPE_A = 0x6B0 // 1712 + SYS___EXECV_A = 0x6B1 // 1713 + SYS___IS_WCTYPE_STD_A = 0x6B2 // 1714 + SYS___TOWLOWER_A = 0x6B3 // 1715 + SYS___TOWLOWER_STD_A = 0x6B4 // 1716 + SYS___TOWUPPER_A = 0x6B5 // 1717 + SYS___TOWUPPER_STD_A = 0x6B6 // 1718 + SYS___LOCALE_INIT_A = 0x6B7 // 1719 + SYS___LOCALECONV_A = 0x6B8 // 1720 + SYS___LOCALECONV_STD_A = 0x6B9 // 1721 + SYS___NL_LANGINFO_A = 0x6BA // 1722 + SYS___NL_LNAGINFO_STD_A = 0x6BB // 1723 + SYS___MONETARY_INIT_A = 0x6BC // 1724 + SYS___STRFMON_A = 0x6BD // 1725 + SYS___STRFMON_STD_A = 0x6BE // 1726 + SYS___GETADDRINFO_A = 0x6BF // 1727 + SYS___CATGETS_A = 0x6C0 // 1728 + SYS___EXECVE_A = 0x6C1 // 1729 + SYS___EXECVP_A = 0x6C2 // 1730 + SYS___SPAWN_A = 0x6C3 // 1731 + SYS___GETNAMEINFO_A = 0x6C4 // 1732 + SYS___SPAWNP_A = 0x6C5 // 1733 + SYS___NUMERIC_INIT_A = 0x6C6 // 1734 + SYS___RESP_INIT_A = 0x6C7 // 1735 + SYS___RPMATCH_A = 0x6C8 // 1736 + SYS___RPMATCH_C_A = 0x6C9 // 1737 + SYS___RPMATCH_STD_A = 0x6CA // 1738 + SYS___TIME_INIT_A = 0x6CB // 1739 + SYS___STRFTIME_A = 0x6CC // 1740 + SYS___STRFTIME_STD_A = 0x6CD // 1741 + SYS___STRPTIME_A = 0x6CE // 1742 + SYS___STRPTIME_STD_A = 0x6CF // 1743 + SYS___WCSFTIME_A = 0x6D0 // 1744 + SYS___WCSFTIME_STD_A = 0x6D1 // 1745 + SYS_____SPAWN2_A = 0x6D2 // 1746 + SYS_____SPAWNP2_A = 0x6D3 // 1747 + SYS___SYNTAX_INIT_A = 0x6D4 // 1748 + SYS___TOD_INIT_A = 0x6D5 // 1749 + SYS___NL_CSINFO_A = 0x6D6 // 1750 + SYS___NL_MONINFO_A = 0x6D7 // 1751 + SYS___NL_NUMINFO_A = 0x6D8 // 1752 + SYS___NL_RESPINFO_A = 0x6D9 // 1753 + SYS___NL_TIMINFO_A = 0x6DA // 1754 + SYS___IF_NAMETOINDEX_A = 0x6DB // 1755 + SYS___IF_INDEXTONAME_A = 0x6DC // 1756 + SYS___PRINTF_A = 0x6DD // 1757 + SYS___ICONV_OPEN_A = 0x6DE // 1758 + SYS___DLLLOAD_A = 0x6DF // 1759 + SYS___DLLQUERYFN_A = 0x6E0 // 1760 + SYS___DLLQUERYVAR_A = 0x6E1 // 1761 + SYS_____CHATTR_A = 0x6E2 // 1762 + SYS___E2A_L = 0x6E3 // 1763 + SYS_____TOCCSID_A = 0x6E4 // 1764 + SYS_____TOCSNAME_A = 0x6E5 // 1765 + SYS_____CCSIDTYPE_A = 0x6E6 // 1766 + SYS_____CSNAMETYPE_A = 0x6E7 // 1767 + SYS___CHMOD_A = 0x6E8 // 1768 + SYS___MKDIR_A = 0x6E9 // 1769 + SYS___STAT_A = 0x6EA // 1770 + SYS___STAT_O_A = 0x6EB // 1771 + SYS___MKFIFO_A = 0x6EC // 1772 + SYS_____OPEN_STAT_A = 0x6ED // 1773 + SYS___LSTAT_A = 0x6EE // 1774 + SYS___LSTAT_O_A = 0x6EF // 1775 + SYS___MKNOD_A = 0x6F0 // 1776 + SYS___MOUNT_A = 0x6F1 // 1777 + SYS___UMOUNT_A = 0x6F2 // 1778 + SYS___CHAUDIT_A = 0x6F4 // 1780 + SYS___W_GETMNTENT_A = 0x6F5 // 1781 + SYS___CREAT_A = 0x6F6 // 1782 + SYS___OPEN_A = 0x6F7 // 1783 + SYS___SETLOCALE_A = 0x6F9 // 1785 + SYS___FPRINTF_A = 0x6FA // 1786 + SYS___SPRINTF_A = 0x6FB // 1787 + SYS___VFPRINTF_A = 0x6FC // 1788 + SYS___VPRINTF_A = 0x6FD // 1789 + SYS___VSPRINTF_A = 0x6FE // 1790 + SYS___VSWPRINTF_A = 0x6FF // 1791 + SYS___SWPRINTF_A = 0x700 // 1792 + SYS___FSCANF_A = 0x701 // 1793 + SYS___SCANF_A = 0x702 // 1794 + SYS___SSCANF_A = 0x703 // 1795 + SYS___SWSCANF_A = 0x704 // 1796 + SYS___ATOF_A = 0x705 // 1797 + SYS___ATOI_A = 0x706 // 1798 + SYS___ATOL_A = 0x707 // 1799 + SYS___STRTOD_A = 0x708 // 1800 + SYS___STRTOL_A = 0x709 // 1801 + SYS___STRTOUL_A = 0x70A // 1802 + SYS_____AE_CORRESTBL_QUERY_A = 0x70B // 1803 + SYS___A64L_A = 0x70C // 1804 + SYS___ECVT_A = 0x70D // 1805 + SYS___FCVT_A = 0x70E // 1806 + SYS___GCVT_A = 0x70F // 1807 + SYS___L64A_A = 0x710 // 1808 + SYS___STRERROR_A = 0x711 // 1809 + SYS___PERROR_A = 0x712 // 1810 + SYS___FETCH_A = 0x713 // 1811 + SYS___GETENV_A = 0x714 // 1812 + SYS___MKSTEMP_A = 0x717 // 1815 + SYS___PTSNAME_A = 0x718 // 1816 + SYS___PUTENV_A = 0x719 // 1817 + SYS___REALPATH_A = 0x71A // 1818 + SYS___SETENV_A = 0x71B // 1819 + SYS___SYSTEM_A = 0x71C // 1820 + SYS___GETOPT_A = 0x71D // 1821 + SYS___CATOPEN_A = 0x71E // 1822 + SYS___ACCESS_A = 0x71F // 1823 + SYS___CHDIR_A = 0x720 // 1824 + SYS___CHOWN_A = 0x721 // 1825 + SYS___CHROOT_A = 0x722 // 1826 + SYS___GETCWD_A = 0x723 // 1827 + SYS___GETWD_A = 0x724 // 1828 + SYS___LCHOWN_A = 0x725 // 1829 + SYS___LINK_A = 0x726 // 1830 + SYS___PATHCONF_A = 0x727 // 1831 + SYS___IF_NAMEINDEX_A = 0x728 // 1832 + SYS___READLINK_A = 0x729 // 1833 + SYS___RMDIR_A = 0x72A // 1834 + SYS___STATVFS_A = 0x72B // 1835 + SYS___SYMLINK_A = 0x72C // 1836 + SYS___TRUNCATE_A = 0x72D // 1837 + SYS___UNLINK_A = 0x72E // 1838 + SYS___GAI_STRERROR_A = 0x72F // 1839 + SYS___EXTLINK_NP_A = 0x730 // 1840 + SYS___ISALNUM_A = 0x731 // 1841 + SYS___ISALPHA_A = 0x732 // 1842 + SYS___A2E_S = 0x733 // 1843 + SYS___ISCNTRL_A = 0x734 // 1844 + SYS___ISDIGIT_A = 0x735 // 1845 + SYS___ISGRAPH_A = 0x736 // 1846 + SYS___ISLOWER_A = 0x737 // 1847 + SYS___ISPRINT_A = 0x738 // 1848 + SYS___ISPUNCT_A = 0x739 // 1849 + SYS___ISSPACE_A = 0x73A // 1850 + SYS___ISUPPER_A = 0x73B // 1851 + SYS___ISXDIGIT_A = 0x73C // 1852 + SYS___TOLOWER_A = 0x73D // 1853 + SYS___TOUPPER_A = 0x73E // 1854 + SYS___ISWALNUM_A = 0x73F // 1855 + SYS___ISWALPHA_A = 0x740 // 1856 + SYS___A2E_L = 0x741 // 1857 + SYS___ISWCNTRL_A = 0x742 // 1858 + SYS___ISWDIGIT_A = 0x743 // 1859 + SYS___ISWGRAPH_A = 0x744 // 1860 + SYS___ISWLOWER_A = 0x745 // 1861 + SYS___ISWPRINT_A = 0x746 // 1862 + SYS___ISWPUNCT_A = 0x747 // 1863 + SYS___ISWSPACE_A = 0x748 // 1864 + SYS___ISWUPPER_A = 0x749 // 1865 + SYS___ISWXDIGIT_A = 0x74A // 1866 + SYS___CONFSTR_A = 0x74B // 1867 + SYS___FTOK_A = 0x74C // 1868 + SYS___MKTEMP_A = 0x74D // 1869 + SYS___FDOPEN_A = 0x74E // 1870 + SYS___FLDATA_A = 0x74F // 1871 + SYS___REMOVE_A = 0x750 // 1872 + SYS___RENAME_A = 0x751 // 1873 + SYS___TMPNAM_A = 0x752 // 1874 + SYS___FOPEN_A = 0x753 // 1875 + SYS___FREOPEN_A = 0x754 // 1876 + SYS___CUSERID_A = 0x755 // 1877 + SYS___POPEN_A = 0x756 // 1878 + SYS___TEMPNAM_A = 0x757 // 1879 + SYS___FTW_A = 0x758 // 1880 + SYS___GETGRENT_A = 0x759 // 1881 + SYS___GETGRGID_A = 0x75A // 1882 + SYS___GETGRNAM_A = 0x75B // 1883 + SYS___GETGROUPSBYNAME_A = 0x75C // 1884 + SYS___GETHOSTENT_A = 0x75D // 1885 + SYS___GETHOSTNAME_A = 0x75E // 1886 + SYS___GETLOGIN_A = 0x75F // 1887 + SYS___INET_NTOP_A = 0x760 // 1888 + SYS___GETPASS_A = 0x761 // 1889 + SYS___GETPWENT_A = 0x762 // 1890 + SYS___GETPWNAM_A = 0x763 // 1891 + SYS___GETPWUID_A = 0x764 // 1892 + SYS_____CHECK_RESOURCE_AUTH_NP_A = 0x765 // 1893 + SYS___CHECKSCHENV_A = 0x766 // 1894 + SYS___CONNECTSERVER_A = 0x767 // 1895 + SYS___CONNECTWORKMGR_A = 0x768 // 1896 + SYS_____CONSOLE_A = 0x769 // 1897 + SYS___CREATEWORKUNIT_A = 0x76A // 1898 + SYS___CTERMID_A = 0x76B // 1899 + SYS___FMTMSG_A = 0x76C // 1900 + SYS___INITGROUPS_A = 0x76D // 1901 + SYS_____LOGIN_A = 0x76E // 1902 + SYS___MSGRCV_A = 0x76F // 1903 + SYS___MSGSND_A = 0x770 // 1904 + SYS___MSGXRCV_A = 0x771 // 1905 + SYS___NFTW_A = 0x772 // 1906 + SYS_____PASSWD_A = 0x773 // 1907 + SYS___PTHREAD_SECURITY_NP_A = 0x774 // 1908 + SYS___QUERYMETRICS_A = 0x775 // 1909 + SYS___QUERYSCHENV = 0x776 // 1910 + SYS___READV_A = 0x777 // 1911 + SYS_____SERVER_CLASSIFY_A = 0x778 // 1912 + SYS_____SERVER_INIT_A = 0x779 // 1913 + SYS_____SERVER_PWU_A = 0x77A // 1914 + SYS___STRCASECMP_A = 0x77B // 1915 + SYS___STRNCASECMP_A = 0x77C // 1916 + SYS___TTYNAME_A = 0x77D // 1917 + SYS___UNAME_A = 0x77E // 1918 + SYS___UTIMES_A = 0x77F // 1919 + SYS___W_GETPSENT_A = 0x780 // 1920 + SYS___WRITEV_A = 0x781 // 1921 + SYS___W_STATFS_A = 0x782 // 1922 + SYS___W_STATVFS_A = 0x783 // 1923 + SYS___FPUTC_A = 0x784 // 1924 + SYS___PUTCHAR_A = 0x785 // 1925 + SYS___PUTS_A = 0x786 // 1926 + SYS___FGETS_A = 0x787 // 1927 + SYS___GETS_A = 0x788 // 1928 + SYS___FPUTS_A = 0x789 // 1929 + SYS___FREAD_A = 0x78A // 1930 + SYS___FWRITE_A = 0x78B // 1931 + SYS___OPEN_O_A = 0x78C // 1932 + SYS___ISASCII = 0x78D // 1933 + SYS___CREAT_O_A = 0x78E // 1934 + SYS___ENVNA = 0x78F // 1935 + SYS___PUTC_A = 0x790 // 1936 + SYS___AE_THREAD_SETMODE = 0x791 // 1937 + SYS___AE_THREAD_SWAPMODE = 0x792 // 1938 + SYS___GETNETBYADDR_A = 0x793 // 1939 + SYS___GETNETBYNAME_A = 0x794 // 1940 + SYS___GETNETENT_A = 0x795 // 1941 + SYS___GETPROTOBYNAME_A = 0x796 // 1942 + SYS___GETPROTOBYNUMBER_A = 0x797 // 1943 + SYS___GETPROTOENT_A = 0x798 // 1944 + SYS___GETSERVBYNAME_A = 0x799 // 1945 + SYS___GETSERVBYPORT_A = 0x79A // 1946 + SYS___GETSERVENT_A = 0x79B // 1947 + SYS___ASCTIME_A = 0x79C // 1948 + SYS___CTIME_A = 0x79D // 1949 + SYS___GETDATE_A = 0x79E // 1950 + SYS___TZSET_A = 0x79F // 1951 + SYS___UTIME_A = 0x7A0 // 1952 + SYS___ASCTIME_R_A = 0x7A1 // 1953 + SYS___CTIME_R_A = 0x7A2 // 1954 + SYS___STRTOLL_A = 0x7A3 // 1955 + SYS___STRTOULL_A = 0x7A4 // 1956 + SYS___FPUTWC_A = 0x7A5 // 1957 + SYS___PUTWC_A = 0x7A6 // 1958 + SYS___PUTWCHAR_A = 0x7A7 // 1959 + SYS___FPUTWS_A = 0x7A8 // 1960 + SYS___UNGETWC_A = 0x7A9 // 1961 + SYS___FGETWC_A = 0x7AA // 1962 + SYS___GETWC_A = 0x7AB // 1963 + SYS___GETWCHAR_A = 0x7AC // 1964 + SYS___FGETWS_A = 0x7AD // 1965 + SYS___GETTIMEOFDAY_A = 0x7AE // 1966 + SYS___GMTIME_A = 0x7AF // 1967 + SYS___GMTIME_R_A = 0x7B0 // 1968 + SYS___LOCALTIME_A = 0x7B1 // 1969 + SYS___LOCALTIME_R_A = 0x7B2 // 1970 + SYS___MKTIME_A = 0x7B3 // 1971 + SYS___TZZNA = 0x7B4 // 1972 + SYS_UNATEXIT = 0x7B5 // 1973 + SYS___CEE3DMP_A = 0x7B6 // 1974 + SYS___CDUMP_A = 0x7B7 // 1975 + SYS___CSNAP_A = 0x7B8 // 1976 + SYS___CTEST_A = 0x7B9 // 1977 + SYS___CTRACE_A = 0x7BA // 1978 + SYS___VSWPRNTF2_A = 0x7BB // 1979 + SYS___INET_PTON_A = 0x7BC // 1980 + SYS___SYSLOG_A = 0x7BD // 1981 + SYS___CRYPT_A = 0x7BE // 1982 + SYS_____OPENDIR2_A = 0x7BF // 1983 + SYS_____READDIR2_A = 0x7C0 // 1984 + SYS___OPENDIR_A = 0x7C2 // 1986 + SYS___READDIR_A = 0x7C3 // 1987 + SYS_PREAD = 0x7C7 // 1991 + SYS_PWRITE = 0x7C8 // 1992 + SYS_M_CREATE_LAYOUT = 0x7C9 // 1993 + SYS_M_DESTROY_LAYOUT = 0x7CA // 1994 + SYS_M_GETVALUES_LAYOUT = 0x7CB // 1995 + SYS_M_SETVALUES_LAYOUT = 0x7CC // 1996 + SYS_M_TRANSFORM_LAYOUT = 0x7CD // 1997 + SYS_M_WTRANSFORM_LAYOUT = 0x7CE // 1998 + SYS_FWPRINTF = 0x7D1 // 2001 + SYS_WPRINTF = 0x7D2 // 2002 + SYS_VFWPRINT = 0x7D3 // 2003 + SYS_VFWPRINTF = 0x7D3 // 2003 + SYS_VWPRINTF = 0x7D4 // 2004 + SYS_FWSCANF = 0x7D5 // 2005 + SYS_WSCANF = 0x7D6 // 2006 + SYS_WCTRANS = 0x7D7 // 2007 + SYS_TOWCTRAN = 0x7D8 // 2008 + SYS_TOWCTRANS = 0x7D8 // 2008 + SYS___WCSTOD_A = 0x7D9 // 2009 + SYS___WCSTOL_A = 0x7DA // 2010 + SYS___WCSTOUL_A = 0x7DB // 2011 + SYS___BASENAME_A = 0x7DC // 2012 + SYS___DIRNAME_A = 0x7DD // 2013 + SYS___GLOB_A = 0x7DE // 2014 + SYS_FWIDE = 0x7DF // 2015 + SYS___OSNAME = 0x7E0 // 2016 + SYS_____OSNAME_A = 0x7E1 // 2017 + SYS___BTOWC_A = 0x7E4 // 2020 + SYS___WCTOB_A = 0x7E5 // 2021 + SYS___DBM_OPEN_A = 0x7E6 // 2022 + SYS___VFPRINTF2_A = 0x7E7 // 2023 + SYS___VPRINTF2_A = 0x7E8 // 2024 + SYS___VSPRINTF2_A = 0x7E9 // 2025 + SYS___CEIL_H = 0x7EA // 2026 + SYS___FLOOR_H = 0x7EB // 2027 + SYS___MODF_H = 0x7EC // 2028 + SYS___FABS_H = 0x7ED // 2029 + SYS___J0_H = 0x7EE // 2030 + SYS___J1_H = 0x7EF // 2031 + SYS___JN_H = 0x7F0 // 2032 + SYS___Y0_H = 0x7F1 // 2033 + SYS___Y1_H = 0x7F2 // 2034 + SYS___YN_H = 0x7F3 // 2035 + SYS___CEILF_H = 0x7F4 // 2036 + SYS___CEILL_H = 0x7F5 // 2037 + SYS___FLOORF_H = 0x7F6 // 2038 + SYS___FLOORL_H = 0x7F7 // 2039 + SYS___MODFF_H = 0x7F8 // 2040 + SYS___MODFL_H = 0x7F9 // 2041 + SYS___FABSF_H = 0x7FA // 2042 + SYS___FABSL_H = 0x7FB // 2043 + SYS___MALLOC24 = 0x7FC // 2044 + SYS___MALLOC31 = 0x7FD // 2045 + SYS_ACL_INIT = 0x7FE // 2046 + SYS_ACL_FREE = 0x7FF // 2047 + SYS_ACL_FIRST_ENTRY = 0x800 // 2048 + SYS_ACL_GET_ENTRY = 0x801 // 2049 + SYS_ACL_VALID = 0x802 // 2050 + SYS_ACL_CREATE_ENTRY = 0x803 // 2051 + SYS_ACL_DELETE_ENTRY = 0x804 // 2052 + SYS_ACL_UPDATE_ENTRY = 0x805 // 2053 + SYS_ACL_DELETE_FD = 0x806 // 2054 + SYS_ACL_DELETE_FILE = 0x807 // 2055 + SYS_ACL_GET_FD = 0x808 // 2056 + SYS_ACL_GET_FILE = 0x809 // 2057 + SYS_ACL_SET_FD = 0x80A // 2058 + SYS_ACL_SET_FILE = 0x80B // 2059 + SYS_ACL_FROM_TEXT = 0x80C // 2060 + SYS_ACL_TO_TEXT = 0x80D // 2061 + SYS_ACL_SORT = 0x80E // 2062 + SYS___SHUTDOWN_REGISTRATION = 0x80F // 2063 + SYS___ERFL_B = 0x810 // 2064 + SYS___ERFCL_B = 0x811 // 2065 + SYS___LGAMMAL_B = 0x812 // 2066 + SYS___SETHOOKEVENTS = 0x813 // 2067 + SYS_IF_NAMETOINDEX = 0x814 // 2068 + SYS_IF_INDEXTONAME = 0x815 // 2069 + SYS_IF_NAMEINDEX = 0x816 // 2070 + SYS_IF_FREENAMEINDEX = 0x817 // 2071 + SYS_GETADDRINFO = 0x818 // 2072 + SYS_GETNAMEINFO = 0x819 // 2073 + SYS_FREEADDRINFO = 0x81A // 2074 + SYS_GAI_STRERROR = 0x81B // 2075 + SYS_REXEC_AF = 0x81C // 2076 + SYS___POE = 0x81D // 2077 + SYS___DYNALLOC_A = 0x81F // 2079 + SYS___DYNFREE_A = 0x820 // 2080 + SYS___RES_QUERY_A = 0x821 // 2081 + SYS___RES_SEARCH_A = 0x822 // 2082 + SYS___RES_QUERYDOMAIN_A = 0x823 // 2083 + SYS___RES_MKQUERY_A = 0x824 // 2084 + SYS___RES_SEND_A = 0x825 // 2085 + SYS___DN_EXPAND_A = 0x826 // 2086 + SYS___DN_SKIPNAME_A = 0x827 // 2087 + SYS___DN_COMP_A = 0x828 // 2088 + SYS___DN_FIND_A = 0x829 // 2089 + SYS___NLIST_A = 0x82A // 2090 + SYS_____TCGETCP_A = 0x82B // 2091 + SYS_____TCSETCP_A = 0x82C // 2092 + SYS_____W_PIOCTL_A = 0x82E // 2094 + SYS___INET_ADDR_A = 0x82F // 2095 + SYS___INET_NTOA_A = 0x830 // 2096 + SYS___INET_NETWORK_A = 0x831 // 2097 + SYS___ACCEPT_A = 0x832 // 2098 + SYS___ACCEPT_AND_RECV_A = 0x833 // 2099 + SYS___BIND_A = 0x834 // 2100 + SYS___CONNECT_A = 0x835 // 2101 + SYS___GETPEERNAME_A = 0x836 // 2102 + SYS___GETSOCKNAME_A = 0x837 // 2103 + SYS___RECVFROM_A = 0x838 // 2104 + SYS___SENDTO_A = 0x839 // 2105 + SYS___SENDMSG_A = 0x83A // 2106 + SYS___RECVMSG_A = 0x83B // 2107 + SYS_____LCHATTR_A = 0x83C // 2108 + SYS___CABEND = 0x83D // 2109 + SYS___LE_CIB_GET = 0x83E // 2110 + SYS___SET_LAA_FOR_JIT = 0x83F // 2111 + SYS___LCHATTR = 0x840 // 2112 + SYS___WRITEDOWN = 0x841 // 2113 + SYS_PTHREAD_MUTEX_INIT2 = 0x842 // 2114 + SYS___ACOSHF_B = 0x843 // 2115 + SYS___ACOSHL_B = 0x844 // 2116 + SYS___ASINHF_B = 0x845 // 2117 + SYS___ASINHL_B = 0x846 // 2118 + SYS___ATANHF_B = 0x847 // 2119 + SYS___ATANHL_B = 0x848 // 2120 + SYS___CBRTF_B = 0x849 // 2121 + SYS___CBRTL_B = 0x84A // 2122 + SYS___COPYSIGNF_B = 0x84B // 2123 + SYS___COPYSIGNL_B = 0x84C // 2124 + SYS___COTANF_B = 0x84D // 2125 + SYS___COTAN_B = 0x84E // 2126 + SYS___COTANL_B = 0x84F // 2127 + SYS___EXP2F_B = 0x850 // 2128 + SYS___EXP2L_B = 0x851 // 2129 + SYS___EXPM1F_B = 0x852 // 2130 + SYS___EXPM1L_B = 0x853 // 2131 + SYS___FDIMF_B = 0x854 // 2132 + SYS___FDIM_B = 0x855 // 2133 + SYS___FDIML_B = 0x856 // 2134 + SYS___HYPOTF_B = 0x857 // 2135 + SYS___HYPOTL_B = 0x858 // 2136 + SYS___LOG1PF_B = 0x859 // 2137 + SYS___LOG1PL_B = 0x85A // 2138 + SYS___LOG2F_B = 0x85B // 2139 + SYS___LOG2_B = 0x85C // 2140 + SYS___LOG2L_B = 0x85D // 2141 + SYS___REMAINDERF_B = 0x85E // 2142 + SYS___REMAINDERL_B = 0x85F // 2143 + SYS___REMQUOF_B = 0x860 // 2144 + SYS___REMQUO_B = 0x861 // 2145 + SYS___REMQUOL_B = 0x862 // 2146 + SYS___TGAMMAF_B = 0x863 // 2147 + SYS___TGAMMA_B = 0x864 // 2148 + SYS___TGAMMAL_B = 0x865 // 2149 + SYS___TRUNCF_B = 0x866 // 2150 + SYS___TRUNC_B = 0x867 // 2151 + SYS___TRUNCL_B = 0x868 // 2152 + SYS___LGAMMAF_B = 0x869 // 2153 + SYS___LROUNDF_B = 0x86A // 2154 + SYS___LROUND_B = 0x86B // 2155 + SYS___ERFF_B = 0x86C // 2156 + SYS___ERFCF_B = 0x86D // 2157 + SYS_ACOSHF = 0x86E // 2158 + SYS_ACOSHL = 0x86F // 2159 + SYS_ASINHF = 0x870 // 2160 + SYS_ASINHL = 0x871 // 2161 + SYS_ATANHF = 0x872 // 2162 + SYS_ATANHL = 0x873 // 2163 + SYS_CBRTF = 0x874 // 2164 + SYS_CBRTL = 0x875 // 2165 + SYS_COPYSIGNF = 0x876 // 2166 + SYS_CPYSIGNF = 0x876 // 2166 + SYS_COPYSIGNL = 0x877 // 2167 + SYS_CPYSIGNL = 0x877 // 2167 + SYS_COTANF = 0x878 // 2168 + SYS___COTANF = 0x878 // 2168 + SYS_COTAN = 0x879 // 2169 + SYS___COTAN = 0x879 // 2169 + SYS_COTANL = 0x87A // 2170 + SYS___COTANL = 0x87A // 2170 + SYS_EXP2F = 0x87B // 2171 + SYS_EXP2L = 0x87C // 2172 + SYS_EXPM1F = 0x87D // 2173 + SYS_EXPM1L = 0x87E // 2174 + SYS_FDIMF = 0x87F // 2175 + SYS_FDIM = 0x881 // 2177 + SYS_FDIML = 0x882 // 2178 + SYS_HYPOTF = 0x883 // 2179 + SYS_HYPOTL = 0x884 // 2180 + SYS_LOG1PF = 0x885 // 2181 + SYS_LOG1PL = 0x886 // 2182 + SYS_LOG2F = 0x887 // 2183 + SYS_LOG2 = 0x888 // 2184 + SYS_LOG2L = 0x889 // 2185 + SYS_REMAINDERF = 0x88A // 2186 + SYS_REMAINDF = 0x88A // 2186 + SYS_REMAINDERL = 0x88B // 2187 + SYS_REMAINDL = 0x88B // 2187 + SYS_REMQUOF = 0x88C // 2188 + SYS_REMQUO = 0x88D // 2189 + SYS_REMQUOL = 0x88E // 2190 + SYS_TGAMMAF = 0x88F // 2191 + SYS_TGAMMA = 0x890 // 2192 + SYS_TGAMMAL = 0x891 // 2193 + SYS_TRUNCF = 0x892 // 2194 + SYS_TRUNC = 0x893 // 2195 + SYS_TRUNCL = 0x894 // 2196 + SYS_LGAMMAF = 0x895 // 2197 + SYS_LGAMMAL = 0x896 // 2198 + SYS_LROUNDF = 0x897 // 2199 + SYS_LROUND = 0x898 // 2200 + SYS_ERFF = 0x899 // 2201 + SYS_ERFL = 0x89A // 2202 + SYS_ERFCF = 0x89B // 2203 + SYS_ERFCL = 0x89C // 2204 + SYS___EXP2_B = 0x89D // 2205 + SYS_EXP2 = 0x89E // 2206 + SYS___FAR_JUMP = 0x89F // 2207 + SYS___TCGETATTR_A = 0x8A1 // 2209 + SYS___TCSETATTR_A = 0x8A2 // 2210 + SYS___SUPERKILL = 0x8A4 // 2212 + SYS___LE_CONDITION_TOKEN_BUILD = 0x8A5 // 2213 + SYS___LE_MSG_ADD_INSERT = 0x8A6 // 2214 + SYS___LE_MSG_GET = 0x8A7 // 2215 + SYS___LE_MSG_GET_AND_WRITE = 0x8A8 // 2216 + SYS___LE_MSG_WRITE = 0x8A9 // 2217 + SYS___ITOA = 0x8AA // 2218 + SYS___UTOA = 0x8AB // 2219 + SYS___LTOA = 0x8AC // 2220 + SYS___ULTOA = 0x8AD // 2221 + SYS___LLTOA = 0x8AE // 2222 + SYS___ULLTOA = 0x8AF // 2223 + SYS___ITOA_A = 0x8B0 // 2224 + SYS___UTOA_A = 0x8B1 // 2225 + SYS___LTOA_A = 0x8B2 // 2226 + SYS___ULTOA_A = 0x8B3 // 2227 + SYS___LLTOA_A = 0x8B4 // 2228 + SYS___ULLTOA_A = 0x8B5 // 2229 + SYS_____GETENV_A = 0x8C3 // 2243 + SYS___REXEC_A = 0x8C4 // 2244 + SYS___REXEC_AF_A = 0x8C5 // 2245 + SYS___GETUTXENT_A = 0x8C6 // 2246 + SYS___GETUTXID_A = 0x8C7 // 2247 + SYS___GETUTXLINE_A = 0x8C8 // 2248 + SYS___PUTUTXLINE_A = 0x8C9 // 2249 + SYS_____UTMPXNAME_A = 0x8CA // 2250 + SYS___PUTC_UNLOCKED_A = 0x8CB // 2251 + SYS___PUTCHAR_UNLOCKED_A = 0x8CC // 2252 + SYS___SNPRINTF_A = 0x8CD // 2253 + SYS___VSNPRINTF_A = 0x8CE // 2254 + SYS___DLOPEN_A = 0x8D0 // 2256 + SYS___DLSYM_A = 0x8D1 // 2257 + SYS___DLERROR_A = 0x8D2 // 2258 + SYS_FLOCKFILE = 0x8D3 // 2259 + SYS_FTRYLOCKFILE = 0x8D4 // 2260 + SYS_FUNLOCKFILE = 0x8D5 // 2261 + SYS_GETC_UNLOCKED = 0x8D6 // 2262 + SYS_GETCHAR_UNLOCKED = 0x8D7 // 2263 + SYS_PUTC_UNLOCKED = 0x8D8 // 2264 + SYS_PUTCHAR_UNLOCKED = 0x8D9 // 2265 + SYS_SNPRINTF = 0x8DA // 2266 + SYS_VSNPRINTF = 0x8DB // 2267 + SYS_DLOPEN = 0x8DD // 2269 + SYS_DLSYM = 0x8DE // 2270 + SYS_DLCLOSE = 0x8DF // 2271 + SYS_DLERROR = 0x8E0 // 2272 + SYS___SET_EXCEPTION_HANDLER = 0x8E2 // 2274 + SYS___RESET_EXCEPTION_HANDLER = 0x8E3 // 2275 + SYS___VHM_EVENT = 0x8E4 // 2276 + SYS___ABS_H = 0x8E6 // 2278 + SYS___ABSF_H = 0x8E7 // 2279 + SYS___ABSL_H = 0x8E8 // 2280 + SYS___ACOS_H = 0x8E9 // 2281 + SYS___ACOSF_H = 0x8EA // 2282 + SYS___ACOSL_H = 0x8EB // 2283 + SYS___ACOSH_H = 0x8EC // 2284 + SYS___ASIN_H = 0x8ED // 2285 + SYS___ASINF_H = 0x8EE // 2286 + SYS___ASINL_H = 0x8EF // 2287 + SYS___ASINH_H = 0x8F0 // 2288 + SYS___ATAN_H = 0x8F1 // 2289 + SYS___ATANF_H = 0x8F2 // 2290 + SYS___ATANL_H = 0x8F3 // 2291 + SYS___ATANH_H = 0x8F4 // 2292 + SYS___ATANHF_H = 0x8F5 // 2293 + SYS___ATANHL_H = 0x8F6 // 2294 + SYS___ATAN2_H = 0x8F7 // 2295 + SYS___ATAN2F_H = 0x8F8 // 2296 + SYS___ATAN2L_H = 0x8F9 // 2297 + SYS___CBRT_H = 0x8FA // 2298 + SYS___COPYSIGNF_H = 0x8FB // 2299 + SYS___COPYSIGNL_H = 0x8FC // 2300 + SYS___COS_H = 0x8FD // 2301 + SYS___COSF_H = 0x8FE // 2302 + SYS___COSL_H = 0x8FF // 2303 + SYS___COSHF_H = 0x900 // 2304 + SYS___COSHL_H = 0x901 // 2305 + SYS___COTAN_H = 0x902 // 2306 + SYS___COTANF_H = 0x903 // 2307 + SYS___COTANL_H = 0x904 // 2308 + SYS___ERF_H = 0x905 // 2309 + SYS___ERFF_H = 0x906 // 2310 + SYS___ERFL_H = 0x907 // 2311 + SYS___ERFC_H = 0x908 // 2312 + SYS___ERFCF_H = 0x909 // 2313 + SYS___ERFCL_H = 0x90A // 2314 + SYS___EXP_H = 0x90B // 2315 + SYS___EXPF_H = 0x90C // 2316 + SYS___EXPL_H = 0x90D // 2317 + SYS___EXPM1_H = 0x90E // 2318 + SYS___FDIM_H = 0x90F // 2319 + SYS___FDIMF_H = 0x910 // 2320 + SYS___FDIML_H = 0x911 // 2321 + SYS___FMOD_H = 0x912 // 2322 + SYS___FMODF_H = 0x913 // 2323 + SYS___FMODL_H = 0x914 // 2324 + SYS___GAMMA_H = 0x915 // 2325 + SYS___HYPOT_H = 0x916 // 2326 + SYS___ILOGB_H = 0x917 // 2327 + SYS___LGAMMA_H = 0x918 // 2328 + SYS___LGAMMAF_H = 0x919 // 2329 + SYS___LOG_H = 0x91A // 2330 + SYS___LOGF_H = 0x91B // 2331 + SYS___LOGL_H = 0x91C // 2332 + SYS___LOGB_H = 0x91D // 2333 + SYS___LOG2_H = 0x91E // 2334 + SYS___LOG2F_H = 0x91F // 2335 + SYS___LOG2L_H = 0x920 // 2336 + SYS___LOG1P_H = 0x921 // 2337 + SYS___LOG10_H = 0x922 // 2338 + SYS___LOG10F_H = 0x923 // 2339 + SYS___LOG10L_H = 0x924 // 2340 + SYS___LROUND_H = 0x925 // 2341 + SYS___LROUNDF_H = 0x926 // 2342 + SYS___NEXTAFTER_H = 0x927 // 2343 + SYS___POW_H = 0x928 // 2344 + SYS___POWF_H = 0x929 // 2345 + SYS___POWL_H = 0x92A // 2346 + SYS___REMAINDER_H = 0x92B // 2347 + SYS___RINT_H = 0x92C // 2348 + SYS___SCALB_H = 0x92D // 2349 + SYS___SIN_H = 0x92E // 2350 + SYS___SINF_H = 0x92F // 2351 + SYS___SINL_H = 0x930 // 2352 + SYS___SINH_H = 0x931 // 2353 + SYS___SINHF_H = 0x932 // 2354 + SYS___SINHL_H = 0x933 // 2355 + SYS___SQRT_H = 0x934 // 2356 + SYS___SQRTF_H = 0x935 // 2357 + SYS___SQRTL_H = 0x936 // 2358 + SYS___TAN_H = 0x937 // 2359 + SYS___TANF_H = 0x938 // 2360 + SYS___TANL_H = 0x939 // 2361 + SYS___TANH_H = 0x93A // 2362 + SYS___TANHF_H = 0x93B // 2363 + SYS___TANHL_H = 0x93C // 2364 + SYS___TGAMMA_H = 0x93D // 2365 + SYS___TGAMMAF_H = 0x93E // 2366 + SYS___TRUNC_H = 0x93F // 2367 + SYS___TRUNCF_H = 0x940 // 2368 + SYS___TRUNCL_H = 0x941 // 2369 + SYS___COSH_H = 0x942 // 2370 + SYS___LE_DEBUG_SET_RESUME_MCH = 0x943 // 2371 + SYS_VFSCANF = 0x944 // 2372 + SYS_VSCANF = 0x946 // 2374 + SYS_VSSCANF = 0x948 // 2376 + SYS_VFWSCANF = 0x94A // 2378 + SYS_VWSCANF = 0x94C // 2380 + SYS_VSWSCANF = 0x94E // 2382 + SYS_IMAXABS = 0x950 // 2384 + SYS_IMAXDIV = 0x951 // 2385 + SYS_STRTOIMAX = 0x952 // 2386 + SYS_STRTOUMAX = 0x953 // 2387 + SYS_WCSTOIMAX = 0x954 // 2388 + SYS_WCSTOUMAX = 0x955 // 2389 + SYS_ATOLL = 0x956 // 2390 + SYS_STRTOF = 0x957 // 2391 + SYS_STRTOLD = 0x958 // 2392 + SYS_WCSTOF = 0x959 // 2393 + SYS_WCSTOLD = 0x95A // 2394 + SYS_INET6_RTH_SPACE = 0x95B // 2395 + SYS_INET6_RTH_INIT = 0x95C // 2396 + SYS_INET6_RTH_ADD = 0x95D // 2397 + SYS_INET6_RTH_REVERSE = 0x95E // 2398 + SYS_INET6_RTH_SEGMENTS = 0x95F // 2399 + SYS_INET6_RTH_GETADDR = 0x960 // 2400 + SYS_INET6_OPT_INIT = 0x961 // 2401 + SYS_INET6_OPT_APPEND = 0x962 // 2402 + SYS_INET6_OPT_FINISH = 0x963 // 2403 + SYS_INET6_OPT_SET_VAL = 0x964 // 2404 + SYS_INET6_OPT_NEXT = 0x965 // 2405 + SYS_INET6_OPT_FIND = 0x966 // 2406 + SYS_INET6_OPT_GET_VAL = 0x967 // 2407 + SYS___POW_I = 0x987 // 2439 + SYS___POW_I_B = 0x988 // 2440 + SYS___POW_I_H = 0x989 // 2441 + SYS___POW_II = 0x98A // 2442 + SYS___POW_II_B = 0x98B // 2443 + SYS___POW_II_H = 0x98C // 2444 + SYS_CABS = 0x98E // 2446 + SYS___CABS_B = 0x98F // 2447 + SYS___CABS_H = 0x990 // 2448 + SYS_CABSF = 0x991 // 2449 + SYS___CABSF_B = 0x992 // 2450 + SYS___CABSF_H = 0x993 // 2451 + SYS_CABSL = 0x994 // 2452 + SYS___CABSL_B = 0x995 // 2453 + SYS___CABSL_H = 0x996 // 2454 + SYS_CACOS = 0x997 // 2455 + SYS___CACOS_B = 0x998 // 2456 + SYS___CACOS_H = 0x999 // 2457 + SYS_CACOSF = 0x99A // 2458 + SYS___CACOSF_B = 0x99B // 2459 + SYS___CACOSF_H = 0x99C // 2460 + SYS_CACOSL = 0x99D // 2461 + SYS___CACOSL_B = 0x99E // 2462 + SYS___CACOSL_H = 0x99F // 2463 + SYS_CACOSH = 0x9A0 // 2464 + SYS___CACOSH_B = 0x9A1 // 2465 + SYS___CACOSH_H = 0x9A2 // 2466 + SYS_CACOSHF = 0x9A3 // 2467 + SYS___CACOSHF_B = 0x9A4 // 2468 + SYS___CACOSHF_H = 0x9A5 // 2469 + SYS_CACOSHL = 0x9A6 // 2470 + SYS___CACOSHL_B = 0x9A7 // 2471 + SYS___CACOSHL_H = 0x9A8 // 2472 + SYS_CARG = 0x9A9 // 2473 + SYS___CARG_B = 0x9AA // 2474 + SYS___CARG_H = 0x9AB // 2475 + SYS_CARGF = 0x9AC // 2476 + SYS___CARGF_B = 0x9AD // 2477 + SYS___CARGF_H = 0x9AE // 2478 + SYS_CARGL = 0x9AF // 2479 + SYS___CARGL_B = 0x9B0 // 2480 + SYS___CARGL_H = 0x9B1 // 2481 + SYS_CASIN = 0x9B2 // 2482 + SYS___CASIN_B = 0x9B3 // 2483 + SYS___CASIN_H = 0x9B4 // 2484 + SYS_CASINF = 0x9B5 // 2485 + SYS___CASINF_B = 0x9B6 // 2486 + SYS___CASINF_H = 0x9B7 // 2487 + SYS_CASINL = 0x9B8 // 2488 + SYS___CASINL_B = 0x9B9 // 2489 + SYS___CASINL_H = 0x9BA // 2490 + SYS_CASINH = 0x9BB // 2491 + SYS___CASINH_B = 0x9BC // 2492 + SYS___CASINH_H = 0x9BD // 2493 + SYS_CASINHF = 0x9BE // 2494 + SYS___CASINHF_B = 0x9BF // 2495 + SYS___CASINHF_H = 0x9C0 // 2496 + SYS_CASINHL = 0x9C1 // 2497 + SYS___CASINHL_B = 0x9C2 // 2498 + SYS___CASINHL_H = 0x9C3 // 2499 + SYS_CATAN = 0x9C4 // 2500 + SYS___CATAN_B = 0x9C5 // 2501 + SYS___CATAN_H = 0x9C6 // 2502 + SYS_CATANF = 0x9C7 // 2503 + SYS___CATANF_B = 0x9C8 // 2504 + SYS___CATANF_H = 0x9C9 // 2505 + SYS_CATANL = 0x9CA // 2506 + SYS___CATANL_B = 0x9CB // 2507 + SYS___CATANL_H = 0x9CC // 2508 + SYS_CATANH = 0x9CD // 2509 + SYS___CATANH_B = 0x9CE // 2510 + SYS___CATANH_H = 0x9CF // 2511 + SYS_CATANHF = 0x9D0 // 2512 + SYS___CATANHF_B = 0x9D1 // 2513 + SYS___CATANHF_H = 0x9D2 // 2514 + SYS_CATANHL = 0x9D3 // 2515 + SYS___CATANHL_B = 0x9D4 // 2516 + SYS___CATANHL_H = 0x9D5 // 2517 + SYS_CCOS = 0x9D6 // 2518 + SYS___CCOS_B = 0x9D7 // 2519 + SYS___CCOS_H = 0x9D8 // 2520 + SYS_CCOSF = 0x9D9 // 2521 + SYS___CCOSF_B = 0x9DA // 2522 + SYS___CCOSF_H = 0x9DB // 2523 + SYS_CCOSL = 0x9DC // 2524 + SYS___CCOSL_B = 0x9DD // 2525 + SYS___CCOSL_H = 0x9DE // 2526 + SYS_CCOSH = 0x9DF // 2527 + SYS___CCOSH_B = 0x9E0 // 2528 + SYS___CCOSH_H = 0x9E1 // 2529 + SYS_CCOSHF = 0x9E2 // 2530 + SYS___CCOSHF_B = 0x9E3 // 2531 + SYS___CCOSHF_H = 0x9E4 // 2532 + SYS_CCOSHL = 0x9E5 // 2533 + SYS___CCOSHL_B = 0x9E6 // 2534 + SYS___CCOSHL_H = 0x9E7 // 2535 + SYS_CEXP = 0x9E8 // 2536 + SYS___CEXP_B = 0x9E9 // 2537 + SYS___CEXP_H = 0x9EA // 2538 + SYS_CEXPF = 0x9EB // 2539 + SYS___CEXPF_B = 0x9EC // 2540 + SYS___CEXPF_H = 0x9ED // 2541 + SYS_CEXPL = 0x9EE // 2542 + SYS___CEXPL_B = 0x9EF // 2543 + SYS___CEXPL_H = 0x9F0 // 2544 + SYS_CIMAG = 0x9F1 // 2545 + SYS___CIMAG_B = 0x9F2 // 2546 + SYS___CIMAG_H = 0x9F3 // 2547 + SYS_CIMAGF = 0x9F4 // 2548 + SYS___CIMAGF_B = 0x9F5 // 2549 + SYS___CIMAGF_H = 0x9F6 // 2550 + SYS_CIMAGL = 0x9F7 // 2551 + SYS___CIMAGL_B = 0x9F8 // 2552 + SYS___CIMAGL_H = 0x9F9 // 2553 + SYS___CLOG = 0x9FA // 2554 + SYS___CLOG_B = 0x9FB // 2555 + SYS___CLOG_H = 0x9FC // 2556 + SYS_CLOGF = 0x9FD // 2557 + SYS___CLOGF_B = 0x9FE // 2558 + SYS___CLOGF_H = 0x9FF // 2559 + SYS_CLOGL = 0xA00 // 2560 + SYS___CLOGL_B = 0xA01 // 2561 + SYS___CLOGL_H = 0xA02 // 2562 + SYS_CONJ = 0xA03 // 2563 + SYS___CONJ_B = 0xA04 // 2564 + SYS___CONJ_H = 0xA05 // 2565 + SYS_CONJF = 0xA06 // 2566 + SYS___CONJF_B = 0xA07 // 2567 + SYS___CONJF_H = 0xA08 // 2568 + SYS_CONJL = 0xA09 // 2569 + SYS___CONJL_B = 0xA0A // 2570 + SYS___CONJL_H = 0xA0B // 2571 + SYS_CPOW = 0xA0C // 2572 + SYS___CPOW_B = 0xA0D // 2573 + SYS___CPOW_H = 0xA0E // 2574 + SYS_CPOWF = 0xA0F // 2575 + SYS___CPOWF_B = 0xA10 // 2576 + SYS___CPOWF_H = 0xA11 // 2577 + SYS_CPOWL = 0xA12 // 2578 + SYS___CPOWL_B = 0xA13 // 2579 + SYS___CPOWL_H = 0xA14 // 2580 + SYS_CPROJ = 0xA15 // 2581 + SYS___CPROJ_B = 0xA16 // 2582 + SYS___CPROJ_H = 0xA17 // 2583 + SYS_CPROJF = 0xA18 // 2584 + SYS___CPROJF_B = 0xA19 // 2585 + SYS___CPROJF_H = 0xA1A // 2586 + SYS_CPROJL = 0xA1B // 2587 + SYS___CPROJL_B = 0xA1C // 2588 + SYS___CPROJL_H = 0xA1D // 2589 + SYS_CREAL = 0xA1E // 2590 + SYS___CREAL_B = 0xA1F // 2591 + SYS___CREAL_H = 0xA20 // 2592 + SYS_CREALF = 0xA21 // 2593 + SYS___CREALF_B = 0xA22 // 2594 + SYS___CREALF_H = 0xA23 // 2595 + SYS_CREALL = 0xA24 // 2596 + SYS___CREALL_B = 0xA25 // 2597 + SYS___CREALL_H = 0xA26 // 2598 + SYS_CSIN = 0xA27 // 2599 + SYS___CSIN_B = 0xA28 // 2600 + SYS___CSIN_H = 0xA29 // 2601 + SYS_CSINF = 0xA2A // 2602 + SYS___CSINF_B = 0xA2B // 2603 + SYS___CSINF_H = 0xA2C // 2604 + SYS_CSINL = 0xA2D // 2605 + SYS___CSINL_B = 0xA2E // 2606 + SYS___CSINL_H = 0xA2F // 2607 + SYS_CSINH = 0xA30 // 2608 + SYS___CSINH_B = 0xA31 // 2609 + SYS___CSINH_H = 0xA32 // 2610 + SYS_CSINHF = 0xA33 // 2611 + SYS___CSINHF_B = 0xA34 // 2612 + SYS___CSINHF_H = 0xA35 // 2613 + SYS_CSINHL = 0xA36 // 2614 + SYS___CSINHL_B = 0xA37 // 2615 + SYS___CSINHL_H = 0xA38 // 2616 + SYS_CSQRT = 0xA39 // 2617 + SYS___CSQRT_B = 0xA3A // 2618 + SYS___CSQRT_H = 0xA3B // 2619 + SYS_CSQRTF = 0xA3C // 2620 + SYS___CSQRTF_B = 0xA3D // 2621 + SYS___CSQRTF_H = 0xA3E // 2622 + SYS_CSQRTL = 0xA3F // 2623 + SYS___CSQRTL_B = 0xA40 // 2624 + SYS___CSQRTL_H = 0xA41 // 2625 + SYS_CTAN = 0xA42 // 2626 + SYS___CTAN_B = 0xA43 // 2627 + SYS___CTAN_H = 0xA44 // 2628 + SYS_CTANF = 0xA45 // 2629 + SYS___CTANF_B = 0xA46 // 2630 + SYS___CTANF_H = 0xA47 // 2631 + SYS_CTANL = 0xA48 // 2632 + SYS___CTANL_B = 0xA49 // 2633 + SYS___CTANL_H = 0xA4A // 2634 + SYS_CTANH = 0xA4B // 2635 + SYS___CTANH_B = 0xA4C // 2636 + SYS___CTANH_H = 0xA4D // 2637 + SYS_CTANHF = 0xA4E // 2638 + SYS___CTANHF_B = 0xA4F // 2639 + SYS___CTANHF_H = 0xA50 // 2640 + SYS_CTANHL = 0xA51 // 2641 + SYS___CTANHL_B = 0xA52 // 2642 + SYS___CTANHL_H = 0xA53 // 2643 + SYS___ACOSHF_H = 0xA54 // 2644 + SYS___ACOSHL_H = 0xA55 // 2645 + SYS___ASINHF_H = 0xA56 // 2646 + SYS___ASINHL_H = 0xA57 // 2647 + SYS___CBRTF_H = 0xA58 // 2648 + SYS___CBRTL_H = 0xA59 // 2649 + SYS___COPYSIGN_B = 0xA5A // 2650 + SYS___EXPM1F_H = 0xA5B // 2651 + SYS___EXPM1L_H = 0xA5C // 2652 + SYS___EXP2_H = 0xA5D // 2653 + SYS___EXP2F_H = 0xA5E // 2654 + SYS___EXP2L_H = 0xA5F // 2655 + SYS___LOG1PF_H = 0xA60 // 2656 + SYS___LOG1PL_H = 0xA61 // 2657 + SYS___LGAMMAL_H = 0xA62 // 2658 + SYS_FMA = 0xA63 // 2659 + SYS___FMA_B = 0xA64 // 2660 + SYS___FMA_H = 0xA65 // 2661 + SYS_FMAF = 0xA66 // 2662 + SYS___FMAF_B = 0xA67 // 2663 + SYS___FMAF_H = 0xA68 // 2664 + SYS_FMAL = 0xA69 // 2665 + SYS___FMAL_B = 0xA6A // 2666 + SYS___FMAL_H = 0xA6B // 2667 + SYS_FMAX = 0xA6C // 2668 + SYS___FMAX_B = 0xA6D // 2669 + SYS___FMAX_H = 0xA6E // 2670 + SYS_FMAXF = 0xA6F // 2671 + SYS___FMAXF_B = 0xA70 // 2672 + SYS___FMAXF_H = 0xA71 // 2673 + SYS_FMAXL = 0xA72 // 2674 + SYS___FMAXL_B = 0xA73 // 2675 + SYS___FMAXL_H = 0xA74 // 2676 + SYS_FMIN = 0xA75 // 2677 + SYS___FMIN_B = 0xA76 // 2678 + SYS___FMIN_H = 0xA77 // 2679 + SYS_FMINF = 0xA78 // 2680 + SYS___FMINF_B = 0xA79 // 2681 + SYS___FMINF_H = 0xA7A // 2682 + SYS_FMINL = 0xA7B // 2683 + SYS___FMINL_B = 0xA7C // 2684 + SYS___FMINL_H = 0xA7D // 2685 + SYS_ILOGBF = 0xA7E // 2686 + SYS___ILOGBF_B = 0xA7F // 2687 + SYS___ILOGBF_H = 0xA80 // 2688 + SYS_ILOGBL = 0xA81 // 2689 + SYS___ILOGBL_B = 0xA82 // 2690 + SYS___ILOGBL_H = 0xA83 // 2691 + SYS_LLRINT = 0xA84 // 2692 + SYS___LLRINT_B = 0xA85 // 2693 + SYS___LLRINT_H = 0xA86 // 2694 + SYS_LLRINTF = 0xA87 // 2695 + SYS___LLRINTF_B = 0xA88 // 2696 + SYS___LLRINTF_H = 0xA89 // 2697 + SYS_LLRINTL = 0xA8A // 2698 + SYS___LLRINTL_B = 0xA8B // 2699 + SYS___LLRINTL_H = 0xA8C // 2700 + SYS_LLROUND = 0xA8D // 2701 + SYS___LLROUND_B = 0xA8E // 2702 + SYS___LLROUND_H = 0xA8F // 2703 + SYS_LLROUNDF = 0xA90 // 2704 + SYS___LLROUNDF_B = 0xA91 // 2705 + SYS___LLROUNDF_H = 0xA92 // 2706 + SYS_LLROUNDL = 0xA93 // 2707 + SYS___LLROUNDL_B = 0xA94 // 2708 + SYS___LLROUNDL_H = 0xA95 // 2709 + SYS_LOGBF = 0xA96 // 2710 + SYS___LOGBF_B = 0xA97 // 2711 + SYS___LOGBF_H = 0xA98 // 2712 + SYS_LOGBL = 0xA99 // 2713 + SYS___LOGBL_B = 0xA9A // 2714 + SYS___LOGBL_H = 0xA9B // 2715 + SYS_LRINT = 0xA9C // 2716 + SYS___LRINT_B = 0xA9D // 2717 + SYS___LRINT_H = 0xA9E // 2718 + SYS_LRINTF = 0xA9F // 2719 + SYS___LRINTF_B = 0xAA0 // 2720 + SYS___LRINTF_H = 0xAA1 // 2721 + SYS_LRINTL = 0xAA2 // 2722 + SYS___LRINTL_B = 0xAA3 // 2723 + SYS___LRINTL_H = 0xAA4 // 2724 + SYS_LROUNDL = 0xAA5 // 2725 + SYS___LROUNDL_B = 0xAA6 // 2726 + SYS___LROUNDL_H = 0xAA7 // 2727 + SYS_NAN = 0xAA8 // 2728 + SYS___NAN_B = 0xAA9 // 2729 + SYS_NANF = 0xAAA // 2730 + SYS___NANF_B = 0xAAB // 2731 + SYS_NANL = 0xAAC // 2732 + SYS___NANL_B = 0xAAD // 2733 + SYS_NEARBYINT = 0xAAE // 2734 + SYS___NEARBYINT_B = 0xAAF // 2735 + SYS___NEARBYINT_H = 0xAB0 // 2736 + SYS_NEARBYINTF = 0xAB1 // 2737 + SYS___NEARBYINTF_B = 0xAB2 // 2738 + SYS___NEARBYINTF_H = 0xAB3 // 2739 + SYS_NEARBYINTL = 0xAB4 // 2740 + SYS___NEARBYINTL_B = 0xAB5 // 2741 + SYS___NEARBYINTL_H = 0xAB6 // 2742 + SYS_NEXTAFTERF = 0xAB7 // 2743 + SYS___NEXTAFTERF_B = 0xAB8 // 2744 + SYS___NEXTAFTERF_H = 0xAB9 // 2745 + SYS_NEXTAFTERL = 0xABA // 2746 + SYS___NEXTAFTERL_B = 0xABB // 2747 + SYS___NEXTAFTERL_H = 0xABC // 2748 + SYS_NEXTTOWARD = 0xABD // 2749 + SYS___NEXTTOWARD_B = 0xABE // 2750 + SYS___NEXTTOWARD_H = 0xABF // 2751 + SYS_NEXTTOWARDF = 0xAC0 // 2752 + SYS___NEXTTOWARDF_B = 0xAC1 // 2753 + SYS___NEXTTOWARDF_H = 0xAC2 // 2754 + SYS_NEXTTOWARDL = 0xAC3 // 2755 + SYS___NEXTTOWARDL_B = 0xAC4 // 2756 + SYS___NEXTTOWARDL_H = 0xAC5 // 2757 + SYS___REMAINDERF_H = 0xAC6 // 2758 + SYS___REMAINDERL_H = 0xAC7 // 2759 + SYS___REMQUO_H = 0xAC8 // 2760 + SYS___REMQUOF_H = 0xAC9 // 2761 + SYS___REMQUOL_H = 0xACA // 2762 + SYS_RINTF = 0xACB // 2763 + SYS___RINTF_B = 0xACC // 2764 + SYS_RINTL = 0xACD // 2765 + SYS___RINTL_B = 0xACE // 2766 + SYS_ROUND = 0xACF // 2767 + SYS___ROUND_B = 0xAD0 // 2768 + SYS___ROUND_H = 0xAD1 // 2769 + SYS_ROUNDF = 0xAD2 // 2770 + SYS___ROUNDF_B = 0xAD3 // 2771 + SYS___ROUNDF_H = 0xAD4 // 2772 + SYS_ROUNDL = 0xAD5 // 2773 + SYS___ROUNDL_B = 0xAD6 // 2774 + SYS___ROUNDL_H = 0xAD7 // 2775 + SYS_SCALBLN = 0xAD8 // 2776 + SYS___SCALBLN_B = 0xAD9 // 2777 + SYS___SCALBLN_H = 0xADA // 2778 + SYS_SCALBLNF = 0xADB // 2779 + SYS___SCALBLNF_B = 0xADC // 2780 + SYS___SCALBLNF_H = 0xADD // 2781 + SYS_SCALBLNL = 0xADE // 2782 + SYS___SCALBLNL_B = 0xADF // 2783 + SYS___SCALBLNL_H = 0xAE0 // 2784 + SYS___SCALBN_B = 0xAE1 // 2785 + SYS___SCALBN_H = 0xAE2 // 2786 + SYS_SCALBNF = 0xAE3 // 2787 + SYS___SCALBNF_B = 0xAE4 // 2788 + SYS___SCALBNF_H = 0xAE5 // 2789 + SYS_SCALBNL = 0xAE6 // 2790 + SYS___SCALBNL_B = 0xAE7 // 2791 + SYS___SCALBNL_H = 0xAE8 // 2792 + SYS___TGAMMAL_H = 0xAE9 // 2793 + SYS_FECLEAREXCEPT = 0xAEA // 2794 + SYS_FEGETENV = 0xAEB // 2795 + SYS_FEGETEXCEPTFLAG = 0xAEC // 2796 + SYS_FEGETROUND = 0xAED // 2797 + SYS_FEHOLDEXCEPT = 0xAEE // 2798 + SYS_FERAISEEXCEPT = 0xAEF // 2799 + SYS_FESETENV = 0xAF0 // 2800 + SYS_FESETEXCEPTFLAG = 0xAF1 // 2801 + SYS_FESETROUND = 0xAF2 // 2802 + SYS_FETESTEXCEPT = 0xAF3 // 2803 + SYS_FEUPDATEENV = 0xAF4 // 2804 + SYS___COPYSIGN_H = 0xAF5 // 2805 + SYS___HYPOTF_H = 0xAF6 // 2806 + SYS___HYPOTL_H = 0xAF7 // 2807 + SYS___CLASS = 0xAFA // 2810 + SYS___CLASS_B = 0xAFB // 2811 + SYS___CLASS_H = 0xAFC // 2812 + SYS___ISBLANK_A = 0xB2E // 2862 + SYS___ISWBLANK_A = 0xB2F // 2863 + SYS___LROUND_FIXUP = 0xB30 // 2864 + SYS___LROUNDF_FIXUP = 0xB31 // 2865 + SYS_SCHED_YIELD = 0xB32 // 2866 + SYS_STRERROR_R = 0xB33 // 2867 + SYS_UNSETENV = 0xB34 // 2868 + SYS___LGAMMA_H_C99 = 0xB38 // 2872 + SYS___LGAMMA_B_C99 = 0xB39 // 2873 + SYS___LGAMMA_R_C99 = 0xB3A // 2874 + SYS___FTELL2 = 0xB3B // 2875 + SYS___FSEEK2 = 0xB3C // 2876 + SYS___STATIC_REINIT = 0xB3D // 2877 + SYS_PTHREAD_ATTR_GETSTACK = 0xB3E // 2878 + SYS_PTHREAD_ATTR_SETSTACK = 0xB3F // 2879 + SYS___TGAMMA_H_C99 = 0xB78 // 2936 + SYS___TGAMMAF_H_C99 = 0xB79 // 2937 + SYS___LE_TRACEBACK = 0xB7A // 2938 + SYS___MUST_STAY_CLEAN = 0xB7C // 2940 + SYS___O_ENV = 0xB7D // 2941 + SYS_ACOSD32 = 0xB7E // 2942 + SYS_ACOSD64 = 0xB7F // 2943 + SYS_ACOSD128 = 0xB80 // 2944 + SYS_ACOSHD32 = 0xB81 // 2945 + SYS_ACOSHD64 = 0xB82 // 2946 + SYS_ACOSHD128 = 0xB83 // 2947 + SYS_ASIND32 = 0xB84 // 2948 + SYS_ASIND64 = 0xB85 // 2949 + SYS_ASIND128 = 0xB86 // 2950 + SYS_ASINHD32 = 0xB87 // 2951 + SYS_ASINHD64 = 0xB88 // 2952 + SYS_ASINHD128 = 0xB89 // 2953 + SYS_ATAND32 = 0xB8A // 2954 + SYS_ATAND64 = 0xB8B // 2955 + SYS_ATAND128 = 0xB8C // 2956 + SYS_ATAN2D32 = 0xB8D // 2957 + SYS_ATAN2D64 = 0xB8E // 2958 + SYS_ATAN2D128 = 0xB8F // 2959 + SYS_ATANHD32 = 0xB90 // 2960 + SYS_ATANHD64 = 0xB91 // 2961 + SYS_ATANHD128 = 0xB92 // 2962 + SYS_CBRTD32 = 0xB93 // 2963 + SYS_CBRTD64 = 0xB94 // 2964 + SYS_CBRTD128 = 0xB95 // 2965 + SYS_CEILD32 = 0xB96 // 2966 + SYS_CEILD64 = 0xB97 // 2967 + SYS_CEILD128 = 0xB98 // 2968 + SYS___CLASS2 = 0xB99 // 2969 + SYS___CLASS2_B = 0xB9A // 2970 + SYS___CLASS2_H = 0xB9B // 2971 + SYS_COPYSIGND32 = 0xB9C // 2972 + SYS_COPYSIGND64 = 0xB9D // 2973 + SYS_COPYSIGND128 = 0xB9E // 2974 + SYS_COSD32 = 0xB9F // 2975 + SYS_COSD64 = 0xBA0 // 2976 + SYS_COSD128 = 0xBA1 // 2977 + SYS_COSHD32 = 0xBA2 // 2978 + SYS_COSHD64 = 0xBA3 // 2979 + SYS_COSHD128 = 0xBA4 // 2980 + SYS_ERFD32 = 0xBA5 // 2981 + SYS_ERFD64 = 0xBA6 // 2982 + SYS_ERFD128 = 0xBA7 // 2983 + SYS_ERFCD32 = 0xBA8 // 2984 + SYS_ERFCD64 = 0xBA9 // 2985 + SYS_ERFCD128 = 0xBAA // 2986 + SYS_EXPD32 = 0xBAB // 2987 + SYS_EXPD64 = 0xBAC // 2988 + SYS_EXPD128 = 0xBAD // 2989 + SYS_EXP2D32 = 0xBAE // 2990 + SYS_EXP2D64 = 0xBAF // 2991 + SYS_EXP2D128 = 0xBB0 // 2992 + SYS_EXPM1D32 = 0xBB1 // 2993 + SYS_EXPM1D64 = 0xBB2 // 2994 + SYS_EXPM1D128 = 0xBB3 // 2995 + SYS_FABSD32 = 0xBB4 // 2996 + SYS_FABSD64 = 0xBB5 // 2997 + SYS_FABSD128 = 0xBB6 // 2998 + SYS_FDIMD32 = 0xBB7 // 2999 + SYS_FDIMD64 = 0xBB8 // 3000 + SYS_FDIMD128 = 0xBB9 // 3001 + SYS_FE_DEC_GETROUND = 0xBBA // 3002 + SYS_FE_DEC_SETROUND = 0xBBB // 3003 + SYS_FLOORD32 = 0xBBC // 3004 + SYS_FLOORD64 = 0xBBD // 3005 + SYS_FLOORD128 = 0xBBE // 3006 + SYS_FMAD32 = 0xBBF // 3007 + SYS_FMAD64 = 0xBC0 // 3008 + SYS_FMAD128 = 0xBC1 // 3009 + SYS_FMAXD32 = 0xBC2 // 3010 + SYS_FMAXD64 = 0xBC3 // 3011 + SYS_FMAXD128 = 0xBC4 // 3012 + SYS_FMIND32 = 0xBC5 // 3013 + SYS_FMIND64 = 0xBC6 // 3014 + SYS_FMIND128 = 0xBC7 // 3015 + SYS_FMODD32 = 0xBC8 // 3016 + SYS_FMODD64 = 0xBC9 // 3017 + SYS_FMODD128 = 0xBCA // 3018 + SYS___FP_CAST_D = 0xBCB // 3019 + SYS_FREXPD32 = 0xBCC // 3020 + SYS_FREXPD64 = 0xBCD // 3021 + SYS_FREXPD128 = 0xBCE // 3022 + SYS_HYPOTD32 = 0xBCF // 3023 + SYS_HYPOTD64 = 0xBD0 // 3024 + SYS_HYPOTD128 = 0xBD1 // 3025 + SYS_ILOGBD32 = 0xBD2 // 3026 + SYS_ILOGBD64 = 0xBD3 // 3027 + SYS_ILOGBD128 = 0xBD4 // 3028 + SYS_LDEXPD32 = 0xBD5 // 3029 + SYS_LDEXPD64 = 0xBD6 // 3030 + SYS_LDEXPD128 = 0xBD7 // 3031 + SYS_LGAMMAD32 = 0xBD8 // 3032 + SYS_LGAMMAD64 = 0xBD9 // 3033 + SYS_LGAMMAD128 = 0xBDA // 3034 + SYS_LLRINTD32 = 0xBDB // 3035 + SYS_LLRINTD64 = 0xBDC // 3036 + SYS_LLRINTD128 = 0xBDD // 3037 + SYS_LLROUNDD32 = 0xBDE // 3038 + SYS_LLROUNDD64 = 0xBDF // 3039 + SYS_LLROUNDD128 = 0xBE0 // 3040 + SYS_LOGD32 = 0xBE1 // 3041 + SYS_LOGD64 = 0xBE2 // 3042 + SYS_LOGD128 = 0xBE3 // 3043 + SYS_LOG10D32 = 0xBE4 // 3044 + SYS_LOG10D64 = 0xBE5 // 3045 + SYS_LOG10D128 = 0xBE6 // 3046 + SYS_LOG1PD32 = 0xBE7 // 3047 + SYS_LOG1PD64 = 0xBE8 // 3048 + SYS_LOG1PD128 = 0xBE9 // 3049 + SYS_LOG2D32 = 0xBEA // 3050 + SYS_LOG2D64 = 0xBEB // 3051 + SYS_LOG2D128 = 0xBEC // 3052 + SYS_LOGBD32 = 0xBED // 3053 + SYS_LOGBD64 = 0xBEE // 3054 + SYS_LOGBD128 = 0xBEF // 3055 + SYS_LRINTD32 = 0xBF0 // 3056 + SYS_LRINTD64 = 0xBF1 // 3057 + SYS_LRINTD128 = 0xBF2 // 3058 + SYS_LROUNDD32 = 0xBF3 // 3059 + SYS_LROUNDD64 = 0xBF4 // 3060 + SYS_LROUNDD128 = 0xBF5 // 3061 + SYS_MODFD32 = 0xBF6 // 3062 + SYS_MODFD64 = 0xBF7 // 3063 + SYS_MODFD128 = 0xBF8 // 3064 + SYS_NAND32 = 0xBF9 // 3065 + SYS_NAND64 = 0xBFA // 3066 + SYS_NAND128 = 0xBFB // 3067 + SYS_NEARBYINTD32 = 0xBFC // 3068 + SYS_NEARBYINTD64 = 0xBFD // 3069 + SYS_NEARBYINTD128 = 0xBFE // 3070 + SYS_NEXTAFTERD32 = 0xBFF // 3071 + SYS_NEXTAFTERD64 = 0xC00 // 3072 + SYS_NEXTAFTERD128 = 0xC01 // 3073 + SYS_NEXTTOWARDD32 = 0xC02 // 3074 + SYS_NEXTTOWARDD64 = 0xC03 // 3075 + SYS_NEXTTOWARDD128 = 0xC04 // 3076 + SYS_POWD32 = 0xC05 // 3077 + SYS_POWD64 = 0xC06 // 3078 + SYS_POWD128 = 0xC07 // 3079 + SYS_QUANTIZED32 = 0xC08 // 3080 + SYS_QUANTIZED64 = 0xC09 // 3081 + SYS_QUANTIZED128 = 0xC0A // 3082 + SYS_REMAINDERD32 = 0xC0B // 3083 + SYS_REMAINDERD64 = 0xC0C // 3084 + SYS_REMAINDERD128 = 0xC0D // 3085 + SYS___REMQUOD32 = 0xC0E // 3086 + SYS___REMQUOD64 = 0xC0F // 3087 + SYS___REMQUOD128 = 0xC10 // 3088 + SYS_RINTD32 = 0xC11 // 3089 + SYS_RINTD64 = 0xC12 // 3090 + SYS_RINTD128 = 0xC13 // 3091 + SYS_ROUNDD32 = 0xC14 // 3092 + SYS_ROUNDD64 = 0xC15 // 3093 + SYS_ROUNDD128 = 0xC16 // 3094 + SYS_SAMEQUANTUMD32 = 0xC17 // 3095 + SYS_SAMEQUANTUMD64 = 0xC18 // 3096 + SYS_SAMEQUANTUMD128 = 0xC19 // 3097 + SYS_SCALBLND32 = 0xC1A // 3098 + SYS_SCALBLND64 = 0xC1B // 3099 + SYS_SCALBLND128 = 0xC1C // 3100 + SYS_SCALBND32 = 0xC1D // 3101 + SYS_SCALBND64 = 0xC1E // 3102 + SYS_SCALBND128 = 0xC1F // 3103 + SYS_SIND32 = 0xC20 // 3104 + SYS_SIND64 = 0xC21 // 3105 + SYS_SIND128 = 0xC22 // 3106 + SYS_SINHD32 = 0xC23 // 3107 + SYS_SINHD64 = 0xC24 // 3108 + SYS_SINHD128 = 0xC25 // 3109 + SYS_SQRTD32 = 0xC26 // 3110 + SYS_SQRTD64 = 0xC27 // 3111 + SYS_SQRTD128 = 0xC28 // 3112 + SYS_STRTOD32 = 0xC29 // 3113 + SYS_STRTOD64 = 0xC2A // 3114 + SYS_STRTOD128 = 0xC2B // 3115 + SYS_TAND32 = 0xC2C // 3116 + SYS_TAND64 = 0xC2D // 3117 + SYS_TAND128 = 0xC2E // 3118 + SYS_TANHD32 = 0xC2F // 3119 + SYS_TANHD64 = 0xC30 // 3120 + SYS_TANHD128 = 0xC31 // 3121 + SYS_TGAMMAD32 = 0xC32 // 3122 + SYS_TGAMMAD64 = 0xC33 // 3123 + SYS_TGAMMAD128 = 0xC34 // 3124 + SYS_TRUNCD32 = 0xC3E // 3134 + SYS_TRUNCD64 = 0xC3F // 3135 + SYS_TRUNCD128 = 0xC40 // 3136 + SYS_WCSTOD32 = 0xC41 // 3137 + SYS_WCSTOD64 = 0xC42 // 3138 + SYS_WCSTOD128 = 0xC43 // 3139 + SYS___CODEPAGE_INFO = 0xC64 // 3172 + SYS_POSIX_OPENPT = 0xC66 // 3174 + SYS_PSELECT = 0xC67 // 3175 + SYS_SOCKATMARK = 0xC68 // 3176 + SYS_AIO_FSYNC = 0xC69 // 3177 + SYS_LIO_LISTIO = 0xC6A // 3178 + SYS___ATANPID32 = 0xC6B // 3179 + SYS___ATANPID64 = 0xC6C // 3180 + SYS___ATANPID128 = 0xC6D // 3181 + SYS___COSPID32 = 0xC6E // 3182 + SYS___COSPID64 = 0xC6F // 3183 + SYS___COSPID128 = 0xC70 // 3184 + SYS___SINPID32 = 0xC71 // 3185 + SYS___SINPID64 = 0xC72 // 3186 + SYS___SINPID128 = 0xC73 // 3187 + SYS_SETIPV4SOURCEFILTER = 0xC76 // 3190 + SYS_GETIPV4SOURCEFILTER = 0xC77 // 3191 + SYS_SETSOURCEFILTER = 0xC78 // 3192 + SYS_GETSOURCEFILTER = 0xC79 // 3193 + SYS_FWRITE_UNLOCKED = 0xC7A // 3194 + SYS_FREAD_UNLOCKED = 0xC7B // 3195 + SYS_FGETS_UNLOCKED = 0xC7C // 3196 + SYS_GETS_UNLOCKED = 0xC7D // 3197 + SYS_FPUTS_UNLOCKED = 0xC7E // 3198 + SYS_PUTS_UNLOCKED = 0xC7F // 3199 + SYS_FGETC_UNLOCKED = 0xC80 // 3200 + SYS_FPUTC_UNLOCKED = 0xC81 // 3201 + SYS_DLADDR = 0xC82 // 3202 + SYS_SHM_OPEN = 0xC8C // 3212 + SYS_SHM_UNLINK = 0xC8D // 3213 + SYS___CLASS2F = 0xC91 // 3217 + SYS___CLASS2L = 0xC92 // 3218 + SYS___CLASS2F_B = 0xC93 // 3219 + SYS___CLASS2F_H = 0xC94 // 3220 + SYS___CLASS2L_B = 0xC95 // 3221 + SYS___CLASS2L_H = 0xC96 // 3222 + SYS___CLASS2D32 = 0xC97 // 3223 + SYS___CLASS2D64 = 0xC98 // 3224 + SYS___CLASS2D128 = 0xC99 // 3225 + SYS___TOCSNAME2 = 0xC9A // 3226 + SYS___D1TOP = 0xC9B // 3227 + SYS___D2TOP = 0xC9C // 3228 + SYS___D4TOP = 0xC9D // 3229 + SYS___PTOD1 = 0xC9E // 3230 + SYS___PTOD2 = 0xC9F // 3231 + SYS___PTOD4 = 0xCA0 // 3232 + SYS_CLEARERR_UNLOCKED = 0xCA1 // 3233 + SYS_FDELREC_UNLOCKED = 0xCA2 // 3234 + SYS_FEOF_UNLOCKED = 0xCA3 // 3235 + SYS_FERROR_UNLOCKED = 0xCA4 // 3236 + SYS_FFLUSH_UNLOCKED = 0xCA5 // 3237 + SYS_FGETPOS_UNLOCKED = 0xCA6 // 3238 + SYS_FGETWC_UNLOCKED = 0xCA7 // 3239 + SYS_FGETWS_UNLOCKED = 0xCA8 // 3240 + SYS_FILENO_UNLOCKED = 0xCA9 // 3241 + SYS_FLDATA_UNLOCKED = 0xCAA // 3242 + SYS_FLOCATE_UNLOCKED = 0xCAB // 3243 + SYS_FPRINTF_UNLOCKED = 0xCAC // 3244 + SYS_FPUTWC_UNLOCKED = 0xCAD // 3245 + SYS_FPUTWS_UNLOCKED = 0xCAE // 3246 + SYS_FSCANF_UNLOCKED = 0xCAF // 3247 + SYS_FSEEK_UNLOCKED = 0xCB0 // 3248 + SYS_FSEEKO_UNLOCKED = 0xCB1 // 3249 + SYS_FSETPOS_UNLOCKED = 0xCB3 // 3251 + SYS_FTELL_UNLOCKED = 0xCB4 // 3252 + SYS_FTELLO_UNLOCKED = 0xCB5 // 3253 + SYS_FUPDATE_UNLOCKED = 0xCB7 // 3255 + SYS_FWIDE_UNLOCKED = 0xCB8 // 3256 + SYS_FWPRINTF_UNLOCKED = 0xCB9 // 3257 + SYS_FWSCANF_UNLOCKED = 0xCBA // 3258 + SYS_GETWC_UNLOCKED = 0xCBB // 3259 + SYS_GETWCHAR_UNLOCKED = 0xCBC // 3260 + SYS_PERROR_UNLOCKED = 0xCBD // 3261 + SYS_PRINTF_UNLOCKED = 0xCBE // 3262 + SYS_PUTWC_UNLOCKED = 0xCBF // 3263 + SYS_PUTWCHAR_UNLOCKED = 0xCC0 // 3264 + SYS_REWIND_UNLOCKED = 0xCC1 // 3265 + SYS_SCANF_UNLOCKED = 0xCC2 // 3266 + SYS_UNGETC_UNLOCKED = 0xCC3 // 3267 + SYS_UNGETWC_UNLOCKED = 0xCC4 // 3268 + SYS_VFPRINTF_UNLOCKED = 0xCC5 // 3269 + SYS_VFSCANF_UNLOCKED = 0xCC7 // 3271 + SYS_VFWPRINTF_UNLOCKED = 0xCC9 // 3273 + SYS_VFWSCANF_UNLOCKED = 0xCCB // 3275 + SYS_VPRINTF_UNLOCKED = 0xCCD // 3277 + SYS_VSCANF_UNLOCKED = 0xCCF // 3279 + SYS_VWPRINTF_UNLOCKED = 0xCD1 // 3281 + SYS_VWSCANF_UNLOCKED = 0xCD3 // 3283 + SYS_WPRINTF_UNLOCKED = 0xCD5 // 3285 + SYS_WSCANF_UNLOCKED = 0xCD6 // 3286 + SYS_ASCTIME64 = 0xCD7 // 3287 + SYS_ASCTIME64_R = 0xCD8 // 3288 + SYS_CTIME64 = 0xCD9 // 3289 + SYS_CTIME64_R = 0xCDA // 3290 + SYS_DIFFTIME64 = 0xCDB // 3291 + SYS_GMTIME64 = 0xCDC // 3292 + SYS_GMTIME64_R = 0xCDD // 3293 + SYS_LOCALTIME64 = 0xCDE // 3294 + SYS_LOCALTIME64_R = 0xCDF // 3295 + SYS_MKTIME64 = 0xCE0 // 3296 + SYS_TIME64 = 0xCE1 // 3297 + SYS___LOGIN_APPLID = 0xCE2 // 3298 + SYS___PASSWD_APPLID = 0xCE3 // 3299 + SYS_PTHREAD_SECURITY_APPLID_NP = 0xCE4 // 3300 + SYS___GETTHENT = 0xCE5 // 3301 + SYS_FREEIFADDRS = 0xCE6 // 3302 + SYS_GETIFADDRS = 0xCE7 // 3303 + SYS_POSIX_FALLOCATE = 0xCE8 // 3304 + SYS_POSIX_MEMALIGN = 0xCE9 // 3305 + SYS_SIZEOF_ALLOC = 0xCEA // 3306 + SYS_RESIZE_ALLOC = 0xCEB // 3307 + SYS_FREAD_NOUPDATE = 0xCEC // 3308 + SYS_FREAD_NOUPDATE_UNLOCKED = 0xCED // 3309 + SYS_FGETPOS64 = 0xCEE // 3310 + SYS_FSEEK64 = 0xCEF // 3311 + SYS_FSEEKO64 = 0xCF0 // 3312 + SYS_FSETPOS64 = 0xCF1 // 3313 + SYS_FTELL64 = 0xCF2 // 3314 + SYS_FTELLO64 = 0xCF3 // 3315 + SYS_FGETPOS64_UNLOCKED = 0xCF4 // 3316 + SYS_FSEEK64_UNLOCKED = 0xCF5 // 3317 + SYS_FSEEKO64_UNLOCKED = 0xCF6 // 3318 + SYS_FSETPOS64_UNLOCKED = 0xCF7 // 3319 + SYS_FTELL64_UNLOCKED = 0xCF8 // 3320 + SYS_FTELLO64_UNLOCKED = 0xCF9 // 3321 + SYS_FOPEN_UNLOCKED = 0xCFA // 3322 + SYS_FREOPEN_UNLOCKED = 0xCFB // 3323 + SYS_FDOPEN_UNLOCKED = 0xCFC // 3324 + SYS_TMPFILE_UNLOCKED = 0xCFD // 3325 + SYS___MOSERVICES = 0xD3D // 3389 + SYS___GETTOD = 0xD3E // 3390 + SYS_C16RTOMB = 0xD40 // 3392 + SYS_C32RTOMB = 0xD41 // 3393 + SYS_MBRTOC16 = 0xD42 // 3394 + SYS_MBRTOC32 = 0xD43 // 3395 + SYS_QUANTEXPD32 = 0xD44 // 3396 + SYS_QUANTEXPD64 = 0xD45 // 3397 + SYS_QUANTEXPD128 = 0xD46 // 3398 + SYS___LOCALE_CTL = 0xD47 // 3399 + SYS___SMF_RECORD2 = 0xD48 // 3400 + SYS_FOPEN64 = 0xD49 // 3401 + SYS_FOPEN64_UNLOCKED = 0xD4A // 3402 + SYS_FREOPEN64 = 0xD4B // 3403 + SYS_FREOPEN64_UNLOCKED = 0xD4C // 3404 + SYS_TMPFILE64 = 0xD4D // 3405 + SYS_TMPFILE64_UNLOCKED = 0xD4E // 3406 + SYS_GETDATE64 = 0xD4F // 3407 + SYS_GETTIMEOFDAY64 = 0xD50 // 3408 + SYS_BIND2ADDRSEL = 0xD59 // 3417 + SYS_INET6_IS_SRCADDR = 0xD5A // 3418 + SYS___GETGRGID1 = 0xD5B // 3419 + SYS___GETGRNAM1 = 0xD5C // 3420 + SYS___FBUFSIZE = 0xD60 // 3424 + SYS___FPENDING = 0xD61 // 3425 + SYS___FLBF = 0xD62 // 3426 + SYS___FREADABLE = 0xD63 // 3427 + SYS___FWRITABLE = 0xD64 // 3428 + SYS___FREADING = 0xD65 // 3429 + SYS___FWRITING = 0xD66 // 3430 + SYS___FSETLOCKING = 0xD67 // 3431 + SYS__FLUSHLBF = 0xD68 // 3432 + SYS___FPURGE = 0xD69 // 3433 + SYS___FREADAHEAD = 0xD6A // 3434 + SYS___FSETERR = 0xD6B // 3435 + SYS___FPENDING_UNLOCKED = 0xD6C // 3436 + SYS___FREADING_UNLOCKED = 0xD6D // 3437 + SYS___FWRITING_UNLOCKED = 0xD6E // 3438 + SYS__FLUSHLBF_UNLOCKED = 0xD6F // 3439 + SYS___FPURGE_UNLOCKED = 0xD70 // 3440 + SYS___FREADAHEAD_UNLOCKED = 0xD71 // 3441 + SYS___LE_CEEGTJS = 0xD72 // 3442 + SYS___LE_RECORD_DUMP = 0xD73 // 3443 + SYS_FSTAT64 = 0xD74 // 3444 + SYS_LSTAT64 = 0xD75 // 3445 + SYS_STAT64 = 0xD76 // 3446 + SYS___READDIR2_64 = 0xD77 // 3447 + SYS___OPEN_STAT64 = 0xD78 // 3448 + SYS_FTW64 = 0xD79 // 3449 + SYS_NFTW64 = 0xD7A // 3450 + SYS_UTIME64 = 0xD7B // 3451 + SYS_UTIMES64 = 0xD7C // 3452 + SYS___GETIPC64 = 0xD7D // 3453 + SYS_MSGCTL64 = 0xD7E // 3454 + SYS_SEMCTL64 = 0xD7F // 3455 + SYS_SHMCTL64 = 0xD80 // 3456 + SYS_MSGXRCV64 = 0xD81 // 3457 + SYS___MGXR64 = 0xD81 // 3457 + SYS_W_GETPSENT64 = 0xD82 // 3458 + SYS_PTHREAD_COND_TIMEDWAIT64 = 0xD83 // 3459 + SYS_FTIME64 = 0xD85 // 3461 + SYS_GETUTXENT64 = 0xD86 // 3462 + SYS_GETUTXID64 = 0xD87 // 3463 + SYS_GETUTXLINE64 = 0xD88 // 3464 + SYS_PUTUTXLINE64 = 0xD89 // 3465 + SYS_NEWLOCALE = 0xD8A // 3466 + SYS_FREELOCALE = 0xD8B // 3467 + SYS_USELOCALE = 0xD8C // 3468 + SYS_DUPLOCALE = 0xD8D // 3469 + SYS___CHATTR64 = 0xD9C // 3484 + SYS___LCHATTR64 = 0xD9D // 3485 + SYS___FCHATTR64 = 0xD9E // 3486 + SYS_____CHATTR64_A = 0xD9F // 3487 + SYS_____LCHATTR64_A = 0xDA0 // 3488 + SYS___LE_CEEUSGD = 0xDA1 // 3489 + SYS___LE_IFAM_CON = 0xDA2 // 3490 + SYS___LE_IFAM_DSC = 0xDA3 // 3491 + SYS___LE_IFAM_GET = 0xDA4 // 3492 + SYS___LE_IFAM_QRY = 0xDA5 // 3493 + SYS_ALIGNED_ALLOC = 0xDA6 // 3494 + SYS_ACCEPT4 = 0xDA7 // 3495 + SYS___ACCEPT4_A = 0xDA8 // 3496 + SYS_COPYFILERANGE = 0xDA9 // 3497 + SYS_GETLINE = 0xDAA // 3498 + SYS___GETLINE_A = 0xDAB // 3499 + SYS_DIRFD = 0xDAC // 3500 + SYS_CLOCK_GETTIME = 0xDAD // 3501 + SYS_DUP3 = 0xDAE // 3502 + SYS_EPOLL_CREATE = 0xDAF // 3503 + SYS_EPOLL_CREATE1 = 0xDB0 // 3504 + SYS_EPOLL_CTL = 0xDB1 // 3505 + SYS_EPOLL_WAIT = 0xDB2 // 3506 + SYS_EPOLL_PWAIT = 0xDB3 // 3507 + SYS_EVENTFD = 0xDB4 // 3508 + SYS_STATFS = 0xDB5 // 3509 + SYS___STATFS_A = 0xDB6 // 3510 + SYS_FSTATFS = 0xDB7 // 3511 + SYS_INOTIFY_INIT = 0xDB8 // 3512 + SYS_INOTIFY_INIT1 = 0xDB9 // 3513 + SYS_INOTIFY_ADD_WATCH = 0xDBA // 3514 + SYS___INOTIFY_ADD_WATCH_A = 0xDBB // 3515 + SYS_INOTIFY_RM_WATCH = 0xDBC // 3516 + SYS_PIPE2 = 0xDBD // 3517 + SYS_PIVOT_ROOT = 0xDBE // 3518 + SYS___PIVOT_ROOT_A = 0xDBF // 3519 + SYS_PRCTL = 0xDC0 // 3520 + SYS_PRLIMIT = 0xDC1 // 3521 + SYS_SETHOSTNAME = 0xDC2 // 3522 + SYS___SETHOSTNAME_A = 0xDC3 // 3523 + SYS_SETRESUID = 0xDC4 // 3524 + SYS_SETRESGID = 0xDC5 // 3525 + SYS_PTHREAD_CONDATTR_GETCLOCK = 0xDC6 // 3526 + SYS_FLOCK = 0xDC7 // 3527 + SYS_FGETXATTR = 0xDC8 // 3528 + SYS___FGETXATTR_A = 0xDC9 // 3529 + SYS_FLISTXATTR = 0xDCA // 3530 + SYS___FLISTXATTR_A = 0xDCB // 3531 + SYS_FREMOVEXATTR = 0xDCC // 3532 + SYS___FREMOVEXATTR_A = 0xDCD // 3533 + SYS_FSETXATTR = 0xDCE // 3534 + SYS___FSETXATTR_A = 0xDCF // 3535 + SYS_GETXATTR = 0xDD0 // 3536 + SYS___GETXATTR_A = 0xDD1 // 3537 + SYS_LGETXATTR = 0xDD2 // 3538 + SYS___LGETXATTR_A = 0xDD3 // 3539 + SYS_LISTXATTR = 0xDD4 // 3540 + SYS___LISTXATTR_A = 0xDD5 // 3541 + SYS_LLISTXATTR = 0xDD6 // 3542 + SYS___LLISTXATTR_A = 0xDD7 // 3543 + SYS_LREMOVEXATTR = 0xDD8 // 3544 + SYS___LREMOVEXATTR_A = 0xDD9 // 3545 + SYS_LSETXATTR = 0xDDA // 3546 + SYS___LSETXATTR_A = 0xDDB // 3547 + SYS_REMOVEXATTR = 0xDDC // 3548 + SYS___REMOVEXATTR_A = 0xDDD // 3549 + SYS_SETXATTR = 0xDDE // 3550 + SYS___SETXATTR_A = 0xDDF // 3551 + SYS_FDATASYNC = 0xDE0 // 3552 + SYS_SYNCFS = 0xDE1 // 3553 + SYS_FUTIMES = 0xDE2 // 3554 + SYS_FUTIMESAT = 0xDE3 // 3555 + SYS___FUTIMESAT_A = 0xDE4 // 3556 + SYS_LUTIMES = 0xDE5 // 3557 + SYS___LUTIMES_A = 0xDE6 // 3558 + SYS_INET_ATON = 0xDE7 // 3559 + SYS_GETRANDOM = 0xDE8 // 3560 + SYS_GETTID = 0xDE9 // 3561 + SYS_MEMFD_CREATE = 0xDEA // 3562 + SYS___MEMFD_CREATE_A = 0xDEB // 3563 + SYS_FACCESSAT = 0xDEC // 3564 + SYS___FACCESSAT_A = 0xDED // 3565 + SYS_FCHMODAT = 0xDEE // 3566 + SYS___FCHMODAT_A = 0xDEF // 3567 + SYS_FCHOWNAT = 0xDF0 // 3568 + SYS___FCHOWNAT_A = 0xDF1 // 3569 + SYS_FSTATAT = 0xDF2 // 3570 + SYS___FSTATAT_A = 0xDF3 // 3571 + SYS_LINKAT = 0xDF4 // 3572 + SYS___LINKAT_A = 0xDF5 // 3573 + SYS_MKDIRAT = 0xDF6 // 3574 + SYS___MKDIRAT_A = 0xDF7 // 3575 + SYS_MKFIFOAT = 0xDF8 // 3576 + SYS___MKFIFOAT_A = 0xDF9 // 3577 + SYS_MKNODAT = 0xDFA // 3578 + SYS___MKNODAT_A = 0xDFB // 3579 + SYS_OPENAT = 0xDFC // 3580 + SYS___OPENAT_A = 0xDFD // 3581 + SYS_READLINKAT = 0xDFE // 3582 + SYS___READLINKAT_A = 0xDFF // 3583 + SYS_RENAMEAT = 0xE00 // 3584 + SYS___RENAMEAT_A = 0xE01 // 3585 + SYS_RENAMEAT2 = 0xE02 // 3586 + SYS___RENAMEAT2_A = 0xE03 // 3587 + SYS_SYMLINKAT = 0xE04 // 3588 + SYS___SYMLINKAT_A = 0xE05 // 3589 + SYS_UNLINKAT = 0xE06 // 3590 + SYS___UNLINKAT_A = 0xE07 // 3591 + SYS_SYSINFO = 0xE08 // 3592 + SYS_WAIT4 = 0xE0A // 3594 + SYS_CLONE = 0xE0B // 3595 + SYS_UNSHARE = 0xE0C // 3596 + SYS_SETNS = 0xE0D // 3597 + SYS_CAPGET = 0xE0E // 3598 + SYS_CAPSET = 0xE0F // 3599 + SYS_STRCHRNUL = 0xE10 // 3600 + SYS_PTHREAD_CONDATTR_SETCLOCK = 0xE12 // 3602 + SYS_OPEN_BY_HANDLE_AT = 0xE13 // 3603 + SYS___OPEN_BY_HANDLE_AT_A = 0xE14 // 3604 + SYS___INET_ATON_A = 0xE15 // 3605 + SYS_MOUNT1 = 0xE16 // 3606 + SYS___MOUNT1_A = 0xE17 // 3607 + SYS_UMOUNT1 = 0xE18 // 3608 + SYS___UMOUNT1_A = 0xE19 // 3609 + SYS_UMOUNT2 = 0xE1A // 3610 + SYS___UMOUNT2_A = 0xE1B // 3611 + SYS___PRCTL_A = 0xE1C // 3612 + SYS_LOCALTIME_R2 = 0xE1D // 3613 + SYS___LOCALTIME_R2_A = 0xE1E // 3614 + SYS_OPENAT2 = 0xE1F // 3615 + SYS___OPENAT2_A = 0xE20 // 3616 + SYS___LE_CEEMICT = 0xE21 // 3617 + SYS_GETENTROPY = 0xE22 // 3618 + SYS_NANOSLEEP = 0xE23 // 3619 + SYS_UTIMENSAT = 0xE24 // 3620 + SYS___UTIMENSAT_A = 0xE25 // 3621 + SYS_ASPRINTF = 0xE26 // 3622 + SYS___ASPRINTF_A = 0xE27 // 3623 + SYS_VASPRINTF = 0xE28 // 3624 + SYS___VASPRINTF_A = 0xE29 // 3625 + SYS_DPRINTF = 0xE2A // 3626 + SYS___DPRINTF_A = 0xE2B // 3627 + SYS_GETOPT_LONG = 0xE2C // 3628 + SYS___GETOPT_LONG_A = 0xE2D // 3629 + SYS_PSIGNAL = 0xE2E // 3630 + SYS___PSIGNAL_A = 0xE2F // 3631 + SYS_PSIGNAL_UNLOCKED = 0xE30 // 3632 + SYS___PSIGNAL_UNLOCKED_A = 0xE31 // 3633 + SYS_FSTATAT_O = 0xE32 // 3634 + SYS___FSTATAT_O_A = 0xE33 // 3635 + SYS_FSTATAT64 = 0xE34 // 3636 + SYS___FSTATAT64_A = 0xE35 // 3637 + SYS___CHATTRAT = 0xE36 // 3638 + SYS_____CHATTRAT_A = 0xE37 // 3639 + SYS___CHATTRAT64 = 0xE38 // 3640 + SYS_____CHATTRAT64_A = 0xE39 // 3641 + SYS_MADVISE = 0xE3A // 3642 + SYS___AUTHENTICATE = 0xE3B // 3643 + ) diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux.go b/vendor/golang.org/x/sys/unix/ztypes_linux.go index bbf8399..0036746 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -174,7 +174,8 @@ type FscryptPolicyV2 struct { Contents_encryption_mode uint8 Filenames_encryption_mode uint8 Flags uint8 - _ [4]uint8 + Log2_data_unit_size uint8 + _ [3]uint8 Master_key_identifier [16]uint8 } @@ -455,60 +456,63 @@ type Ucred struct { } type TCPInfo struct { - State uint8 - Ca_state uint8 - Retransmits uint8 - Probes uint8 - Backoff uint8 - Options uint8 - Rto uint32 - Ato uint32 - Snd_mss uint32 - Rcv_mss uint32 - Unacked uint32 - Sacked uint32 - Lost uint32 - Retrans uint32 - Fackets uint32 - Last_data_sent uint32 - Last_ack_sent uint32 - Last_data_recv uint32 - Last_ack_recv uint32 - Pmtu uint32 - Rcv_ssthresh uint32 - Rtt uint32 - Rttvar uint32 - Snd_ssthresh uint32 - Snd_cwnd uint32 - Advmss uint32 - Reordering uint32 - Rcv_rtt uint32 - Rcv_space uint32 - Total_retrans uint32 - Pacing_rate uint64 - Max_pacing_rate uint64 - Bytes_acked uint64 - Bytes_received uint64 - Segs_out uint32 - Segs_in uint32 - Notsent_bytes uint32 - Min_rtt uint32 - Data_segs_in uint32 - Data_segs_out uint32 - Delivery_rate uint64 - Busy_time uint64 - Rwnd_limited uint64 - Sndbuf_limited uint64 - Delivered uint32 - Delivered_ce uint32 - Bytes_sent uint64 - Bytes_retrans uint64 - Dsack_dups uint32 - Reord_seen uint32 - Rcv_ooopack uint32 - Snd_wnd uint32 - Rcv_wnd uint32 - Rehash uint32 + State uint8 + Ca_state uint8 + Retransmits uint8 + Probes uint8 + Backoff uint8 + Options uint8 + Rto uint32 + Ato uint32 + Snd_mss uint32 + Rcv_mss uint32 + Unacked uint32 + Sacked uint32 + Lost uint32 + Retrans uint32 + Fackets uint32 + Last_data_sent uint32 + Last_ack_sent uint32 + Last_data_recv uint32 + Last_ack_recv uint32 + Pmtu uint32 + Rcv_ssthresh uint32 + Rtt uint32 + Rttvar uint32 + Snd_ssthresh uint32 + Snd_cwnd uint32 + Advmss uint32 + Reordering uint32 + Rcv_rtt uint32 + Rcv_space uint32 + Total_retrans uint32 + Pacing_rate uint64 + Max_pacing_rate uint64 + Bytes_acked uint64 + Bytes_received uint64 + Segs_out uint32 + Segs_in uint32 + Notsent_bytes uint32 + Min_rtt uint32 + Data_segs_in uint32 + Data_segs_out uint32 + Delivery_rate uint64 + Busy_time uint64 + Rwnd_limited uint64 + Sndbuf_limited uint64 + Delivered uint32 + Delivered_ce uint32 + Bytes_sent uint64 + Bytes_retrans uint64 + Dsack_dups uint32 + Reord_seen uint32 + Rcv_ooopack uint32 + Snd_wnd uint32 + Rcv_wnd uint32 + Rehash uint32 + Total_rto uint16 + Total_rto_recoveries uint16 + Total_rto_time uint32 } type CanFilter struct { @@ -551,7 +555,7 @@ const ( SizeofIPv6MTUInfo = 0x20 SizeofICMPv6Filter = 0x20 SizeofUcred = 0xc - SizeofTCPInfo = 0xf0 + SizeofTCPInfo = 0xf8 SizeofCanFilter = 0x8 SizeofTCPRepairOpt = 0x8 ) @@ -832,6 +836,15 @@ const ( FSPICK_EMPTY_PATH = 0x8 FSMOUNT_CLOEXEC = 0x1 + + FSCONFIG_SET_FLAG = 0x0 + FSCONFIG_SET_STRING = 0x1 + FSCONFIG_SET_BINARY = 0x2 + FSCONFIG_SET_PATH = 0x3 + FSCONFIG_SET_PATH_EMPTY = 0x4 + FSCONFIG_SET_FD = 0x5 + FSCONFIG_CMD_CREATE = 0x6 + FSCONFIG_CMD_RECONFIGURE = 0x7 ) type OpenHow struct { @@ -1165,7 +1178,8 @@ const ( PERF_SAMPLE_BRANCH_TYPE_SAVE_SHIFT = 0x10 PERF_SAMPLE_BRANCH_HW_INDEX_SHIFT = 0x11 PERF_SAMPLE_BRANCH_PRIV_SAVE_SHIFT = 0x12 - PERF_SAMPLE_BRANCH_MAX_SHIFT = 0x13 + PERF_SAMPLE_BRANCH_COUNTERS = 0x80000 + PERF_SAMPLE_BRANCH_MAX_SHIFT = 0x14 PERF_SAMPLE_BRANCH_USER = 0x1 PERF_SAMPLE_BRANCH_KERNEL = 0x2 PERF_SAMPLE_BRANCH_HV = 0x4 @@ -1185,7 +1199,7 @@ const ( PERF_SAMPLE_BRANCH_TYPE_SAVE = 0x10000 PERF_SAMPLE_BRANCH_HW_INDEX = 0x20000 PERF_SAMPLE_BRANCH_PRIV_SAVE = 0x40000 - PERF_SAMPLE_BRANCH_MAX = 0x80000 + PERF_SAMPLE_BRANCH_MAX = 0x100000 PERF_BR_UNKNOWN = 0x0 PERF_BR_COND = 0x1 PERF_BR_UNCOND = 0x2 @@ -1546,6 +1560,7 @@ const ( IFLA_DEVLINK_PORT = 0x3e IFLA_GSO_IPV4_MAX_SIZE = 0x3f IFLA_GRO_IPV4_MAX_SIZE = 0x40 + IFLA_DPLL_PIN = 0x41 IFLA_PROTO_DOWN_REASON_UNSPEC = 0x0 IFLA_PROTO_DOWN_REASON_MASK = 0x1 IFLA_PROTO_DOWN_REASON_VALUE = 0x2 @@ -1561,6 +1576,7 @@ const ( IFLA_INET6_ICMP6STATS = 0x6 IFLA_INET6_TOKEN = 0x7 IFLA_INET6_ADDR_GEN_MODE = 0x8 + IFLA_INET6_RA_MTU = 0x9 IFLA_BR_UNSPEC = 0x0 IFLA_BR_FORWARD_DELAY = 0x1 IFLA_BR_HELLO_TIME = 0x2 @@ -1608,6 +1624,9 @@ const ( IFLA_BR_MCAST_MLD_VERSION = 0x2c IFLA_BR_VLAN_STATS_PER_PORT = 0x2d IFLA_BR_MULTI_BOOLOPT = 0x2e + IFLA_BR_MCAST_QUERIER_STATE = 0x2f + IFLA_BR_FDB_N_LEARNED = 0x30 + IFLA_BR_FDB_MAX_LEARNED = 0x31 IFLA_BRPORT_UNSPEC = 0x0 IFLA_BRPORT_STATE = 0x1 IFLA_BRPORT_PRIORITY = 0x2 @@ -1645,6 +1664,14 @@ const ( IFLA_BRPORT_BACKUP_PORT = 0x22 IFLA_BRPORT_MRP_RING_OPEN = 0x23 IFLA_BRPORT_MRP_IN_OPEN = 0x24 + IFLA_BRPORT_MCAST_EHT_HOSTS_LIMIT = 0x25 + IFLA_BRPORT_MCAST_EHT_HOSTS_CNT = 0x26 + IFLA_BRPORT_LOCKED = 0x27 + IFLA_BRPORT_MAB = 0x28 + IFLA_BRPORT_MCAST_N_GROUPS = 0x29 + IFLA_BRPORT_MCAST_MAX_GROUPS = 0x2a + IFLA_BRPORT_NEIGH_VLAN_SUPPRESS = 0x2b + IFLA_BRPORT_BACKUP_NHID = 0x2c IFLA_INFO_UNSPEC = 0x0 IFLA_INFO_KIND = 0x1 IFLA_INFO_DATA = 0x2 @@ -1666,6 +1693,9 @@ const ( IFLA_MACVLAN_MACADDR = 0x4 IFLA_MACVLAN_MACADDR_DATA = 0x5 IFLA_MACVLAN_MACADDR_COUNT = 0x6 + IFLA_MACVLAN_BC_QUEUE_LEN = 0x7 + IFLA_MACVLAN_BC_QUEUE_LEN_USED = 0x8 + IFLA_MACVLAN_BC_CUTOFF = 0x9 IFLA_VRF_UNSPEC = 0x0 IFLA_VRF_TABLE = 0x1 IFLA_VRF_PORT_UNSPEC = 0x0 @@ -1689,9 +1719,22 @@ const ( IFLA_XFRM_UNSPEC = 0x0 IFLA_XFRM_LINK = 0x1 IFLA_XFRM_IF_ID = 0x2 + IFLA_XFRM_COLLECT_METADATA = 0x3 IFLA_IPVLAN_UNSPEC = 0x0 IFLA_IPVLAN_MODE = 0x1 IFLA_IPVLAN_FLAGS = 0x2 + NETKIT_NEXT = -0x1 + NETKIT_PASS = 0x0 + NETKIT_DROP = 0x2 + NETKIT_REDIRECT = 0x7 + NETKIT_L2 = 0x0 + NETKIT_L3 = 0x1 + IFLA_NETKIT_UNSPEC = 0x0 + IFLA_NETKIT_PEER_INFO = 0x1 + IFLA_NETKIT_PRIMARY = 0x2 + IFLA_NETKIT_POLICY = 0x3 + IFLA_NETKIT_PEER_POLICY = 0x4 + IFLA_NETKIT_MODE = 0x5 IFLA_VXLAN_UNSPEC = 0x0 IFLA_VXLAN_ID = 0x1 IFLA_VXLAN_GROUP = 0x2 @@ -1722,6 +1765,8 @@ const ( IFLA_VXLAN_GPE = 0x1b IFLA_VXLAN_TTL_INHERIT = 0x1c IFLA_VXLAN_DF = 0x1d + IFLA_VXLAN_VNIFILTER = 0x1e + IFLA_VXLAN_LOCALBYPASS = 0x1f IFLA_GENEVE_UNSPEC = 0x0 IFLA_GENEVE_ID = 0x1 IFLA_GENEVE_REMOTE = 0x2 @@ -1736,6 +1781,7 @@ const ( IFLA_GENEVE_LABEL = 0xb IFLA_GENEVE_TTL_INHERIT = 0xc IFLA_GENEVE_DF = 0xd + IFLA_GENEVE_INNER_PROTO_INHERIT = 0xe IFLA_BAREUDP_UNSPEC = 0x0 IFLA_BAREUDP_PORT = 0x1 IFLA_BAREUDP_ETHERTYPE = 0x2 @@ -1748,6 +1794,8 @@ const ( IFLA_GTP_FD1 = 0x2 IFLA_GTP_PDP_HASHSIZE = 0x3 IFLA_GTP_ROLE = 0x4 + IFLA_GTP_CREATE_SOCKETS = 0x5 + IFLA_GTP_RESTART_COUNT = 0x6 IFLA_BOND_UNSPEC = 0x0 IFLA_BOND_MODE = 0x1 IFLA_BOND_ACTIVE_SLAVE = 0x2 @@ -1777,6 +1825,9 @@ const ( IFLA_BOND_AD_ACTOR_SYSTEM = 0x1a IFLA_BOND_TLB_DYNAMIC_LB = 0x1b IFLA_BOND_PEER_NOTIF_DELAY = 0x1c + IFLA_BOND_AD_LACP_ACTIVE = 0x1d + IFLA_BOND_MISSED_MAX = 0x1e + IFLA_BOND_NS_IP6_TARGET = 0x1f IFLA_BOND_AD_INFO_UNSPEC = 0x0 IFLA_BOND_AD_INFO_AGGREGATOR = 0x1 IFLA_BOND_AD_INFO_NUM_PORTS = 0x2 @@ -1792,6 +1843,7 @@ const ( IFLA_BOND_SLAVE_AD_AGGREGATOR_ID = 0x6 IFLA_BOND_SLAVE_AD_ACTOR_OPER_PORT_STATE = 0x7 IFLA_BOND_SLAVE_AD_PARTNER_OPER_PORT_STATE = 0x8 + IFLA_BOND_SLAVE_PRIO = 0x9 IFLA_VF_INFO_UNSPEC = 0x0 IFLA_VF_INFO = 0x1 IFLA_VF_UNSPEC = 0x0 @@ -1850,8 +1902,16 @@ const ( IFLA_STATS_LINK_XSTATS_SLAVE = 0x3 IFLA_STATS_LINK_OFFLOAD_XSTATS = 0x4 IFLA_STATS_AF_SPEC = 0x5 + IFLA_STATS_GETSET_UNSPEC = 0x0 + IFLA_STATS_GET_FILTERS = 0x1 + IFLA_STATS_SET_OFFLOAD_XSTATS_L3_STATS = 0x2 IFLA_OFFLOAD_XSTATS_UNSPEC = 0x0 IFLA_OFFLOAD_XSTATS_CPU_HIT = 0x1 + IFLA_OFFLOAD_XSTATS_HW_S_INFO = 0x2 + IFLA_OFFLOAD_XSTATS_L3_STATS = 0x3 + IFLA_OFFLOAD_XSTATS_HW_S_INFO_UNSPEC = 0x0 + IFLA_OFFLOAD_XSTATS_HW_S_INFO_REQUEST = 0x1 + IFLA_OFFLOAD_XSTATS_HW_S_INFO_USED = 0x2 IFLA_XDP_UNSPEC = 0x0 IFLA_XDP_FD = 0x1 IFLA_XDP_ATTACHED = 0x2 @@ -1881,6 +1941,11 @@ const ( IFLA_RMNET_UNSPEC = 0x0 IFLA_RMNET_MUX_ID = 0x1 IFLA_RMNET_FLAGS = 0x2 + IFLA_MCTP_UNSPEC = 0x0 + IFLA_MCTP_NET = 0x1 + IFLA_DSA_UNSPEC = 0x0 + IFLA_DSA_CONDUIT = 0x1 + IFLA_DSA_MASTER = 0x1 ) const ( @@ -2417,6 +2482,15 @@ type XDPMmapOffsets struct { Cr XDPRingOffset } +type XDPUmemReg struct { + Addr uint64 + Len uint64 + Chunk_size uint32 + Headroom uint32 + Flags uint32 + Tx_metadata_len uint32 +} + type XDPStatistics struct { Rx_dropped uint64 Rx_invalid_descs uint64 @@ -2871,7 +2945,7 @@ const ( BPF_TCP_LISTEN = 0xa BPF_TCP_CLOSING = 0xb BPF_TCP_NEW_SYN_RECV = 0xc - BPF_TCP_MAX_STATES = 0xd + BPF_TCP_MAX_STATES = 0xe TCP_BPF_IW = 0x3e9 TCP_BPF_SNDCWND_CLAMP = 0x3ea TCP_BPF_DELACK_MAX = 0x3eb @@ -3147,7 +3221,7 @@ const ( DEVLINK_CMD_LINECARD_NEW = 0x50 DEVLINK_CMD_LINECARD_DEL = 0x51 DEVLINK_CMD_SELFTESTS_GET = 0x52 - DEVLINK_CMD_MAX = 0x53 + DEVLINK_CMD_MAX = 0x54 DEVLINK_PORT_TYPE_NOTSET = 0x0 DEVLINK_PORT_TYPE_AUTO = 0x1 DEVLINK_PORT_TYPE_ETH = 0x2 @@ -3399,7 +3473,7 @@ const ( DEVLINK_PORT_FN_ATTR_STATE = 0x2 DEVLINK_PORT_FN_ATTR_OPSTATE = 0x3 DEVLINK_PORT_FN_ATTR_CAPS = 0x4 - DEVLINK_PORT_FUNCTION_ATTR_MAX = 0x4 + DEVLINK_PORT_FUNCTION_ATTR_MAX = 0x5 ) type FsverityDigest struct { @@ -4183,7 +4257,8 @@ const ( ) type LandlockRulesetAttr struct { - Access_fs uint64 + Access_fs uint64 + Access_net uint64 } type LandlockPathBeneathAttr struct { @@ -4530,7 +4605,7 @@ const ( NL80211_ATTR_MAC_HINT = 0xc8 NL80211_ATTR_MAC_MASK = 0xd7 NL80211_ATTR_MAX_AP_ASSOC_STA = 0xca - NL80211_ATTR_MAX = 0x146 + NL80211_ATTR_MAX = 0x149 NL80211_ATTR_MAX_CRIT_PROT_DURATION = 0xb4 NL80211_ATTR_MAX_CSA_COUNTERS = 0xce NL80211_ATTR_MAX_MATCH_SETS = 0x85 @@ -4796,7 +4871,7 @@ const ( NL80211_BSS_FREQUENCY_OFFSET = 0x14 NL80211_BSS_INFORMATION_ELEMENTS = 0x6 NL80211_BSS_LAST_SEEN_BOOTTIME = 0xf - NL80211_BSS_MAX = 0x16 + NL80211_BSS_MAX = 0x18 NL80211_BSS_MLD_ADDR = 0x16 NL80211_BSS_MLO_LINK_ID = 0x15 NL80211_BSS_PAD = 0x10 @@ -4900,7 +4975,7 @@ const ( NL80211_CMD_LEAVE_IBSS = 0x2c NL80211_CMD_LEAVE_MESH = 0x45 NL80211_CMD_LEAVE_OCB = 0x6d - NL80211_CMD_MAX = 0x9a + NL80211_CMD_MAX = 0x9b NL80211_CMD_MICHAEL_MIC_FAILURE = 0x29 NL80211_CMD_MODIFY_LINK_STA = 0x97 NL80211_CMD_NAN_MATCH = 0x78 @@ -5134,7 +5209,7 @@ const ( NL80211_FREQUENCY_ATTR_GO_CONCURRENT = 0xf NL80211_FREQUENCY_ATTR_INDOOR_ONLY = 0xe NL80211_FREQUENCY_ATTR_IR_CONCURRENT = 0xf - NL80211_FREQUENCY_ATTR_MAX = 0x1b + NL80211_FREQUENCY_ATTR_MAX = 0x1f NL80211_FREQUENCY_ATTR_MAX_TX_POWER = 0x6 NL80211_FREQUENCY_ATTR_NO_10MHZ = 0x11 NL80211_FREQUENCY_ATTR_NO_160MHZ = 0xc @@ -5547,7 +5622,7 @@ const ( NL80211_REGDOM_TYPE_CUSTOM_WORLD = 0x2 NL80211_REGDOM_TYPE_INTERSECTION = 0x3 NL80211_REGDOM_TYPE_WORLD = 0x1 - NL80211_REG_RULE_ATTR_MAX = 0x7 + NL80211_REG_RULE_ATTR_MAX = 0x8 NL80211_REKEY_DATA_AKM = 0x4 NL80211_REKEY_DATA_KCK = 0x2 NL80211_REKEY_DATA_KEK = 0x1 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go index 438a30a..fd402da 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go @@ -477,14 +477,6 @@ const ( BLKPG = 0x1269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go index adceca3..eb7a5e1 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go @@ -492,15 +492,6 @@ const ( BLKPG = 0x1269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go index eeaa00a..d78ac10 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go @@ -470,15 +470,6 @@ const ( BLKPG = 0x1269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]uint8 Driver_name [64]uint8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go index 6739aa9..cd06d47 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go @@ -471,15 +471,6 @@ const ( BLKPG = 0x1269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go index 9920ef6..2f28fe2 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go @@ -472,15 +472,6 @@ const ( BLKPG = 0x1269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go index 2923b79..71d6cac 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go @@ -476,15 +476,6 @@ const ( BLKPG = 0x20001269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go index ce2750e..8596d45 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go @@ -474,15 +474,6 @@ const ( BLKPG = 0x20001269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go index 3038811..cd60ea1 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go @@ -474,15 +474,6 @@ const ( BLKPG = 0x20001269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go index efc6fed..b0ae420 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go @@ -476,15 +476,6 @@ const ( BLKPG = 0x20001269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go index 9a654b7..8359728 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go @@ -482,15 +482,6 @@ const ( BLKPG = 0x20001269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]uint8 Driver_name [64]uint8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go index 40d358e..69eb6a5 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go @@ -481,15 +481,6 @@ const ( BLKPG = 0x20001269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]uint8 Driver_name [64]uint8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go index 148c6ce..5f583cb 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go @@ -481,15 +481,6 @@ const ( BLKPG = 0x20001269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]uint8 Driver_name [64]uint8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go index 72ba815..15adc04 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go @@ -499,15 +499,6 @@ const ( BLKPG = 0x1269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]uint8 Driver_name [64]uint8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go index 71e7655..cf3ce90 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go @@ -495,15 +495,6 @@ const ( BLKPG = 0x1269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go index 4abbdb9..590b567 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go @@ -476,15 +476,6 @@ const ( BLKPG = 0x20001269 ) -type XDPUmemReg struct { - Addr uint64 - Len uint64 - Size uint32 - Headroom uint32 - Flags uint32 - _ [4]byte -} - type CryptoUserAlg struct { Name [64]int8 Driver_name [64]int8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go b/vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go index 54f31be..d9a13af 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go +++ b/vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go @@ -25,10 +25,13 @@ const ( SizeofIPv6Mreq = 20 SizeofICMPv6Filter = 32 SizeofIPv6MTUInfo = 32 + SizeofInet4Pktinfo = 8 + SizeofInet6Pktinfo = 20 SizeofLinger = 8 SizeofSockaddrInet4 = 16 SizeofSockaddrInet6 = 28 SizeofTCPInfo = 0x68 + SizeofUcred = 12 ) type ( @@ -69,12 +72,17 @@ type Utimbuf struct { } type Utsname struct { - Sysname [65]byte - Nodename [65]byte - Release [65]byte - Version [65]byte - Machine [65]byte - Domainname [65]byte + Sysname [16]byte + Nodename [32]byte + Release [8]byte + Version [8]byte + Machine [16]byte +} + +type Ucred struct { + Pid int32 + Uid uint32 + Gid uint32 } type RawSockaddrInet4 struct { @@ -325,7 +333,7 @@ type Statvfs_t struct { } type Statfs_t struct { - Type uint32 + Type uint64 Bsize uint64 Blocks uint64 Bfree uint64 @@ -336,6 +344,7 @@ type Statfs_t struct { Namelen uint64 Frsize uint64 Flags uint64 + _ [4]uint64 } type direntLE struct { @@ -412,3 +421,126 @@ type W_Mntent struct { Quiesceowner [8]byte _ [38]byte } + +type EpollEvent struct { + Events uint32 + _ int32 + Fd int32 + Pad int32 +} + +type InotifyEvent struct { + Wd int32 + Mask uint32 + Cookie uint32 + Len uint32 + Name string +} + +const ( + SizeofInotifyEvent = 0x10 +) + +type ConsMsg2 struct { + Cm2Format uint16 + Cm2R1 uint16 + Cm2Msglength uint32 + Cm2Msg *byte + Cm2R2 [4]byte + Cm2R3 [4]byte + Cm2Routcde *uint32 + Cm2Descr *uint32 + Cm2Msgflag uint32 + Cm2Token uint32 + Cm2Msgid *uint32 + Cm2R4 [4]byte + Cm2DomToken uint32 + Cm2DomMsgid *uint32 + Cm2ModCartptr *byte + Cm2ModConsidptr *byte + Cm2MsgCart [8]byte + Cm2MsgConsid [4]byte + Cm2R5 [12]byte +} + +const ( + CC_modify = 1 + CC_stop = 2 + CONSOLE_FORMAT_2 = 2 + CONSOLE_FORMAT_3 = 3 + CONSOLE_HRDCPY = 0x80000000 +) + +type OpenHow struct { + Flags uint64 + Mode uint64 + Resolve uint64 +} + +const SizeofOpenHow = 0x18 + +const ( + RESOLVE_CACHED = 0x20 + RESOLVE_BENEATH = 0x8 + RESOLVE_IN_ROOT = 0x10 + RESOLVE_NO_MAGICLINKS = 0x2 + RESOLVE_NO_SYMLINKS = 0x4 + RESOLVE_NO_XDEV = 0x1 +) + +type Siginfo struct { + Signo int32 + Errno int32 + Code int32 + Pid int32 + Uid uint32 + _ [44]byte +} + +type SysvIpcPerm struct { + Uid uint32 + Gid uint32 + Cuid uint32 + Cgid uint32 + Mode int32 +} + +type SysvShmDesc struct { + Perm SysvIpcPerm + _ [4]byte + Lpid int32 + Cpid int32 + Nattch uint32 + _ [4]byte + _ [4]byte + _ [4]byte + _ int32 + _ uint8 + _ uint8 + _ uint16 + _ *byte + Segsz uint64 + Atime Time_t + Dtime Time_t + Ctime Time_t +} + +type SysvShmDesc64 struct { + Perm SysvIpcPerm + _ [4]byte + Lpid int32 + Cpid int32 + Nattch uint32 + _ [4]byte + _ [4]byte + _ [4]byte + _ int32 + _ byte + _ uint8 + _ uint16 + _ *byte + Segsz uint64 + Atime int64 + Dtime int64 + Ctime int64 +} diff --git a/vendor/golang.org/x/sys/windows/aliases.go b/vendor/golang.org/x/sys/windows/aliases.go index ce2d713..16f9056 100644 --- a/vendor/golang.org/x/sys/windows/aliases.go +++ b/vendor/golang.org/x/sys/windows/aliases.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build windows && go1.9 +//go:build windows package windows diff --git a/vendor/golang.org/x/sys/windows/empty.s b/vendor/golang.org/x/sys/windows/empty.s deleted file mode 100644 index ba64cac..0000000 --- a/vendor/golang.org/x/sys/windows/empty.s +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.12 - -// This file is here to allow bodyless functions with go:linkname for Go 1.11 -// and earlier (see https://golang.org/issue/23311). diff --git a/vendor/golang.org/x/sys/windows/env_windows.go b/vendor/golang.org/x/sys/windows/env_windows.go index b8ad192..d4577a4 100644 --- a/vendor/golang.org/x/sys/windows/env_windows.go +++ b/vendor/golang.org/x/sys/windows/env_windows.go @@ -37,14 +37,17 @@ func (token Token) Environ(inheritExisting bool) (env []string, err error) { return nil, err } defer DestroyEnvironmentBlock(block) - blockp := unsafe.Pointer(block) - for { - entry := UTF16PtrToString((*uint16)(blockp)) - if len(entry) == 0 { - break + size := unsafe.Sizeof(*block) + for *block != 0 { + // find NUL terminator + end := unsafe.Pointer(block) + for *(*uint16)(end) != 0 { + end = unsafe.Add(end, size) } - env = append(env, entry) - blockp = unsafe.Add(blockp, 2*(len(entry)+1)) + + entry := unsafe.Slice(block, (uintptr(end)-uintptr(unsafe.Pointer(block)))/size) + env = append(env, UTF16ToString(entry)) + block = (*uint16)(unsafe.Add(end, size)) } return env, nil } diff --git a/vendor/golang.org/x/sys/windows/syscall_windows.go b/vendor/golang.org/x/sys/windows/syscall_windows.go index 47dc579..6525c62 100644 --- a/vendor/golang.org/x/sys/windows/syscall_windows.go +++ b/vendor/golang.org/x/sys/windows/syscall_windows.go @@ -125,8 +125,7 @@ func UTF16PtrToString(p *uint16) string { for ptr := unsafe.Pointer(p); *(*uint16)(ptr) != 0; n++ { ptr = unsafe.Pointer(uintptr(ptr) + unsafe.Sizeof(*p)) } - - return string(utf16.Decode(unsafe.Slice(p, n))) + return UTF16ToString(unsafe.Slice(p, n)) } func Getpagesize() int { return 4096 } @@ -166,6 +165,7 @@ func NewCallbackCDecl(fn interface{}) uintptr { //sys CreateFile(name *uint16, access uint32, mode uint32, sa *SecurityAttributes, createmode uint32, attrs uint32, templatefile Handle) (handle Handle, err error) [failretval==InvalidHandle] = CreateFileW //sys CreateNamedPipe(name *uint16, flags uint32, pipeMode uint32, maxInstances uint32, outSize uint32, inSize uint32, defaultTimeout uint32, sa *SecurityAttributes) (handle Handle, err error) [failretval==InvalidHandle] = CreateNamedPipeW //sys ConnectNamedPipe(pipe Handle, overlapped *Overlapped) (err error) +//sys DisconnectNamedPipe(pipe Handle) (err error) //sys GetNamedPipeInfo(pipe Handle, flags *uint32, outSize *uint32, inSize *uint32, maxInstances *uint32) (err error) //sys GetNamedPipeHandleState(pipe Handle, state *uint32, curInstances *uint32, maxCollectionCount *uint32, collectDataTimeout *uint32, userName *uint16, maxUserNameSize uint32) (err error) = GetNamedPipeHandleStateW //sys SetNamedPipeHandleState(pipe Handle, state *uint32, maxCollectionCount *uint32, collectDataTimeout *uint32) (err error) = SetNamedPipeHandleState @@ -194,6 +194,7 @@ func NewCallbackCDecl(fn interface{}) uintptr { //sys GetComputerName(buf *uint16, n *uint32) (err error) = GetComputerNameW //sys GetComputerNameEx(nametype uint32, buf *uint16, n *uint32) (err error) = GetComputerNameExW //sys SetEndOfFile(handle Handle) (err error) +//sys SetFileValidData(handle Handle, validDataLength int64) (err error) //sys GetSystemTimeAsFileTime(time *Filetime) //sys GetSystemTimePreciseAsFileTime(time *Filetime) //sys GetTimeZoneInformation(tzi *Timezoneinformation) (rc uint32, err error) [failretval==0xffffffff] @@ -348,8 +349,19 @@ func NewCallbackCDecl(fn interface{}) uintptr { //sys SetProcessPriorityBoost(process Handle, disable bool) (err error) = kernel32.SetProcessPriorityBoost //sys GetProcessWorkingSetSizeEx(hProcess Handle, lpMinimumWorkingSetSize *uintptr, lpMaximumWorkingSetSize *uintptr, flags *uint32) //sys SetProcessWorkingSetSizeEx(hProcess Handle, dwMinimumWorkingSetSize uintptr, dwMaximumWorkingSetSize uintptr, flags uint32) (err error) +//sys ClearCommBreak(handle Handle) (err error) +//sys ClearCommError(handle Handle, lpErrors *uint32, lpStat *ComStat) (err error) +//sys EscapeCommFunction(handle Handle, dwFunc uint32) (err error) +//sys GetCommState(handle Handle, lpDCB *DCB) (err error) +//sys GetCommModemStatus(handle Handle, lpModemStat *uint32) (err error) //sys GetCommTimeouts(handle Handle, timeouts *CommTimeouts) (err error) +//sys PurgeComm(handle Handle, dwFlags uint32) (err error) +//sys SetCommBreak(handle Handle) (err error) +//sys SetCommMask(handle Handle, dwEvtMask uint32) (err error) +//sys SetCommState(handle Handle, lpDCB *DCB) (err error) //sys SetCommTimeouts(handle Handle, timeouts *CommTimeouts) (err error) +//sys SetupComm(handle Handle, dwInQueue uint32, dwOutQueue uint32) (err error) +//sys WaitCommEvent(handle Handle, lpEvtMask *uint32, lpOverlapped *Overlapped) (err error) //sys GetActiveProcessorCount(groupNumber uint16) (ret uint32) //sys GetMaximumProcessorCount(groupNumber uint16) (ret uint32) //sys EnumWindows(enumFunc uintptr, param unsafe.Pointer) (err error) = user32.EnumWindows @@ -1834,3 +1846,73 @@ func ResizePseudoConsole(pconsole Handle, size Coord) error { // accept arguments that can be casted to uintptr, and Coord can't. return resizePseudoConsole(pconsole, *((*uint32)(unsafe.Pointer(&size)))) } + +// DCB constants. See https://learn.microsoft.com/en-us/windows/win32/api/winbase/ns-winbase-dcb. +const ( + CBR_110 = 110 + CBR_300 = 300 + CBR_600 = 600 + CBR_1200 = 1200 + CBR_2400 = 2400 + CBR_4800 = 4800 + CBR_9600 = 9600 + CBR_14400 = 14400 + CBR_19200 = 19200 + CBR_38400 = 38400 + CBR_57600 = 57600 + CBR_115200 = 115200 + CBR_128000 = 128000 + CBR_256000 = 256000 + + DTR_CONTROL_DISABLE = 0x00000000 + DTR_CONTROL_ENABLE = 0x00000010 + DTR_CONTROL_HANDSHAKE = 0x00000020 + + RTS_CONTROL_DISABLE = 0x00000000 + RTS_CONTROL_ENABLE = 0x00001000 + RTS_CONTROL_HANDSHAKE = 0x00002000 + RTS_CONTROL_TOGGLE = 0x00003000 + + NOPARITY = 0 + ODDPARITY = 1 + EVENPARITY = 2 + MARKPARITY = 3 + SPACEPARITY = 4 + + ONESTOPBIT = 0 + ONE5STOPBITS = 1 + TWOSTOPBITS = 2 +) + +// EscapeCommFunction constants. See https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-escapecommfunction. +const ( + SETXOFF = 1 + SETXON = 2 + SETRTS = 3 + CLRRTS = 4 + SETDTR = 5 + CLRDTR = 6 + SETBREAK = 8 + CLRBREAK = 9 +) + +// PurgeComm constants. See https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-purgecomm. +const ( + PURGE_TXABORT = 0x0001 + PURGE_RXABORT = 0x0002 + PURGE_TXCLEAR = 0x0004 + PURGE_RXCLEAR = 0x0008 +) + +// SetCommMask constants. See https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-setcommmask. +const ( + EV_RXCHAR = 0x0001 + EV_RXFLAG = 0x0002 + EV_TXEMPTY = 0x0004 + EV_CTS = 0x0008 + EV_DSR = 0x0010 + EV_RLSD = 0x0020 + EV_BREAK = 0x0040 + EV_ERR = 0x0080 + EV_RING = 0x0100 +) diff --git a/vendor/golang.org/x/sys/windows/types_windows.go b/vendor/golang.org/x/sys/windows/types_windows.go index 359780f..d8cb71d 100644 --- a/vendor/golang.org/x/sys/windows/types_windows.go +++ b/vendor/golang.org/x/sys/windows/types_windows.go @@ -3380,3 +3380,27 @@ type BLOB struct { Size uint32 BlobData *byte } + +type ComStat struct { + Flags uint32 + CBInQue uint32 + CBOutQue uint32 +} + +type DCB struct { + DCBlength uint32 + BaudRate uint32 + Flags uint32 + wReserved uint16 + XonLim uint16 + XoffLim uint16 + ByteSize uint8 + Parity uint8 + StopBits uint8 + XonChar byte + XoffChar byte + ErrorChar byte + EofChar byte + EvtChar byte + wReserved1 uint16 +} diff --git a/vendor/golang.org/x/sys/windows/zsyscall_windows.go b/vendor/golang.org/x/sys/windows/zsyscall_windows.go index 146a1f0..5c6035d 100644 --- a/vendor/golang.org/x/sys/windows/zsyscall_windows.go +++ b/vendor/golang.org/x/sys/windows/zsyscall_windows.go @@ -188,6 +188,8 @@ var ( procAssignProcessToJobObject = modkernel32.NewProc("AssignProcessToJobObject") procCancelIo = modkernel32.NewProc("CancelIo") procCancelIoEx = modkernel32.NewProc("CancelIoEx") + procClearCommBreak = modkernel32.NewProc("ClearCommBreak") + procClearCommError = modkernel32.NewProc("ClearCommError") procCloseHandle = modkernel32.NewProc("CloseHandle") procClosePseudoConsole = modkernel32.NewProc("ClosePseudoConsole") procConnectNamedPipe = modkernel32.NewProc("ConnectNamedPipe") @@ -212,7 +214,9 @@ var ( procDeleteProcThreadAttributeList = modkernel32.NewProc("DeleteProcThreadAttributeList") procDeleteVolumeMountPointW = modkernel32.NewProc("DeleteVolumeMountPointW") procDeviceIoControl = modkernel32.NewProc("DeviceIoControl") + procDisconnectNamedPipe = modkernel32.NewProc("DisconnectNamedPipe") procDuplicateHandle = modkernel32.NewProc("DuplicateHandle") + procEscapeCommFunction = modkernel32.NewProc("EscapeCommFunction") procExitProcess = modkernel32.NewProc("ExitProcess") procExpandEnvironmentStringsW = modkernel32.NewProc("ExpandEnvironmentStringsW") procFindClose = modkernel32.NewProc("FindClose") @@ -236,6 +240,8 @@ var ( procGenerateConsoleCtrlEvent = modkernel32.NewProc("GenerateConsoleCtrlEvent") procGetACP = modkernel32.NewProc("GetACP") procGetActiveProcessorCount = modkernel32.NewProc("GetActiveProcessorCount") + procGetCommModemStatus = modkernel32.NewProc("GetCommModemStatus") + procGetCommState = modkernel32.NewProc("GetCommState") procGetCommTimeouts = modkernel32.NewProc("GetCommTimeouts") procGetCommandLineW = modkernel32.NewProc("GetCommandLineW") procGetComputerNameExW = modkernel32.NewProc("GetComputerNameExW") @@ -322,6 +328,7 @@ var ( procProcess32NextW = modkernel32.NewProc("Process32NextW") procProcessIdToSessionId = modkernel32.NewProc("ProcessIdToSessionId") procPulseEvent = modkernel32.NewProc("PulseEvent") + procPurgeComm = modkernel32.NewProc("PurgeComm") procQueryDosDeviceW = modkernel32.NewProc("QueryDosDeviceW") procQueryFullProcessImageNameW = modkernel32.NewProc("QueryFullProcessImageNameW") procQueryInformationJobObject = modkernel32.NewProc("QueryInformationJobObject") @@ -335,6 +342,9 @@ var ( procResetEvent = modkernel32.NewProc("ResetEvent") procResizePseudoConsole = modkernel32.NewProc("ResizePseudoConsole") procResumeThread = modkernel32.NewProc("ResumeThread") + procSetCommBreak = modkernel32.NewProc("SetCommBreak") + procSetCommMask = modkernel32.NewProc("SetCommMask") + procSetCommState = modkernel32.NewProc("SetCommState") procSetCommTimeouts = modkernel32.NewProc("SetCommTimeouts") procSetConsoleCursorPosition = modkernel32.NewProc("SetConsoleCursorPosition") procSetConsoleMode = modkernel32.NewProc("SetConsoleMode") @@ -350,6 +360,7 @@ var ( procSetFileInformationByHandle = modkernel32.NewProc("SetFileInformationByHandle") procSetFilePointer = modkernel32.NewProc("SetFilePointer") procSetFileTime = modkernel32.NewProc("SetFileTime") + procSetFileValidData = modkernel32.NewProc("SetFileValidData") procSetHandleInformation = modkernel32.NewProc("SetHandleInformation") procSetInformationJobObject = modkernel32.NewProc("SetInformationJobObject") procSetNamedPipeHandleState = modkernel32.NewProc("SetNamedPipeHandleState") @@ -360,6 +371,7 @@ var ( procSetStdHandle = modkernel32.NewProc("SetStdHandle") procSetVolumeLabelW = modkernel32.NewProc("SetVolumeLabelW") procSetVolumeMountPointW = modkernel32.NewProc("SetVolumeMountPointW") + procSetupComm = modkernel32.NewProc("SetupComm") procSizeofResource = modkernel32.NewProc("SizeofResource") procSleepEx = modkernel32.NewProc("SleepEx") procTerminateJobObject = modkernel32.NewProc("TerminateJobObject") @@ -378,6 +390,7 @@ var ( procVirtualQueryEx = modkernel32.NewProc("VirtualQueryEx") procVirtualUnlock = modkernel32.NewProc("VirtualUnlock") procWTSGetActiveConsoleSessionId = modkernel32.NewProc("WTSGetActiveConsoleSessionId") + procWaitCommEvent = modkernel32.NewProc("WaitCommEvent") procWaitForMultipleObjects = modkernel32.NewProc("WaitForMultipleObjects") procWaitForSingleObject = modkernel32.NewProc("WaitForSingleObject") procWriteConsoleW = modkernel32.NewProc("WriteConsoleW") @@ -1640,6 +1653,22 @@ func CancelIoEx(s Handle, o *Overlapped) (err error) { return } +func ClearCommBreak(handle Handle) (err error) { + r1, _, e1 := syscall.Syscall(procClearCommBreak.Addr(), 1, uintptr(handle), 0, 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + +func ClearCommError(handle Handle, lpErrors *uint32, lpStat *ComStat) (err error) { + r1, _, e1 := syscall.Syscall(procClearCommError.Addr(), 3, uintptr(handle), uintptr(unsafe.Pointer(lpErrors)), uintptr(unsafe.Pointer(lpStat))) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func CloseHandle(handle Handle) (err error) { r1, _, e1 := syscall.Syscall(procCloseHandle.Addr(), 1, uintptr(handle), 0, 0) if r1 == 0 { @@ -1844,6 +1873,14 @@ func DeviceIoControl(handle Handle, ioControlCode uint32, inBuffer *byte, inBuff return } +func DisconnectNamedPipe(pipe Handle) (err error) { + r1, _, e1 := syscall.Syscall(procDisconnectNamedPipe.Addr(), 1, uintptr(pipe), 0, 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func DuplicateHandle(hSourceProcessHandle Handle, hSourceHandle Handle, hTargetProcessHandle Handle, lpTargetHandle *Handle, dwDesiredAccess uint32, bInheritHandle bool, dwOptions uint32) (err error) { var _p0 uint32 if bInheritHandle { @@ -1856,6 +1893,14 @@ func DuplicateHandle(hSourceProcessHandle Handle, hSourceHandle Handle, hTargetP return } +func EscapeCommFunction(handle Handle, dwFunc uint32) (err error) { + r1, _, e1 := syscall.Syscall(procEscapeCommFunction.Addr(), 2, uintptr(handle), uintptr(dwFunc), 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func ExitProcess(exitcode uint32) { syscall.Syscall(procExitProcess.Addr(), 1, uintptr(exitcode), 0, 0) return @@ -2057,6 +2102,22 @@ func GetActiveProcessorCount(groupNumber uint16) (ret uint32) { return } +func GetCommModemStatus(handle Handle, lpModemStat *uint32) (err error) { + r1, _, e1 := syscall.Syscall(procGetCommModemStatus.Addr(), 2, uintptr(handle), uintptr(unsafe.Pointer(lpModemStat)), 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + +func GetCommState(handle Handle, lpDCB *DCB) (err error) { + r1, _, e1 := syscall.Syscall(procGetCommState.Addr(), 2, uintptr(handle), uintptr(unsafe.Pointer(lpDCB)), 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func GetCommTimeouts(handle Handle, timeouts *CommTimeouts) (err error) { r1, _, e1 := syscall.Syscall(procGetCommTimeouts.Addr(), 2, uintptr(handle), uintptr(unsafe.Pointer(timeouts)), 0) if r1 == 0 { @@ -2809,6 +2870,14 @@ func PulseEvent(event Handle) (err error) { return } +func PurgeComm(handle Handle, dwFlags uint32) (err error) { + r1, _, e1 := syscall.Syscall(procPurgeComm.Addr(), 2, uintptr(handle), uintptr(dwFlags), 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func QueryDosDevice(deviceName *uint16, targetPath *uint16, max uint32) (n uint32, err error) { r0, _, e1 := syscall.Syscall(procQueryDosDeviceW.Addr(), 3, uintptr(unsafe.Pointer(deviceName)), uintptr(unsafe.Pointer(targetPath)), uintptr(max)) n = uint32(r0) @@ -2923,6 +2992,30 @@ func ResumeThread(thread Handle) (ret uint32, err error) { return } +func SetCommBreak(handle Handle) (err error) { + r1, _, e1 := syscall.Syscall(procSetCommBreak.Addr(), 1, uintptr(handle), 0, 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + +func SetCommMask(handle Handle, dwEvtMask uint32) (err error) { + r1, _, e1 := syscall.Syscall(procSetCommMask.Addr(), 2, uintptr(handle), uintptr(dwEvtMask), 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + +func SetCommState(handle Handle, lpDCB *DCB) (err error) { + r1, _, e1 := syscall.Syscall(procSetCommState.Addr(), 2, uintptr(handle), uintptr(unsafe.Pointer(lpDCB)), 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func SetCommTimeouts(handle Handle, timeouts *CommTimeouts) (err error) { r1, _, e1 := syscall.Syscall(procSetCommTimeouts.Addr(), 2, uintptr(handle), uintptr(unsafe.Pointer(timeouts)), 0) if r1 == 0 { @@ -3051,6 +3144,14 @@ func SetFileTime(handle Handle, ctime *Filetime, atime *Filetime, wtime *Filetim return } +func SetFileValidData(handle Handle, validDataLength int64) (err error) { + r1, _, e1 := syscall.Syscall(procSetFileValidData.Addr(), 2, uintptr(handle), uintptr(validDataLength), 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func SetHandleInformation(handle Handle, mask uint32, flags uint32) (err error) { r1, _, e1 := syscall.Syscall(procSetHandleInformation.Addr(), 3, uintptr(handle), uintptr(mask), uintptr(flags)) if r1 == 0 { @@ -3136,6 +3237,14 @@ func SetVolumeMountPoint(volumeMountPoint *uint16, volumeName *uint16) (err erro return } +func SetupComm(handle Handle, dwInQueue uint32, dwOutQueue uint32) (err error) { + r1, _, e1 := syscall.Syscall(procSetupComm.Addr(), 3, uintptr(handle), uintptr(dwInQueue), uintptr(dwOutQueue)) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func SizeofResource(module Handle, resInfo Handle) (size uint32, err error) { r0, _, e1 := syscall.Syscall(procSizeofResource.Addr(), 2, uintptr(module), uintptr(resInfo), 0) size = uint32(r0) @@ -3282,6 +3391,14 @@ func WTSGetActiveConsoleSessionId() (sessionID uint32) { return } +func WaitCommEvent(handle Handle, lpEvtMask *uint32, lpOverlapped *Overlapped) (err error) { + r1, _, e1 := syscall.Syscall(procWaitCommEvent.Addr(), 3, uintptr(handle), uintptr(unsafe.Pointer(lpEvtMask)), uintptr(unsafe.Pointer(lpOverlapped))) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func waitForMultipleObjects(count uint32, handles uintptr, waitAll bool, waitMilliseconds uint32) (event uint32, err error) { var _p0 uint32 if waitAll { diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE new file mode 100644 index 0000000..6a66aea --- /dev/null +++ b/vendor/golang.org/x/tools/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS new file mode 100644 index 0000000..7330990 --- /dev/null +++ b/vendor/golang.org/x/tools/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go new file mode 100644 index 0000000..2c4c4e2 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go @@ -0,0 +1,634 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +// This file defines utilities for working with source positions. + +import ( + "fmt" + "go/ast" + "go/token" + "sort" +) + +// PathEnclosingInterval returns the node that encloses the source +// interval [start, end), and all its ancestors up to the AST root. +// +// The definition of "enclosing" used by this function considers +// additional whitespace abutting a node to be enclosed by it. +// In this example: +// +// z := x + y // add them +// <-A-> +// <----B-----> +// +// the ast.BinaryExpr(+) node is considered to enclose interval B +// even though its [Pos()..End()) is actually only interval A. +// This behaviour makes user interfaces more tolerant of imperfect +// input. +// +// This function treats tokens as nodes, though they are not included +// in the result. e.g. PathEnclosingInterval("+") returns the +// enclosing ast.BinaryExpr("x + y"). +// +// If start==end, the 1-char interval following start is used instead. +// +// The 'exact' result is true if the interval contains only path[0] +// and perhaps some adjacent whitespace. It is false if the interval +// overlaps multiple children of path[0], or if it contains only +// interior whitespace of path[0]. +// In this example: +// +// z := x + y // add them +// <--C--> <---E--> +// ^ +// D +// +// intervals C, D and E are inexact. C is contained by the +// z-assignment statement, because it spans three of its children (:=, +// x, +). So too is the 1-char interval D, because it contains only +// interior whitespace of the assignment. E is considered interior +// whitespace of the BlockStmt containing the assignment. +// +// The resulting path is never empty; it always contains at least the +// 'root' *ast.File. Ideally PathEnclosingInterval would reject +// intervals that lie wholly or partially outside the range of the +// file, but unfortunately ast.File records only the token.Pos of +// the 'package' keyword, but not of the start of the file itself. +func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { + // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging + + // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end). + var visit func(node ast.Node) bool + visit = func(node ast.Node) bool { + path = append(path, node) + + nodePos := node.Pos() + nodeEnd := node.End() + + // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging + + // Intersect [start, end) with interval of node. + if start < nodePos { + start = nodePos + } + if end > nodeEnd { + end = nodeEnd + } + + // Find sole child that contains [start, end). + children := childrenOf(node) + l := len(children) + for i, child := range children { + // [childPos, childEnd) is unaugmented interval of child. + childPos := child.Pos() + childEnd := child.End() + + // [augPos, augEnd) is whitespace-augmented interval of child. + augPos := childPos + augEnd := childEnd + if i > 0 { + augPos = children[i-1].End() // start of preceding whitespace + } + if i < l-1 { + nextChildPos := children[i+1].Pos() + // Does [start, end) lie between child and next child? + if start >= augEnd && end <= nextChildPos { + return false // inexact match + } + augEnd = nextChildPos // end of following whitespace + } + + // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n", + // i, augPos, augEnd, start, end) // debugging + + // Does augmented child strictly contain [start, end)? + if augPos <= start && end <= augEnd { + _, isToken := child.(tokenNode) + return isToken || visit(child) + } + + // Does [start, end) overlap multiple children? + // i.e. left-augmented child contains start + // but LR-augmented child does not contain end. + if start < childEnd && end > augEnd { + break + } + } + + // No single child contained [start, end), + // so node is the result. Is it exact? + + // (It's tempting to put this condition before the + // child loop, but it gives the wrong result in the + // case where a node (e.g. ExprStmt) and its sole + // child have equal intervals.) + if start == nodePos && end == nodeEnd { + return true // exact match + } + + return false // inexact: overlaps multiple children + } + + // Ensure [start,end) is nondecreasing. + if start > end { + start, end = end, start + } + + if start < root.End() && end > root.Pos() { + if start == end { + end = start + 1 // empty interval => interval of size 1 + } + exact = visit(root) + + // Reverse the path: + for i, l := 0, len(path); i < l/2; i++ { + path[i], path[l-1-i] = path[l-1-i], path[i] + } + } else { + // Selection lies within whitespace preceding the + // first (or following the last) declaration in the file. + // The result nonetheless always includes the ast.File. + path = append(path, root) + } + + return +} + +// tokenNode is a dummy implementation of ast.Node for a single token. +// They are used transiently by PathEnclosingInterval but never escape +// this package. +type tokenNode struct { + pos token.Pos + end token.Pos +} + +func (n tokenNode) Pos() token.Pos { + return n.pos +} + +func (n tokenNode) End() token.Pos { + return n.end +} + +func tok(pos token.Pos, len int) ast.Node { + return tokenNode{pos, pos + token.Pos(len)} +} + +// childrenOf returns the direct non-nil children of ast.Node n. +// It may include fake ast.Node implementations for bare tokens. +// it is not safe to call (e.g.) ast.Walk on such nodes. +func childrenOf(n ast.Node) []ast.Node { + var children []ast.Node + + // First add nodes for all true subtrees. + ast.Inspect(n, func(node ast.Node) bool { + if node == n { // push n + return true // recur + } + if node != nil { // push child + children = append(children, node) + } + return false // no recursion + }) + + // Then add fake Nodes for bare tokens. + switch n := n.(type) { + case *ast.ArrayType: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Elt.End(), len("]"))) + + case *ast.AssignStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.BasicLit: + children = append(children, + tok(n.ValuePos, len(n.Value))) + + case *ast.BinaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.BlockStmt: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("}"))) + + case *ast.BranchStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.CallExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + if n.Ellipsis != 0 { + children = append(children, tok(n.Ellipsis, len("..."))) + } + + case *ast.CaseClause: + if n.List == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.ChanType: + switch n.Dir { + case ast.RECV: + children = append(children, tok(n.Begin, len("<-chan"))) + case ast.SEND: + children = append(children, tok(n.Begin, len("chan<-"))) + case ast.RECV | ast.SEND: + children = append(children, tok(n.Begin, len("chan"))) + } + + case *ast.CommClause: + if n.Comm == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.Comment: + // nop + + case *ast.CommentGroup: + // nop + + case *ast.CompositeLit: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("{"))) + + case *ast.DeclStmt: + // nop + + case *ast.DeferStmt: + children = append(children, + tok(n.Defer, len("defer"))) + + case *ast.Ellipsis: + children = append(children, + tok(n.Ellipsis, len("..."))) + + case *ast.EmptyStmt: + // nop + + case *ast.ExprStmt: + // nop + + case *ast.Field: + // TODO(adonovan): Field.{Doc,Comment,Tag}? + + case *ast.FieldList: + children = append(children, + tok(n.Opening, len("(")), // or len("[") + tok(n.Closing, len(")"))) // or len("]") + + case *ast.File: + // TODO test: Doc + children = append(children, + tok(n.Package, len("package"))) + + case *ast.ForStmt: + children = append(children, + tok(n.For, len("for"))) + + case *ast.FuncDecl: + // TODO(adonovan): FuncDecl.Comment? + + // Uniquely, FuncDecl breaks the invariant that + // preorder traversal yields tokens in lexical order: + // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func. + // + // As a workaround, we inline the case for FuncType + // here and order things correctly. + // + children = nil // discard ast.Walk(FuncDecl) info subtrees + children = append(children, tok(n.Type.Func, len("func"))) + if n.Recv != nil { + children = append(children, n.Recv) + } + children = append(children, n.Name) + if tparams := n.Type.TypeParams; tparams != nil { + children = append(children, tparams) + } + if n.Type.Params != nil { + children = append(children, n.Type.Params) + } + if n.Type.Results != nil { + children = append(children, n.Type.Results) + } + if n.Body != nil { + children = append(children, n.Body) + } + + case *ast.FuncLit: + // nop + + case *ast.FuncType: + if n.Func != 0 { + children = append(children, + tok(n.Func, len("func"))) + } + + case *ast.GenDecl: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + if n.Lparen != 0 { + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + } + + case *ast.GoStmt: + children = append(children, + tok(n.Go, len("go"))) + + case *ast.Ident: + children = append(children, + tok(n.NamePos, len(n.Name))) + + case *ast.IfStmt: + children = append(children, + tok(n.If, len("if"))) + + case *ast.ImportSpec: + // TODO(adonovan): ImportSpec.{Doc,EndPos}? + + case *ast.IncDecStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.IndexExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *ast.IndexListExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *ast.InterfaceType: + children = append(children, + tok(n.Interface, len("interface"))) + + case *ast.KeyValueExpr: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.LabeledStmt: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.MapType: + children = append(children, + tok(n.Map, len("map"))) + + case *ast.ParenExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.RangeStmt: + children = append(children, + tok(n.For, len("for")), + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.ReturnStmt: + children = append(children, + tok(n.Return, len("return"))) + + case *ast.SelectStmt: + children = append(children, + tok(n.Select, len("select"))) + + case *ast.SelectorExpr: + // nop + + case *ast.SendStmt: + children = append(children, + tok(n.Arrow, len("<-"))) + + case *ast.SliceExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *ast.StarExpr: + children = append(children, tok(n.Star, len("*"))) + + case *ast.StructType: + children = append(children, tok(n.Struct, len("struct"))) + + case *ast.SwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.TypeAssertExpr: + children = append(children, + tok(n.Lparen-1, len(".")), + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.TypeSpec: + // TODO(adonovan): TypeSpec.{Doc,Comment}? + + case *ast.TypeSwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.UnaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.ValueSpec: + // TODO(adonovan): ValueSpec.{Doc,Comment}? + + case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: + // nop + } + + // TODO(adonovan): opt: merge the logic of ast.Inspect() into + // the switch above so we can make interleaved callbacks for + // both Nodes and Tokens in the right order and avoid the need + // to sort. + sort.Sort(byPos(children)) + + return children +} + +type byPos []ast.Node + +func (sl byPos) Len() int { + return len(sl) +} +func (sl byPos) Less(i, j int) bool { + return sl[i].Pos() < sl[j].Pos() +} +func (sl byPos) Swap(i, j int) { + sl[i], sl[j] = sl[j], sl[i] +} + +// NodeDescription returns a description of the concrete type of n suitable +// for a user interface. +// +// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident, +// StarExpr) we could be much more specific given the path to the AST +// root. Perhaps we should do that. +func NodeDescription(n ast.Node) string { + switch n := n.(type) { + case *ast.ArrayType: + return "array type" + case *ast.AssignStmt: + return "assignment" + case *ast.BadDecl: + return "bad declaration" + case *ast.BadExpr: + return "bad expression" + case *ast.BadStmt: + return "bad statement" + case *ast.BasicLit: + return "basic literal" + case *ast.BinaryExpr: + return fmt.Sprintf("binary %s operation", n.Op) + case *ast.BlockStmt: + return "block" + case *ast.BranchStmt: + switch n.Tok { + case token.BREAK: + return "break statement" + case token.CONTINUE: + return "continue statement" + case token.GOTO: + return "goto statement" + case token.FALLTHROUGH: + return "fall-through statement" + } + case *ast.CallExpr: + if len(n.Args) == 1 && !n.Ellipsis.IsValid() { + return "function call (or conversion)" + } + return "function call" + case *ast.CaseClause: + return "case clause" + case *ast.ChanType: + return "channel type" + case *ast.CommClause: + return "communication clause" + case *ast.Comment: + return "comment" + case *ast.CommentGroup: + return "comment group" + case *ast.CompositeLit: + return "composite literal" + case *ast.DeclStmt: + return NodeDescription(n.Decl) + " statement" + case *ast.DeferStmt: + return "defer statement" + case *ast.Ellipsis: + return "ellipsis" + case *ast.EmptyStmt: + return "empty statement" + case *ast.ExprStmt: + return "expression statement" + case *ast.Field: + // Can be any of these: + // struct {x, y int} -- struct field(s) + // struct {T} -- anon struct field + // interface {I} -- interface embedding + // interface {f()} -- interface method + // func (A) func(B) C -- receiver, param(s), result(s) + return "field/method/parameter" + case *ast.FieldList: + return "field/method/parameter list" + case *ast.File: + return "source file" + case *ast.ForStmt: + return "for loop" + case *ast.FuncDecl: + return "function declaration" + case *ast.FuncLit: + return "function literal" + case *ast.FuncType: + return "function type" + case *ast.GenDecl: + switch n.Tok { + case token.IMPORT: + return "import declaration" + case token.CONST: + return "constant declaration" + case token.TYPE: + return "type declaration" + case token.VAR: + return "variable declaration" + } + case *ast.GoStmt: + return "go statement" + case *ast.Ident: + return "identifier" + case *ast.IfStmt: + return "if statement" + case *ast.ImportSpec: + return "import specification" + case *ast.IncDecStmt: + if n.Tok == token.INC { + return "increment statement" + } + return "decrement statement" + case *ast.IndexExpr: + return "index expression" + case *ast.IndexListExpr: + return "index list expression" + case *ast.InterfaceType: + return "interface type" + case *ast.KeyValueExpr: + return "key/value association" + case *ast.LabeledStmt: + return "statement label" + case *ast.MapType: + return "map type" + case *ast.Package: + return "package" + case *ast.ParenExpr: + return "parenthesized " + NodeDescription(n.X) + case *ast.RangeStmt: + return "range loop" + case *ast.ReturnStmt: + return "return statement" + case *ast.SelectStmt: + return "select statement" + case *ast.SelectorExpr: + return "selector" + case *ast.SendStmt: + return "channel send" + case *ast.SliceExpr: + return "slice expression" + case *ast.StarExpr: + return "*-operation" // load/store expr or pointer type + case *ast.StructType: + return "struct type" + case *ast.SwitchStmt: + return "switch statement" + case *ast.TypeAssertExpr: + return "type assertion" + case *ast.TypeSpec: + return "type specification" + case *ast.TypeSwitchStmt: + return "type switch" + case *ast.UnaryExpr: + return fmt.Sprintf("unary %s operation", n.Op) + case *ast.ValueSpec: + return "value specification" + + } + panic(fmt.Sprintf("unexpected node type: %T", n)) +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go new file mode 100644 index 0000000..18d1adb --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go @@ -0,0 +1,485 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package astutil contains common utilities for working with the Go AST. +package astutil // import "golang.org/x/tools/go/ast/astutil" + +import ( + "fmt" + "go/ast" + "go/token" + "strconv" + "strings" +) + +// AddImport adds the import path to the file f, if absent. +func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) { + return AddNamedImport(fset, f, "", path) +} + +// AddNamedImport adds the import with the given name and path to the file f, if absent. +// If name is not empty, it is used to rename the import. +// +// For example, calling +// +// AddNamedImport(fset, f, "pathpkg", "path") +// +// adds +// +// import pathpkg "path" +func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) { + if imports(f, name, path) { + return false + } + + newImport := &ast.ImportSpec{ + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(path), + }, + } + if name != "" { + newImport.Name = &ast.Ident{Name: name} + } + + // Find an import decl to add to. + // The goal is to find an existing import + // whose import path has the longest shared + // prefix with path. + var ( + bestMatch = -1 // length of longest shared prefix + lastImport = -1 // index in f.Decls of the file's final import decl + impDecl *ast.GenDecl // import decl containing the best match + impIndex = -1 // spec index in impDecl containing the best match + + isThirdPartyPath = isThirdParty(path) + ) + for i, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if ok && gen.Tok == token.IMPORT { + lastImport = i + // Do not add to import "C", to avoid disrupting the + // association with its doc comment, breaking cgo. + if declImports(gen, "C") { + continue + } + + // Match an empty import decl if that's all that is available. + if len(gen.Specs) == 0 && bestMatch == -1 { + impDecl = gen + } + + // Compute longest shared prefix with imports in this group and find best + // matched import spec. + // 1. Always prefer import spec with longest shared prefix. + // 2. While match length is 0, + // - for stdlib package: prefer first import spec. + // - for third party package: prefer first third party import spec. + // We cannot use last import spec as best match for third party package + // because grouped imports are usually placed last by goimports -local + // flag. + // See issue #19190. + seenAnyThirdParty := false + for j, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + p := importPath(impspec) + n := matchLen(p, path) + if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) { + bestMatch = n + impDecl = gen + impIndex = j + } + seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p) + } + } + } + + // If no import decl found, add one after the last import. + if impDecl == nil { + impDecl = &ast.GenDecl{ + Tok: token.IMPORT, + } + if lastImport >= 0 { + impDecl.TokPos = f.Decls[lastImport].End() + } else { + // There are no existing imports. + // Our new import, preceded by a blank line, goes after the package declaration + // and after the comment, if any, that starts on the same line as the + // package declaration. + impDecl.TokPos = f.Package + + file := fset.File(f.Package) + pkgLine := file.Line(f.Package) + for _, c := range f.Comments { + if file.Line(c.Pos()) > pkgLine { + break + } + // +2 for a blank line + impDecl.TokPos = c.End() + 2 + } + } + f.Decls = append(f.Decls, nil) + copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:]) + f.Decls[lastImport+1] = impDecl + } + + // Insert new import at insertAt. + insertAt := 0 + if impIndex >= 0 { + // insert after the found import + insertAt = impIndex + 1 + } + impDecl.Specs = append(impDecl.Specs, nil) + copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:]) + impDecl.Specs[insertAt] = newImport + pos := impDecl.Pos() + if insertAt > 0 { + // If there is a comment after an existing import, preserve the comment + // position by adding the new import after the comment. + if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil { + pos = spec.Comment.End() + } else { + // Assign same position as the previous import, + // so that the sorter sees it as being in the same block. + pos = impDecl.Specs[insertAt-1].Pos() + } + } + if newImport.Name != nil { + newImport.Name.NamePos = pos + } + newImport.Path.ValuePos = pos + newImport.EndPos = pos + + // Clean up parens. impDecl contains at least one spec. + if len(impDecl.Specs) == 1 { + // Remove unneeded parens. + impDecl.Lparen = token.NoPos + } else if !impDecl.Lparen.IsValid() { + // impDecl needs parens added. + impDecl.Lparen = impDecl.Specs[0].Pos() + } + + f.Imports = append(f.Imports, newImport) + + if len(f.Decls) <= 1 { + return true + } + + // Merge all the import declarations into the first one. + var first *ast.GenDecl + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { + continue + } + if first == nil { + first = gen + continue // Don't touch the first one. + } + // We now know there is more than one package in this import + // declaration. Ensure that it ends up parenthesized. + first.Lparen = first.Pos() + // Move the imports of the other import declaration to the first one. + for _, spec := range gen.Specs { + spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() + first.Specs = append(first.Specs, spec) + } + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + i-- + } + + return true +} + +func isThirdParty(importPath string) bool { + // Third party package import path usually contains "." (".com", ".org", ...) + // This logic is taken from golang.org/x/tools/imports package. + return strings.Contains(importPath, ".") +} + +// DeleteImport deletes the import path from the file f, if present. +// If there are duplicate import declarations, all matching ones are deleted. +func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) { + return DeleteNamedImport(fset, f, "", path) +} + +// DeleteNamedImport deletes the import with the given name and path from the file f, if present. +// If there are duplicate import declarations, all matching ones are deleted. +func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { + var delspecs []*ast.ImportSpec + var delcomments []*ast.CommentGroup + + // Find the import nodes that import path, if any. + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT { + continue + } + for j := 0; j < len(gen.Specs); j++ { + spec := gen.Specs[j] + impspec := spec.(*ast.ImportSpec) + if importName(impspec) != name || importPath(impspec) != path { + continue + } + + // We found an import spec that imports path. + // Delete it. + delspecs = append(delspecs, impspec) + deleted = true + copy(gen.Specs[j:], gen.Specs[j+1:]) + gen.Specs = gen.Specs[:len(gen.Specs)-1] + + // If this was the last import spec in this decl, + // delete the decl, too. + if len(gen.Specs) == 0 { + copy(f.Decls[i:], f.Decls[i+1:]) + f.Decls = f.Decls[:len(f.Decls)-1] + i-- + break + } else if len(gen.Specs) == 1 { + if impspec.Doc != nil { + delcomments = append(delcomments, impspec.Doc) + } + if impspec.Comment != nil { + delcomments = append(delcomments, impspec.Comment) + } + for _, cg := range f.Comments { + // Found comment on the same line as the import spec. + if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { + delcomments = append(delcomments, cg) + break + } + } + + spec := gen.Specs[0].(*ast.ImportSpec) + + // Move the documentation right after the import decl. + if spec.Doc != nil { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + } + for _, cg := range f.Comments { + if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + break + } + } + } + if j > 0 { + lastImpspec := gen.Specs[j-1].(*ast.ImportSpec) + lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line + line := fset.PositionFor(impspec.Path.ValuePos, false).Line + + // We deleted an entry but now there may be + // a blank line-sized hole where the import was. + if line-lastLine > 1 || !gen.Rparen.IsValid() { + // There was a blank line immediately preceding the deleted import, + // so there's no need to close the hole. The right parenthesis is + // invalid after AddImport to an import statement without parenthesis. + // Do nothing. + } else if line != fset.File(gen.Rparen).LineCount() { + // There was no blank line. Close the hole. + fset.File(gen.Rparen).MergeLine(line) + } + } + j-- + } + } + + // Delete imports from f.Imports. + for i := 0; i < len(f.Imports); i++ { + imp := f.Imports[i] + for j, del := range delspecs { + if imp == del { + copy(f.Imports[i:], f.Imports[i+1:]) + f.Imports = f.Imports[:len(f.Imports)-1] + copy(delspecs[j:], delspecs[j+1:]) + delspecs = delspecs[:len(delspecs)-1] + i-- + break + } + } + } + + // Delete comments from f.Comments. + for i := 0; i < len(f.Comments); i++ { + cg := f.Comments[i] + for j, del := range delcomments { + if cg == del { + copy(f.Comments[i:], f.Comments[i+1:]) + f.Comments = f.Comments[:len(f.Comments)-1] + copy(delcomments[j:], delcomments[j+1:]) + delcomments = delcomments[:len(delcomments)-1] + i-- + break + } + } + } + + if len(delspecs) > 0 { + panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) + } + + return +} + +// RewriteImport rewrites any import of path oldPath to path newPath. +func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) { + for _, imp := range f.Imports { + if importPath(imp) == oldPath { + rewrote = true + // record old End, because the default is to compute + // it using the length of imp.Path.Value. + imp.EndPos = imp.End() + imp.Path.Value = strconv.Quote(newPath) + } + } + return +} + +// UsesImport reports whether a given import is used. +func UsesImport(f *ast.File, path string) (used bool) { + spec := importSpec(f, path) + if spec == nil { + return + } + + name := spec.Name.String() + switch name { + case "": + // If the package name is not explicitly specified, + // make an educated guess. This is not guaranteed to be correct. + lastSlash := strings.LastIndex(path, "/") + if lastSlash == -1 { + name = path + } else { + name = path[lastSlash+1:] + } + case "_", ".": + // Not sure if this import is used - err on the side of caution. + return true + } + + ast.Walk(visitFn(func(n ast.Node) { + sel, ok := n.(*ast.SelectorExpr) + if ok && isTopName(sel.X, name) { + used = true + } + }), f) + + return +} + +type visitFn func(node ast.Node) + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + fn(node) + return fn +} + +// imports reports whether f has an import with the specified name and path. +func imports(f *ast.File, name, path string) bool { + for _, s := range f.Imports { + if importName(s) == name && importPath(s) == path { + return true + } + } + return false +} + +// importSpec returns the import spec if f imports path, +// or nil otherwise. +func importSpec(f *ast.File, path string) *ast.ImportSpec { + for _, s := range f.Imports { + if importPath(s) == path { + return s + } + } + return nil +} + +// importName returns the name of s, +// or "" if the import is not named. +func importName(s *ast.ImportSpec) string { + if s.Name == nil { + return "" + } + return s.Name.Name +} + +// importPath returns the unquoted import path of s, +// or "" if the path is not properly quoted. +func importPath(s *ast.ImportSpec) string { + t, err := strconv.Unquote(s.Path.Value) + if err != nil { + return "" + } + return t +} + +// declImports reports whether gen contains an import of path. +func declImports(gen *ast.GenDecl, path string) bool { + if gen.Tok != token.IMPORT { + return false + } + for _, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + if importPath(impspec) == path { + return true + } + } + return false +} + +// matchLen returns the length of the longest path segment prefix shared by x and y. +func matchLen(x, y string) int { + n := 0 + for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ { + if x[i] == '/' { + n++ + } + } + return n +} + +// isTopName returns true if n is a top-level unresolved identifier with the given name. +func isTopName(n ast.Expr, name string) bool { + id, ok := n.(*ast.Ident) + return ok && id.Name == name && id.Obj == nil +} + +// Imports returns the file imports grouped by paragraph. +func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec { + var groups [][]*ast.ImportSpec + + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.IMPORT { + break + } + + group := []*ast.ImportSpec{} + + var lastLine int + for _, spec := range genDecl.Specs { + importSpec := spec.(*ast.ImportSpec) + pos := importSpec.Path.ValuePos + line := fset.Position(pos).Line + if lastLine > 0 && pos > 0 && line-lastLine > 1 { + groups = append(groups, group) + group = []*ast.ImportSpec{} + } + group = append(group, importSpec) + lastLine = line + } + groups = append(groups, group) + } + + return groups +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go new file mode 100644 index 0000000..58934f7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go @@ -0,0 +1,486 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import ( + "fmt" + "go/ast" + "reflect" + "sort" +) + +// An ApplyFunc is invoked by Apply for each node n, even if n is nil, +// before and/or after the node's children, using a Cursor describing +// the current node and providing operations on it. +// +// The return value of ApplyFunc controls the syntax tree traversal. +// See Apply for details. +type ApplyFunc func(*Cursor) bool + +// Apply traverses a syntax tree recursively, starting with root, +// and calling pre and post for each node as described below. +// Apply returns the syntax tree, possibly modified. +// +// If pre is not nil, it is called for each node before the node's +// children are traversed (pre-order). If pre returns false, no +// children are traversed, and post is not called for that node. +// +// If post is not nil, and a prior call of pre didn't return false, +// post is called for each node after its children are traversed +// (post-order). If post returns false, traversal is terminated and +// Apply returns immediately. +// +// Only fields that refer to AST nodes are considered children; +// i.e., token.Pos, Scopes, Objects, and fields of basic types +// (strings, etc.) are ignored. +// +// Children are traversed in the order in which they appear in the +// respective node's struct definition. A package's files are +// traversed in the filenames' alphabetical order. +func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { + parent := &struct{ ast.Node }{root} + defer func() { + if r := recover(); r != nil && r != abort { + panic(r) + } + result = parent.Node + }() + a := &application{pre: pre, post: post} + a.apply(parent, "Node", nil, root) + return +} + +var abort = new(int) // singleton, to signal termination of Apply + +// A Cursor describes a node encountered during Apply. +// Information about the node and its parent is available +// from the Node, Parent, Name, and Index methods. +// +// If p is a variable of type and value of the current parent node +// c.Parent(), and f is the field identifier with name c.Name(), +// the following invariants hold: +// +// p.f == c.Node() if c.Index() < 0 +// p.f[c.Index()] == c.Node() if c.Index() >= 0 +// +// The methods Replace, Delete, InsertBefore, and InsertAfter +// can be used to change the AST without disrupting Apply. +type Cursor struct { + parent ast.Node + name string + iter *iterator // valid if non-nil + node ast.Node +} + +// Node returns the current Node. +func (c *Cursor) Node() ast.Node { return c.node } + +// Parent returns the parent of the current Node. +func (c *Cursor) Parent() ast.Node { return c.parent } + +// Name returns the name of the parent Node field that contains the current Node. +// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns +// the filename for the current Node. +func (c *Cursor) Name() string { return c.name } + +// Index reports the index >= 0 of the current Node in the slice of Nodes that +// contains it, or a value < 0 if the current Node is not part of a slice. +// The index of the current node changes if InsertBefore is called while +// processing the current node. +func (c *Cursor) Index() int { + if c.iter != nil { + return c.iter.index + } + return -1 +} + +// field returns the current node's parent field value. +func (c *Cursor) field() reflect.Value { + return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name) +} + +// Replace replaces the current Node with n. +// The replacement node is not walked by Apply. +func (c *Cursor) Replace(n ast.Node) { + if _, ok := c.node.(*ast.File); ok { + file, ok := n.(*ast.File) + if !ok { + panic("attempt to replace *ast.File with non-*ast.File") + } + c.parent.(*ast.Package).Files[c.name] = file + return + } + + v := c.field() + if i := c.Index(); i >= 0 { + v = v.Index(i) + } + v.Set(reflect.ValueOf(n)) +} + +// Delete deletes the current Node from its containing slice. +// If the current Node is not part of a slice, Delete panics. +// As a special case, if the current node is a package file, +// Delete removes it from the package's Files map. +func (c *Cursor) Delete() { + if _, ok := c.node.(*ast.File); ok { + delete(c.parent.(*ast.Package).Files, c.name) + return + } + + i := c.Index() + if i < 0 { + panic("Delete node not contained in slice") + } + v := c.field() + l := v.Len() + reflect.Copy(v.Slice(i, l), v.Slice(i+1, l)) + v.Index(l - 1).Set(reflect.Zero(v.Type().Elem())) + v.SetLen(l - 1) + c.iter.step-- +} + +// InsertAfter inserts n after the current Node in its containing slice. +// If the current Node is not part of a slice, InsertAfter panics. +// Apply does not walk n. +func (c *Cursor) InsertAfter(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertAfter node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l)) + v.Index(i + 1).Set(reflect.ValueOf(n)) + c.iter.step++ +} + +// InsertBefore inserts n before the current Node in its containing slice. +// If the current Node is not part of a slice, InsertBefore panics. +// Apply will not walk n. +func (c *Cursor) InsertBefore(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertBefore node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+1, l), v.Slice(i, l)) + v.Index(i).Set(reflect.ValueOf(n)) + c.iter.index++ +} + +// application carries all the shared data so we can pass it around cheaply. +type application struct { + pre, post ApplyFunc + cursor Cursor + iter iterator +} + +func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) { + // convert typed nil into untyped nil + if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { + n = nil + } + + // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead + saved := a.cursor + a.cursor.parent = parent + a.cursor.name = name + a.cursor.iter = iter + a.cursor.node = n + + if a.pre != nil && !a.pre(&a.cursor) { + a.cursor = saved + return + } + + // walk children + // (the order of the cases matches the order of the corresponding node types in go/ast) + switch n := n.(type) { + case nil: + // nothing to do + + // Comments and fields + case *ast.Comment: + // nothing to do + + case *ast.CommentGroup: + if n != nil { + a.applyList(n, "List") + } + + case *ast.Field: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.FieldList: + a.applyList(n, "List") + + // Expressions + case *ast.BadExpr, *ast.Ident, *ast.BasicLit: + // nothing to do + + case *ast.Ellipsis: + a.apply(n, "Elt", nil, n.Elt) + + case *ast.FuncLit: + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + case *ast.CompositeLit: + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Elts") + + case *ast.ParenExpr: + a.apply(n, "X", nil, n.X) + + case *ast.SelectorExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Sel", nil, n.Sel) + + case *ast.IndexExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Index", nil, n.Index) + + case *ast.IndexListExpr: + a.apply(n, "X", nil, n.X) + a.applyList(n, "Indices") + + case *ast.SliceExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Low", nil, n.Low) + a.apply(n, "High", nil, n.High) + a.apply(n, "Max", nil, n.Max) + + case *ast.TypeAssertExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Type", nil, n.Type) + + case *ast.CallExpr: + a.apply(n, "Fun", nil, n.Fun) + a.applyList(n, "Args") + + case *ast.StarExpr: + a.apply(n, "X", nil, n.X) + + case *ast.UnaryExpr: + a.apply(n, "X", nil, n.X) + + case *ast.BinaryExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Y", nil, n.Y) + + case *ast.KeyValueExpr: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + // Types + case *ast.ArrayType: + a.apply(n, "Len", nil, n.Len) + a.apply(n, "Elt", nil, n.Elt) + + case *ast.StructType: + a.apply(n, "Fields", nil, n.Fields) + + case *ast.FuncType: + if tparams := n.TypeParams; tparams != nil { + a.apply(n, "TypeParams", nil, tparams) + } + a.apply(n, "Params", nil, n.Params) + a.apply(n, "Results", nil, n.Results) + + case *ast.InterfaceType: + a.apply(n, "Methods", nil, n.Methods) + + case *ast.MapType: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + case *ast.ChanType: + a.apply(n, "Value", nil, n.Value) + + // Statements + case *ast.BadStmt: + // nothing to do + + case *ast.DeclStmt: + a.apply(n, "Decl", nil, n.Decl) + + case *ast.EmptyStmt: + // nothing to do + + case *ast.LabeledStmt: + a.apply(n, "Label", nil, n.Label) + a.apply(n, "Stmt", nil, n.Stmt) + + case *ast.ExprStmt: + a.apply(n, "X", nil, n.X) + + case *ast.SendStmt: + a.apply(n, "Chan", nil, n.Chan) + a.apply(n, "Value", nil, n.Value) + + case *ast.IncDecStmt: + a.apply(n, "X", nil, n.X) + + case *ast.AssignStmt: + a.applyList(n, "Lhs") + a.applyList(n, "Rhs") + + case *ast.GoStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.DeferStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.ReturnStmt: + a.applyList(n, "Results") + + case *ast.BranchStmt: + a.apply(n, "Label", nil, n.Label) + + case *ast.BlockStmt: + a.applyList(n, "List") + + case *ast.IfStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Body", nil, n.Body) + a.apply(n, "Else", nil, n.Else) + + case *ast.CaseClause: + a.applyList(n, "List") + a.applyList(n, "Body") + + case *ast.SwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Body", nil, n.Body) + + case *ast.TypeSwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Assign", nil, n.Assign) + a.apply(n, "Body", nil, n.Body) + + case *ast.CommClause: + a.apply(n, "Comm", nil, n.Comm) + a.applyList(n, "Body") + + case *ast.SelectStmt: + a.apply(n, "Body", nil, n.Body) + + case *ast.ForStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Post", nil, n.Post) + a.apply(n, "Body", nil, n.Body) + + case *ast.RangeStmt: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + a.apply(n, "X", nil, n.X) + a.apply(n, "Body", nil, n.Body) + + // Declarations + case *ast.ImportSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Path", nil, n.Path) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.ValueSpec: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Values") + a.apply(n, "Comment", nil, n.Comment) + + case *ast.TypeSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + if tparams := n.TypeParams; tparams != nil { + a.apply(n, "TypeParams", nil, tparams) + } + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.BadDecl: + // nothing to do + + case *ast.GenDecl: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Specs") + + case *ast.FuncDecl: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Recv", nil, n.Recv) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + // Files and packages + case *ast.File: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.applyList(n, "Decls") + // Don't walk n.Comments; they have either been walked already if + // they are Doc comments, or they can be easily walked explicitly. + + case *ast.Package: + // collect and sort names for reproducible behavior + var names []string + for name := range n.Files { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + a.apply(n, name, nil, n.Files[name]) + } + + default: + panic(fmt.Sprintf("Apply: unexpected node type %T", n)) + } + + if a.post != nil && !a.post(&a.cursor) { + panic(abort) + } + + a.cursor = saved +} + +// An iterator controls iteration over a slice of nodes. +type iterator struct { + index, step int +} + +func (a *application) applyList(parent ast.Node, name string) { + // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead + saved := a.iter + a.iter.index = 0 + for { + // must reload parent.name each time, since cursor modifications might change it + v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name) + if a.iter.index >= v.Len() { + break + } + + // element x may be nil in a bad AST - be cautious + var x ast.Node + if e := v.Index(a.iter.index); e.IsValid() { + x = e.Interface().(ast.Node) + } + + a.iter.step = 1 + a.apply(parent, name, &a.iter, x) + a.iter.index += a.iter.step + } + a.iter = saved +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go new file mode 100644 index 0000000..919d530 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go @@ -0,0 +1,18 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import "go/ast" + +// Unparen returns e with any enclosing parentheses stripped. +func Unparen(e ast.Expr) ast.Expr { + for { + p, ok := e.(*ast.ParenExpr) + if !ok { + return e + } + e = p.X + } +} diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go new file mode 100644 index 0000000..dfb8cd6 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/allpackages.go @@ -0,0 +1,195 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package buildutil provides utilities related to the go/build +// package in the standard library. +// +// All I/O is done via the build.Context file system interface, which must +// be concurrency-safe. +package buildutil // import "golang.org/x/tools/go/buildutil" + +import ( + "go/build" + "os" + "path/filepath" + "sort" + "strings" + "sync" +) + +// AllPackages returns the package path of each Go package in any source +// directory of the specified build context (e.g. $GOROOT or an element +// of $GOPATH). Errors are ignored. The results are sorted. +// All package paths are canonical, and thus may contain "/vendor/". +// +// The result may include import paths for directories that contain no +// *.go files, such as "archive" (in $GOROOT/src). +// +// All I/O is done via the build.Context file system interface, +// which must be concurrency-safe. +func AllPackages(ctxt *build.Context) []string { + var list []string + ForEachPackage(ctxt, func(pkg string, _ error) { + list = append(list, pkg) + }) + sort.Strings(list) + return list +} + +// ForEachPackage calls the found function with the package path of +// each Go package it finds in any source directory of the specified +// build context (e.g. $GOROOT or an element of $GOPATH). +// All package paths are canonical, and thus may contain "/vendor/". +// +// If the package directory exists but could not be read, the second +// argument to the found function provides the error. +// +// All I/O is done via the build.Context file system interface, +// which must be concurrency-safe. +func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) { + ch := make(chan item) + + var wg sync.WaitGroup + for _, root := range ctxt.SrcDirs() { + root := root + wg.Add(1) + go func() { + allPackages(ctxt, root, ch) + wg.Done() + }() + } + go func() { + wg.Wait() + close(ch) + }() + + // All calls to found occur in the caller's goroutine. + for i := range ch { + found(i.importPath, i.err) + } +} + +type item struct { + importPath string + err error // (optional) +} + +// We use a process-wide counting semaphore to limit +// the number of parallel calls to ReadDir. +var ioLimit = make(chan bool, 20) + +func allPackages(ctxt *build.Context, root string, ch chan<- item) { + root = filepath.Clean(root) + string(os.PathSeparator) + + var wg sync.WaitGroup + + var walkDir func(dir string) + walkDir = func(dir string) { + // Avoid .foo, _foo, and testdata directory trees. + base := filepath.Base(dir) + if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" { + return + } + + pkg := filepath.ToSlash(strings.TrimPrefix(dir, root)) + + // Prune search if we encounter any of these import paths. + switch pkg { + case "builtin": + return + } + + ioLimit <- true + files, err := ReadDir(ctxt, dir) + <-ioLimit + if pkg != "" || err != nil { + ch <- item{pkg, err} + } + for _, fi := range files { + fi := fi + if fi.IsDir() { + wg.Add(1) + go func() { + walkDir(filepath.Join(dir, fi.Name())) + wg.Done() + }() + } + } + } + + walkDir(root) + wg.Wait() +} + +// ExpandPatterns returns the set of packages matched by patterns, +// which may have the following forms: +// +// golang.org/x/tools/cmd/guru # a single package +// golang.org/x/tools/... # all packages beneath dir +// ... # the entire workspace. +// +// Order is significant: a pattern preceded by '-' removes matching +// packages from the set. For example, these patterns match all encoding +// packages except encoding/xml: +// +// encoding/... -encoding/xml +// +// A trailing slash in a pattern is ignored. (Path components of Go +// package names are separated by slash, not the platform's path separator.) +func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool { + // TODO(adonovan): support other features of 'go list': + // - "std"/"cmd"/"all" meta-packages + // - "..." not at the end of a pattern + // - relative patterns using "./" or "../" prefix + + pkgs := make(map[string]bool) + doPkg := func(pkg string, neg bool) { + if neg { + delete(pkgs, pkg) + } else { + pkgs[pkg] = true + } + } + + // Scan entire workspace if wildcards are present. + // TODO(adonovan): opt: scan only the necessary subtrees of the workspace. + var all []string + for _, arg := range patterns { + if strings.HasSuffix(arg, "...") { + all = AllPackages(ctxt) + break + } + } + + for _, arg := range patterns { + if arg == "" { + continue + } + + neg := arg[0] == '-' + if neg { + arg = arg[1:] + } + + if arg == "..." { + // ... matches all packages + for _, pkg := range all { + doPkg(pkg, neg) + } + } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg { + // dir/... matches all packages beneath dir + for _, pkg := range all { + if strings.HasPrefix(pkg, dir) && + (len(pkg) == len(dir) || pkg[len(dir)] == '/') { + doPkg(pkg, neg) + } + } + } else { + // single package + doPkg(strings.TrimSuffix(arg, "/"), neg) + } + } + + return pkgs +} diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go new file mode 100644 index 0000000..763d188 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go @@ -0,0 +1,111 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +import ( + "fmt" + "go/build" + "io" + "os" + "path" + "path/filepath" + "sort" + "strings" + "time" +) + +// FakeContext returns a build.Context for the fake file tree specified +// by pkgs, which maps package import paths to a mapping from file base +// names to contents. +// +// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides +// the necessary file access methods to read from memory instead of the +// real file system. +// +// Unlike a real file tree, the fake one has only two levels---packages +// and files---so ReadDir("/go/src/") returns all packages under +// /go/src/ including, for instance, "math" and "math/big". +// ReadDir("/go/src/math/big") would return all the files in the +// "math/big" package. +func FakeContext(pkgs map[string]map[string]string) *build.Context { + clean := func(filename string) string { + f := path.Clean(filepath.ToSlash(filename)) + // Removing "/go/src" while respecting segment + // boundaries has this unfortunate corner case: + if f == "/go/src" { + return "" + } + return strings.TrimPrefix(f, "/go/src/") + } + + ctxt := build.Default // copy + ctxt.GOROOT = "/go" + ctxt.GOPATH = "" + ctxt.Compiler = "gc" + ctxt.IsDir = func(dir string) bool { + dir = clean(dir) + if dir == "" { + return true // needed by (*build.Context).SrcDirs + } + return pkgs[dir] != nil + } + ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) { + dir = clean(dir) + var fis []os.FileInfo + if dir == "" { + // enumerate packages + for importPath := range pkgs { + fis = append(fis, fakeDirInfo(importPath)) + } + } else { + // enumerate files of package + for basename := range pkgs[dir] { + fis = append(fis, fakeFileInfo(basename)) + } + } + sort.Sort(byName(fis)) + return fis, nil + } + ctxt.OpenFile = func(filename string) (io.ReadCloser, error) { + filename = clean(filename) + dir, base := path.Split(filename) + content, ok := pkgs[path.Clean(dir)][base] + if !ok { + return nil, fmt.Errorf("file not found: %s", filename) + } + return io.NopCloser(strings.NewReader(content)), nil + } + ctxt.IsAbsPath = func(path string) bool { + path = filepath.ToSlash(path) + // Don't rely on the default (filepath.Path) since on + // Windows, it reports virtual paths as non-absolute. + return strings.HasPrefix(path, "/") + } + return &ctxt +} + +type byName []os.FileInfo + +func (s byName) Len() int { return len(s) } +func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } + +type fakeFileInfo string + +func (fi fakeFileInfo) Name() string { return string(fi) } +func (fakeFileInfo) Sys() interface{} { return nil } +func (fakeFileInfo) ModTime() time.Time { return time.Time{} } +func (fakeFileInfo) IsDir() bool { return false } +func (fakeFileInfo) Size() int64 { return 0 } +func (fakeFileInfo) Mode() os.FileMode { return 0644 } + +type fakeDirInfo string + +func (fd fakeDirInfo) Name() string { return string(fd) } +func (fakeDirInfo) Sys() interface{} { return nil } +func (fakeDirInfo) ModTime() time.Time { return time.Time{} } +func (fakeDirInfo) IsDir() bool { return true } +func (fakeDirInfo) Size() int64 { return 0 } +func (fakeDirInfo) Mode() os.FileMode { return 0755 } diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go new file mode 100644 index 0000000..7e37165 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/overlay.go @@ -0,0 +1,101 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +import ( + "bufio" + "bytes" + "fmt" + "go/build" + "io" + "path/filepath" + "strconv" + "strings" +) + +// OverlayContext overlays a build.Context with additional files from +// a map. Files in the map take precedence over other files. +// +// In addition to plain string comparison, two file names are +// considered equal if their base names match and their directory +// components point at the same directory on the file system. That is, +// symbolic links are followed for directories, but not files. +// +// A common use case for OverlayContext is to allow editors to pass in +// a set of unsaved, modified files. +// +// Currently, only the Context.OpenFile function will respect the +// overlay. This may change in the future. +func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context { + // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir + + rc := func(data []byte) (io.ReadCloser, error) { + return io.NopCloser(bytes.NewBuffer(data)), nil + } + + copy := *orig // make a copy + ctxt := © + ctxt.OpenFile = func(path string) (io.ReadCloser, error) { + // Fast path: names match exactly. + if content, ok := overlay[path]; ok { + return rc(content) + } + + // Slow path: check for same file under a different + // alias, perhaps due to a symbolic link. + for filename, content := range overlay { + if sameFile(path, filename) { + return rc(content) + } + } + + return OpenFile(orig, path) + } + return ctxt +} + +// ParseOverlayArchive parses an archive containing Go files and their +// contents. The result is intended to be used with OverlayContext. +// +// # Archive format +// +// The archive consists of a series of files. Each file consists of a +// name, a decimal file size and the file contents, separated by +// newlines. No newline follows after the file contents. +func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) { + overlay := make(map[string][]byte) + r := bufio.NewReader(archive) + for { + // Read file name. + filename, err := r.ReadString('\n') + if err != nil { + if err == io.EOF { + break // OK + } + return nil, fmt.Errorf("reading archive file name: %v", err) + } + filename = filepath.Clean(strings.TrimSpace(filename)) + + // Read file size. + sz, err := r.ReadString('\n') + if err != nil { + return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err) + } + sz = strings.TrimSpace(sz) + size, err := strconv.ParseUint(sz, 10, 32) + if err != nil { + return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err) + } + + // Read file content. + content := make([]byte, size) + if _, err := io.ReadFull(r, content); err != nil { + return nil, fmt.Errorf("reading archive file %s: %v", filename, err) + } + overlay[filename] = content + } + + return overlay, nil +} diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go new file mode 100644 index 0000000..32c8d14 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/tags.go @@ -0,0 +1,100 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +// This duplicated logic must be kept in sync with that from go build: +// $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set) +// $GOROOT/src/cmd/go/internal/base/flag.go (StringsFlag.Set) +// $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split) + +import ( + "fmt" + "strings" +) + +const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " + + "For more information about build tags, see the description of " + + "build constraints in the documentation for the go/build package" + +// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses +// a flag value the same as go build's -tags flag and populates a []string slice. +// +// See $GOROOT/src/go/build/doc.go for description of build tags. +// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag. +// +// Example: +// +// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc) +type TagsFlag []string + +func (v *TagsFlag) Set(s string) error { + // See $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set) + // For compatibility with Go 1.12 and earlier, allow "-tags='a b c'" or even just "-tags='a'". + if strings.Contains(s, " ") || strings.Contains(s, "'") { + var err error + *v, err = splitQuotedFields(s) + if *v == nil { + *v = []string{} + } + return err + } + + // Starting in Go 1.13, the -tags flag is a comma-separated list of build tags. + *v = []string{} + for _, s := range strings.Split(s, ",") { + if s != "" { + *v = append(*v, s) + } + } + return nil +} + +func (v *TagsFlag) Get() interface{} { return *v } + +func splitQuotedFields(s string) ([]string, error) { + // See $GOROOT/src/cmd/internal/quoted/quoted.go (Split) + // This must remain in sync with that logic. + var f []string + for len(s) > 0 { + for len(s) > 0 && isSpaceByte(s[0]) { + s = s[1:] + } + if len(s) == 0 { + break + } + // Accepted quoted string. No unescaping inside. + if s[0] == '"' || s[0] == '\'' { + quote := s[0] + s = s[1:] + i := 0 + for i < len(s) && s[i] != quote { + i++ + } + if i >= len(s) { + return nil, fmt.Errorf("unterminated %c string", quote) + } + f = append(f, s[:i]) + s = s[i+1:] + continue + } + i := 0 + for i < len(s) && !isSpaceByte(s[i]) { + i++ + } + f = append(f, s[:i]) + s = s[i:] + } + return f, nil +} + +func (v *TagsFlag) String() string { + return "" +} + +func isSpaceByte(c byte) bool { + // See $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split) + // This list must remain in sync with that. + return c == ' ' || c == '\t' || c == '\n' || c == '\r' +} diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go new file mode 100644 index 0000000..bee6390 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/util.go @@ -0,0 +1,209 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +import ( + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io" + "io/ioutil" + "os" + "path" + "path/filepath" + "strings" +) + +// ParseFile behaves like parser.ParseFile, +// but uses the build context's file system interface, if any. +// +// If file is not absolute (as defined by IsAbsPath), the (dir, file) +// components are joined using JoinPath; dir must be absolute. +// +// The displayPath function, if provided, is used to transform the +// filename that will be attached to the ASTs. +// +// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws. +func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) { + if !IsAbsPath(ctxt, file) { + file = JoinPath(ctxt, dir, file) + } + rd, err := OpenFile(ctxt, file) + if err != nil { + return nil, err + } + defer rd.Close() // ignore error + if displayPath != nil { + file = displayPath(file) + } + return parser.ParseFile(fset, file, rd, mode) +} + +// ContainingPackage returns the package containing filename. +// +// If filename is not absolute, it is interpreted relative to working directory dir. +// All I/O is via the build context's file system interface, if any. +// +// The '...Files []string' fields of the resulting build.Package are not +// populated (build.FindOnly mode). +func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) { + if !IsAbsPath(ctxt, filename) { + filename = JoinPath(ctxt, dir, filename) + } + + // We must not assume the file tree uses + // "/" always, + // `\` always, + // or os.PathSeparator (which varies by platform), + // but to make any progress, we are forced to assume that + // paths will not use `\` unless the PathSeparator + // is also `\`, thus we can rely on filepath.ToSlash for some sanity. + + dirSlash := path.Dir(filepath.ToSlash(filename)) + "/" + + // We assume that no source root (GOPATH[i] or GOROOT) contains any other. + for _, srcdir := range ctxt.SrcDirs() { + srcdirSlash := filepath.ToSlash(srcdir) + "/" + if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok { + return ctxt.Import(importPath, dir, build.FindOnly) + } + } + + return nil, fmt.Errorf("can't find package containing %s", filename) +} + +// -- Effective methods of file system interface ------------------------- + +// (go/build.Context defines these as methods, but does not export them.) + +// HasSubdir calls ctxt.HasSubdir (if not nil) or else uses +// the local file system to answer the question. +func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) { + if f := ctxt.HasSubdir; f != nil { + return f(root, dir) + } + + // Try using paths we received. + if rel, ok = hasSubdir(root, dir); ok { + return + } + + // Try expanding symlinks and comparing + // expanded against unexpanded and + // expanded against expanded. + rootSym, _ := filepath.EvalSymlinks(root) + dirSym, _ := filepath.EvalSymlinks(dir) + + if rel, ok = hasSubdir(rootSym, dir); ok { + return + } + if rel, ok = hasSubdir(root, dirSym); ok { + return + } + return hasSubdir(rootSym, dirSym) +} + +func hasSubdir(root, dir string) (rel string, ok bool) { + const sep = string(filepath.Separator) + root = filepath.Clean(root) + if !strings.HasSuffix(root, sep) { + root += sep + } + + dir = filepath.Clean(dir) + if !strings.HasPrefix(dir, root) { + return "", false + } + + return filepath.ToSlash(dir[len(root):]), true +} + +// FileExists returns true if the specified file exists, +// using the build context's file system interface. +func FileExists(ctxt *build.Context, path string) bool { + if ctxt.OpenFile != nil { + r, err := ctxt.OpenFile(path) + if err != nil { + return false + } + r.Close() // ignore error + return true + } + _, err := os.Stat(path) + return err == nil +} + +// OpenFile behaves like os.Open, +// but uses the build context's file system interface, if any. +func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) { + if ctxt.OpenFile != nil { + return ctxt.OpenFile(path) + } + return os.Open(path) +} + +// IsAbsPath behaves like filepath.IsAbs, +// but uses the build context's file system interface, if any. +func IsAbsPath(ctxt *build.Context, path string) bool { + if ctxt.IsAbsPath != nil { + return ctxt.IsAbsPath(path) + } + return filepath.IsAbs(path) +} + +// JoinPath behaves like filepath.Join, +// but uses the build context's file system interface, if any. +func JoinPath(ctxt *build.Context, path ...string) string { + if ctxt.JoinPath != nil { + return ctxt.JoinPath(path...) + } + return filepath.Join(path...) +} + +// IsDir behaves like os.Stat plus IsDir, +// but uses the build context's file system interface, if any. +func IsDir(ctxt *build.Context, path string) bool { + if ctxt.IsDir != nil { + return ctxt.IsDir(path) + } + fi, err := os.Stat(path) + return err == nil && fi.IsDir() +} + +// ReadDir behaves like ioutil.ReadDir, +// but uses the build context's file system interface, if any. +func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) { + if ctxt.ReadDir != nil { + return ctxt.ReadDir(path) + } + return ioutil.ReadDir(path) +} + +// SplitPathList behaves like filepath.SplitList, +// but uses the build context's file system interface, if any. +func SplitPathList(ctxt *build.Context, s string) []string { + if ctxt.SplitPathList != nil { + return ctxt.SplitPathList(s) + } + return filepath.SplitList(s) +} + +// sameFile returns true if x and y have the same basename and denote +// the same file. +func sameFile(x, y string) bool { + if path.Clean(x) == path.Clean(y) { + return true + } + if filepath.Base(x) == filepath.Base(y) { // (optimisation) + if xi, err := os.Stat(x); err == nil { + if yi, err := os.Stat(y); err == nil { + return os.SameFile(xi, yi) + } + } + } + return false +} diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go new file mode 100644 index 0000000..697974b --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go @@ -0,0 +1,219 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cgo handles cgo preprocessing of files containing `import "C"`. +// +// DESIGN +// +// The approach taken is to run the cgo processor on the package's +// CgoFiles and parse the output, faking the filenames of the +// resulting ASTs so that the synthetic file containing the C types is +// called "C" (e.g. "~/go/src/net/C") and the preprocessed files +// have their original names (e.g. "~/go/src/net/cgo_unix.go"), +// not the names of the actual temporary files. +// +// The advantage of this approach is its fidelity to 'go build'. The +// downside is that the token.Position.Offset for each AST node is +// incorrect, being an offset within the temporary file. Line numbers +// should still be correct because of the //line comments. +// +// The logic of this file is mostly plundered from the 'go build' +// tool, which also invokes the cgo preprocessor. +// +// +// REJECTED ALTERNATIVE +// +// An alternative approach that we explored is to extend go/types' +// Importer mechanism to provide the identity of the importing package +// so that each time `import "C"` appears it resolves to a different +// synthetic package containing just the objects needed in that case. +// The loader would invoke cgo but parse only the cgo_types.go file +// defining the package-level objects, discarding the other files +// resulting from preprocessing. +// +// The benefit of this approach would have been that source-level +// syntax information would correspond exactly to the original cgo +// file, with no preprocessing involved, making source tools like +// godoc, guru, and eg happy. However, the approach was rejected +// due to the additional complexity it would impose on go/types. (It +// made for a beautiful demo, though.) +// +// cgo files, despite their *.go extension, are not legal Go source +// files per the specification since they may refer to unexported +// members of package "C" such as C.int. Also, a function such as +// C.getpwent has in effect two types, one matching its C type and one +// which additionally returns (errno C.int). The cgo preprocessor +// uses name mangling to distinguish these two functions in the +// processed code, but go/types would need to duplicate this logic in +// its handling of function calls, analogous to the treatment of map +// lookups in which y=m[k] and y,ok=m[k] are both legal. + +package cgo + +import ( + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "log" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" +) + +// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses +// the output and returns the resulting ASTs. +func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) { + tmpdir, err := os.MkdirTemp("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C") + if err != nil { + return nil, err + } + defer os.RemoveAll(tmpdir) + + pkgdir := bp.Dir + if DisplayPath != nil { + pkgdir = DisplayPath(pkgdir) + } + + cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false) + if err != nil { + return nil, err + } + var files []*ast.File + for i := range cgoFiles { + rd, err := os.Open(cgoFiles[i]) + if err != nil { + return nil, err + } + display := filepath.Join(bp.Dir, cgoDisplayFiles[i]) + f, err := parser.ParseFile(fset, display, rd, mode) + rd.Close() + if err != nil { + return nil, err + } + files = append(files, f) + } + return files, nil +} + +var cgoRe = regexp.MustCompile(`[/\\:]`) + +// Run invokes the cgo preprocessor on bp.CgoFiles and returns two +// lists of files: the resulting processed files (in temporary +// directory tmpdir) and the corresponding names of the unprocessed files. +// +// Run is adapted from (*builder).cgo in +// $GOROOT/src/cmd/go/build.go, but these features are unsupported: +// Objective C, CGOPKGPATH, CGO_FLAGS. +// +// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in +// to the cgo preprocessor. This in turn will set the // line comments +// referring to those files to use absolute paths. This is needed for +// go/packages using the legacy go list support so it is able to find +// the original files. +func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) { + cgoCPPFLAGS, _, _, _ := cflags(bp, true) + _, cgoexeCFLAGS, _, _ := cflags(bp, false) + + if len(bp.CgoPkgConfig) > 0 { + pcCFLAGS, err := pkgConfigFlags(bp) + if err != nil { + return nil, nil, err + } + cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...) + } + + // Allows including _cgo_export.h from .[ch] files in the package. + cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir) + + // _cgo_gotypes.go (displayed "C") contains the type definitions. + files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go")) + displayFiles = append(displayFiles, "C") + for _, fn := range bp.CgoFiles { + // "foo.cgo1.go" (displayed "foo.go") is the processed Go source. + f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_") + files = append(files, filepath.Join(tmpdir, f+"cgo1.go")) + displayFiles = append(displayFiles, fn) + } + + var cgoflags []string + if bp.Goroot && bp.ImportPath == "runtime/cgo" { + cgoflags = append(cgoflags, "-import_runtime_cgo=false") + } + if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" { + cgoflags = append(cgoflags, "-import_syscall=false") + } + + var cgoFiles []string = bp.CgoFiles + if useabs { + cgoFiles = make([]string, len(bp.CgoFiles)) + for i := range cgoFiles { + cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i]) + } + } + + args := stringList( + "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--", + cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles, + ) + if false { + log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir) + } + cmd := exec.Command(args[0], args[1:]...) + cmd.Dir = pkgdir + cmd.Env = append(os.Environ(), "PWD="+pkgdir) + cmd.Stdout = os.Stderr + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err) + } + + return files, displayFiles, nil +} + +// -- unmodified from 'go build' --------------------------------------- + +// Return the flags to use when invoking the C or C++ compilers, or cgo. +func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) { + var defaults string + if def { + defaults = "-g -O2" + } + + cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS) + cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS) + cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS) + ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS) + return +} + +// envList returns the value of the given environment variable broken +// into fields, using the default value when the variable is empty. +func envList(key, def string) []string { + v := os.Getenv(key) + if v == "" { + v = def + } + return strings.Fields(v) +} + +// stringList's arguments should be a sequence of string or []string values. +// stringList flattens them into a single []string. +func stringList(args ...interface{}) []string { + var x []string + for _, arg := range args { + switch arg := arg.(type) { + case []string: + x = append(x, arg...) + case string: + x = append(x, arg) + default: + panic("stringList: invalid argument") + } + } + return x +} diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go new file mode 100644 index 0000000..2455be5 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go @@ -0,0 +1,42 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cgo + +import ( + "errors" + "fmt" + "go/build" + "os/exec" + "strings" +) + +// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints. +func pkgConfig(mode string, pkgs []string) (flags []string, err error) { + cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...) + out, err := cmd.Output() + if err != nil { + s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err) + if len(out) > 0 { + s = fmt.Sprintf("%s: %s", s, out) + } + if err, ok := err.(*exec.ExitError); ok && len(err.Stderr) > 0 { + s = fmt.Sprintf("%s\nstderr:\n%s", s, err.Stderr) + } + return nil, errors.New(s) + } + if len(out) > 0 { + flags = strings.Fields(string(out)) + } + return +} + +// pkgConfigFlags calls pkg-config if needed and returns the cflags +// needed to build the package. +func pkgConfigFlags(p *build.Package) (cflags []string, err error) { + if len(p.CgoPkgConfig) == 0 { + return nil, nil + } + return pkgConfig("--cflags", p.CgoPkgConfig) +} diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go new file mode 100644 index 0000000..e35b1fd --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/doc.go @@ -0,0 +1,202 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package loader loads a complete Go program from source code, parsing +// and type-checking the initial packages plus their transitive closure +// of dependencies. The ASTs and the derived facts are retained for +// later use. +// +// Deprecated: This is an older API and does not have support +// for modules. Use golang.org/x/tools/go/packages instead. +// +// The package defines two primary types: Config, which specifies a +// set of initial packages to load and various other options; and +// Program, which is the result of successfully loading the packages +// specified by a configuration. +// +// The configuration can be set directly, but *Config provides various +// convenience methods to simplify the common cases, each of which can +// be called any number of times. Finally, these are followed by a +// call to Load() to actually load and type-check the program. +// +// var conf loader.Config +// +// // Use the command-line arguments to specify +// // a set of initial packages to load from source. +// // See FromArgsUsage for help. +// rest, err := conf.FromArgs(os.Args[1:], wantTests) +// +// // Parse the specified files and create an ad hoc package with path "foo". +// // All files must have the same 'package' declaration. +// conf.CreateFromFilenames("foo", "foo.go", "bar.go") +// +// // Create an ad hoc package with path "foo" from +// // the specified already-parsed files. +// // All ASTs must have the same 'package' declaration. +// conf.CreateFromFiles("foo", parsedFiles) +// +// // Add "runtime" to the set of packages to be loaded. +// conf.Import("runtime") +// +// // Adds "fmt" and "fmt_test" to the set of packages +// // to be loaded. "fmt" will include *_test.go files. +// conf.ImportWithTests("fmt") +// +// // Finally, load all the packages specified by the configuration. +// prog, err := conf.Load() +// +// See examples_test.go for examples of API usage. +// +// # CONCEPTS AND TERMINOLOGY +// +// The WORKSPACE is the set of packages accessible to the loader. The +// workspace is defined by Config.Build, a *build.Context. The +// default context treats subdirectories of $GOROOT and $GOPATH as +// packages, but this behavior may be overridden. +// +// An AD HOC package is one specified as a set of source files on the +// command line. In the simplest case, it may consist of a single file +// such as $GOROOT/src/net/http/triv.go. +// +// EXTERNAL TEST packages are those comprised of a set of *_test.go +// files all with the same 'package foo_test' declaration, all in the +// same directory. (go/build.Package calls these files XTestFiles.) +// +// An IMPORTABLE package is one that can be referred to by some import +// spec. Every importable package is uniquely identified by its +// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json", +// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path +// typically denotes a subdirectory of the workspace. +// +// An import declaration uses an IMPORT PATH to refer to a package. +// Most import declarations use the package path as the import path. +// +// Due to VENDORING (https://golang.org/s/go15vendor), the +// interpretation of an import path may depend on the directory in which +// it appears. To resolve an import path to a package path, go/build +// must search the enclosing directories for a subdirectory named +// "vendor". +// +// ad hoc packages and external test packages are NON-IMPORTABLE. The +// path of an ad hoc package is inferred from the package +// declarations of its files and is therefore not a unique package key. +// For example, Config.CreatePkgs may specify two initial ad hoc +// packages, both with path "main". +// +// An AUGMENTED package is an importable package P plus all the +// *_test.go files with same 'package foo' declaration as P. +// (go/build.Package calls these files TestFiles.) +// +// The INITIAL packages are those specified in the configuration. A +// DEPENDENCY is a package loaded to satisfy an import in an initial +// package or another dependency. +package loader + +// IMPLEMENTATION NOTES +// +// 'go test', in-package test files, and import cycles +// --------------------------------------------------- +// +// An external test package may depend upon members of the augmented +// package that are not in the unaugmented package, such as functions +// that expose internals. (See bufio/export_test.go for an example.) +// So, the loader must ensure that for each external test package +// it loads, it also augments the corresponding non-test package. +// +// The import graph over n unaugmented packages must be acyclic; the +// import graph over n-1 unaugmented packages plus one augmented +// package must also be acyclic. ('go test' relies on this.) But the +// import graph over n augmented packages may contain cycles. +// +// First, all the (unaugmented) non-test packages and their +// dependencies are imported in the usual way; the loader reports an +// error if it detects an import cycle. +// +// Then, each package P for which testing is desired is augmented by +// the list P' of its in-package test files, by calling +// (*types.Checker).Files. This arrangement ensures that P' may +// reference definitions within P, but P may not reference definitions +// within P'. Furthermore, P' may import any other package, including +// ones that depend upon P, without an import cycle error. +// +// Consider two packages A and B, both of which have lists of +// in-package test files we'll call A' and B', and which have the +// following import graph edges: +// B imports A +// B' imports A +// A' imports B +// This last edge would be expected to create an error were it not +// for the special type-checking discipline above. +// Cycles of size greater than two are possible. For example: +// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil" +// io/ioutil/tempfile_test.go (package ioutil) imports "regexp" +// regexp/exec_test.go (package regexp) imports "compress/bzip2" +// +// +// Concurrency +// ----------- +// +// Let us define the import dependency graph as follows. Each node is a +// list of files passed to (Checker).Files at once. Many of these lists +// are the production code of an importable Go package, so those nodes +// are labelled by the package's path. The remaining nodes are +// ad hoc packages and lists of in-package *_test.go files that augment +// an importable package; those nodes have no label. +// +// The edges of the graph represent import statements appearing within a +// file. An edge connects a node (a list of files) to the node it +// imports, which is importable and thus always labelled. +// +// Loading is controlled by this dependency graph. +// +// To reduce I/O latency, we start loading a package's dependencies +// asynchronously as soon as we've parsed its files and enumerated its +// imports (scanImports). This performs a preorder traversal of the +// import dependency graph. +// +// To exploit hardware parallelism, we type-check unrelated packages in +// parallel, where "unrelated" means not ordered by the partial order of +// the import dependency graph. +// +// We use a concurrency-safe non-blocking cache (importer.imported) to +// record the results of type-checking, whether success or failure. An +// entry is created in this cache by startLoad the first time the +// package is imported. The first goroutine to request an entry becomes +// responsible for completing the task and broadcasting completion to +// subsequent requestors, which block until then. +// +// Type checking occurs in (parallel) postorder: we cannot type-check a +// set of files until we have loaded and type-checked all of their +// immediate dependencies (and thus all of their transitive +// dependencies). If the input were guaranteed free of import cycles, +// this would be trivial: we could simply wait for completion of the +// dependencies and then invoke the typechecker. +// +// But as we saw in the 'go test' section above, some cycles in the +// import graph over packages are actually legal, so long as the +// cycle-forming edge originates in the in-package test files that +// augment the package. This explains why the nodes of the import +// dependency graph are not packages, but lists of files: the unlabelled +// nodes avoid the cycles. Consider packages A and B where B imports A +// and A's in-package tests AT import B. The naively constructed import +// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but +// the graph over lists of files is AT --> B --> A, where AT is an +// unlabelled node. +// +// Awaiting completion of the dependencies in a cyclic graph would +// deadlock, so we must materialize the import dependency graph (as +// importer.graph) and check whether each import edge forms a cycle. If +// x imports y, and the graph already contains a path from y to x, then +// there is an import cycle, in which case the processing of x must not +// wait for the completion of processing of y. +// +// When the type-checker makes a callback (doImport) to the loader for a +// given import edge, there are two possible cases. In the normal case, +// the dependency has already been completely type-checked; doImport +// does a cache lookup and returns it. In the cyclic case, the entry in +// the cache is still necessarily incomplete, indicating a cycle. We +// perform the cycle check again to obtain the error message, and return +// the error. +// +// The result of using concurrency is about a 2.5x speedup for stdlib_test. diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go new file mode 100644 index 0000000..013c0f5 --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/loader.go @@ -0,0 +1,1066 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +// See doc.go for package documentation and implementation notes. + +import ( + "errors" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "go/types" + "os" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/internal/cgo" + "golang.org/x/tools/internal/versions" +) + +var ignoreVendor build.ImportMode + +const trace = false // show timing info for type-checking + +// Config specifies the configuration for loading a whole program from +// Go source code. +// The zero value for Config is a ready-to-use default configuration. +type Config struct { + // Fset is the file set for the parser to use when loading the + // program. If nil, it may be lazily initialized by any + // method of Config. + Fset *token.FileSet + + // ParserMode specifies the mode to be used by the parser when + // loading source packages. + ParserMode parser.Mode + + // TypeChecker contains options relating to the type checker. + // + // The supplied IgnoreFuncBodies is not used; the effective + // value comes from the TypeCheckFuncBodies func below. + // The supplied Import function is not used either. + TypeChecker types.Config + + // TypeCheckFuncBodies is a predicate over package paths. + // A package for which the predicate is false will + // have its package-level declarations type checked, but not + // its function bodies; this can be used to quickly load + // dependencies from source. If nil, all func bodies are type + // checked. + TypeCheckFuncBodies func(path string) bool + + // If Build is non-nil, it is used to locate source packages. + // Otherwise &build.Default is used. + // + // By default, cgo is invoked to preprocess Go files that + // import the fake package "C". This behaviour can be + // disabled by setting CGO_ENABLED=0 in the environment prior + // to startup, or by setting Build.CgoEnabled=false. + Build *build.Context + + // The current directory, used for resolving relative package + // references such as "./go/loader". If empty, os.Getwd will be + // used instead. + Cwd string + + // If DisplayPath is non-nil, it is used to transform each + // file name obtained from Build.Import(). This can be used + // to prevent a virtualized build.Config's file names from + // leaking into the user interface. + DisplayPath func(path string) string + + // If AllowErrors is true, Load will return a Program even + // if some of the its packages contained I/O, parser or type + // errors; such errors are accessible via PackageInfo.Errors. If + // false, Load will fail if any package had an error. + AllowErrors bool + + // CreatePkgs specifies a list of non-importable initial + // packages to create. The resulting packages will appear in + // the corresponding elements of the Program.Created slice. + CreatePkgs []PkgSpec + + // ImportPkgs specifies a set of initial packages to load. + // The map keys are package paths. + // + // The map value indicates whether to load tests. If true, Load + // will add and type-check two lists of files to the package: + // non-test files followed by in-package *_test.go files. In + // addition, it will append the external test package (if any) + // to Program.Created. + ImportPkgs map[string]bool + + // FindPackage is called during Load to create the build.Package + // for a given import path from a given directory. + // If FindPackage is nil, (*build.Context).Import is used. + // A client may use this hook to adapt to a proprietary build + // system that does not follow the "go build" layout + // conventions, for example. + // + // It must be safe to call concurrently from multiple goroutines. + FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error) + + // AfterTypeCheck is called immediately after a list of files + // has been type-checked and appended to info.Files. + // + // This optional hook function is the earliest opportunity for + // the client to observe the output of the type checker, + // which may be useful to reduce analysis latency when loading + // a large program. + // + // The function is permitted to modify info.Info, for instance + // to clear data structures that are no longer needed, which can + // dramatically reduce peak memory consumption. + // + // The function may be called twice for the same PackageInfo: + // once for the files of the package and again for the + // in-package test files. + // + // It must be safe to call concurrently from multiple goroutines. + AfterTypeCheck func(info *PackageInfo, files []*ast.File) +} + +// A PkgSpec specifies a non-importable package to be created by Load. +// Files are processed first, but typically only one of Files and +// Filenames is provided. The path needn't be globally unique. +// +// For vendoring purposes, the package's directory is the one that +// contains the first file. +type PkgSpec struct { + Path string // package path ("" => use package declaration) + Files []*ast.File // ASTs of already-parsed files + Filenames []string // names of files to be parsed +} + +// A Program is a Go program loaded from source as specified by a Config. +type Program struct { + Fset *token.FileSet // the file set for this program + + // Created[i] contains the initial package whose ASTs or + // filenames were supplied by Config.CreatePkgs[i], followed by + // the external test package, if any, of each package in + // Config.ImportPkgs ordered by ImportPath. + // + // NOTE: these files must not import "C". Cgo preprocessing is + // only performed on imported packages, not ad hoc packages. + // + // TODO(adonovan): we need to copy and adapt the logic of + // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make + // Config.Import and Config.Create methods return the same kind + // of entity, essentially a build.Package. + // Perhaps we can even reuse that type directly. + Created []*PackageInfo + + // Imported contains the initially imported packages, + // as specified by Config.ImportPkgs. + Imported map[string]*PackageInfo + + // AllPackages contains the PackageInfo of every package + // encountered by Load: all initial packages and all + // dependencies, including incomplete ones. + AllPackages map[*types.Package]*PackageInfo + + // importMap is the canonical mapping of package paths to + // packages. It contains all Imported initial packages, but not + // Created ones, and all imported dependencies. + importMap map[string]*types.Package +} + +// PackageInfo holds the ASTs and facts derived by the type-checker +// for a single package. +// +// Not mutated once exposed via the API. +type PackageInfo struct { + Pkg *types.Package + Importable bool // true if 'import "Pkg.Path()"' would resolve to this + TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors + Files []*ast.File // syntax trees for the package's files + Errors []error // non-nil if the package had errors + types.Info // type-checker deductions. + dir string // package directory + + checker *types.Checker // transient type-checker state + errorFunc func(error) +} + +func (info *PackageInfo) String() string { return info.Pkg.Path() } + +func (info *PackageInfo) appendError(err error) { + if info.errorFunc != nil { + info.errorFunc(err) + } else { + fmt.Fprintln(os.Stderr, err) + } + info.Errors = append(info.Errors, err) +} + +func (conf *Config) fset() *token.FileSet { + if conf.Fset == nil { + conf.Fset = token.NewFileSet() + } + return conf.Fset +} + +// ParseFile is a convenience function (intended for testing) that invokes +// the parser using the Config's FileSet, which is initialized if nil. +// +// src specifies the parser input as a string, []byte, or io.Reader, and +// filename is its apparent name. If src is nil, the contents of +// filename are read from the file system. +func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) { + // TODO(adonovan): use conf.build() etc like parseFiles does. + return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode) +} + +// FromArgsUsage is a partial usage message that applications calling +// FromArgs may wish to include in their -help output. +const FromArgsUsage = ` + is a list of arguments denoting a set of initial packages. +It may take one of two forms: + +1. A list of *.go source files. + + All of the specified files are loaded, parsed and type-checked + as a single package. All the files must belong to the same directory. + +2. A list of import paths, each denoting a package. + + The package's directory is found relative to the $GOROOT and + $GOPATH using similar logic to 'go build', and the *.go files in + that directory are loaded, parsed and type-checked as a single + package. + + In addition, all *_test.go files in the directory are then loaded + and parsed. Those files whose package declaration equals that of + the non-*_test.go files are included in the primary package. Test + files whose package declaration ends with "_test" are type-checked + as another package, the 'external' test package, so that a single + import path may denote two packages. (Whether this behaviour is + enabled is tool-specific, and may depend on additional flags.) + +A '--' argument terminates the list of packages. +` + +// FromArgs interprets args as a set of initial packages to load from +// source and updates the configuration. It returns the list of +// unconsumed arguments. +// +// It is intended for use in command-line interfaces that require a +// set of initial packages to be specified; see FromArgsUsage message +// for details. +// +// Only superficial errors are reported at this stage; errors dependent +// on I/O are detected during Load. +func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) { + var rest []string + for i, arg := range args { + if arg == "--" { + rest = args[i+1:] + args = args[:i] + break // consume "--" and return the remaining args + } + } + + if len(args) > 0 && strings.HasSuffix(args[0], ".go") { + // Assume args is a list of a *.go files + // denoting a single ad hoc package. + for _, arg := range args { + if !strings.HasSuffix(arg, ".go") { + return nil, fmt.Errorf("named files must be .go files: %s", arg) + } + } + conf.CreateFromFilenames("", args...) + } else { + // Assume args are directories each denoting a + // package and (perhaps) an external test, iff xtest. + for _, arg := range args { + if xtest { + conf.ImportWithTests(arg) + } else { + conf.Import(arg) + } + } + } + + return rest, nil +} + +// CreateFromFilenames is a convenience function that adds +// a conf.CreatePkgs entry to create a package of the specified *.go +// files. +func (conf *Config) CreateFromFilenames(path string, filenames ...string) { + conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames}) +} + +// CreateFromFiles is a convenience function that adds a conf.CreatePkgs +// entry to create package of the specified path and parsed files. +func (conf *Config) CreateFromFiles(path string, files ...*ast.File) { + conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files}) +} + +// ImportWithTests is a convenience function that adds path to +// ImportPkgs, the set of initial source packages located relative to +// $GOPATH. The package will be augmented by any *_test.go files in +// its directory that contain a "package x" (not "package x_test") +// declaration. +// +// In addition, if any *_test.go files contain a "package x_test" +// declaration, an additional package comprising just those files will +// be added to CreatePkgs. +func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) } + +// Import is a convenience function that adds path to ImportPkgs, the +// set of initial packages that will be imported from source. +func (conf *Config) Import(path string) { conf.addImport(path, false) } + +func (conf *Config) addImport(path string, tests bool) { + if path == "C" { + return // ignore; not a real package + } + if conf.ImportPkgs == nil { + conf.ImportPkgs = make(map[string]bool) + } + conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests +} + +// PathEnclosingInterval returns the PackageInfo and ast.Node that +// contain source interval [start, end), and all the node's ancestors +// up to the AST root. It searches all ast.Files of all packages in prog. +// exact is defined as for astutil.PathEnclosingInterval. +// +// The zero value is returned if not found. +func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) { + for _, info := range prog.AllPackages { + for _, f := range info.Files { + if f.Pos() == token.NoPos { + // This can happen if the parser saw + // too many errors and bailed out. + // (Use parser.AllErrors to prevent that.) + continue + } + if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) { + continue + } + if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil { + return info, path, exact + } + } + } + return nil, nil, false +} + +// InitialPackages returns a new slice containing the set of initial +// packages (Created + Imported) in unspecified order. +func (prog *Program) InitialPackages() []*PackageInfo { + infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported)) + infos = append(infos, prog.Created...) + for _, info := range prog.Imported { + infos = append(infos, info) + } + return infos +} + +// Package returns the ASTs and results of type checking for the +// specified package. +func (prog *Program) Package(path string) *PackageInfo { + if info, ok := prog.AllPackages[prog.importMap[path]]; ok { + return info + } + for _, info := range prog.Created { + if path == info.Pkg.Path() { + return info + } + } + return nil +} + +// ---------- Implementation ---------- + +// importer holds the working state of the algorithm. +type importer struct { + conf *Config // the client configuration + start time.Time // for logging + + progMu sync.Mutex // guards prog + prog *Program // the resulting program + + // findpkg is a memoization of FindPackage. + findpkgMu sync.Mutex // guards findpkg + findpkg map[findpkgKey]*findpkgValue + + importedMu sync.Mutex // guards imported + imported map[string]*importInfo // all imported packages (incl. failures) by import path + + // import dependency graph: graph[x][y] => x imports y + // + // Since non-importable packages cannot be cyclic, we ignore + // their imports, thus we only need the subgraph over importable + // packages. Nodes are identified by their import paths. + graphMu sync.Mutex + graph map[string]map[string]bool +} + +type findpkgKey struct { + importPath string + fromDir string + mode build.ImportMode +} + +type findpkgValue struct { + ready chan struct{} // closed to broadcast readiness + bp *build.Package + err error +} + +// importInfo tracks the success or failure of a single import. +// +// Upon completion, exactly one of info and err is non-nil: +// info on successful creation of a package, err otherwise. +// A successful package may still contain type errors. +type importInfo struct { + path string // import path + info *PackageInfo // results of typechecking (including errors) + complete chan struct{} // closed to broadcast that info is set. +} + +// awaitCompletion blocks until ii is complete, +// i.e. the info field is safe to inspect. +func (ii *importInfo) awaitCompletion() { + <-ii.complete // wait for close +} + +// Complete marks ii as complete. +// Its info and err fields will not be subsequently updated. +func (ii *importInfo) Complete(info *PackageInfo) { + if info == nil { + panic("info == nil") + } + ii.info = info + close(ii.complete) +} + +type importError struct { + path string // import path + err error // reason for failure to create a package +} + +// Load creates the initial packages specified by conf.{Create,Import}Pkgs, +// loading their dependencies packages as needed. +// +// On success, Load returns a Program containing a PackageInfo for +// each package. On failure, it returns an error. +// +// If AllowErrors is true, Load will return a Program even if some +// packages contained I/O, parser or type errors, or if dependencies +// were missing. (Such errors are accessible via PackageInfo.Errors. If +// false, Load will fail if any package had an error. +// +// It is an error if no packages were loaded. +func (conf *Config) Load() (*Program, error) { + // Create a simple default error handler for parse/type errors. + if conf.TypeChecker.Error == nil { + conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) } + } + + // Set default working directory for relative package references. + if conf.Cwd == "" { + var err error + conf.Cwd, err = os.Getwd() + if err != nil { + return nil, err + } + } + + // Install default FindPackage hook using go/build logic. + if conf.FindPackage == nil { + conf.FindPackage = (*build.Context).Import + } + + prog := &Program{ + Fset: conf.fset(), + Imported: make(map[string]*PackageInfo), + importMap: make(map[string]*types.Package), + AllPackages: make(map[*types.Package]*PackageInfo), + } + + imp := importer{ + conf: conf, + prog: prog, + findpkg: make(map[findpkgKey]*findpkgValue), + imported: make(map[string]*importInfo), + start: time.Now(), + graph: make(map[string]map[string]bool), + } + + // -- loading proper (concurrent phase) -------------------------------- + + var errpkgs []string // packages that contained errors + + // Load the initially imported packages and their dependencies, + // in parallel. + // No vendor check on packages imported from the command line. + infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor) + for _, ie := range importErrors { + conf.TypeChecker.Error(ie.err) // failed to create package + errpkgs = append(errpkgs, ie.path) + } + for _, info := range infos { + prog.Imported[info.Pkg.Path()] = info + } + + // Augment the designated initial packages by their tests. + // Dependencies are loaded in parallel. + var xtestPkgs []*build.Package + for importPath, augment := range conf.ImportPkgs { + if !augment { + continue + } + + // No vendor check on packages imported from command line. + bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor) + if err != nil { + // Package not found, or can't even parse package declaration. + // Already reported by previous loop; ignore it. + continue + } + + // Needs external test package? + if len(bp.XTestGoFiles) > 0 { + xtestPkgs = append(xtestPkgs, bp) + } + + // Consult the cache using the canonical package path. + path := bp.ImportPath + imp.importedMu.Lock() // (unnecessary, we're sequential here) + ii, ok := imp.imported[path] + // Paranoid checks added due to issue #11012. + if !ok { + // Unreachable. + // The previous loop called importAll and thus + // startLoad for each path in ImportPkgs, which + // populates imp.imported[path] with a non-zero value. + panic(fmt.Sprintf("imported[%q] not found", path)) + } + if ii == nil { + // Unreachable. + // The ii values in this loop are the same as in + // the previous loop, which enforced the invariant + // that at least one of ii.err and ii.info is non-nil. + panic(fmt.Sprintf("imported[%q] == nil", path)) + } + if ii.info == nil { + // Unreachable. + // awaitCompletion has the postcondition + // ii.info != nil. + panic(fmt.Sprintf("imported[%q].info = nil", path)) + } + info := ii.info + imp.importedMu.Unlock() + + // Parse the in-package test files. + files, errs := imp.conf.parsePackageFiles(bp, 't') + for _, err := range errs { + info.appendError(err) + } + + // The test files augmenting package P cannot be imported, + // but may import packages that import P, + // so we must disable the cycle check. + imp.addFiles(info, files, false) + } + + createPkg := func(path, dir string, files []*ast.File, errs []error) { + info := imp.newPackageInfo(path, dir) + for _, err := range errs { + info.appendError(err) + } + + // Ad hoc packages are non-importable, + // so no cycle check is needed. + // addFiles loads dependencies in parallel. + imp.addFiles(info, files, false) + prog.Created = append(prog.Created, info) + } + + // Create packages specified by conf.CreatePkgs. + for _, cp := range conf.CreatePkgs { + files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode) + files = append(files, cp.Files...) + + path := cp.Path + if path == "" { + if len(files) > 0 { + path = files[0].Name.Name + } else { + path = "(unnamed)" + } + } + + dir := conf.Cwd + if len(files) > 0 && files[0].Pos().IsValid() { + dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name()) + } + createPkg(path, dir, files, errs) + } + + // Create external test packages. + sort.Sort(byImportPath(xtestPkgs)) + for _, bp := range xtestPkgs { + files, errs := imp.conf.parsePackageFiles(bp, 'x') + createPkg(bp.ImportPath+"_test", bp.Dir, files, errs) + } + + // -- finishing up (sequential) ---------------------------------------- + + if len(prog.Imported)+len(prog.Created) == 0 { + return nil, errors.New("no initial packages were loaded") + } + + // Create infos for indirectly imported packages. + // e.g. incomplete packages without syntax, loaded from export data. + for _, obj := range prog.importMap { + info := prog.AllPackages[obj] + if info == nil { + prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true} + } else { + // finished + info.checker = nil + info.errorFunc = nil + } + } + + if !conf.AllowErrors { + // Report errors in indirectly imported packages. + for _, info := range prog.AllPackages { + if len(info.Errors) > 0 { + errpkgs = append(errpkgs, info.Pkg.Path()) + } + } + if errpkgs != nil { + var more string + if len(errpkgs) > 3 { + more = fmt.Sprintf(" and %d more", len(errpkgs)-3) + errpkgs = errpkgs[:3] + } + return nil, fmt.Errorf("couldn't load packages due to errors: %s%s", + strings.Join(errpkgs, ", "), more) + } + } + + markErrorFreePackages(prog.AllPackages) + + return prog, nil +} + +type byImportPath []*build.Package + +func (b byImportPath) Len() int { return len(b) } +func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath } +func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] } + +// markErrorFreePackages sets the TransitivelyErrorFree flag on all +// applicable packages. +func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) { + // Build the transpose of the import graph. + importedBy := make(map[*types.Package]map[*types.Package]bool) + for P := range allPackages { + for _, Q := range P.Imports() { + clients, ok := importedBy[Q] + if !ok { + clients = make(map[*types.Package]bool) + importedBy[Q] = clients + } + clients[P] = true + } + } + + // Find all packages reachable from some error package. + reachable := make(map[*types.Package]bool) + var visit func(*types.Package) + visit = func(p *types.Package) { + if !reachable[p] { + reachable[p] = true + for q := range importedBy[p] { + visit(q) + } + } + } + for _, info := range allPackages { + if len(info.Errors) > 0 { + visit(info.Pkg) + } + } + + // Mark the others as "transitively error-free". + for _, info := range allPackages { + if !reachable[info.Pkg] { + info.TransitivelyErrorFree = true + } + } +} + +// build returns the effective build context. +func (conf *Config) build() *build.Context { + if conf.Build != nil { + return conf.Build + } + return &build.Default +} + +// parsePackageFiles enumerates the files belonging to package path, +// then loads, parses and returns them, plus a list of I/O or parse +// errors that were encountered. +// +// 'which' indicates which files to include: +// +// 'g': include non-test *.go source files (GoFiles + processed CgoFiles) +// 't': include in-package *_test.go source files (TestGoFiles) +// 'x': include external *_test.go source files. (XTestGoFiles) +func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) { + if bp.ImportPath == "unsafe" { + return nil, nil + } + var filenames []string + switch which { + case 'g': + filenames = bp.GoFiles + case 't': + filenames = bp.TestGoFiles + case 'x': + filenames = bp.XTestGoFiles + default: + panic(which) + } + + files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode) + + // Preprocess CgoFiles and parse the outputs (sequentially). + if which == 'g' && bp.CgoFiles != nil { + cgofiles, err := cgo.ProcessFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode) + if err != nil { + errs = append(errs, err) + } else { + files = append(files, cgofiles...) + } + } + + return files, errs +} + +// doImport imports the package denoted by path. +// It implements the types.Importer signature. +// +// It returns an error if a package could not be created +// (e.g. go/build or parse error), but type errors are reported via +// the types.Config.Error callback (the first of which is also saved +// in the package's PackageInfo). +// +// Idempotent. +func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) { + if to == "C" { + // This should be unreachable, but ad hoc packages are + // not currently subject to cgo preprocessing. + // See https://golang.org/issue/11627. + return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`, + from.Pkg.Path()) + } + + bp, err := imp.findPackage(to, from.dir, 0) + if err != nil { + return nil, err + } + + // The standard unsafe package is handled specially, + // and has no PackageInfo. + if bp.ImportPath == "unsafe" { + return types.Unsafe, nil + } + + // Look for the package in the cache using its canonical path. + path := bp.ImportPath + imp.importedMu.Lock() + ii := imp.imported[path] + imp.importedMu.Unlock() + if ii == nil { + panic("internal error: unexpected import: " + path) + } + if ii.info != nil { + return ii.info.Pkg, nil + } + + // Import of incomplete package: this indicates a cycle. + fromPath := from.Pkg.Path() + if cycle := imp.findPath(path, fromPath); cycle != nil { + // Normalize cycle: start from alphabetically largest node. + pos, start := -1, "" + for i, s := range cycle { + if pos < 0 || s > start { + pos, start = i, s + } + } + cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest + cycle = append(cycle, cycle[0]) // add start node to end to show cycliness + return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> ")) + } + + panic("internal error: import of incomplete (yet acyclic) package: " + fromPath) +} + +// findPackage locates the package denoted by the importPath in the +// specified directory. +func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) { + // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7) + // to avoid holding the lock around FindPackage. + key := findpkgKey{importPath, fromDir, mode} + imp.findpkgMu.Lock() + v, ok := imp.findpkg[key] + if ok { + // cache hit + imp.findpkgMu.Unlock() + + <-v.ready // wait for entry to become ready + } else { + // Cache miss: this goroutine becomes responsible for + // populating the map entry and broadcasting its readiness. + v = &findpkgValue{ready: make(chan struct{})} + imp.findpkg[key] = v + imp.findpkgMu.Unlock() + + ioLimit <- true + v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode) + <-ioLimit + + if _, ok := v.err.(*build.NoGoError); ok { + v.err = nil // empty directory is not an error + } + + close(v.ready) // broadcast ready condition + } + return v.bp, v.err +} + +// importAll loads, parses, and type-checks the specified packages in +// parallel and returns their completed importInfos in unspecified order. +// +// fromPath is the package path of the importing package, if it is +// importable, "" otherwise. It is used for cycle detection. +// +// fromDir is the directory containing the import declaration that +// caused these imports. +func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) { + if fromPath != "" { + // We're loading a set of imports. + // + // We must record graph edges from the importing package + // to its dependencies, and check for cycles. + imp.graphMu.Lock() + deps, ok := imp.graph[fromPath] + if !ok { + deps = make(map[string]bool) + imp.graph[fromPath] = deps + } + for importPath := range imports { + deps[importPath] = true + } + imp.graphMu.Unlock() + } + + var pending []*importInfo + for importPath := range imports { + if fromPath != "" { + if cycle := imp.findPath(importPath, fromPath); cycle != nil { + // Cycle-forming import: we must not check it + // since it would deadlock. + if trace { + fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle) + } + continue + } + } + bp, err := imp.findPackage(importPath, fromDir, mode) + if err != nil { + errors = append(errors, importError{ + path: importPath, + err: err, + }) + continue + } + pending = append(pending, imp.startLoad(bp)) + } + + for _, ii := range pending { + ii.awaitCompletion() + infos = append(infos, ii.info) + } + + return infos, errors +} + +// findPath returns an arbitrary path from 'from' to 'to' in the import +// graph, or nil if there was none. +func (imp *importer) findPath(from, to string) []string { + imp.graphMu.Lock() + defer imp.graphMu.Unlock() + + seen := make(map[string]bool) + var search func(stack []string, importPath string) []string + search = func(stack []string, importPath string) []string { + if !seen[importPath] { + seen[importPath] = true + stack = append(stack, importPath) + if importPath == to { + return stack + } + for x := range imp.graph[importPath] { + if p := search(stack, x); p != nil { + return p + } + } + } + return nil + } + return search(make([]string, 0, 20), from) +} + +// startLoad initiates the loading, parsing and type-checking of the +// specified package and its dependencies, if it has not already begun. +// +// It returns an importInfo, not necessarily in a completed state. The +// caller must call awaitCompletion() before accessing its info field. +// +// startLoad is concurrency-safe and idempotent. +func (imp *importer) startLoad(bp *build.Package) *importInfo { + path := bp.ImportPath + imp.importedMu.Lock() + ii, ok := imp.imported[path] + if !ok { + ii = &importInfo{path: path, complete: make(chan struct{})} + imp.imported[path] = ii + go func() { + info := imp.load(bp) + ii.Complete(info) + }() + } + imp.importedMu.Unlock() + + return ii +} + +// load implements package loading by parsing Go source files +// located by go/build. +func (imp *importer) load(bp *build.Package) *PackageInfo { + info := imp.newPackageInfo(bp.ImportPath, bp.Dir) + info.Importable = true + files, errs := imp.conf.parsePackageFiles(bp, 'g') + for _, err := range errs { + info.appendError(err) + } + + imp.addFiles(info, files, true) + + imp.progMu.Lock() + imp.prog.importMap[bp.ImportPath] = info.Pkg + imp.progMu.Unlock() + + return info +} + +// addFiles adds and type-checks the specified files to info, loading +// their dependencies if needed. The order of files determines the +// package initialization order. It may be called multiple times on the +// same package. Errors are appended to the info.Errors field. +// +// cycleCheck determines whether the imports within files create +// dependency edges that should be checked for potential cycles. +func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) { + // Ensure the dependencies are loaded, in parallel. + var fromPath string + if cycleCheck { + fromPath = info.Pkg.Path() + } + // TODO(adonovan): opt: make the caller do scanImports. + // Callers with a build.Package can skip it. + imp.importAll(fromPath, info.dir, scanImports(files), 0) + + if trace { + fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n", + time.Since(imp.start), info.Pkg.Path(), len(files)) + } + + // Don't call checker.Files on Unsafe, even with zero files, + // because it would mutate the package, which is a global. + if info.Pkg == types.Unsafe { + if len(files) > 0 { + panic(`"unsafe" package contains unexpected files`) + } + } else { + // Ignore the returned (first) error since we + // already collect them all in the PackageInfo. + info.checker.Files(files) + info.Files = append(info.Files, files...) + } + + if imp.conf.AfterTypeCheck != nil { + imp.conf.AfterTypeCheck(info, files) + } + + if trace { + fmt.Fprintf(os.Stderr, "%s: stop %q\n", + time.Since(imp.start), info.Pkg.Path()) + } +} + +func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { + var pkg *types.Package + if path == "unsafe" { + pkg = types.Unsafe + } else { + pkg = types.NewPackage(path, "") + } + info := &PackageInfo{ + Pkg: pkg, + Info: types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + }, + errorFunc: imp.conf.TypeChecker.Error, + dir: dir, + } + versions.InitFileVersions(&info.Info) + + // Copy the types.Config so we can vary it across PackageInfos. + tc := imp.conf.TypeChecker + tc.IgnoreFuncBodies = false + if f := imp.conf.TypeCheckFuncBodies; f != nil { + tc.IgnoreFuncBodies = !f(path) + } + tc.Importer = closure{imp, info} + tc.Error = info.appendError // appendError wraps the user's Error function + + info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info) + imp.progMu.Lock() + imp.prog.AllPackages[pkg] = info + imp.progMu.Unlock() + return info +} + +type closure struct { + imp *importer + info *PackageInfo +} + +func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) } diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go new file mode 100644 index 0000000..3a80aca --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/util.go @@ -0,0 +1,123 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +import ( + "go/ast" + "go/build" + "go/parser" + "go/token" + "io" + "os" + "strconv" + "sync" + + "golang.org/x/tools/go/buildutil" +) + +// We use a counting semaphore to limit +// the number of parallel I/O calls per process. +var ioLimit = make(chan bool, 10) + +// parseFiles parses the Go source files within directory dir and +// returns the ASTs of the ones that could be at least partially parsed, +// along with a list of I/O and parse errors encountered. +// +// I/O is done via ctxt, which may specify a virtual file system. +// displayPath is used to transform the filenames attached to the ASTs. +func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) { + if displayPath == nil { + displayPath = func(path string) string { return path } + } + var wg sync.WaitGroup + n := len(files) + parsed := make([]*ast.File, n) + errors := make([]error, n) + for i, file := range files { + if !buildutil.IsAbsPath(ctxt, file) { + file = buildutil.JoinPath(ctxt, dir, file) + } + wg.Add(1) + go func(i int, file string) { + ioLimit <- true // wait + defer func() { + wg.Done() + <-ioLimit // signal + }() + var rd io.ReadCloser + var err error + if ctxt.OpenFile != nil { + rd, err = ctxt.OpenFile(file) + } else { + rd, err = os.Open(file) + } + if err != nil { + errors[i] = err // open failed + return + } + + // ParseFile may return both an AST and an error. + parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode) + rd.Close() + }(i, file) + } + wg.Wait() + + // Eliminate nils, preserving order. + var o int + for _, f := range parsed { + if f != nil { + parsed[o] = f + o++ + } + } + parsed = parsed[:o] + + o = 0 + for _, err := range errors { + if err != nil { + errors[o] = err + o++ + } + } + errors = errors[:o] + + return parsed, errors +} + +// scanImports returns the set of all import paths from all +// import specs in the specified files. +func scanImports(files []*ast.File) map[string]bool { + imports := make(map[string]bool) + for _, f := range files { + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { + for _, spec := range decl.Specs { + spec := spec.(*ast.ImportSpec) + + // NB: do not assume the program is well-formed! + path, err := strconv.Unquote(spec.Path.Value) + if err != nil { + continue // quietly ignore the error + } + if path == "C" { + continue // skip pseudopackage + } + imports[path] = true + } + } + } + } + return imports +} + +// ---------- Internal helpers ---------- + +// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos) +func tokenFileContainsPos(f *token.File, pos token.Pos) bool { + p := int(pos) + base := f.Base() + return base <= p && p < base+f.Size() +} diff --git a/vendor/golang.org/x/tools/internal/versions/features.go b/vendor/golang.org/x/tools/internal/versions/features.go new file mode 100644 index 0000000..b53f178 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/features.go @@ -0,0 +1,43 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions + +// This file contains predicates for working with file versions to +// decide when a tool should consider a language feature enabled. + +// GoVersions that features in x/tools can be gated to. +const ( + Go1_18 = "go1.18" + Go1_19 = "go1.19" + Go1_20 = "go1.20" + Go1_21 = "go1.21" + Go1_22 = "go1.22" +) + +// Future is an invalid unknown Go version sometime in the future. +// Do not use directly with Compare. +const Future = "" + +// AtLeast reports whether the file version v comes after a Go release. +// +// Use this predicate to enable a behavior once a certain Go release +// has happened (and stays enabled in the future). +func AtLeast(v, release string) bool { + if v == Future { + return true // an unknown future version is always after y. + } + return Compare(Lang(v), Lang(release)) >= 0 +} + +// Before reports whether the file version v is strictly before a Go release. +// +// Use this predicate to disable a behavior once a certain Go release +// has happened (and stays enabled in the future). +func Before(v, release string) bool { + if v == Future { + return false // an unknown future version happens after y. + } + return Compare(Lang(v), Lang(release)) < 0 +} diff --git a/vendor/golang.org/x/tools/internal/versions/gover.go b/vendor/golang.org/x/tools/internal/versions/gover.go new file mode 100644 index 0000000..bbabcd2 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/gover.go @@ -0,0 +1,172 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a fork of internal/gover for use by x/tools until +// go1.21 and earlier are no longer supported by x/tools. + +package versions + +import "strings" + +// A gover is a parsed Go gover: major[.Minor[.Patch]][kind[pre]] +// The numbers are the original decimal strings to avoid integer overflows +// and since there is very little actual math. (Probably overflow doesn't matter in practice, +// but at the time this code was written, there was an existing test that used +// go1.99999999999, which does not fit in an int on 32-bit platforms. +// The "big decimal" representation avoids the problem entirely.) +type gover struct { + major string // decimal + minor string // decimal or "" + patch string // decimal or "" + kind string // "", "alpha", "beta", "rc" + pre string // decimal or "" +} + +// compare returns -1, 0, or +1 depending on whether +// x < y, x == y, or x > y, interpreted as toolchain versions. +// The versions x and y must not begin with a "go" prefix: just "1.21" not "go1.21". +// Malformed versions compare less than well-formed versions and equal to each other. +// The language version "1.21" compares less than the release candidate and eventual releases "1.21rc1" and "1.21.0". +func compare(x, y string) int { + vx := parse(x) + vy := parse(y) + + if c := cmpInt(vx.major, vy.major); c != 0 { + return c + } + if c := cmpInt(vx.minor, vy.minor); c != 0 { + return c + } + if c := cmpInt(vx.patch, vy.patch); c != 0 { + return c + } + if c := strings.Compare(vx.kind, vy.kind); c != 0 { // "" < alpha < beta < rc + return c + } + if c := cmpInt(vx.pre, vy.pre); c != 0 { + return c + } + return 0 +} + +// lang returns the Go language version. For example, lang("1.2.3") == "1.2". +func lang(x string) string { + v := parse(x) + if v.minor == "" || v.major == "1" && v.minor == "0" { + return v.major + } + return v.major + "." + v.minor +} + +// isValid reports whether the version x is valid. +func isValid(x string) bool { + return parse(x) != gover{} +} + +// parse parses the Go version string x into a version. +// It returns the zero version if x is malformed. +func parse(x string) gover { + var v gover + + // Parse major version. + var ok bool + v.major, x, ok = cutInt(x) + if !ok { + return gover{} + } + if x == "" { + // Interpret "1" as "1.0.0". + v.minor = "0" + v.patch = "0" + return v + } + + // Parse . before minor version. + if x[0] != '.' { + return gover{} + } + + // Parse minor version. + v.minor, x, ok = cutInt(x[1:]) + if !ok { + return gover{} + } + if x == "" { + // Patch missing is same as "0" for older versions. + // Starting in Go 1.21, patch missing is different from explicit .0. + if cmpInt(v.minor, "21") < 0 { + v.patch = "0" + } + return v + } + + // Parse patch if present. + if x[0] == '.' { + v.patch, x, ok = cutInt(x[1:]) + if !ok || x != "" { + // Note that we are disallowing prereleases (alpha, beta, rc) for patch releases here (x != ""). + // Allowing them would be a bit confusing because we already have: + // 1.21 < 1.21rc1 + // But a prerelease of a patch would have the opposite effect: + // 1.21.3rc1 < 1.21.3 + // We've never needed them before, so let's not start now. + return gover{} + } + return v + } + + // Parse prerelease. + i := 0 + for i < len(x) && (x[i] < '0' || '9' < x[i]) { + if x[i] < 'a' || 'z' < x[i] { + return gover{} + } + i++ + } + if i == 0 { + return gover{} + } + v.kind, x = x[:i], x[i:] + if x == "" { + return v + } + v.pre, x, ok = cutInt(x) + if !ok || x != "" { + return gover{} + } + + return v +} + +// cutInt scans the leading decimal number at the start of x to an integer +// and returns that value and the rest of the string. +func cutInt(x string) (n, rest string, ok bool) { + i := 0 + for i < len(x) && '0' <= x[i] && x[i] <= '9' { + i++ + } + if i == 0 || x[0] == '0' && i != 1 { // no digits or unnecessary leading zero + return "", "", false + } + return x[:i], x[i:], true +} + +// cmpInt returns cmp.Compare(x, y) interpreting x and y as decimal numbers. +// (Copied from golang.org/x/mod/semver's compareInt.) +func cmpInt(x, y string) int { + if x == y { + return 0 + } + if len(x) < len(y) { + return -1 + } + if len(x) > len(y) { + return +1 + } + if x < y { + return -1 + } else { + return +1 + } +} diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain.go b/vendor/golang.org/x/tools/internal/versions/toolchain.go new file mode 100644 index 0000000..377bf7a --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/toolchain.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions + +// toolchain is maximum version (<1.22) that the go toolchain used +// to build the current tool is known to support. +// +// When a tool is built with >=1.22, the value of toolchain is unused. +// +// x/tools does not support building with go <1.18. So we take this +// as the minimum possible maximum. +var toolchain string = Go1_18 diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go b/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go new file mode 100644 index 0000000..f65beed --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.19 +// +build go1.19 + +package versions + +func init() { + if Compare(toolchain, Go1_19) < 0 { + toolchain = Go1_19 + } +} diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go120.go b/vendor/golang.org/x/tools/internal/versions/toolchain_go120.go new file mode 100644 index 0000000..1a9efa1 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/toolchain_go120.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.20 +// +build go1.20 + +package versions + +func init() { + if Compare(toolchain, Go1_20) < 0 { + toolchain = Go1_20 + } +} diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go b/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go new file mode 100644 index 0000000..b7ef216 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.21 +// +build go1.21 + +package versions + +func init() { + if Compare(toolchain, Go1_21) < 0 { + toolchain = Go1_21 + } +} diff --git a/vendor/golang.org/x/tools/internal/versions/types.go b/vendor/golang.org/x/tools/internal/versions/types.go new file mode 100644 index 0000000..562eef2 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/types.go @@ -0,0 +1,19 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions + +import ( + "go/types" +) + +// GoVersion returns the Go version of the type package. +// It returns zero if no version can be determined. +func GoVersion(pkg *types.Package) string { + // TODO(taking): x/tools can call GoVersion() [from 1.21] after 1.25. + if pkg, ok := any(pkg).(interface{ GoVersion() string }); ok { + return pkg.GoVersion() + } + return "" +} diff --git a/vendor/golang.org/x/tools/internal/versions/types_go121.go b/vendor/golang.org/x/tools/internal/versions/types_go121.go new file mode 100644 index 0000000..b4345d3 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/types_go121.go @@ -0,0 +1,30 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.22 +// +build !go1.22 + +package versions + +import ( + "go/ast" + "go/types" +) + +// FileVersion returns a language version (<=1.21) derived from runtime.Version() +// or an unknown future version. +func FileVersion(info *types.Info, file *ast.File) string { + // In x/tools built with Go <= 1.21, we do not have Info.FileVersions + // available. We use a go version derived from the toolchain used to + // compile the tool by default. + // This will be <= go1.21. We take this as the maximum version that + // this tool can support. + // + // There are no features currently in x/tools that need to tell fine grained + // differences for versions <1.22. + return toolchain +} + +// InitFileVersions is a noop when compiled with this Go version. +func InitFileVersions(*types.Info) {} diff --git a/vendor/golang.org/x/tools/internal/versions/types_go122.go b/vendor/golang.org/x/tools/internal/versions/types_go122.go new file mode 100644 index 0000000..e818063 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/types_go122.go @@ -0,0 +1,41 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.22 +// +build go1.22 + +package versions + +import ( + "go/ast" + "go/types" +) + +// FileVersions returns a file's Go version. +// The reported version is an unknown Future version if a +// version cannot be determined. +func FileVersion(info *types.Info, file *ast.File) string { + // In tools built with Go >= 1.22, the Go version of a file + // follow a cascades of sources: + // 1) types.Info.FileVersion, which follows the cascade: + // 1.a) file version (ast.File.GoVersion), + // 1.b) the package version (types.Config.GoVersion), or + // 2) is some unknown Future version. + // + // File versions require a valid package version to be provided to types + // in Config.GoVersion. Config.GoVersion is either from the package's module + // or the toolchain (go run). This value should be provided by go/packages + // or unitchecker.Config.GoVersion. + if v := info.FileVersions[file]; IsValid(v) { + return v + } + // Note: we could instead return runtime.Version() [if valid]. + // This would act as a max version on what a tool can support. + return Future +} + +// InitFileVersions initializes info to record Go versions for Go files. +func InitFileVersions(info *types.Info) { + info.FileVersions = make(map[*ast.File]string) +} diff --git a/vendor/golang.org/x/tools/internal/versions/versions.go b/vendor/golang.org/x/tools/internal/versions/versions.go new file mode 100644 index 0000000..8d1f745 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/versions/versions.go @@ -0,0 +1,57 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package versions + +import ( + "strings" +) + +// Note: If we use build tags to use go/versions when go >=1.22, +// we run into go.dev/issue/53737. Under some operations users would see an +// import of "go/versions" even if they would not compile the file. +// For example, during `go get -u ./...` (go.dev/issue/64490) we do not try to include +// For this reason, this library just a clone of go/versions for the moment. + +// Lang returns the Go language version for version x. +// If x is not a valid version, Lang returns the empty string. +// For example: +// +// Lang("go1.21rc2") = "go1.21" +// Lang("go1.21.2") = "go1.21" +// Lang("go1.21") = "go1.21" +// Lang("go1") = "go1" +// Lang("bad") = "" +// Lang("1.21") = "" +func Lang(x string) string { + v := lang(stripGo(x)) + if v == "" { + return "" + } + return x[:2+len(v)] // "go"+v without allocation +} + +// Compare returns -1, 0, or +1 depending on whether +// x < y, x == y, or x > y, interpreted as Go versions. +// The versions x and y must begin with a "go" prefix: "go1.21" not "1.21". +// Invalid versions, including the empty string, compare less than +// valid versions and equal to each other. +// The language version "go1.21" compares less than the +// release candidate and eventual releases "go1.21rc1" and "go1.21.0". +// Custom toolchain suffixes are ignored during comparison: +// "go1.21.0" and "go1.21.0-bigcorp" are equal. +func Compare(x, y string) int { return compare(stripGo(x), stripGo(y)) } + +// IsValid reports whether the version x is valid. +func IsValid(x string) bool { return isValid(stripGo(x)) } + +// stripGo converts from a "go1.21" version to a "1.21" version. +// If v does not start with "go", stripGo returns the empty string (a known invalid version). +func stripGo(v string) string { + v, _, _ = strings.Cut(v, "-") // strip -bigcorp suffix. + if len(v) < 2 || v[:2] != "go" { + return "" + } + return v[2:] +} diff --git a/vendor/gopkg.in/natefinch/lumberjack.v2/lumberjack.go b/vendor/gopkg.in/natefinch/lumberjack.v2/lumberjack.go index 8fe043e..3447cdc 100644 --- a/vendor/gopkg.in/natefinch/lumberjack.v2/lumberjack.go +++ b/vendor/gopkg.in/natefinch/lumberjack.v2/lumberjack.go @@ -3,7 +3,7 @@ // Note that this is v2.0 of lumberjack, and should be imported using gopkg.in // thusly: // -// import "gopkg.in/natefinch/lumberjack.v2" +// import "gopkg.in/natefinch/lumberjack.v2" // // The package name remains simply lumberjack, and the code resides at // https://github.com/natefinch/lumberjack under the v2.0 branch. @@ -66,7 +66,7 @@ var _ io.WriteCloser = (*Logger)(nil) // `/var/log/foo/server.log`, a backup created at 6:30pm on Nov 11 2016 would // use the filename `/var/log/foo/server-2016-11-04T18-30-00.000.log` // -// # Cleaning Up Old Log Files +// Cleaning Up Old Log Files // // Whenever a new logfile gets created, old log files may be deleted. The most // recent files according to the encoded timestamp will be retained, up to a diff --git a/vendor/gopkg.in/yaml.v2/.travis.yml b/vendor/gopkg.in/yaml.v2/.travis.yml new file mode 100644 index 0000000..7348c50 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/.travis.yml @@ -0,0 +1,17 @@ +language: go + +go: + - "1.4.x" + - "1.5.x" + - "1.6.x" + - "1.7.x" + - "1.8.x" + - "1.9.x" + - "1.10.x" + - "1.11.x" + - "1.12.x" + - "1.13.x" + - "1.14.x" + - "tip" + +go_import_path: gopkg.in/yaml.v2 diff --git a/vendor/gopkg.in/yaml.v2/LICENSE b/vendor/gopkg.in/yaml.v2/LICENSE new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/gopkg.in/yaml.v2/LICENSE.libyaml b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml new file mode 100644 index 0000000..8da58fb --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml @@ -0,0 +1,31 @@ +The following files were ported to Go from C files of libyaml, and thus +are still covered by their original copyright and license: + + apic.go + emitterc.go + parserc.go + readerc.go + scannerc.go + writerc.go + yamlh.go + yamlprivateh.go + +Copyright (c) 2006 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/gopkg.in/yaml.v2/NOTICE b/vendor/gopkg.in/yaml.v2/NOTICE new file mode 100644 index 0000000..866d74a --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/NOTICE @@ -0,0 +1,13 @@ +Copyright 2011-2016 Canonical Ltd. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/gopkg.in/yaml.v2/README.md b/vendor/gopkg.in/yaml.v2/README.md new file mode 100644 index 0000000..b50c6e8 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/README.md @@ -0,0 +1,133 @@ +# YAML support for the Go language + +Introduction +------------ + +The yaml package enables Go programs to comfortably encode and decode YAML +values. It was developed within [Canonical](https://www.canonical.com) as +part of the [juju](https://juju.ubuntu.com) project, and is based on a +pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML) +C library to parse and generate YAML data quickly and reliably. + +Compatibility +------------- + +The yaml package supports most of YAML 1.1 and 1.2, including support for +anchors, tags, map merging, etc. Multi-document unmarshalling is not yet +implemented, and base-60 floats from YAML 1.1 are purposefully not +supported since they're a poor design and are gone in YAML 1.2. + +Installation and usage +---------------------- + +The import path for the package is *gopkg.in/yaml.v2*. + +To install it, run: + + go get gopkg.in/yaml.v2 + +API documentation +----------------- + +If opened in a browser, the import path itself leads to the API documentation: + + * [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2) + +API stability +------------- + +The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in). + + +License +------- + +The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details. + + +Example +------- + +```Go +package main + +import ( + "fmt" + "log" + + "gopkg.in/yaml.v2" +) + +var data = ` +a: Easy! +b: + c: 2 + d: [3, 4] +` + +// Note: struct fields must be public in order for unmarshal to +// correctly populate the data. +type T struct { + A string + B struct { + RenamedC int `yaml:"c"` + D []int `yaml:",flow"` + } +} + +func main() { + t := T{} + + err := yaml.Unmarshal([]byte(data), &t) + if err != nil { + log.Fatalf("error: %v", err) + } + fmt.Printf("--- t:\n%v\n\n", t) + + d, err := yaml.Marshal(&t) + if err != nil { + log.Fatalf("error: %v", err) + } + fmt.Printf("--- t dump:\n%s\n\n", string(d)) + + m := make(map[interface{}]interface{}) + + err = yaml.Unmarshal([]byte(data), &m) + if err != nil { + log.Fatalf("error: %v", err) + } + fmt.Printf("--- m:\n%v\n\n", m) + + d, err = yaml.Marshal(&m) + if err != nil { + log.Fatalf("error: %v", err) + } + fmt.Printf("--- m dump:\n%s\n\n", string(d)) +} +``` + +This example will generate the following output: + +``` +--- t: +{Easy! {2 [3 4]}} + +--- t dump: +a: Easy! +b: + c: 2 + d: [3, 4] + + +--- m: +map[a:Easy! b:map[c:2 d:[3 4]]] + +--- m dump: +a: Easy! +b: + c: 2 + d: + - 3 + - 4 +``` + diff --git a/vendor/gopkg.in/yaml.v2/apic.go b/vendor/gopkg.in/yaml.v2/apic.go new file mode 100644 index 0000000..acf7140 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/apic.go @@ -0,0 +1,744 @@ +package yaml + +import ( + "io" +) + +func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { + //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) + + // Check if we can move the queue at the beginning of the buffer. + if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { + if parser.tokens_head != len(parser.tokens) { + copy(parser.tokens, parser.tokens[parser.tokens_head:]) + } + parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] + parser.tokens_head = 0 + } + parser.tokens = append(parser.tokens, *token) + if pos < 0 { + return + } + copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) + parser.tokens[parser.tokens_head+pos] = *token +} + +// Create a new parser object. +func yaml_parser_initialize(parser *yaml_parser_t) bool { + *parser = yaml_parser_t{ + raw_buffer: make([]byte, 0, input_raw_buffer_size), + buffer: make([]byte, 0, input_buffer_size), + } + return true +} + +// Destroy a parser object. +func yaml_parser_delete(parser *yaml_parser_t) { + *parser = yaml_parser_t{} +} + +// String read handler. +func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { + if parser.input_pos == len(parser.input) { + return 0, io.EOF + } + n = copy(buffer, parser.input[parser.input_pos:]) + parser.input_pos += n + return n, nil +} + +// Reader read handler. +func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { + return parser.input_reader.Read(buffer) +} + +// Set a string input. +func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { + if parser.read_handler != nil { + panic("must set the input source only once") + } + parser.read_handler = yaml_string_read_handler + parser.input = input + parser.input_pos = 0 +} + +// Set a file input. +func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) { + if parser.read_handler != nil { + panic("must set the input source only once") + } + parser.read_handler = yaml_reader_read_handler + parser.input_reader = r +} + +// Set the source encoding. +func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { + if parser.encoding != yaml_ANY_ENCODING { + panic("must set the encoding only once") + } + parser.encoding = encoding +} + +var disableLineWrapping = false + +// Create a new emitter object. +func yaml_emitter_initialize(emitter *yaml_emitter_t) { + *emitter = yaml_emitter_t{ + buffer: make([]byte, output_buffer_size), + raw_buffer: make([]byte, 0, output_raw_buffer_size), + states: make([]yaml_emitter_state_t, 0, initial_stack_size), + events: make([]yaml_event_t, 0, initial_queue_size), + } + if disableLineWrapping { + emitter.best_width = -1 + } +} + +// Destroy an emitter object. +func yaml_emitter_delete(emitter *yaml_emitter_t) { + *emitter = yaml_emitter_t{} +} + +// String write handler. +func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { + *emitter.output_buffer = append(*emitter.output_buffer, buffer...) + return nil +} + +// yaml_writer_write_handler uses emitter.output_writer to write the +// emitted text. +func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error { + _, err := emitter.output_writer.Write(buffer) + return err +} + +// Set a string output. +func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { + if emitter.write_handler != nil { + panic("must set the output target only once") + } + emitter.write_handler = yaml_string_write_handler + emitter.output_buffer = output_buffer +} + +// Set a file output. +func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) { + if emitter.write_handler != nil { + panic("must set the output target only once") + } + emitter.write_handler = yaml_writer_write_handler + emitter.output_writer = w +} + +// Set the output encoding. +func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { + if emitter.encoding != yaml_ANY_ENCODING { + panic("must set the output encoding only once") + } + emitter.encoding = encoding +} + +// Set the canonical output style. +func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { + emitter.canonical = canonical +} + +//// Set the indentation increment. +func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { + if indent < 2 || indent > 9 { + indent = 2 + } + emitter.best_indent = indent +} + +// Set the preferred line width. +func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { + if width < 0 { + width = -1 + } + emitter.best_width = width +} + +// Set if unescaped non-ASCII characters are allowed. +func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { + emitter.unicode = unicode +} + +// Set the preferred line break character. +func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { + emitter.line_break = line_break +} + +///* +// * Destroy a token object. +// */ +// +//YAML_DECLARE(void) +//yaml_token_delete(yaml_token_t *token) +//{ +// assert(token); // Non-NULL token object expected. +// +// switch (token.type) +// { +// case YAML_TAG_DIRECTIVE_TOKEN: +// yaml_free(token.data.tag_directive.handle); +// yaml_free(token.data.tag_directive.prefix); +// break; +// +// case YAML_ALIAS_TOKEN: +// yaml_free(token.data.alias.value); +// break; +// +// case YAML_ANCHOR_TOKEN: +// yaml_free(token.data.anchor.value); +// break; +// +// case YAML_TAG_TOKEN: +// yaml_free(token.data.tag.handle); +// yaml_free(token.data.tag.suffix); +// break; +// +// case YAML_SCALAR_TOKEN: +// yaml_free(token.data.scalar.value); +// break; +// +// default: +// break; +// } +// +// memset(token, 0, sizeof(yaml_token_t)); +//} +// +///* +// * Check if a string is a valid UTF-8 sequence. +// * +// * Check 'reader.c' for more details on UTF-8 encoding. +// */ +// +//static int +//yaml_check_utf8(yaml_char_t *start, size_t length) +//{ +// yaml_char_t *end = start+length; +// yaml_char_t *pointer = start; +// +// while (pointer < end) { +// unsigned char octet; +// unsigned int width; +// unsigned int value; +// size_t k; +// +// octet = pointer[0]; +// width = (octet & 0x80) == 0x00 ? 1 : +// (octet & 0xE0) == 0xC0 ? 2 : +// (octet & 0xF0) == 0xE0 ? 3 : +// (octet & 0xF8) == 0xF0 ? 4 : 0; +// value = (octet & 0x80) == 0x00 ? octet & 0x7F : +// (octet & 0xE0) == 0xC0 ? octet & 0x1F : +// (octet & 0xF0) == 0xE0 ? octet & 0x0F : +// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; +// if (!width) return 0; +// if (pointer+width > end) return 0; +// for (k = 1; k < width; k ++) { +// octet = pointer[k]; +// if ((octet & 0xC0) != 0x80) return 0; +// value = (value << 6) + (octet & 0x3F); +// } +// if (!((width == 1) || +// (width == 2 && value >= 0x80) || +// (width == 3 && value >= 0x800) || +// (width == 4 && value >= 0x10000))) return 0; +// +// pointer += width; +// } +// +// return 1; +//} +// + +// Create STREAM-START. +func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) { + *event = yaml_event_t{ + typ: yaml_STREAM_START_EVENT, + encoding: encoding, + } +} + +// Create STREAM-END. +func yaml_stream_end_event_initialize(event *yaml_event_t) { + *event = yaml_event_t{ + typ: yaml_STREAM_END_EVENT, + } +} + +// Create DOCUMENT-START. +func yaml_document_start_event_initialize( + event *yaml_event_t, + version_directive *yaml_version_directive_t, + tag_directives []yaml_tag_directive_t, + implicit bool, +) { + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + version_directive: version_directive, + tag_directives: tag_directives, + implicit: implicit, + } +} + +// Create DOCUMENT-END. +func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) { + *event = yaml_event_t{ + typ: yaml_DOCUMENT_END_EVENT, + implicit: implicit, + } +} + +///* +// * Create ALIAS. +// */ +// +//YAML_DECLARE(int) +//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t) +//{ +// mark yaml_mark_t = { 0, 0, 0 } +// anchor_copy *yaml_char_t = NULL +// +// assert(event) // Non-NULL event object is expected. +// assert(anchor) // Non-NULL anchor is expected. +// +// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0 +// +// anchor_copy = yaml_strdup(anchor) +// if (!anchor_copy) +// return 0 +// +// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark) +// +// return 1 +//} + +// Create SCALAR. +func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + anchor: anchor, + tag: tag, + value: value, + implicit: plain_implicit, + quoted_implicit: quoted_implicit, + style: yaml_style_t(style), + } + return true +} + +// Create SEQUENCE-START. +func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(style), + } + return true +} + +// Create SEQUENCE-END. +func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + } + return true +} + +// Create MAPPING-START. +func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) { + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(style), + } +} + +// Create MAPPING-END. +func yaml_mapping_end_event_initialize(event *yaml_event_t) { + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + } +} + +// Destroy an event object. +func yaml_event_delete(event *yaml_event_t) { + *event = yaml_event_t{} +} + +///* +// * Create a document object. +// */ +// +//YAML_DECLARE(int) +//yaml_document_initialize(document *yaml_document_t, +// version_directive *yaml_version_directive_t, +// tag_directives_start *yaml_tag_directive_t, +// tag_directives_end *yaml_tag_directive_t, +// start_implicit int, end_implicit int) +//{ +// struct { +// error yaml_error_type_t +// } context +// struct { +// start *yaml_node_t +// end *yaml_node_t +// top *yaml_node_t +// } nodes = { NULL, NULL, NULL } +// version_directive_copy *yaml_version_directive_t = NULL +// struct { +// start *yaml_tag_directive_t +// end *yaml_tag_directive_t +// top *yaml_tag_directive_t +// } tag_directives_copy = { NULL, NULL, NULL } +// value yaml_tag_directive_t = { NULL, NULL } +// mark yaml_mark_t = { 0, 0, 0 } +// +// assert(document) // Non-NULL document object is expected. +// assert((tag_directives_start && tag_directives_end) || +// (tag_directives_start == tag_directives_end)) +// // Valid tag directives are expected. +// +// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error +// +// if (version_directive) { +// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) +// if (!version_directive_copy) goto error +// version_directive_copy.major = version_directive.major +// version_directive_copy.minor = version_directive.minor +// } +// +// if (tag_directives_start != tag_directives_end) { +// tag_directive *yaml_tag_directive_t +// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) +// goto error +// for (tag_directive = tag_directives_start +// tag_directive != tag_directives_end; tag_directive ++) { +// assert(tag_directive.handle) +// assert(tag_directive.prefix) +// if (!yaml_check_utf8(tag_directive.handle, +// strlen((char *)tag_directive.handle))) +// goto error +// if (!yaml_check_utf8(tag_directive.prefix, +// strlen((char *)tag_directive.prefix))) +// goto error +// value.handle = yaml_strdup(tag_directive.handle) +// value.prefix = yaml_strdup(tag_directive.prefix) +// if (!value.handle || !value.prefix) goto error +// if (!PUSH(&context, tag_directives_copy, value)) +// goto error +// value.handle = NULL +// value.prefix = NULL +// } +// } +// +// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, +// tag_directives_copy.start, tag_directives_copy.top, +// start_implicit, end_implicit, mark, mark) +// +// return 1 +// +//error: +// STACK_DEL(&context, nodes) +// yaml_free(version_directive_copy) +// while (!STACK_EMPTY(&context, tag_directives_copy)) { +// value yaml_tag_directive_t = POP(&context, tag_directives_copy) +// yaml_free(value.handle) +// yaml_free(value.prefix) +// } +// STACK_DEL(&context, tag_directives_copy) +// yaml_free(value.handle) +// yaml_free(value.prefix) +// +// return 0 +//} +// +///* +// * Destroy a document object. +// */ +// +//YAML_DECLARE(void) +//yaml_document_delete(document *yaml_document_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// tag_directive *yaml_tag_directive_t +// +// context.error = YAML_NO_ERROR // Eliminate a compiler warning. +// +// assert(document) // Non-NULL document object is expected. +// +// while (!STACK_EMPTY(&context, document.nodes)) { +// node yaml_node_t = POP(&context, document.nodes) +// yaml_free(node.tag) +// switch (node.type) { +// case YAML_SCALAR_NODE: +// yaml_free(node.data.scalar.value) +// break +// case YAML_SEQUENCE_NODE: +// STACK_DEL(&context, node.data.sequence.items) +// break +// case YAML_MAPPING_NODE: +// STACK_DEL(&context, node.data.mapping.pairs) +// break +// default: +// assert(0) // Should not happen. +// } +// } +// STACK_DEL(&context, document.nodes) +// +// yaml_free(document.version_directive) +// for (tag_directive = document.tag_directives.start +// tag_directive != document.tag_directives.end +// tag_directive++) { +// yaml_free(tag_directive.handle) +// yaml_free(tag_directive.prefix) +// } +// yaml_free(document.tag_directives.start) +// +// memset(document, 0, sizeof(yaml_document_t)) +//} +// +///** +// * Get a document node. +// */ +// +//YAML_DECLARE(yaml_node_t *) +//yaml_document_get_node(document *yaml_document_t, index int) +//{ +// assert(document) // Non-NULL document object is expected. +// +// if (index > 0 && document.nodes.start + index <= document.nodes.top) { +// return document.nodes.start + index - 1 +// } +// return NULL +//} +// +///** +// * Get the root object. +// */ +// +//YAML_DECLARE(yaml_node_t *) +//yaml_document_get_root_node(document *yaml_document_t) +//{ +// assert(document) // Non-NULL document object is expected. +// +// if (document.nodes.top != document.nodes.start) { +// return document.nodes.start +// } +// return NULL +//} +// +///* +// * Add a scalar node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_scalar(document *yaml_document_t, +// tag *yaml_char_t, value *yaml_char_t, length int, +// style yaml_scalar_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// value_copy *yaml_char_t = NULL +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// assert(value) // Non-NULL value is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (length < 0) { +// length = strlen((char *)value) +// } +// +// if (!yaml_check_utf8(value, length)) goto error +// value_copy = yaml_malloc(length+1) +// if (!value_copy) goto error +// memcpy(value_copy, value, length) +// value_copy[length] = '\0' +// +// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// yaml_free(tag_copy) +// yaml_free(value_copy) +// +// return 0 +//} +// +///* +// * Add a sequence node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_sequence(document *yaml_document_t, +// tag *yaml_char_t, style yaml_sequence_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// struct { +// start *yaml_node_item_t +// end *yaml_node_item_t +// top *yaml_node_item_t +// } items = { NULL, NULL, NULL } +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error +// +// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, +// style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// STACK_DEL(&context, items) +// yaml_free(tag_copy) +// +// return 0 +//} +// +///* +// * Add a mapping node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_mapping(document *yaml_document_t, +// tag *yaml_char_t, style yaml_mapping_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// struct { +// start *yaml_node_pair_t +// end *yaml_node_pair_t +// top *yaml_node_pair_t +// } pairs = { NULL, NULL, NULL } +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error +// +// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, +// style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// STACK_DEL(&context, pairs) +// yaml_free(tag_copy) +// +// return 0 +//} +// +///* +// * Append an item to a sequence node. +// */ +// +//YAML_DECLARE(int) +//yaml_document_append_sequence_item(document *yaml_document_t, +// sequence int, item int) +//{ +// struct { +// error yaml_error_type_t +// } context +// +// assert(document) // Non-NULL document is required. +// assert(sequence > 0 +// && document.nodes.start + sequence <= document.nodes.top) +// // Valid sequence id is required. +// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) +// // A sequence node is required. +// assert(item > 0 && document.nodes.start + item <= document.nodes.top) +// // Valid item id is required. +// +// if (!PUSH(&context, +// document.nodes.start[sequence-1].data.sequence.items, item)) +// return 0 +// +// return 1 +//} +// +///* +// * Append a pair of a key and a value to a mapping node. +// */ +// +//YAML_DECLARE(int) +//yaml_document_append_mapping_pair(document *yaml_document_t, +// mapping int, key int, value int) +//{ +// struct { +// error yaml_error_type_t +// } context +// +// pair yaml_node_pair_t +// +// assert(document) // Non-NULL document is required. +// assert(mapping > 0 +// && document.nodes.start + mapping <= document.nodes.top) +// // Valid mapping id is required. +// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) +// // A mapping node is required. +// assert(key > 0 && document.nodes.start + key <= document.nodes.top) +// // Valid key id is required. +// assert(value > 0 && document.nodes.start + value <= document.nodes.top) +// // Valid value id is required. +// +// pair.key = key +// pair.value = value +// +// if (!PUSH(&context, +// document.nodes.start[mapping-1].data.mapping.pairs, pair)) +// return 0 +// +// return 1 +//} +// +// diff --git a/vendor/gopkg.in/yaml.v2/decode.go b/vendor/gopkg.in/yaml.v2/decode.go new file mode 100644 index 0000000..129bc2a --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/decode.go @@ -0,0 +1,815 @@ +package yaml + +import ( + "encoding" + "encoding/base64" + "fmt" + "io" + "math" + "reflect" + "strconv" + "time" +) + +const ( + documentNode = 1 << iota + mappingNode + sequenceNode + scalarNode + aliasNode +) + +type node struct { + kind int + line, column int + tag string + // For an alias node, alias holds the resolved alias. + alias *node + value string + implicit bool + children []*node + anchors map[string]*node +} + +// ---------------------------------------------------------------------------- +// Parser, produces a node tree out of a libyaml event stream. + +type parser struct { + parser yaml_parser_t + event yaml_event_t + doc *node + doneInit bool +} + +func newParser(b []byte) *parser { + p := parser{} + if !yaml_parser_initialize(&p.parser) { + panic("failed to initialize YAML emitter") + } + if len(b) == 0 { + b = []byte{'\n'} + } + yaml_parser_set_input_string(&p.parser, b) + return &p +} + +func newParserFromReader(r io.Reader) *parser { + p := parser{} + if !yaml_parser_initialize(&p.parser) { + panic("failed to initialize YAML emitter") + } + yaml_parser_set_input_reader(&p.parser, r) + return &p +} + +func (p *parser) init() { + if p.doneInit { + return + } + p.expect(yaml_STREAM_START_EVENT) + p.doneInit = true +} + +func (p *parser) destroy() { + if p.event.typ != yaml_NO_EVENT { + yaml_event_delete(&p.event) + } + yaml_parser_delete(&p.parser) +} + +// expect consumes an event from the event stream and +// checks that it's of the expected type. +func (p *parser) expect(e yaml_event_type_t) { + if p.event.typ == yaml_NO_EVENT { + if !yaml_parser_parse(&p.parser, &p.event) { + p.fail() + } + } + if p.event.typ == yaml_STREAM_END_EVENT { + failf("attempted to go past the end of stream; corrupted value?") + } + if p.event.typ != e { + p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ) + p.fail() + } + yaml_event_delete(&p.event) + p.event.typ = yaml_NO_EVENT +} + +// peek peeks at the next event in the event stream, +// puts the results into p.event and returns the event type. +func (p *parser) peek() yaml_event_type_t { + if p.event.typ != yaml_NO_EVENT { + return p.event.typ + } + if !yaml_parser_parse(&p.parser, &p.event) { + p.fail() + } + return p.event.typ +} + +func (p *parser) fail() { + var where string + var line int + if p.parser.problem_mark.line != 0 { + line = p.parser.problem_mark.line + // Scanner errors don't iterate line before returning error + if p.parser.error == yaml_SCANNER_ERROR { + line++ + } + } else if p.parser.context_mark.line != 0 { + line = p.parser.context_mark.line + } + if line != 0 { + where = "line " + strconv.Itoa(line) + ": " + } + var msg string + if len(p.parser.problem) > 0 { + msg = p.parser.problem + } else { + msg = "unknown problem parsing YAML content" + } + failf("%s%s", where, msg) +} + +func (p *parser) anchor(n *node, anchor []byte) { + if anchor != nil { + p.doc.anchors[string(anchor)] = n + } +} + +func (p *parser) parse() *node { + p.init() + switch p.peek() { + case yaml_SCALAR_EVENT: + return p.scalar() + case yaml_ALIAS_EVENT: + return p.alias() + case yaml_MAPPING_START_EVENT: + return p.mapping() + case yaml_SEQUENCE_START_EVENT: + return p.sequence() + case yaml_DOCUMENT_START_EVENT: + return p.document() + case yaml_STREAM_END_EVENT: + // Happens when attempting to decode an empty buffer. + return nil + default: + panic("attempted to parse unknown event: " + p.event.typ.String()) + } +} + +func (p *parser) node(kind int) *node { + return &node{ + kind: kind, + line: p.event.start_mark.line, + column: p.event.start_mark.column, + } +} + +func (p *parser) document() *node { + n := p.node(documentNode) + n.anchors = make(map[string]*node) + p.doc = n + p.expect(yaml_DOCUMENT_START_EVENT) + n.children = append(n.children, p.parse()) + p.expect(yaml_DOCUMENT_END_EVENT) + return n +} + +func (p *parser) alias() *node { + n := p.node(aliasNode) + n.value = string(p.event.anchor) + n.alias = p.doc.anchors[n.value] + if n.alias == nil { + failf("unknown anchor '%s' referenced", n.value) + } + p.expect(yaml_ALIAS_EVENT) + return n +} + +func (p *parser) scalar() *node { + n := p.node(scalarNode) + n.value = string(p.event.value) + n.tag = string(p.event.tag) + n.implicit = p.event.implicit + p.anchor(n, p.event.anchor) + p.expect(yaml_SCALAR_EVENT) + return n +} + +func (p *parser) sequence() *node { + n := p.node(sequenceNode) + p.anchor(n, p.event.anchor) + p.expect(yaml_SEQUENCE_START_EVENT) + for p.peek() != yaml_SEQUENCE_END_EVENT { + n.children = append(n.children, p.parse()) + } + p.expect(yaml_SEQUENCE_END_EVENT) + return n +} + +func (p *parser) mapping() *node { + n := p.node(mappingNode) + p.anchor(n, p.event.anchor) + p.expect(yaml_MAPPING_START_EVENT) + for p.peek() != yaml_MAPPING_END_EVENT { + n.children = append(n.children, p.parse(), p.parse()) + } + p.expect(yaml_MAPPING_END_EVENT) + return n +} + +// ---------------------------------------------------------------------------- +// Decoder, unmarshals a node into a provided value. + +type decoder struct { + doc *node + aliases map[*node]bool + mapType reflect.Type + terrors []string + strict bool + + decodeCount int + aliasCount int + aliasDepth int +} + +var ( + mapItemType = reflect.TypeOf(MapItem{}) + durationType = reflect.TypeOf(time.Duration(0)) + defaultMapType = reflect.TypeOf(map[interface{}]interface{}{}) + ifaceType = defaultMapType.Elem() + timeType = reflect.TypeOf(time.Time{}) + ptrTimeType = reflect.TypeOf(&time.Time{}) +) + +func newDecoder(strict bool) *decoder { + d := &decoder{mapType: defaultMapType, strict: strict} + d.aliases = make(map[*node]bool) + return d +} + +func (d *decoder) terror(n *node, tag string, out reflect.Value) { + if n.tag != "" { + tag = n.tag + } + value := n.value + if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG { + if len(value) > 10 { + value = " `" + value[:7] + "...`" + } else { + value = " `" + value + "`" + } + } + d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type())) +} + +func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) { + terrlen := len(d.terrors) + err := u.UnmarshalYAML(func(v interface{}) (err error) { + defer handleErr(&err) + d.unmarshal(n, reflect.ValueOf(v)) + if len(d.terrors) > terrlen { + issues := d.terrors[terrlen:] + d.terrors = d.terrors[:terrlen] + return &TypeError{issues} + } + return nil + }) + if e, ok := err.(*TypeError); ok { + d.terrors = append(d.terrors, e.Errors...) + return false + } + if err != nil { + fail(err) + } + return true +} + +// d.prepare initializes and dereferences pointers and calls UnmarshalYAML +// if a value is found to implement it. +// It returns the initialized and dereferenced out value, whether +// unmarshalling was already done by UnmarshalYAML, and if so whether +// its types unmarshalled appropriately. +// +// If n holds a null value, prepare returns before doing anything. +func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { + if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) { + return out, false, false + } + again := true + for again { + again = false + if out.Kind() == reflect.Ptr { + if out.IsNil() { + out.Set(reflect.New(out.Type().Elem())) + } + out = out.Elem() + again = true + } + if out.CanAddr() { + if u, ok := out.Addr().Interface().(Unmarshaler); ok { + good = d.callUnmarshaler(n, u) + return out, true, good + } + } + } + return out, false, false +} + +const ( + // 400,000 decode operations is ~500kb of dense object declarations, or + // ~5kb of dense object declarations with 10000% alias expansion + alias_ratio_range_low = 400000 + + // 4,000,000 decode operations is ~5MB of dense object declarations, or + // ~4.5MB of dense object declarations with 10% alias expansion + alias_ratio_range_high = 4000000 + + // alias_ratio_range is the range over which we scale allowed alias ratios + alias_ratio_range = float64(alias_ratio_range_high - alias_ratio_range_low) +) + +func allowedAliasRatio(decodeCount int) float64 { + switch { + case decodeCount <= alias_ratio_range_low: + // allow 99% to come from alias expansion for small-to-medium documents + return 0.99 + case decodeCount >= alias_ratio_range_high: + // allow 10% to come from alias expansion for very large documents + return 0.10 + default: + // scale smoothly from 99% down to 10% over the range. + // this maps to 396,000 - 400,000 allowed alias-driven decodes over the range. + // 400,000 decode operations is ~100MB of allocations in worst-case scenarios (single-item maps). + return 0.99 - 0.89*(float64(decodeCount-alias_ratio_range_low)/alias_ratio_range) + } +} + +func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) { + d.decodeCount++ + if d.aliasDepth > 0 { + d.aliasCount++ + } + if d.aliasCount > 100 && d.decodeCount > 1000 && float64(d.aliasCount)/float64(d.decodeCount) > allowedAliasRatio(d.decodeCount) { + failf("document contains excessive aliasing") + } + switch n.kind { + case documentNode: + return d.document(n, out) + case aliasNode: + return d.alias(n, out) + } + out, unmarshaled, good := d.prepare(n, out) + if unmarshaled { + return good + } + switch n.kind { + case scalarNode: + good = d.scalar(n, out) + case mappingNode: + good = d.mapping(n, out) + case sequenceNode: + good = d.sequence(n, out) + default: + panic("internal error: unknown node kind: " + strconv.Itoa(n.kind)) + } + return good +} + +func (d *decoder) document(n *node, out reflect.Value) (good bool) { + if len(n.children) == 1 { + d.doc = n + d.unmarshal(n.children[0], out) + return true + } + return false +} + +func (d *decoder) alias(n *node, out reflect.Value) (good bool) { + if d.aliases[n] { + // TODO this could actually be allowed in some circumstances. + failf("anchor '%s' value contains itself", n.value) + } + d.aliases[n] = true + d.aliasDepth++ + good = d.unmarshal(n.alias, out) + d.aliasDepth-- + delete(d.aliases, n) + return good +} + +var zeroValue reflect.Value + +func resetMap(out reflect.Value) { + for _, k := range out.MapKeys() { + out.SetMapIndex(k, zeroValue) + } +} + +func (d *decoder) scalar(n *node, out reflect.Value) bool { + var tag string + var resolved interface{} + if n.tag == "" && !n.implicit { + tag = yaml_STR_TAG + resolved = n.value + } else { + tag, resolved = resolve(n.tag, n.value) + if tag == yaml_BINARY_TAG { + data, err := base64.StdEncoding.DecodeString(resolved.(string)) + if err != nil { + failf("!!binary value contains invalid base64 data") + } + resolved = string(data) + } + } + if resolved == nil { + if out.Kind() == reflect.Map && !out.CanAddr() { + resetMap(out) + } else { + out.Set(reflect.Zero(out.Type())) + } + return true + } + if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { + // We've resolved to exactly the type we want, so use that. + out.Set(resolvedv) + return true + } + // Perhaps we can use the value as a TextUnmarshaler to + // set its value. + if out.CanAddr() { + u, ok := out.Addr().Interface().(encoding.TextUnmarshaler) + if ok { + var text []byte + if tag == yaml_BINARY_TAG { + text = []byte(resolved.(string)) + } else { + // We let any value be unmarshaled into TextUnmarshaler. + // That might be more lax than we'd like, but the + // TextUnmarshaler itself should bowl out any dubious values. + text = []byte(n.value) + } + err := u.UnmarshalText(text) + if err != nil { + fail(err) + } + return true + } + } + switch out.Kind() { + case reflect.String: + if tag == yaml_BINARY_TAG { + out.SetString(resolved.(string)) + return true + } + if resolved != nil { + out.SetString(n.value) + return true + } + case reflect.Interface: + if resolved == nil { + out.Set(reflect.Zero(out.Type())) + } else if tag == yaml_TIMESTAMP_TAG { + // It looks like a timestamp but for backward compatibility + // reasons we set it as a string, so that code that unmarshals + // timestamp-like values into interface{} will continue to + // see a string and not a time.Time. + // TODO(v3) Drop this. + out.Set(reflect.ValueOf(n.value)) + } else { + out.Set(reflect.ValueOf(resolved)) + } + return true + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + switch resolved := resolved.(type) { + case int: + if !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + return true + } + case int64: + if !out.OverflowInt(resolved) { + out.SetInt(resolved) + return true + } + case uint64: + if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + return true + } + case float64: + if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + return true + } + case string: + if out.Type() == durationType { + d, err := time.ParseDuration(resolved) + if err == nil { + out.SetInt(int64(d)) + return true + } + } + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + switch resolved := resolved.(type) { + case int: + if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + return true + } + case int64: + if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + return true + } + case uint64: + if !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + return true + } + case float64: + if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + return true + } + } + case reflect.Bool: + switch resolved := resolved.(type) { + case bool: + out.SetBool(resolved) + return true + } + case reflect.Float32, reflect.Float64: + switch resolved := resolved.(type) { + case int: + out.SetFloat(float64(resolved)) + return true + case int64: + out.SetFloat(float64(resolved)) + return true + case uint64: + out.SetFloat(float64(resolved)) + return true + case float64: + out.SetFloat(resolved) + return true + } + case reflect.Struct: + if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { + out.Set(resolvedv) + return true + } + case reflect.Ptr: + if out.Type().Elem() == reflect.TypeOf(resolved) { + // TODO DOes this make sense? When is out a Ptr except when decoding a nil value? + elem := reflect.New(out.Type().Elem()) + elem.Elem().Set(reflect.ValueOf(resolved)) + out.Set(elem) + return true + } + } + d.terror(n, tag, out) + return false +} + +func settableValueOf(i interface{}) reflect.Value { + v := reflect.ValueOf(i) + sv := reflect.New(v.Type()).Elem() + sv.Set(v) + return sv +} + +func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { + l := len(n.children) + + var iface reflect.Value + switch out.Kind() { + case reflect.Slice: + out.Set(reflect.MakeSlice(out.Type(), l, l)) + case reflect.Array: + if l != out.Len() { + failf("invalid array: want %d elements but got %d", out.Len(), l) + } + case reflect.Interface: + // No type hints. Will have to use a generic sequence. + iface = out + out = settableValueOf(make([]interface{}, l)) + default: + d.terror(n, yaml_SEQ_TAG, out) + return false + } + et := out.Type().Elem() + + j := 0 + for i := 0; i < l; i++ { + e := reflect.New(et).Elem() + if ok := d.unmarshal(n.children[i], e); ok { + out.Index(j).Set(e) + j++ + } + } + if out.Kind() != reflect.Array { + out.Set(out.Slice(0, j)) + } + if iface.IsValid() { + iface.Set(out) + } + return true +} + +func (d *decoder) mapping(n *node, out reflect.Value) (good bool) { + switch out.Kind() { + case reflect.Struct: + return d.mappingStruct(n, out) + case reflect.Slice: + return d.mappingSlice(n, out) + case reflect.Map: + // okay + case reflect.Interface: + if d.mapType.Kind() == reflect.Map { + iface := out + out = reflect.MakeMap(d.mapType) + iface.Set(out) + } else { + slicev := reflect.New(d.mapType).Elem() + if !d.mappingSlice(n, slicev) { + return false + } + out.Set(slicev) + return true + } + default: + d.terror(n, yaml_MAP_TAG, out) + return false + } + outt := out.Type() + kt := outt.Key() + et := outt.Elem() + + mapType := d.mapType + if outt.Key() == ifaceType && outt.Elem() == ifaceType { + d.mapType = outt + } + + if out.IsNil() { + out.Set(reflect.MakeMap(outt)) + } + l := len(n.children) + for i := 0; i < l; i += 2 { + if isMerge(n.children[i]) { + d.merge(n.children[i+1], out) + continue + } + k := reflect.New(kt).Elem() + if d.unmarshal(n.children[i], k) { + kkind := k.Kind() + if kkind == reflect.Interface { + kkind = k.Elem().Kind() + } + if kkind == reflect.Map || kkind == reflect.Slice { + failf("invalid map key: %#v", k.Interface()) + } + e := reflect.New(et).Elem() + if d.unmarshal(n.children[i+1], e) { + d.setMapIndex(n.children[i+1], out, k, e) + } + } + } + d.mapType = mapType + return true +} + +func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) { + if d.strict && out.MapIndex(k) != zeroValue { + d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface())) + return + } + out.SetMapIndex(k, v) +} + +func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) { + outt := out.Type() + if outt.Elem() != mapItemType { + d.terror(n, yaml_MAP_TAG, out) + return false + } + + mapType := d.mapType + d.mapType = outt + + var slice []MapItem + var l = len(n.children) + for i := 0; i < l; i += 2 { + if isMerge(n.children[i]) { + d.merge(n.children[i+1], out) + continue + } + item := MapItem{} + k := reflect.ValueOf(&item.Key).Elem() + if d.unmarshal(n.children[i], k) { + v := reflect.ValueOf(&item.Value).Elem() + if d.unmarshal(n.children[i+1], v) { + slice = append(slice, item) + } + } + } + out.Set(reflect.ValueOf(slice)) + d.mapType = mapType + return true +} + +func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) { + sinfo, err := getStructInfo(out.Type()) + if err != nil { + panic(err) + } + name := settableValueOf("") + l := len(n.children) + + var inlineMap reflect.Value + var elemType reflect.Type + if sinfo.InlineMap != -1 { + inlineMap = out.Field(sinfo.InlineMap) + inlineMap.Set(reflect.New(inlineMap.Type()).Elem()) + elemType = inlineMap.Type().Elem() + } + + var doneFields []bool + if d.strict { + doneFields = make([]bool, len(sinfo.FieldsList)) + } + for i := 0; i < l; i += 2 { + ni := n.children[i] + if isMerge(ni) { + d.merge(n.children[i+1], out) + continue + } + if !d.unmarshal(ni, name) { + continue + } + if info, ok := sinfo.FieldsMap[name.String()]; ok { + if d.strict { + if doneFields[info.Id] { + d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type())) + continue + } + doneFields[info.Id] = true + } + var field reflect.Value + if info.Inline == nil { + field = out.Field(info.Num) + } else { + field = out.FieldByIndex(info.Inline) + } + d.unmarshal(n.children[i+1], field) + } else if sinfo.InlineMap != -1 { + if inlineMap.IsNil() { + inlineMap.Set(reflect.MakeMap(inlineMap.Type())) + } + value := reflect.New(elemType).Elem() + d.unmarshal(n.children[i+1], value) + d.setMapIndex(n.children[i+1], inlineMap, name, value) + } else if d.strict { + d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type())) + } + } + return true +} + +func failWantMap() { + failf("map merge requires map or sequence of maps as the value") +} + +func (d *decoder) merge(n *node, out reflect.Value) { + switch n.kind { + case mappingNode: + d.unmarshal(n, out) + case aliasNode: + if n.alias != nil && n.alias.kind != mappingNode { + failWantMap() + } + d.unmarshal(n, out) + case sequenceNode: + // Step backwards as earlier nodes take precedence. + for i := len(n.children) - 1; i >= 0; i-- { + ni := n.children[i] + if ni.kind == aliasNode { + if ni.alias != nil && ni.alias.kind != mappingNode { + failWantMap() + } + } else if ni.kind != mappingNode { + failWantMap() + } + d.unmarshal(ni, out) + } + default: + failWantMap() + } +} + +func isMerge(n *node) bool { + return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG) +} diff --git a/vendor/gopkg.in/yaml.v2/emitterc.go b/vendor/gopkg.in/yaml.v2/emitterc.go new file mode 100644 index 0000000..a1c2cc5 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/emitterc.go @@ -0,0 +1,1685 @@ +package yaml + +import ( + "bytes" + "fmt" +) + +// Flush the buffer if needed. +func flush(emitter *yaml_emitter_t) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) { + return yaml_emitter_flush(emitter) + } + return true +} + +// Put a character to the output buffer. +func put(emitter *yaml_emitter_t, value byte) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + emitter.buffer[emitter.buffer_pos] = value + emitter.buffer_pos++ + emitter.column++ + return true +} + +// Put a line break to the output buffer. +func put_break(emitter *yaml_emitter_t) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + switch emitter.line_break { + case yaml_CR_BREAK: + emitter.buffer[emitter.buffer_pos] = '\r' + emitter.buffer_pos += 1 + case yaml_LN_BREAK: + emitter.buffer[emitter.buffer_pos] = '\n' + emitter.buffer_pos += 1 + case yaml_CRLN_BREAK: + emitter.buffer[emitter.buffer_pos+0] = '\r' + emitter.buffer[emitter.buffer_pos+1] = '\n' + emitter.buffer_pos += 2 + default: + panic("unknown line break setting") + } + emitter.column = 0 + emitter.line++ + return true +} + +// Copy a character from a string into buffer. +func write(emitter *yaml_emitter_t, s []byte, i *int) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + p := emitter.buffer_pos + w := width(s[*i]) + switch w { + case 4: + emitter.buffer[p+3] = s[*i+3] + fallthrough + case 3: + emitter.buffer[p+2] = s[*i+2] + fallthrough + case 2: + emitter.buffer[p+1] = s[*i+1] + fallthrough + case 1: + emitter.buffer[p+0] = s[*i+0] + default: + panic("unknown character width") + } + emitter.column++ + emitter.buffer_pos += w + *i += w + return true +} + +// Write a whole string into buffer. +func write_all(emitter *yaml_emitter_t, s []byte) bool { + for i := 0; i < len(s); { + if !write(emitter, s, &i) { + return false + } + } + return true +} + +// Copy a line break character from a string into buffer. +func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { + if s[*i] == '\n' { + if !put_break(emitter) { + return false + } + *i++ + } else { + if !write(emitter, s, i) { + return false + } + emitter.column = 0 + emitter.line++ + } + return true +} + +// Set an emitter error and return false. +func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { + emitter.error = yaml_EMITTER_ERROR + emitter.problem = problem + return false +} + +// Emit an event. +func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.events = append(emitter.events, *event) + for !yaml_emitter_need_more_events(emitter) { + event := &emitter.events[emitter.events_head] + if !yaml_emitter_analyze_event(emitter, event) { + return false + } + if !yaml_emitter_state_machine(emitter, event) { + return false + } + yaml_event_delete(event) + emitter.events_head++ + } + return true +} + +// Check if we need to accumulate more events before emitting. +// +// We accumulate extra +// - 1 event for DOCUMENT-START +// - 2 events for SEQUENCE-START +// - 3 events for MAPPING-START +// +func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { + if emitter.events_head == len(emitter.events) { + return true + } + var accumulate int + switch emitter.events[emitter.events_head].typ { + case yaml_DOCUMENT_START_EVENT: + accumulate = 1 + break + case yaml_SEQUENCE_START_EVENT: + accumulate = 2 + break + case yaml_MAPPING_START_EVENT: + accumulate = 3 + break + default: + return false + } + if len(emitter.events)-emitter.events_head > accumulate { + return false + } + var level int + for i := emitter.events_head; i < len(emitter.events); i++ { + switch emitter.events[i].typ { + case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: + level++ + case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: + level-- + } + if level == 0 { + return false + } + } + return true +} + +// Append a directive to the directives stack. +func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { + for i := 0; i < len(emitter.tag_directives); i++ { + if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { + if allow_duplicates { + return true + } + return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") + } + } + + // [Go] Do we actually need to copy this given garbage collection + // and the lack of deallocating destructors? + tag_copy := yaml_tag_directive_t{ + handle: make([]byte, len(value.handle)), + prefix: make([]byte, len(value.prefix)), + } + copy(tag_copy.handle, value.handle) + copy(tag_copy.prefix, value.prefix) + emitter.tag_directives = append(emitter.tag_directives, tag_copy) + return true +} + +// Increase the indentation level. +func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { + emitter.indents = append(emitter.indents, emitter.indent) + if emitter.indent < 0 { + if flow { + emitter.indent = emitter.best_indent + } else { + emitter.indent = 0 + } + } else if !indentless { + emitter.indent += emitter.best_indent + } + return true +} + +// State dispatcher. +func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { + switch emitter.state { + default: + case yaml_EMIT_STREAM_START_STATE: + return yaml_emitter_emit_stream_start(emitter, event) + + case yaml_EMIT_FIRST_DOCUMENT_START_STATE: + return yaml_emitter_emit_document_start(emitter, event, true) + + case yaml_EMIT_DOCUMENT_START_STATE: + return yaml_emitter_emit_document_start(emitter, event, false) + + case yaml_EMIT_DOCUMENT_CONTENT_STATE: + return yaml_emitter_emit_document_content(emitter, event) + + case yaml_EMIT_DOCUMENT_END_STATE: + return yaml_emitter_emit_document_end(emitter, event) + + case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, true) + + case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, false) + + case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, true) + + case yaml_EMIT_FLOW_MAPPING_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, false) + + case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: + return yaml_emitter_emit_flow_mapping_value(emitter, event, true) + + case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: + return yaml_emitter_emit_flow_mapping_value(emitter, event, false) + + case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: + return yaml_emitter_emit_block_sequence_item(emitter, event, true) + + case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: + return yaml_emitter_emit_block_sequence_item(emitter, event, false) + + case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: + return yaml_emitter_emit_block_mapping_key(emitter, event, true) + + case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: + return yaml_emitter_emit_block_mapping_key(emitter, event, false) + + case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: + return yaml_emitter_emit_block_mapping_value(emitter, event, true) + + case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: + return yaml_emitter_emit_block_mapping_value(emitter, event, false) + + case yaml_EMIT_END_STATE: + return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") + } + panic("invalid emitter state") +} + +// Expect STREAM-START. +func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if event.typ != yaml_STREAM_START_EVENT { + return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") + } + if emitter.encoding == yaml_ANY_ENCODING { + emitter.encoding = event.encoding + if emitter.encoding == yaml_ANY_ENCODING { + emitter.encoding = yaml_UTF8_ENCODING + } + } + if emitter.best_indent < 2 || emitter.best_indent > 9 { + emitter.best_indent = 2 + } + if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { + emitter.best_width = 80 + } + if emitter.best_width < 0 { + emitter.best_width = 1<<31 - 1 + } + if emitter.line_break == yaml_ANY_BREAK { + emitter.line_break = yaml_LN_BREAK + } + + emitter.indent = -1 + emitter.line = 0 + emitter.column = 0 + emitter.whitespace = true + emitter.indention = true + + if emitter.encoding != yaml_UTF8_ENCODING { + if !yaml_emitter_write_bom(emitter) { + return false + } + } + emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE + return true +} + +// Expect DOCUMENT-START or STREAM-END. +func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + + if event.typ == yaml_DOCUMENT_START_EVENT { + + if event.version_directive != nil { + if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { + return false + } + } + + for i := 0; i < len(event.tag_directives); i++ { + tag_directive := &event.tag_directives[i] + if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { + return false + } + if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { + return false + } + } + + for i := 0; i < len(default_tag_directives); i++ { + tag_directive := &default_tag_directives[i] + if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { + return false + } + } + + implicit := event.implicit + if !first || emitter.canonical { + implicit = false + } + + if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if event.version_directive != nil { + implicit = false + if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if len(event.tag_directives) > 0 { + implicit = false + for i := 0; i < len(event.tag_directives); i++ { + tag_directive := &event.tag_directives[i] + if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { + return false + } + if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { + return false + } + if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + } + + if yaml_emitter_check_empty_document(emitter) { + implicit = false + } + if !implicit { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { + return false + } + if emitter.canonical { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + } + + emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE + return true + } + + if event.typ == yaml_STREAM_END_EVENT { + if emitter.open_ended { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_flush(emitter) { + return false + } + emitter.state = yaml_EMIT_END_STATE + return true + } + + return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") +} + +// Expect the root node. +func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) + return yaml_emitter_emit_node(emitter, event, true, false, false, false) +} + +// Expect DOCUMENT-END. +func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if event.typ != yaml_DOCUMENT_END_EVENT { + return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !event.implicit { + // [Go] Allocate the slice elsewhere. + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_flush(emitter) { + return false + } + emitter.state = yaml_EMIT_DOCUMENT_START_STATE + emitter.tag_directives = emitter.tag_directives[:0] + return true +} + +// Expect a flow item node. +func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + emitter.flow_level++ + } + + if event.typ == yaml_SEQUENCE_END_EVENT { + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + if emitter.canonical && !first { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true + } + + if !first { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) + return yaml_emitter_emit_node(emitter, event, false, true, false, false) +} + +// Expect a flow key node. +func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + emitter.flow_level++ + } + + if event.typ == yaml_MAPPING_END_EVENT { + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + if emitter.canonical && !first { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + + if !first { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, true) + } + if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a flow value node. +func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { + if simple { + if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { + return false + } + } else { + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { + return false + } + } + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a block item node. +func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) { + return false + } + } + if event.typ == yaml_SEQUENCE_END_EVENT { + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) + return yaml_emitter_emit_node(emitter, event, false, true, false, false) +} + +// Expect a block key node. +func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_increase_indent(emitter, false, false) { + return false + } + } + if event.typ == yaml_MAPPING_END_EVENT { + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if yaml_emitter_check_simple_key(emitter) { + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, true) + } + if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a block value node. +func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { + if simple { + if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { + return false + } + } else { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { + return false + } + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a node. +func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, + root bool, sequence bool, mapping bool, simple_key bool) bool { + + emitter.root_context = root + emitter.sequence_context = sequence + emitter.mapping_context = mapping + emitter.simple_key_context = simple_key + + switch event.typ { + case yaml_ALIAS_EVENT: + return yaml_emitter_emit_alias(emitter, event) + case yaml_SCALAR_EVENT: + return yaml_emitter_emit_scalar(emitter, event) + case yaml_SEQUENCE_START_EVENT: + return yaml_emitter_emit_sequence_start(emitter, event) + case yaml_MAPPING_START_EVENT: + return yaml_emitter_emit_mapping_start(emitter, event) + default: + return yaml_emitter_set_emitter_error(emitter, + fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ)) + } +} + +// Expect ALIAS. +func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true +} + +// Expect SCALAR. +func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_select_scalar_style(emitter, event) { + return false + } + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + if !yaml_emitter_process_scalar(emitter) { + return false + } + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true +} + +// Expect SEQUENCE-START. +func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || + yaml_emitter_check_empty_sequence(emitter) { + emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE + } else { + emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE + } + return true +} + +// Expect MAPPING-START. +func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || + yaml_emitter_check_empty_mapping(emitter) { + emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE + } else { + emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE + } + return true +} + +// Check if the document content is an empty scalar. +func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { + return false // [Go] Huh? +} + +// Check if the next events represent an empty sequence. +func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { + if len(emitter.events)-emitter.events_head < 2 { + return false + } + return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && + emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT +} + +// Check if the next events represent an empty mapping. +func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { + if len(emitter.events)-emitter.events_head < 2 { + return false + } + return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && + emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT +} + +// Check if the next node can be expressed as a simple key. +func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { + length := 0 + switch emitter.events[emitter.events_head].typ { + case yaml_ALIAS_EVENT: + length += len(emitter.anchor_data.anchor) + case yaml_SCALAR_EVENT: + if emitter.scalar_data.multiline { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + + len(emitter.scalar_data.value) + case yaml_SEQUENCE_START_EVENT: + if !yaml_emitter_check_empty_sequence(emitter) { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + case yaml_MAPPING_START_EVENT: + if !yaml_emitter_check_empty_mapping(emitter) { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + default: + return false + } + return length <= 128 +} + +// Determine an acceptable scalar style. +func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { + + no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 + if no_tag && !event.implicit && !event.quoted_implicit { + return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") + } + + style := event.scalar_style() + if style == yaml_ANY_SCALAR_STYLE { + style = yaml_PLAIN_SCALAR_STYLE + } + if emitter.canonical { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + if emitter.simple_key_context && emitter.scalar_data.multiline { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + + if style == yaml_PLAIN_SCALAR_STYLE { + if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || + emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + if no_tag && !event.implicit { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + } + if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { + if !emitter.scalar_data.single_quoted_allowed { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + } + if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { + if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + } + + if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { + emitter.tag_data.handle = []byte{'!'} + } + emitter.scalar_data.style = style + return true +} + +// Write an anchor. +func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { + if emitter.anchor_data.anchor == nil { + return true + } + c := []byte{'&'} + if emitter.anchor_data.alias { + c[0] = '*' + } + if !yaml_emitter_write_indicator(emitter, c, true, false, false) { + return false + } + return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) +} + +// Write a tag. +func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { + if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { + return true + } + if len(emitter.tag_data.handle) > 0 { + if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { + return false + } + if len(emitter.tag_data.suffix) > 0 { + if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { + return false + } + } + } else { + // [Go] Allocate these slices elsewhere. + if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { + return false + } + if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { + return false + } + } + return true +} + +// Write a scalar. +func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { + switch emitter.scalar_data.style { + case yaml_PLAIN_SCALAR_STYLE: + return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_SINGLE_QUOTED_SCALAR_STYLE: + return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_DOUBLE_QUOTED_SCALAR_STYLE: + return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_LITERAL_SCALAR_STYLE: + return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) + + case yaml_FOLDED_SCALAR_STYLE: + return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) + } + panic("unknown scalar style") +} + +// Check if a %YAML directive is valid. +func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { + if version_directive.major != 1 || version_directive.minor != 1 { + return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") + } + return true +} + +// Check if a %TAG directive is valid. +func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { + handle := tag_directive.handle + prefix := tag_directive.prefix + if len(handle) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") + } + if handle[0] != '!' { + return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") + } + if handle[len(handle)-1] != '!' { + return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") + } + for i := 1; i < len(handle)-1; i += width(handle[i]) { + if !is_alpha(handle, i) { + return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") + } + } + if len(prefix) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") + } + return true +} + +// Check if an anchor is valid. +func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { + if len(anchor) == 0 { + problem := "anchor value must not be empty" + if alias { + problem = "alias value must not be empty" + } + return yaml_emitter_set_emitter_error(emitter, problem) + } + for i := 0; i < len(anchor); i += width(anchor[i]) { + if !is_alpha(anchor, i) { + problem := "anchor value must contain alphanumerical characters only" + if alias { + problem = "alias value must contain alphanumerical characters only" + } + return yaml_emitter_set_emitter_error(emitter, problem) + } + } + emitter.anchor_data.anchor = anchor + emitter.anchor_data.alias = alias + return true +} + +// Check if a tag is valid. +func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { + if len(tag) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") + } + for i := 0; i < len(emitter.tag_directives); i++ { + tag_directive := &emitter.tag_directives[i] + if bytes.HasPrefix(tag, tag_directive.prefix) { + emitter.tag_data.handle = tag_directive.handle + emitter.tag_data.suffix = tag[len(tag_directive.prefix):] + return true + } + } + emitter.tag_data.suffix = tag + return true +} + +// Check if a scalar is valid. +func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { + var ( + block_indicators = false + flow_indicators = false + line_breaks = false + special_characters = false + + leading_space = false + leading_break = false + trailing_space = false + trailing_break = false + break_space = false + space_break = false + + preceded_by_whitespace = false + followed_by_whitespace = false + previous_space = false + previous_break = false + ) + + emitter.scalar_data.value = value + + if len(value) == 0 { + emitter.scalar_data.multiline = false + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = true + emitter.scalar_data.single_quoted_allowed = true + emitter.scalar_data.block_allowed = false + return true + } + + if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { + block_indicators = true + flow_indicators = true + } + + preceded_by_whitespace = true + for i, w := 0, 0; i < len(value); i += w { + w = width(value[i]) + followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) + + if i == 0 { + switch value[i] { + case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': + flow_indicators = true + block_indicators = true + case '?', ':': + flow_indicators = true + if followed_by_whitespace { + block_indicators = true + } + case '-': + if followed_by_whitespace { + flow_indicators = true + block_indicators = true + } + } + } else { + switch value[i] { + case ',', '?', '[', ']', '{', '}': + flow_indicators = true + case ':': + flow_indicators = true + if followed_by_whitespace { + block_indicators = true + } + case '#': + if preceded_by_whitespace { + flow_indicators = true + block_indicators = true + } + } + } + + if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { + special_characters = true + } + if is_space(value, i) { + if i == 0 { + leading_space = true + } + if i+width(value[i]) == len(value) { + trailing_space = true + } + if previous_break { + break_space = true + } + previous_space = true + previous_break = false + } else if is_break(value, i) { + line_breaks = true + if i == 0 { + leading_break = true + } + if i+width(value[i]) == len(value) { + trailing_break = true + } + if previous_space { + space_break = true + } + previous_space = false + previous_break = true + } else { + previous_space = false + previous_break = false + } + + // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. + preceded_by_whitespace = is_blankz(value, i) + } + + emitter.scalar_data.multiline = line_breaks + emitter.scalar_data.flow_plain_allowed = true + emitter.scalar_data.block_plain_allowed = true + emitter.scalar_data.single_quoted_allowed = true + emitter.scalar_data.block_allowed = true + + if leading_space || leading_break || trailing_space || trailing_break { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + } + if trailing_space { + emitter.scalar_data.block_allowed = false + } + if break_space { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + emitter.scalar_data.single_quoted_allowed = false + } + if space_break || special_characters { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + emitter.scalar_data.single_quoted_allowed = false + emitter.scalar_data.block_allowed = false + } + if line_breaks { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + } + if flow_indicators { + emitter.scalar_data.flow_plain_allowed = false + } + if block_indicators { + emitter.scalar_data.block_plain_allowed = false + } + return true +} + +// Check if the event data is valid. +func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { + + emitter.anchor_data.anchor = nil + emitter.tag_data.handle = nil + emitter.tag_data.suffix = nil + emitter.scalar_data.value = nil + + switch event.typ { + case yaml_ALIAS_EVENT: + if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { + return false + } + + case yaml_SCALAR_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + if !yaml_emitter_analyze_scalar(emitter, event.value) { + return false + } + + case yaml_SEQUENCE_START_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + + case yaml_MAPPING_START_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + } + return true +} + +// Write the BOM character. +func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { + if !flush(emitter) { + return false + } + pos := emitter.buffer_pos + emitter.buffer[pos+0] = '\xEF' + emitter.buffer[pos+1] = '\xBB' + emitter.buffer[pos+2] = '\xBF' + emitter.buffer_pos += 3 + return true +} + +func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { + indent := emitter.indent + if indent < 0 { + indent = 0 + } + if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { + if !put_break(emitter) { + return false + } + } + for emitter.column < indent { + if !put(emitter, ' ') { + return false + } + } + emitter.whitespace = true + emitter.indention = true + return true +} + +func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { + if need_whitespace && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + if !write_all(emitter, indicator) { + return false + } + emitter.whitespace = is_whitespace + emitter.indention = (emitter.indention && is_indention) + emitter.open_ended = false + return true +} + +func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { + if !write_all(emitter, value) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + if !write_all(emitter, value) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { + if need_whitespace && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + for i := 0; i < len(value); { + var must_write bool + switch value[i] { + case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': + must_write = true + default: + must_write = is_alpha(value, i) + } + if must_write { + if !write(emitter, value, &i) { + return false + } + } else { + w := width(value[i]) + for k := 0; k < w; k++ { + octet := value[i] + i++ + if !put(emitter, '%') { + return false + } + + c := octet >> 4 + if c < 10 { + c += '0' + } else { + c += 'A' - 10 + } + if !put(emitter, c) { + return false + } + + c = octet & 0x0f + if c < 10 { + c += '0' + } else { + c += 'A' - 10 + } + if !put(emitter, c) { + return false + } + } + } + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + + spaces := false + breaks := false + for i := 0; i < len(value); { + if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else if is_break(value, i) { + if !breaks && value[i] == '\n' { + if !put_break(emitter) { + return false + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + spaces = false + breaks = false + } + } + + emitter.whitespace = false + emitter.indention = false + if emitter.root_context { + emitter.open_ended = true + } + + return true +} + +func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + + if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { + return false + } + + spaces := false + breaks := false + for i := 0; i < len(value); { + if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else if is_break(value, i) { + if !breaks && value[i] == '\n' { + if !put_break(emitter) { + return false + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if value[i] == '\'' { + if !put(emitter, '\'') { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + spaces = false + breaks = false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + spaces := false + if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { + return false + } + + for i := 0; i < len(value); { + if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || + is_bom(value, i) || is_break(value, i) || + value[i] == '"' || value[i] == '\\' { + + octet := value[i] + + var w int + var v rune + switch { + case octet&0x80 == 0x00: + w, v = 1, rune(octet&0x7F) + case octet&0xE0 == 0xC0: + w, v = 2, rune(octet&0x1F) + case octet&0xF0 == 0xE0: + w, v = 3, rune(octet&0x0F) + case octet&0xF8 == 0xF0: + w, v = 4, rune(octet&0x07) + } + for k := 1; k < w; k++ { + octet = value[i+k] + v = (v << 6) + (rune(octet) & 0x3F) + } + i += w + + if !put(emitter, '\\') { + return false + } + + var ok bool + switch v { + case 0x00: + ok = put(emitter, '0') + case 0x07: + ok = put(emitter, 'a') + case 0x08: + ok = put(emitter, 'b') + case 0x09: + ok = put(emitter, 't') + case 0x0A: + ok = put(emitter, 'n') + case 0x0b: + ok = put(emitter, 'v') + case 0x0c: + ok = put(emitter, 'f') + case 0x0d: + ok = put(emitter, 'r') + case 0x1b: + ok = put(emitter, 'e') + case 0x22: + ok = put(emitter, '"') + case 0x5c: + ok = put(emitter, '\\') + case 0x85: + ok = put(emitter, 'N') + case 0xA0: + ok = put(emitter, '_') + case 0x2028: + ok = put(emitter, 'L') + case 0x2029: + ok = put(emitter, 'P') + default: + if v <= 0xFF { + ok = put(emitter, 'x') + w = 2 + } else if v <= 0xFFFF { + ok = put(emitter, 'u') + w = 4 + } else { + ok = put(emitter, 'U') + w = 8 + } + for k := (w - 1) * 4; ok && k >= 0; k -= 4 { + digit := byte((v >> uint(k)) & 0x0F) + if digit < 10 { + ok = put(emitter, digit+'0') + } else { + ok = put(emitter, digit+'A'-10) + } + } + } + if !ok { + return false + } + spaces = false + } else if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { + if !yaml_emitter_write_indent(emitter) { + return false + } + if is_space(value, i+1) { + if !put(emitter, '\\') { + return false + } + } + i += width(value[i]) + } else if !write(emitter, value, &i) { + return false + } + spaces = true + } else { + if !write(emitter, value, &i) { + return false + } + spaces = false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { + if is_space(value, 0) || is_break(value, 0) { + indent_hint := []byte{'0' + byte(emitter.best_indent)} + if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { + return false + } + } + + emitter.open_ended = false + + var chomp_hint [1]byte + if len(value) == 0 { + chomp_hint[0] = '-' + } else { + i := len(value) - 1 + for value[i]&0xC0 == 0x80 { + i-- + } + if !is_break(value, i) { + chomp_hint[0] = '-' + } else if i == 0 { + chomp_hint[0] = '+' + emitter.open_ended = true + } else { + i-- + for value[i]&0xC0 == 0x80 { + i-- + } + if is_break(value, i) { + chomp_hint[0] = '+' + emitter.open_ended = true + } + } + } + if chomp_hint[0] != 0 { + if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { + return false + } + } + return true +} + +func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { + if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { + return false + } + if !yaml_emitter_write_block_scalar_hints(emitter, value) { + return false + } + if !put_break(emitter) { + return false + } + emitter.indention = true + emitter.whitespace = true + breaks := true + for i := 0; i < len(value); { + if is_break(value, i) { + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + breaks = false + } + } + + return true +} + +func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { + if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { + return false + } + if !yaml_emitter_write_block_scalar_hints(emitter, value) { + return false + } + + if !put_break(emitter) { + return false + } + emitter.indention = true + emitter.whitespace = true + + breaks := true + leading_spaces := true + for i := 0; i < len(value); { + if is_break(value, i) { + if !breaks && !leading_spaces && value[i] == '\n' { + k := 0 + for is_break(value, k) { + k += width(value[k]) + } + if !is_blankz(value, k) { + if !put_break(emitter) { + return false + } + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + leading_spaces = is_blank(value, i) + } + if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + emitter.indention = false + breaks = false + } + } + return true +} diff --git a/vendor/gopkg.in/yaml.v2/encode.go b/vendor/gopkg.in/yaml.v2/encode.go new file mode 100644 index 0000000..0ee738e --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/encode.go @@ -0,0 +1,390 @@ +package yaml + +import ( + "encoding" + "fmt" + "io" + "reflect" + "regexp" + "sort" + "strconv" + "strings" + "time" + "unicode/utf8" +) + +// jsonNumber is the interface of the encoding/json.Number datatype. +// Repeating the interface here avoids a dependency on encoding/json, and also +// supports other libraries like jsoniter, which use a similar datatype with +// the same interface. Detecting this interface is useful when dealing with +// structures containing json.Number, which is a string under the hood. The +// encoder should prefer the use of Int64(), Float64() and string(), in that +// order, when encoding this type. +type jsonNumber interface { + Float64() (float64, error) + Int64() (int64, error) + String() string +} + +type encoder struct { + emitter yaml_emitter_t + event yaml_event_t + out []byte + flow bool + // doneInit holds whether the initial stream_start_event has been + // emitted. + doneInit bool +} + +func newEncoder() *encoder { + e := &encoder{} + yaml_emitter_initialize(&e.emitter) + yaml_emitter_set_output_string(&e.emitter, &e.out) + yaml_emitter_set_unicode(&e.emitter, true) + return e +} + +func newEncoderWithWriter(w io.Writer) *encoder { + e := &encoder{} + yaml_emitter_initialize(&e.emitter) + yaml_emitter_set_output_writer(&e.emitter, w) + yaml_emitter_set_unicode(&e.emitter, true) + return e +} + +func (e *encoder) init() { + if e.doneInit { + return + } + yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING) + e.emit() + e.doneInit = true +} + +func (e *encoder) finish() { + e.emitter.open_ended = false + yaml_stream_end_event_initialize(&e.event) + e.emit() +} + +func (e *encoder) destroy() { + yaml_emitter_delete(&e.emitter) +} + +func (e *encoder) emit() { + // This will internally delete the e.event value. + e.must(yaml_emitter_emit(&e.emitter, &e.event)) +} + +func (e *encoder) must(ok bool) { + if !ok { + msg := e.emitter.problem + if msg == "" { + msg = "unknown problem generating YAML content" + } + failf("%s", msg) + } +} + +func (e *encoder) marshalDoc(tag string, in reflect.Value) { + e.init() + yaml_document_start_event_initialize(&e.event, nil, nil, true) + e.emit() + e.marshal(tag, in) + yaml_document_end_event_initialize(&e.event, true) + e.emit() +} + +func (e *encoder) marshal(tag string, in reflect.Value) { + if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() { + e.nilv() + return + } + iface := in.Interface() + switch m := iface.(type) { + case jsonNumber: + integer, err := m.Int64() + if err == nil { + // In this case the json.Number is a valid int64 + in = reflect.ValueOf(integer) + break + } + float, err := m.Float64() + if err == nil { + // In this case the json.Number is a valid float64 + in = reflect.ValueOf(float) + break + } + // fallback case - no number could be obtained + in = reflect.ValueOf(m.String()) + case time.Time, *time.Time: + // Although time.Time implements TextMarshaler, + // we don't want to treat it as a string for YAML + // purposes because YAML has special support for + // timestamps. + case Marshaler: + v, err := m.MarshalYAML() + if err != nil { + fail(err) + } + if v == nil { + e.nilv() + return + } + in = reflect.ValueOf(v) + case encoding.TextMarshaler: + text, err := m.MarshalText() + if err != nil { + fail(err) + } + in = reflect.ValueOf(string(text)) + case nil: + e.nilv() + return + } + switch in.Kind() { + case reflect.Interface: + e.marshal(tag, in.Elem()) + case reflect.Map: + e.mapv(tag, in) + case reflect.Ptr: + if in.Type() == ptrTimeType { + e.timev(tag, in.Elem()) + } else { + e.marshal(tag, in.Elem()) + } + case reflect.Struct: + if in.Type() == timeType { + e.timev(tag, in) + } else { + e.structv(tag, in) + } + case reflect.Slice, reflect.Array: + if in.Type().Elem() == mapItemType { + e.itemsv(tag, in) + } else { + e.slicev(tag, in) + } + case reflect.String: + e.stringv(tag, in) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + if in.Type() == durationType { + e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String())) + } else { + e.intv(tag, in) + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + e.uintv(tag, in) + case reflect.Float32, reflect.Float64: + e.floatv(tag, in) + case reflect.Bool: + e.boolv(tag, in) + default: + panic("cannot marshal type: " + in.Type().String()) + } +} + +func (e *encoder) mapv(tag string, in reflect.Value) { + e.mappingv(tag, func() { + keys := keyList(in.MapKeys()) + sort.Sort(keys) + for _, k := range keys { + e.marshal("", k) + e.marshal("", in.MapIndex(k)) + } + }) +} + +func (e *encoder) itemsv(tag string, in reflect.Value) { + e.mappingv(tag, func() { + slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem) + for _, item := range slice { + e.marshal("", reflect.ValueOf(item.Key)) + e.marshal("", reflect.ValueOf(item.Value)) + } + }) +} + +func (e *encoder) structv(tag string, in reflect.Value) { + sinfo, err := getStructInfo(in.Type()) + if err != nil { + panic(err) + } + e.mappingv(tag, func() { + for _, info := range sinfo.FieldsList { + var value reflect.Value + if info.Inline == nil { + value = in.Field(info.Num) + } else { + value = in.FieldByIndex(info.Inline) + } + if info.OmitEmpty && isZero(value) { + continue + } + e.marshal("", reflect.ValueOf(info.Key)) + e.flow = info.Flow + e.marshal("", value) + } + if sinfo.InlineMap >= 0 { + m := in.Field(sinfo.InlineMap) + if m.Len() > 0 { + e.flow = false + keys := keyList(m.MapKeys()) + sort.Sort(keys) + for _, k := range keys { + if _, found := sinfo.FieldsMap[k.String()]; found { + panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String())) + } + e.marshal("", k) + e.flow = false + e.marshal("", m.MapIndex(k)) + } + } + } + }) +} + +func (e *encoder) mappingv(tag string, f func()) { + implicit := tag == "" + style := yaml_BLOCK_MAPPING_STYLE + if e.flow { + e.flow = false + style = yaml_FLOW_MAPPING_STYLE + } + yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style) + e.emit() + f() + yaml_mapping_end_event_initialize(&e.event) + e.emit() +} + +func (e *encoder) slicev(tag string, in reflect.Value) { + implicit := tag == "" + style := yaml_BLOCK_SEQUENCE_STYLE + if e.flow { + e.flow = false + style = yaml_FLOW_SEQUENCE_STYLE + } + e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) + e.emit() + n := in.Len() + for i := 0; i < n; i++ { + e.marshal("", in.Index(i)) + } + e.must(yaml_sequence_end_event_initialize(&e.event)) + e.emit() +} + +// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1. +// +// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported +// in YAML 1.2 and by this package, but these should be marshalled quoted for +// the time being for compatibility with other parsers. +func isBase60Float(s string) (result bool) { + // Fast path. + if s == "" { + return false + } + c := s[0] + if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 { + return false + } + // Do the full match. + return base60float.MatchString(s) +} + +// From http://yaml.org/type/float.html, except the regular expression there +// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. +var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) + +func (e *encoder) stringv(tag string, in reflect.Value) { + var style yaml_scalar_style_t + s := in.String() + canUsePlain := true + switch { + case !utf8.ValidString(s): + if tag == yaml_BINARY_TAG { + failf("explicitly tagged !!binary data must be base64-encoded") + } + if tag != "" { + failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag)) + } + // It can't be encoded directly as YAML so use a binary tag + // and encode it as base64. + tag = yaml_BINARY_TAG + s = encodeBase64(s) + case tag == "": + // Check to see if it would resolve to a specific + // tag when encoded unquoted. If it doesn't, + // there's no need to quote it. + rtag, _ := resolve("", s) + canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s) + } + // Note: it's possible for user code to emit invalid YAML + // if they explicitly specify a tag and a string containing + // text that's incompatible with that tag. + switch { + case strings.Contains(s, "\n"): + style = yaml_LITERAL_SCALAR_STYLE + case canUsePlain: + style = yaml_PLAIN_SCALAR_STYLE + default: + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + e.emitScalar(s, "", tag, style) +} + +func (e *encoder) boolv(tag string, in reflect.Value) { + var s string + if in.Bool() { + s = "true" + } else { + s = "false" + } + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) intv(tag string, in reflect.Value) { + s := strconv.FormatInt(in.Int(), 10) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) uintv(tag string, in reflect.Value) { + s := strconv.FormatUint(in.Uint(), 10) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) timev(tag string, in reflect.Value) { + t := in.Interface().(time.Time) + s := t.Format(time.RFC3339Nano) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) floatv(tag string, in reflect.Value) { + // Issue #352: When formatting, use the precision of the underlying value + precision := 64 + if in.Kind() == reflect.Float32 { + precision = 32 + } + + s := strconv.FormatFloat(in.Float(), 'g', -1, precision) + switch s { + case "+Inf": + s = ".inf" + case "-Inf": + s = "-.inf" + case "NaN": + s = ".nan" + } + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) nilv() { + e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) { + implicit := tag == "" + e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) + e.emit() +} diff --git a/vendor/gopkg.in/yaml.v2/parserc.go b/vendor/gopkg.in/yaml.v2/parserc.go new file mode 100644 index 0000000..81d05df --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/parserc.go @@ -0,0 +1,1095 @@ +package yaml + +import ( + "bytes" +) + +// The parser implements the following grammar: +// +// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +// implicit_document ::= block_node DOCUMENT-END* +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// block_node_or_indentless_sequence ::= +// ALIAS +// | properties (block_content | indentless_block_sequence)? +// | block_content +// | indentless_block_sequence +// block_node ::= ALIAS +// | properties block_content? +// | block_content +// flow_node ::= ALIAS +// | properties flow_content? +// | flow_content +// properties ::= TAG ANCHOR? | ANCHOR TAG? +// block_content ::= block_collection | flow_collection | SCALAR +// flow_content ::= flow_collection | SCALAR +// block_collection ::= block_sequence | block_mapping +// flow_collection ::= flow_sequence | flow_mapping +// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +// block_mapping ::= BLOCK-MAPPING_START +// ((KEY block_node_or_indentless_sequence?)? +// (VALUE block_node_or_indentless_sequence?)?)* +// BLOCK-END +// flow_sequence ::= FLOW-SEQUENCE-START +// (flow_sequence_entry FLOW-ENTRY)* +// flow_sequence_entry? +// FLOW-SEQUENCE-END +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// flow_mapping ::= FLOW-MAPPING-START +// (flow_mapping_entry FLOW-ENTRY)* +// flow_mapping_entry? +// FLOW-MAPPING-END +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + +// Peek the next token in the token queue. +func peek_token(parser *yaml_parser_t) *yaml_token_t { + if parser.token_available || yaml_parser_fetch_more_tokens(parser) { + return &parser.tokens[parser.tokens_head] + } + return nil +} + +// Remove the next token from the queue (must be called after peek_token). +func skip_token(parser *yaml_parser_t) { + parser.token_available = false + parser.tokens_parsed++ + parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN + parser.tokens_head++ +} + +// Get the next event. +func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { + // Erase the event object. + *event = yaml_event_t{} + + // No events after the end of the stream or error. + if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { + return true + } + + // Generate the next event. + return yaml_parser_state_machine(parser, event) +} + +// Set parser error. +func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { + parser.error = yaml_PARSER_ERROR + parser.problem = problem + parser.problem_mark = problem_mark + return false +} + +func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { + parser.error = yaml_PARSER_ERROR + parser.context = context + parser.context_mark = context_mark + parser.problem = problem + parser.problem_mark = problem_mark + return false +} + +// State dispatcher. +func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { + //trace("yaml_parser_state_machine", "state:", parser.state.String()) + + switch parser.state { + case yaml_PARSE_STREAM_START_STATE: + return yaml_parser_parse_stream_start(parser, event) + + case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: + return yaml_parser_parse_document_start(parser, event, true) + + case yaml_PARSE_DOCUMENT_START_STATE: + return yaml_parser_parse_document_start(parser, event, false) + + case yaml_PARSE_DOCUMENT_CONTENT_STATE: + return yaml_parser_parse_document_content(parser, event) + + case yaml_PARSE_DOCUMENT_END_STATE: + return yaml_parser_parse_document_end(parser, event) + + case yaml_PARSE_BLOCK_NODE_STATE: + return yaml_parser_parse_node(parser, event, true, false) + + case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: + return yaml_parser_parse_node(parser, event, true, true) + + case yaml_PARSE_FLOW_NODE_STATE: + return yaml_parser_parse_node(parser, event, false, false) + + case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: + return yaml_parser_parse_block_sequence_entry(parser, event, true) + + case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_block_sequence_entry(parser, event, false) + + case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_indentless_sequence_entry(parser, event) + + case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: + return yaml_parser_parse_block_mapping_key(parser, event, true) + + case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: + return yaml_parser_parse_block_mapping_key(parser, event, false) + + case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: + return yaml_parser_parse_block_mapping_value(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: + return yaml_parser_parse_flow_sequence_entry(parser, event, true) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_flow_sequence_entry(parser, event, false) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) + + case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: + return yaml_parser_parse_flow_mapping_key(parser, event, true) + + case yaml_PARSE_FLOW_MAPPING_KEY_STATE: + return yaml_parser_parse_flow_mapping_key(parser, event, false) + + case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: + return yaml_parser_parse_flow_mapping_value(parser, event, false) + + case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: + return yaml_parser_parse_flow_mapping_value(parser, event, true) + + default: + panic("invalid parser state") + } +} + +// Parse the production: +// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +// ************ +func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_STREAM_START_TOKEN { + return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark) + } + parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE + *event = yaml_event_t{ + typ: yaml_STREAM_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + encoding: token.encoding, + } + skip_token(parser) + return true +} + +// Parse the productions: +// implicit_document ::= block_node DOCUMENT-END* +// * +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// ************************* +func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { + + token := peek_token(parser) + if token == nil { + return false + } + + // Parse extra document end indicators. + if !implicit { + for token.typ == yaml_DOCUMENT_END_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } + + if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && + token.typ != yaml_TAG_DIRECTIVE_TOKEN && + token.typ != yaml_DOCUMENT_START_TOKEN && + token.typ != yaml_STREAM_END_TOKEN { + // Parse an implicit document. + if !yaml_parser_process_directives(parser, nil, nil) { + return false + } + parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) + parser.state = yaml_PARSE_BLOCK_NODE_STATE + + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + } else if token.typ != yaml_STREAM_END_TOKEN { + // Parse an explicit document. + var version_directive *yaml_version_directive_t + var tag_directives []yaml_tag_directive_t + start_mark := token.start_mark + if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { + return false + } + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_DOCUMENT_START_TOKEN { + yaml_parser_set_parser_error(parser, + "did not find expected ", token.start_mark) + return false + } + parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) + parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE + end_mark := token.end_mark + + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + version_directive: version_directive, + tag_directives: tag_directives, + implicit: false, + } + skip_token(parser) + + } else { + // Parse the stream end. + parser.state = yaml_PARSE_END_STATE + *event = yaml_event_t{ + typ: yaml_STREAM_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + } + + return true +} + +// Parse the productions: +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// *********** +// +func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || + token.typ == yaml_TAG_DIRECTIVE_TOKEN || + token.typ == yaml_DOCUMENT_START_TOKEN || + token.typ == yaml_DOCUMENT_END_TOKEN || + token.typ == yaml_STREAM_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + return yaml_parser_process_empty_scalar(parser, event, + token.start_mark) + } + return yaml_parser_parse_node(parser, event, true, false) +} + +// Parse the productions: +// implicit_document ::= block_node DOCUMENT-END* +// ************* +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// +func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + start_mark := token.start_mark + end_mark := token.start_mark + + implicit := true + if token.typ == yaml_DOCUMENT_END_TOKEN { + end_mark = token.end_mark + skip_token(parser) + implicit = false + } + + parser.tag_directives = parser.tag_directives[:0] + + parser.state = yaml_PARSE_DOCUMENT_START_STATE + *event = yaml_event_t{ + typ: yaml_DOCUMENT_END_EVENT, + start_mark: start_mark, + end_mark: end_mark, + implicit: implicit, + } + return true +} + +// Parse the productions: +// block_node_or_indentless_sequence ::= +// ALIAS +// ***** +// | properties (block_content | indentless_block_sequence)? +// ********** * +// | block_content | indentless_block_sequence +// * +// block_node ::= ALIAS +// ***** +// | properties block_content? +// ********** * +// | block_content +// * +// flow_node ::= ALIAS +// ***** +// | properties flow_content? +// ********** * +// | flow_content +// * +// properties ::= TAG ANCHOR? | ANCHOR TAG? +// ************************* +// block_content ::= block_collection | flow_collection | SCALAR +// ****** +// flow_content ::= flow_collection | SCALAR +// ****** +func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { + //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_ALIAS_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + *event = yaml_event_t{ + typ: yaml_ALIAS_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + anchor: token.value, + } + skip_token(parser) + return true + } + + start_mark := token.start_mark + end_mark := token.start_mark + + var tag_token bool + var tag_handle, tag_suffix, anchor []byte + var tag_mark yaml_mark_t + if token.typ == yaml_ANCHOR_TOKEN { + anchor = token.value + start_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_TAG_TOKEN { + tag_token = true + tag_handle = token.value + tag_suffix = token.suffix + tag_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } else if token.typ == yaml_TAG_TOKEN { + tag_token = true + tag_handle = token.value + tag_suffix = token.suffix + start_mark = token.start_mark + tag_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_ANCHOR_TOKEN { + anchor = token.value + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } + + var tag []byte + if tag_token { + if len(tag_handle) == 0 { + tag = tag_suffix + tag_suffix = nil + } else { + for i := range parser.tag_directives { + if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { + tag = append([]byte(nil), parser.tag_directives[i].prefix...) + tag = append(tag, tag_suffix...) + break + } + } + if len(tag) == 0 { + yaml_parser_set_parser_error_context(parser, + "while parsing a node", start_mark, + "found undefined tag handle", tag_mark) + return false + } + } + } + + implicit := len(tag) == 0 + if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), + } + return true + } + if token.typ == yaml_SCALAR_TOKEN { + var plain_implicit, quoted_implicit bool + end_mark = token.end_mark + if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { + plain_implicit = true + } else if len(tag) == 0 { + quoted_implicit = true + } + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + value: token.value, + implicit: plain_implicit, + quoted_implicit: quoted_implicit, + style: yaml_style_t(token.style), + } + skip_token(parser) + return true + } + if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { + // [Go] Some of the events below can be merged as they differ only on style. + end_mark = token.end_mark + parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), + } + return true + } + if token.typ == yaml_FLOW_MAPPING_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), + } + return true + } + if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), + } + return true + } + if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), + } + return true + } + if len(anchor) > 0 || len(tag) > 0 { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + quoted_implicit: false, + style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), + } + return true + } + + context := "while parsing a flow node" + if block { + context = "while parsing a block node" + } + yaml_parser_set_parser_error_context(parser, context, start_mark, + "did not find expected node content", token.start_mark) + return false +} + +// Parse the productions: +// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +// ******************** *********** * ********* +// +func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_BLOCK_ENTRY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, true, false) + } else { + parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } + if token.typ == yaml_BLOCK_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + skip_token(parser) + return true + } + + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a block collection", context_mark, + "did not find expected '-' indicator", token.start_mark) +} + +// Parse the productions: +// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +// *********** * +func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_BLOCK_ENTRY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_BLOCK_ENTRY_TOKEN && + token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, true, false) + } + parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? + } + return true +} + +// Parse the productions: +// block_mapping ::= BLOCK-MAPPING_START +// ******************* +// ((KEY block_node_or_indentless_sequence?)? +// *** * +// (VALUE block_node_or_indentless_sequence?)?)* +// +// BLOCK-END +// ********* +// +func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_KEY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, true, true) + } else { + parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } else if token.typ == yaml_BLOCK_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + return true + } + + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a block mapping", context_mark, + "did not find expected key", token.start_mark) +} + +// Parse the productions: +// block_mapping ::= BLOCK-MAPPING_START +// +// ((KEY block_node_or_indentless_sequence?)? +// +// (VALUE block_node_or_indentless_sequence?)?)* +// ***** * +// BLOCK-END +// +// +func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_VALUE_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) + return yaml_parser_parse_node(parser, event, true, true) + } + parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Parse the productions: +// flow_sequence ::= FLOW-SEQUENCE-START +// ******************* +// (flow_sequence_entry FLOW-ENTRY)* +// * ********** +// flow_sequence_entry? +// * +// FLOW-SEQUENCE-END +// ***************** +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// * +// +func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + if !first { + if token.typ == yaml_FLOW_ENTRY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } else { + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a flow sequence", context_mark, + "did not find expected ',' or ']'", token.start_mark) + } + } + + if token.typ == yaml_KEY_TOKEN { + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + implicit: true, + style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), + } + skip_token(parser) + return true + } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + skip_token(parser) + return true +} + +// +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// *** * +// +func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_FLOW_ENTRY_TOKEN && + token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + mark := token.end_mark + skip_token(parser) + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) +} + +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// ***** * +// +func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_VALUE_TOKEN { + skip_token(parser) + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// * +// +func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? + } + return true +} + +// Parse the productions: +// flow_mapping ::= FLOW-MAPPING-START +// ****************** +// (flow_mapping_entry FLOW-ENTRY)* +// * ********** +// flow_mapping_entry? +// ****************** +// FLOW-MAPPING-END +// **************** +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// * *** * +// +func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ != yaml_FLOW_MAPPING_END_TOKEN { + if !first { + if token.typ == yaml_FLOW_ENTRY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } else { + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a flow mapping", context_mark, + "did not find expected ',' or '}'", token.start_mark) + } + } + + if token.typ == yaml_KEY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_FLOW_ENTRY_TOKEN && + token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } else { + parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) + } + } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + return true +} + +// Parse the productions: +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// * ***** * +// +func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { + token := peek_token(parser) + if token == nil { + return false + } + if empty { + parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) + } + if token.typ == yaml_VALUE_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Generate an empty scalar event. +func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: mark, + end_mark: mark, + value: nil, // Empty + implicit: true, + style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), + } + return true +} + +var default_tag_directives = []yaml_tag_directive_t{ + {[]byte("!"), []byte("!")}, + {[]byte("!!"), []byte("tag:yaml.org,2002:")}, +} + +// Parse directives. +func yaml_parser_process_directives(parser *yaml_parser_t, + version_directive_ref **yaml_version_directive_t, + tag_directives_ref *[]yaml_tag_directive_t) bool { + + var version_directive *yaml_version_directive_t + var tag_directives []yaml_tag_directive_t + + token := peek_token(parser) + if token == nil { + return false + } + + for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { + if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { + if version_directive != nil { + yaml_parser_set_parser_error(parser, + "found duplicate %YAML directive", token.start_mark) + return false + } + if token.major != 1 || token.minor != 1 { + yaml_parser_set_parser_error(parser, + "found incompatible YAML document", token.start_mark) + return false + } + version_directive = &yaml_version_directive_t{ + major: token.major, + minor: token.minor, + } + } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { + value := yaml_tag_directive_t{ + handle: token.value, + prefix: token.prefix, + } + if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { + return false + } + tag_directives = append(tag_directives, value) + } + + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + + for i := range default_tag_directives { + if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { + return false + } + } + + if version_directive_ref != nil { + *version_directive_ref = version_directive + } + if tag_directives_ref != nil { + *tag_directives_ref = tag_directives + } + return true +} + +// Append a tag directive to the directives stack. +func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { + for i := range parser.tag_directives { + if bytes.Equal(value.handle, parser.tag_directives[i].handle) { + if allow_duplicates { + return true + } + return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) + } + } + + // [Go] I suspect the copy is unnecessary. This was likely done + // because there was no way to track ownership of the data. + value_copy := yaml_tag_directive_t{ + handle: make([]byte, len(value.handle)), + prefix: make([]byte, len(value.prefix)), + } + copy(value_copy.handle, value.handle) + copy(value_copy.prefix, value.prefix) + parser.tag_directives = append(parser.tag_directives, value_copy) + return true +} diff --git a/vendor/gopkg.in/yaml.v2/readerc.go b/vendor/gopkg.in/yaml.v2/readerc.go new file mode 100644 index 0000000..7c1f5fa --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/readerc.go @@ -0,0 +1,412 @@ +package yaml + +import ( + "io" +) + +// Set the reader error and return 0. +func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { + parser.error = yaml_READER_ERROR + parser.problem = problem + parser.problem_offset = offset + parser.problem_value = value + return false +} + +// Byte order marks. +const ( + bom_UTF8 = "\xef\xbb\xbf" + bom_UTF16LE = "\xff\xfe" + bom_UTF16BE = "\xfe\xff" +) + +// Determine the input stream encoding by checking the BOM symbol. If no BOM is +// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. +func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { + // Ensure that we had enough bytes in the raw buffer. + for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { + if !yaml_parser_update_raw_buffer(parser) { + return false + } + } + + // Determine the encoding. + buf := parser.raw_buffer + pos := parser.raw_buffer_pos + avail := len(buf) - pos + if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { + parser.encoding = yaml_UTF16LE_ENCODING + parser.raw_buffer_pos += 2 + parser.offset += 2 + } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { + parser.encoding = yaml_UTF16BE_ENCODING + parser.raw_buffer_pos += 2 + parser.offset += 2 + } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { + parser.encoding = yaml_UTF8_ENCODING + parser.raw_buffer_pos += 3 + parser.offset += 3 + } else { + parser.encoding = yaml_UTF8_ENCODING + } + return true +} + +// Update the raw buffer. +func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { + size_read := 0 + + // Return if the raw buffer is full. + if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { + return true + } + + // Return on EOF. + if parser.eof { + return true + } + + // Move the remaining bytes in the raw buffer to the beginning. + if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { + copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) + } + parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] + parser.raw_buffer_pos = 0 + + // Call the read handler to fill the buffer. + size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) + parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] + if err == io.EOF { + parser.eof = true + } else if err != nil { + return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) + } + return true +} + +// Ensure that the buffer contains at least `length` characters. +// Return true on success, false on failure. +// +// The length is supposed to be significantly less that the buffer size. +func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { + if parser.read_handler == nil { + panic("read handler must be set") + } + + // [Go] This function was changed to guarantee the requested length size at EOF. + // The fact we need to do this is pretty awful, but the description above implies + // for that to be the case, and there are tests + + // If the EOF flag is set and the raw buffer is empty, do nothing. + if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { + // [Go] ACTUALLY! Read the documentation of this function above. + // This is just broken. To return true, we need to have the + // given length in the buffer. Not doing that means every single + // check that calls this function to make sure the buffer has a + // given length is Go) panicking; or C) accessing invalid memory. + //return true + } + + // Return if the buffer contains enough characters. + if parser.unread >= length { + return true + } + + // Determine the input encoding if it is not known yet. + if parser.encoding == yaml_ANY_ENCODING { + if !yaml_parser_determine_encoding(parser) { + return false + } + } + + // Move the unread characters to the beginning of the buffer. + buffer_len := len(parser.buffer) + if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { + copy(parser.buffer, parser.buffer[parser.buffer_pos:]) + buffer_len -= parser.buffer_pos + parser.buffer_pos = 0 + } else if parser.buffer_pos == buffer_len { + buffer_len = 0 + parser.buffer_pos = 0 + } + + // Open the whole buffer for writing, and cut it before returning. + parser.buffer = parser.buffer[:cap(parser.buffer)] + + // Fill the buffer until it has enough characters. + first := true + for parser.unread < length { + + // Fill the raw buffer if necessary. + if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { + if !yaml_parser_update_raw_buffer(parser) { + parser.buffer = parser.buffer[:buffer_len] + return false + } + } + first = false + + // Decode the raw buffer. + inner: + for parser.raw_buffer_pos != len(parser.raw_buffer) { + var value rune + var width int + + raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos + + // Decode the next character. + switch parser.encoding { + case yaml_UTF8_ENCODING: + // Decode a UTF-8 character. Check RFC 3629 + // (http://www.ietf.org/rfc/rfc3629.txt) for more details. + // + // The following table (taken from the RFC) is used for + // decoding. + // + // Char. number range | UTF-8 octet sequence + // (hexadecimal) | (binary) + // --------------------+------------------------------------ + // 0000 0000-0000 007F | 0xxxxxxx + // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx + // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx + // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + // + // Additionally, the characters in the range 0xD800-0xDFFF + // are prohibited as they are reserved for use with UTF-16 + // surrogate pairs. + + // Determine the length of the UTF-8 sequence. + octet := parser.raw_buffer[parser.raw_buffer_pos] + switch { + case octet&0x80 == 0x00: + width = 1 + case octet&0xE0 == 0xC0: + width = 2 + case octet&0xF0 == 0xE0: + width = 3 + case octet&0xF8 == 0xF0: + width = 4 + default: + // The leading octet is invalid. + return yaml_parser_set_reader_error(parser, + "invalid leading UTF-8 octet", + parser.offset, int(octet)) + } + + // Check if the raw buffer contains an incomplete character. + if width > raw_unread { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-8 octet sequence", + parser.offset, -1) + } + break inner + } + + // Decode the leading octet. + switch { + case octet&0x80 == 0x00: + value = rune(octet & 0x7F) + case octet&0xE0 == 0xC0: + value = rune(octet & 0x1F) + case octet&0xF0 == 0xE0: + value = rune(octet & 0x0F) + case octet&0xF8 == 0xF0: + value = rune(octet & 0x07) + default: + value = 0 + } + + // Check and decode the trailing octets. + for k := 1; k < width; k++ { + octet = parser.raw_buffer[parser.raw_buffer_pos+k] + + // Check if the octet is valid. + if (octet & 0xC0) != 0x80 { + return yaml_parser_set_reader_error(parser, + "invalid trailing UTF-8 octet", + parser.offset+k, int(octet)) + } + + // Decode the octet. + value = (value << 6) + rune(octet&0x3F) + } + + // Check the length of the sequence against the value. + switch { + case width == 1: + case width == 2 && value >= 0x80: + case width == 3 && value >= 0x800: + case width == 4 && value >= 0x10000: + default: + return yaml_parser_set_reader_error(parser, + "invalid length of a UTF-8 sequence", + parser.offset, -1) + } + + // Check the range of the value. + if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { + return yaml_parser_set_reader_error(parser, + "invalid Unicode character", + parser.offset, int(value)) + } + + case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: + var low, high int + if parser.encoding == yaml_UTF16LE_ENCODING { + low, high = 0, 1 + } else { + low, high = 1, 0 + } + + // The UTF-16 encoding is not as simple as one might + // naively think. Check RFC 2781 + // (http://www.ietf.org/rfc/rfc2781.txt). + // + // Normally, two subsequent bytes describe a Unicode + // character. However a special technique (called a + // surrogate pair) is used for specifying character + // values larger than 0xFFFF. + // + // A surrogate pair consists of two pseudo-characters: + // high surrogate area (0xD800-0xDBFF) + // low surrogate area (0xDC00-0xDFFF) + // + // The following formulas are used for decoding + // and encoding characters using surrogate pairs: + // + // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) + // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) + // W1 = 110110yyyyyyyyyy + // W2 = 110111xxxxxxxxxx + // + // where U is the character value, W1 is the high surrogate + // area, W2 is the low surrogate area. + + // Check for incomplete UTF-16 character. + if raw_unread < 2 { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-16 character", + parser.offset, -1) + } + break inner + } + + // Get the character. + value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + + (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) + + // Check for unexpected low surrogate area. + if value&0xFC00 == 0xDC00 { + return yaml_parser_set_reader_error(parser, + "unexpected low surrogate area", + parser.offset, int(value)) + } + + // Check for a high surrogate area. + if value&0xFC00 == 0xD800 { + width = 4 + + // Check for incomplete surrogate pair. + if raw_unread < 4 { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-16 surrogate pair", + parser.offset, -1) + } + break inner + } + + // Get the next character. + value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + + (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) + + // Check for a low surrogate area. + if value2&0xFC00 != 0xDC00 { + return yaml_parser_set_reader_error(parser, + "expected low surrogate area", + parser.offset+2, int(value2)) + } + + // Generate the value of the surrogate pair. + value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) + } else { + width = 2 + } + + default: + panic("impossible") + } + + // Check if the character is in the allowed range: + // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) + // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) + // | [#x10000-#x10FFFF] (32 bit) + switch { + case value == 0x09: + case value == 0x0A: + case value == 0x0D: + case value >= 0x20 && value <= 0x7E: + case value == 0x85: + case value >= 0xA0 && value <= 0xD7FF: + case value >= 0xE000 && value <= 0xFFFD: + case value >= 0x10000 && value <= 0x10FFFF: + default: + return yaml_parser_set_reader_error(parser, + "control characters are not allowed", + parser.offset, int(value)) + } + + // Move the raw pointers. + parser.raw_buffer_pos += width + parser.offset += width + + // Finally put the character into the buffer. + if value <= 0x7F { + // 0000 0000-0000 007F . 0xxxxxxx + parser.buffer[buffer_len+0] = byte(value) + buffer_len += 1 + } else if value <= 0x7FF { + // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) + parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) + buffer_len += 2 + } else if value <= 0xFFFF { + // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) + parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) + parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) + buffer_len += 3 + } else { + // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) + parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) + parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) + parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) + buffer_len += 4 + } + + parser.unread++ + } + + // On EOF, put NUL into the buffer and return. + if parser.eof { + parser.buffer[buffer_len] = 0 + buffer_len++ + parser.unread++ + break + } + } + // [Go] Read the documentation of this function above. To return true, + // we need to have the given length in the buffer. Not doing that means + // every single check that calls this function to make sure the buffer + // has a given length is Go) panicking; or C) accessing invalid memory. + // This happens here due to the EOF above breaking early. + for buffer_len < length { + parser.buffer[buffer_len] = 0 + buffer_len++ + } + parser.buffer = parser.buffer[:buffer_len] + return true +} diff --git a/vendor/gopkg.in/yaml.v2/resolve.go b/vendor/gopkg.in/yaml.v2/resolve.go new file mode 100644 index 0000000..4120e0c --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/resolve.go @@ -0,0 +1,258 @@ +package yaml + +import ( + "encoding/base64" + "math" + "regexp" + "strconv" + "strings" + "time" +) + +type resolveMapItem struct { + value interface{} + tag string +} + +var resolveTable = make([]byte, 256) +var resolveMap = make(map[string]resolveMapItem) + +func init() { + t := resolveTable + t[int('+')] = 'S' // Sign + t[int('-')] = 'S' + for _, c := range "0123456789" { + t[int(c)] = 'D' // Digit + } + for _, c := range "yYnNtTfFoO~" { + t[int(c)] = 'M' // In map + } + t[int('.')] = '.' // Float (potentially in map) + + var resolveMapList = []struct { + v interface{} + tag string + l []string + }{ + {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}}, + {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}}, + {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}}, + {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}}, + {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}}, + {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}}, + {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}}, + {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}}, + {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}}, + {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}}, + {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}}, + {"<<", yaml_MERGE_TAG, []string{"<<"}}, + } + + m := resolveMap + for _, item := range resolveMapList { + for _, s := range item.l { + m[s] = resolveMapItem{item.v, item.tag} + } + } +} + +const longTagPrefix = "tag:yaml.org,2002:" + +func shortTag(tag string) string { + // TODO This can easily be made faster and produce less garbage. + if strings.HasPrefix(tag, longTagPrefix) { + return "!!" + tag[len(longTagPrefix):] + } + return tag +} + +func longTag(tag string) string { + if strings.HasPrefix(tag, "!!") { + return longTagPrefix + tag[2:] + } + return tag +} + +func resolvableTag(tag string) bool { + switch tag { + case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG: + return true + } + return false +} + +var yamlStyleFloat = regexp.MustCompile(`^[-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?$`) + +func resolve(tag string, in string) (rtag string, out interface{}) { + if !resolvableTag(tag) { + return tag, in + } + + defer func() { + switch tag { + case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG: + return + case yaml_FLOAT_TAG: + if rtag == yaml_INT_TAG { + switch v := out.(type) { + case int64: + rtag = yaml_FLOAT_TAG + out = float64(v) + return + case int: + rtag = yaml_FLOAT_TAG + out = float64(v) + return + } + } + } + failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag)) + }() + + // Any data is accepted as a !!str or !!binary. + // Otherwise, the prefix is enough of a hint about what it might be. + hint := byte('N') + if in != "" { + hint = resolveTable[in[0]] + } + if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG { + // Handle things we can lookup in a map. + if item, ok := resolveMap[in]; ok { + return item.tag, item.value + } + + // Base 60 floats are a bad idea, were dropped in YAML 1.2, and + // are purposefully unsupported here. They're still quoted on + // the way out for compatibility with other parser, though. + + switch hint { + case 'M': + // We've already checked the map above. + + case '.': + // Not in the map, so maybe a normal float. + floatv, err := strconv.ParseFloat(in, 64) + if err == nil { + return yaml_FLOAT_TAG, floatv + } + + case 'D', 'S': + // Int, float, or timestamp. + // Only try values as a timestamp if the value is unquoted or there's an explicit + // !!timestamp tag. + if tag == "" || tag == yaml_TIMESTAMP_TAG { + t, ok := parseTimestamp(in) + if ok { + return yaml_TIMESTAMP_TAG, t + } + } + + plain := strings.Replace(in, "_", "", -1) + intv, err := strconv.ParseInt(plain, 0, 64) + if err == nil { + if intv == int64(int(intv)) { + return yaml_INT_TAG, int(intv) + } else { + return yaml_INT_TAG, intv + } + } + uintv, err := strconv.ParseUint(plain, 0, 64) + if err == nil { + return yaml_INT_TAG, uintv + } + if yamlStyleFloat.MatchString(plain) { + floatv, err := strconv.ParseFloat(plain, 64) + if err == nil { + return yaml_FLOAT_TAG, floatv + } + } + if strings.HasPrefix(plain, "0b") { + intv, err := strconv.ParseInt(plain[2:], 2, 64) + if err == nil { + if intv == int64(int(intv)) { + return yaml_INT_TAG, int(intv) + } else { + return yaml_INT_TAG, intv + } + } + uintv, err := strconv.ParseUint(plain[2:], 2, 64) + if err == nil { + return yaml_INT_TAG, uintv + } + } else if strings.HasPrefix(plain, "-0b") { + intv, err := strconv.ParseInt("-" + plain[3:], 2, 64) + if err == nil { + if true || intv == int64(int(intv)) { + return yaml_INT_TAG, int(intv) + } else { + return yaml_INT_TAG, intv + } + } + } + default: + panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")") + } + } + return yaml_STR_TAG, in +} + +// encodeBase64 encodes s as base64 that is broken up into multiple lines +// as appropriate for the resulting length. +func encodeBase64(s string) string { + const lineLen = 70 + encLen := base64.StdEncoding.EncodedLen(len(s)) + lines := encLen/lineLen + 1 + buf := make([]byte, encLen*2+lines) + in := buf[0:encLen] + out := buf[encLen:] + base64.StdEncoding.Encode(in, []byte(s)) + k := 0 + for i := 0; i < len(in); i += lineLen { + j := i + lineLen + if j > len(in) { + j = len(in) + } + k += copy(out[k:], in[i:j]) + if lines > 1 { + out[k] = '\n' + k++ + } + } + return string(out[:k]) +} + +// This is a subset of the formats allowed by the regular expression +// defined at http://yaml.org/type/timestamp.html. +var allowedTimestampFormats = []string{ + "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields. + "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t". + "2006-1-2 15:4:5.999999999", // space separated with no time zone + "2006-1-2", // date only + // Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5" + // from the set of examples. +} + +// parseTimestamp parses s as a timestamp string and +// returns the timestamp and reports whether it succeeded. +// Timestamp formats are defined at http://yaml.org/type/timestamp.html +func parseTimestamp(s string) (time.Time, bool) { + // TODO write code to check all the formats supported by + // http://yaml.org/type/timestamp.html instead of using time.Parse. + + // Quick check: all date formats start with YYYY-. + i := 0 + for ; i < len(s); i++ { + if c := s[i]; c < '0' || c > '9' { + break + } + } + if i != 4 || i == len(s) || s[i] != '-' { + return time.Time{}, false + } + for _, format := range allowedTimestampFormats { + if t, err := time.Parse(format, s); err == nil { + return t, true + } + } + return time.Time{}, false +} diff --git a/vendor/gopkg.in/yaml.v2/scannerc.go b/vendor/gopkg.in/yaml.v2/scannerc.go new file mode 100644 index 0000000..0b9bb60 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/scannerc.go @@ -0,0 +1,2711 @@ +package yaml + +import ( + "bytes" + "fmt" +) + +// Introduction +// ************ +// +// The following notes assume that you are familiar with the YAML specification +// (http://yaml.org/spec/1.2/spec.html). We mostly follow it, although in +// some cases we are less restrictive that it requires. +// +// The process of transforming a YAML stream into a sequence of events is +// divided on two steps: Scanning and Parsing. +// +// The Scanner transforms the input stream into a sequence of tokens, while the +// parser transform the sequence of tokens produced by the Scanner into a +// sequence of parsing events. +// +// The Scanner is rather clever and complicated. The Parser, on the contrary, +// is a straightforward implementation of a recursive-descendant parser (or, +// LL(1) parser, as it is usually called). +// +// Actually there are two issues of Scanning that might be called "clever", the +// rest is quite straightforward. The issues are "block collection start" and +// "simple keys". Both issues are explained below in details. +// +// Here the Scanning step is explained and implemented. We start with the list +// of all the tokens produced by the Scanner together with short descriptions. +// +// Now, tokens: +// +// STREAM-START(encoding) # The stream start. +// STREAM-END # The stream end. +// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. +// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. +// DOCUMENT-START # '---' +// DOCUMENT-END # '...' +// BLOCK-SEQUENCE-START # Indentation increase denoting a block +// BLOCK-MAPPING-START # sequence or a block mapping. +// BLOCK-END # Indentation decrease. +// FLOW-SEQUENCE-START # '[' +// FLOW-SEQUENCE-END # ']' +// BLOCK-SEQUENCE-START # '{' +// BLOCK-SEQUENCE-END # '}' +// BLOCK-ENTRY # '-' +// FLOW-ENTRY # ',' +// KEY # '?' or nothing (simple keys). +// VALUE # ':' +// ALIAS(anchor) # '*anchor' +// ANCHOR(anchor) # '&anchor' +// TAG(handle,suffix) # '!handle!suffix' +// SCALAR(value,style) # A scalar. +// +// The following two tokens are "virtual" tokens denoting the beginning and the +// end of the stream: +// +// STREAM-START(encoding) +// STREAM-END +// +// We pass the information about the input stream encoding with the +// STREAM-START token. +// +// The next two tokens are responsible for tags: +// +// VERSION-DIRECTIVE(major,minor) +// TAG-DIRECTIVE(handle,prefix) +// +// Example: +// +// %YAML 1.1 +// %TAG ! !foo +// %TAG !yaml! tag:yaml.org,2002: +// --- +// +// The correspoding sequence of tokens: +// +// STREAM-START(utf-8) +// VERSION-DIRECTIVE(1,1) +// TAG-DIRECTIVE("!","!foo") +// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") +// DOCUMENT-START +// STREAM-END +// +// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole +// line. +// +// The document start and end indicators are represented by: +// +// DOCUMENT-START +// DOCUMENT-END +// +// Note that if a YAML stream contains an implicit document (without '---' +// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be +// produced. +// +// In the following examples, we present whole documents together with the +// produced tokens. +// +// 1. An implicit document: +// +// 'a scalar' +// +// Tokens: +// +// STREAM-START(utf-8) +// SCALAR("a scalar",single-quoted) +// STREAM-END +// +// 2. An explicit document: +// +// --- +// 'a scalar' +// ... +// +// Tokens: +// +// STREAM-START(utf-8) +// DOCUMENT-START +// SCALAR("a scalar",single-quoted) +// DOCUMENT-END +// STREAM-END +// +// 3. Several documents in a stream: +// +// 'a scalar' +// --- +// 'another scalar' +// --- +// 'yet another scalar' +// +// Tokens: +// +// STREAM-START(utf-8) +// SCALAR("a scalar",single-quoted) +// DOCUMENT-START +// SCALAR("another scalar",single-quoted) +// DOCUMENT-START +// SCALAR("yet another scalar",single-quoted) +// STREAM-END +// +// We have already introduced the SCALAR token above. The following tokens are +// used to describe aliases, anchors, tag, and scalars: +// +// ALIAS(anchor) +// ANCHOR(anchor) +// TAG(handle,suffix) +// SCALAR(value,style) +// +// The following series of examples illustrate the usage of these tokens: +// +// 1. A recursive sequence: +// +// &A [ *A ] +// +// Tokens: +// +// STREAM-START(utf-8) +// ANCHOR("A") +// FLOW-SEQUENCE-START +// ALIAS("A") +// FLOW-SEQUENCE-END +// STREAM-END +// +// 2. A tagged scalar: +// +// !!float "3.14" # A good approximation. +// +// Tokens: +// +// STREAM-START(utf-8) +// TAG("!!","float") +// SCALAR("3.14",double-quoted) +// STREAM-END +// +// 3. Various scalar styles: +// +// --- # Implicit empty plain scalars do not produce tokens. +// --- a plain scalar +// --- 'a single-quoted scalar' +// --- "a double-quoted scalar" +// --- |- +// a literal scalar +// --- >- +// a folded +// scalar +// +// Tokens: +// +// STREAM-START(utf-8) +// DOCUMENT-START +// DOCUMENT-START +// SCALAR("a plain scalar",plain) +// DOCUMENT-START +// SCALAR("a single-quoted scalar",single-quoted) +// DOCUMENT-START +// SCALAR("a double-quoted scalar",double-quoted) +// DOCUMENT-START +// SCALAR("a literal scalar",literal) +// DOCUMENT-START +// SCALAR("a folded scalar",folded) +// STREAM-END +// +// Now it's time to review collection-related tokens. We will start with +// flow collections: +// +// FLOW-SEQUENCE-START +// FLOW-SEQUENCE-END +// FLOW-MAPPING-START +// FLOW-MAPPING-END +// FLOW-ENTRY +// KEY +// VALUE +// +// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and +// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' +// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the +// indicators '?' and ':', which are used for denoting mapping keys and values, +// are represented by the KEY and VALUE tokens. +// +// The following examples show flow collections: +// +// 1. A flow sequence: +// +// [item 1, item 2, item 3] +// +// Tokens: +// +// STREAM-START(utf-8) +// FLOW-SEQUENCE-START +// SCALAR("item 1",plain) +// FLOW-ENTRY +// SCALAR("item 2",plain) +// FLOW-ENTRY +// SCALAR("item 3",plain) +// FLOW-SEQUENCE-END +// STREAM-END +// +// 2. A flow mapping: +// +// { +// a simple key: a value, # Note that the KEY token is produced. +// ? a complex key: another value, +// } +// +// Tokens: +// +// STREAM-START(utf-8) +// FLOW-MAPPING-START +// KEY +// SCALAR("a simple key",plain) +// VALUE +// SCALAR("a value",plain) +// FLOW-ENTRY +// KEY +// SCALAR("a complex key",plain) +// VALUE +// SCALAR("another value",plain) +// FLOW-ENTRY +// FLOW-MAPPING-END +// STREAM-END +// +// A simple key is a key which is not denoted by the '?' indicator. Note that +// the Scanner still produce the KEY token whenever it encounters a simple key. +// +// For scanning block collections, the following tokens are used (note that we +// repeat KEY and VALUE here): +// +// BLOCK-SEQUENCE-START +// BLOCK-MAPPING-START +// BLOCK-END +// BLOCK-ENTRY +// KEY +// VALUE +// +// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation +// increase that precedes a block collection (cf. the INDENT token in Python). +// The token BLOCK-END denote indentation decrease that ends a block collection +// (cf. the DEDENT token in Python). However YAML has some syntax pecularities +// that makes detections of these tokens more complex. +// +// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators +// '-', '?', and ':' correspondingly. +// +// The following examples show how the tokens BLOCK-SEQUENCE-START, +// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: +// +// 1. Block sequences: +// +// - item 1 +// - item 2 +// - +// - item 3.1 +// - item 3.2 +// - +// key 1: value 1 +// key 2: value 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-ENTRY +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 3.1",plain) +// BLOCK-ENTRY +// SCALAR("item 3.2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// 2. Block mappings: +// +// a simple key: a value # The KEY token is produced here. +// ? a complex key +// : another value +// a mapping: +// key 1: value 1 +// key 2: value 2 +// a sequence: +// - item 1 +// - item 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("a simple key",plain) +// VALUE +// SCALAR("a value",plain) +// KEY +// SCALAR("a complex key",plain) +// VALUE +// SCALAR("another value",plain) +// KEY +// SCALAR("a mapping",plain) +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// KEY +// SCALAR("a sequence",plain) +// VALUE +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// YAML does not always require to start a new block collection from a new +// line. If the current line contains only '-', '?', and ':' indicators, a new +// block collection may start at the current line. The following examples +// illustrate this case: +// +// 1. Collections in a sequence: +// +// - - item 1 +// - item 2 +// - key 1: value 1 +// key 2: value 2 +// - ? complex key +// : complex value +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("complex key") +// VALUE +// SCALAR("complex value") +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// 2. Collections in a mapping: +// +// ? a sequence +// : - item 1 +// - item 2 +// ? a mapping +// : key 1: value 1 +// key 2: value 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("a sequence",plain) +// VALUE +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// KEY +// SCALAR("a mapping",plain) +// VALUE +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// YAML also permits non-indented sequences if they are included into a block +// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: +// +// key: +// - item 1 # BLOCK-SEQUENCE-START is NOT produced here. +// - item 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("key",plain) +// VALUE +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// + +// Ensure that the buffer contains the required number of characters. +// Return true on success, false on failure (reader error or memory error). +func cache(parser *yaml_parser_t, length int) bool { + // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) + return parser.unread >= length || yaml_parser_update_buffer(parser, length) +} + +// Advance the buffer pointer. +func skip(parser *yaml_parser_t) { + parser.mark.index++ + parser.mark.column++ + parser.unread-- + parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) +} + +func skip_line(parser *yaml_parser_t) { + if is_crlf(parser.buffer, parser.buffer_pos) { + parser.mark.index += 2 + parser.mark.column = 0 + parser.mark.line++ + parser.unread -= 2 + parser.buffer_pos += 2 + } else if is_break(parser.buffer, parser.buffer_pos) { + parser.mark.index++ + parser.mark.column = 0 + parser.mark.line++ + parser.unread-- + parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) + } +} + +// Copy a character to a string buffer and advance pointers. +func read(parser *yaml_parser_t, s []byte) []byte { + w := width(parser.buffer[parser.buffer_pos]) + if w == 0 { + panic("invalid character sequence") + } + if len(s) == 0 { + s = make([]byte, 0, 32) + } + if w == 1 && len(s)+w <= cap(s) { + s = s[:len(s)+1] + s[len(s)-1] = parser.buffer[parser.buffer_pos] + parser.buffer_pos++ + } else { + s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) + parser.buffer_pos += w + } + parser.mark.index++ + parser.mark.column++ + parser.unread-- + return s +} + +// Copy a line break character to a string buffer and advance pointers. +func read_line(parser *yaml_parser_t, s []byte) []byte { + buf := parser.buffer + pos := parser.buffer_pos + switch { + case buf[pos] == '\r' && buf[pos+1] == '\n': + // CR LF . LF + s = append(s, '\n') + parser.buffer_pos += 2 + parser.mark.index++ + parser.unread-- + case buf[pos] == '\r' || buf[pos] == '\n': + // CR|LF . LF + s = append(s, '\n') + parser.buffer_pos += 1 + case buf[pos] == '\xC2' && buf[pos+1] == '\x85': + // NEL . LF + s = append(s, '\n') + parser.buffer_pos += 2 + case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): + // LS|PS . LS|PS + s = append(s, buf[parser.buffer_pos:pos+3]...) + parser.buffer_pos += 3 + default: + return s + } + parser.mark.index++ + parser.mark.column = 0 + parser.mark.line++ + parser.unread-- + return s +} + +// Get the next token. +func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { + // Erase the token object. + *token = yaml_token_t{} // [Go] Is this necessary? + + // No tokens after STREAM-END or error. + if parser.stream_end_produced || parser.error != yaml_NO_ERROR { + return true + } + + // Ensure that the tokens queue contains enough tokens. + if !parser.token_available { + if !yaml_parser_fetch_more_tokens(parser) { + return false + } + } + + // Fetch the next token from the queue. + *token = parser.tokens[parser.tokens_head] + parser.tokens_head++ + parser.tokens_parsed++ + parser.token_available = false + + if token.typ == yaml_STREAM_END_TOKEN { + parser.stream_end_produced = true + } + return true +} + +// Set the scanner error and return false. +func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { + parser.error = yaml_SCANNER_ERROR + parser.context = context + parser.context_mark = context_mark + parser.problem = problem + parser.problem_mark = parser.mark + return false +} + +func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { + context := "while parsing a tag" + if directive { + context = "while parsing a %TAG directive" + } + return yaml_parser_set_scanner_error(parser, context, context_mark, problem) +} + +func trace(args ...interface{}) func() { + pargs := append([]interface{}{"+++"}, args...) + fmt.Println(pargs...) + pargs = append([]interface{}{"---"}, args...) + return func() { fmt.Println(pargs...) } +} + +// Ensure that the tokens queue contains at least one token which can be +// returned to the Parser. +func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { + // While we need more tokens to fetch, do it. + for { + if parser.tokens_head != len(parser.tokens) { + // If queue is non-empty, check if any potential simple key may + // occupy the head position. + head_tok_idx, ok := parser.simple_keys_by_tok[parser.tokens_parsed] + if !ok { + break + } else if valid, ok := yaml_simple_key_is_valid(parser, &parser.simple_keys[head_tok_idx]); !ok { + return false + } else if !valid { + break + } + } + // Fetch the next token. + if !yaml_parser_fetch_next_token(parser) { + return false + } + } + + parser.token_available = true + return true +} + +// The dispatcher for token fetchers. +func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool { + // Ensure that the buffer is initialized. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // Check if we just started scanning. Fetch STREAM-START then. + if !parser.stream_start_produced { + return yaml_parser_fetch_stream_start(parser) + } + + // Eat whitespaces and comments until we reach the next token. + if !yaml_parser_scan_to_next_token(parser) { + return false + } + + // Check the indentation level against the current column. + if !yaml_parser_unroll_indent(parser, parser.mark.column) { + return false + } + + // Ensure that the buffer contains at least 4 characters. 4 is the length + // of the longest indicators ('--- ' and '... '). + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + + // Is it the end of the stream? + if is_z(parser.buffer, parser.buffer_pos) { + return yaml_parser_fetch_stream_end(parser) + } + + // Is it a directive? + if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { + return yaml_parser_fetch_directive(parser) + } + + buf := parser.buffer + pos := parser.buffer_pos + + // Is it the document start indicator? + if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { + return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) + } + + // Is it the document end indicator? + if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { + return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) + } + + // Is it the flow sequence start indicator? + if buf[pos] == '[' { + return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) + } + + // Is it the flow mapping start indicator? + if parser.buffer[parser.buffer_pos] == '{' { + return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) + } + + // Is it the flow sequence end indicator? + if parser.buffer[parser.buffer_pos] == ']' { + return yaml_parser_fetch_flow_collection_end(parser, + yaml_FLOW_SEQUENCE_END_TOKEN) + } + + // Is it the flow mapping end indicator? + if parser.buffer[parser.buffer_pos] == '}' { + return yaml_parser_fetch_flow_collection_end(parser, + yaml_FLOW_MAPPING_END_TOKEN) + } + + // Is it the flow entry indicator? + if parser.buffer[parser.buffer_pos] == ',' { + return yaml_parser_fetch_flow_entry(parser) + } + + // Is it the block entry indicator? + if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { + return yaml_parser_fetch_block_entry(parser) + } + + // Is it the key indicator? + if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_key(parser) + } + + // Is it the value indicator? + if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_value(parser) + } + + // Is it an alias? + if parser.buffer[parser.buffer_pos] == '*' { + return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) + } + + // Is it an anchor? + if parser.buffer[parser.buffer_pos] == '&' { + return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) + } + + // Is it a tag? + if parser.buffer[parser.buffer_pos] == '!' { + return yaml_parser_fetch_tag(parser) + } + + // Is it a literal scalar? + if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { + return yaml_parser_fetch_block_scalar(parser, true) + } + + // Is it a folded scalar? + if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { + return yaml_parser_fetch_block_scalar(parser, false) + } + + // Is it a single-quoted scalar? + if parser.buffer[parser.buffer_pos] == '\'' { + return yaml_parser_fetch_flow_scalar(parser, true) + } + + // Is it a double-quoted scalar? + if parser.buffer[parser.buffer_pos] == '"' { + return yaml_parser_fetch_flow_scalar(parser, false) + } + + // Is it a plain scalar? + // + // A plain scalar may start with any non-blank characters except + // + // '-', '?', ':', ',', '[', ']', '{', '}', + // '#', '&', '*', '!', '|', '>', '\'', '\"', + // '%', '@', '`'. + // + // In the block context (and, for the '-' indicator, in the flow context + // too), it may also start with the characters + // + // '-', '?', ':' + // + // if it is followed by a non-space character. + // + // The last rule is more restrictive than the specification requires. + // [Go] Make this logic more reasonable. + //switch parser.buffer[parser.buffer_pos] { + //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': + //} + if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || + parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || + parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || + parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || + parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || + parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || + parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || + parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || + parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || + (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || + (parser.flow_level == 0 && + (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && + !is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_plain_scalar(parser) + } + + // If we don't determine the token type so far, it is an error. + return yaml_parser_set_scanner_error(parser, + "while scanning for the next token", parser.mark, + "found character that cannot start any token") +} + +func yaml_simple_key_is_valid(parser *yaml_parser_t, simple_key *yaml_simple_key_t) (valid, ok bool) { + if !simple_key.possible { + return false, true + } + + // The 1.2 specification says: + // + // "If the ? indicator is omitted, parsing needs to see past the + // implicit key to recognize it as such. To limit the amount of + // lookahead required, the “:” indicator must appear at most 1024 + // Unicode characters beyond the start of the key. In addition, the key + // is restricted to a single line." + // + if simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index { + // Check if the potential simple key to be removed is required. + if simple_key.required { + return false, yaml_parser_set_scanner_error(parser, + "while scanning a simple key", simple_key.mark, + "could not find expected ':'") + } + simple_key.possible = false + return false, true + } + return true, true +} + +// Check if a simple key may start at the current position and add it if +// needed. +func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { + // A simple key is required at the current position if the scanner is in + // the block context and the current column coincides with the indentation + // level. + + required := parser.flow_level == 0 && parser.indent == parser.mark.column + + // + // If the current position may start a simple key, save it. + // + if parser.simple_key_allowed { + simple_key := yaml_simple_key_t{ + possible: true, + required: required, + token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), + mark: parser.mark, + } + + if !yaml_parser_remove_simple_key(parser) { + return false + } + parser.simple_keys[len(parser.simple_keys)-1] = simple_key + parser.simple_keys_by_tok[simple_key.token_number] = len(parser.simple_keys) - 1 + } + return true +} + +// Remove a potential simple key at the current flow level. +func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { + i := len(parser.simple_keys) - 1 + if parser.simple_keys[i].possible { + // If the key is required, it is an error. + if parser.simple_keys[i].required { + return yaml_parser_set_scanner_error(parser, + "while scanning a simple key", parser.simple_keys[i].mark, + "could not find expected ':'") + } + // Remove the key from the stack. + parser.simple_keys[i].possible = false + delete(parser.simple_keys_by_tok, parser.simple_keys[i].token_number) + } + return true +} + +// max_flow_level limits the flow_level +const max_flow_level = 10000 + +// Increase the flow level and resize the simple key list if needed. +func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { + // Reset the simple key on the next level. + parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{ + possible: false, + required: false, + token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), + mark: parser.mark, + }) + + // Increase the flow level. + parser.flow_level++ + if parser.flow_level > max_flow_level { + return yaml_parser_set_scanner_error(parser, + "while increasing flow level", parser.simple_keys[len(parser.simple_keys)-1].mark, + fmt.Sprintf("exceeded max depth of %d", max_flow_level)) + } + return true +} + +// Decrease the flow level. +func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { + if parser.flow_level > 0 { + parser.flow_level-- + last := len(parser.simple_keys) - 1 + delete(parser.simple_keys_by_tok, parser.simple_keys[last].token_number) + parser.simple_keys = parser.simple_keys[:last] + } + return true +} + +// max_indents limits the indents stack size +const max_indents = 10000 + +// Push the current indentation level to the stack and set the new level +// the current column is greater than the indentation level. In this case, +// append or insert the specified token into the token queue. +func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { + // In the flow context, do nothing. + if parser.flow_level > 0 { + return true + } + + if parser.indent < column { + // Push the current indentation level to the stack and set the new + // indentation level. + parser.indents = append(parser.indents, parser.indent) + parser.indent = column + if len(parser.indents) > max_indents { + return yaml_parser_set_scanner_error(parser, + "while increasing indent level", parser.simple_keys[len(parser.simple_keys)-1].mark, + fmt.Sprintf("exceeded max depth of %d", max_indents)) + } + + // Create a token and insert it into the queue. + token := yaml_token_t{ + typ: typ, + start_mark: mark, + end_mark: mark, + } + if number > -1 { + number -= parser.tokens_parsed + } + yaml_insert_token(parser, number, &token) + } + return true +} + +// Pop indentation levels from the indents stack until the current level +// becomes less or equal to the column. For each indentation level, append +// the BLOCK-END token. +func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool { + // In the flow context, do nothing. + if parser.flow_level > 0 { + return true + } + + // Loop through the indentation levels in the stack. + for parser.indent > column { + // Create a token and append it to the queue. + token := yaml_token_t{ + typ: yaml_BLOCK_END_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + } + yaml_insert_token(parser, -1, &token) + + // Pop the indentation level. + parser.indent = parser.indents[len(parser.indents)-1] + parser.indents = parser.indents[:len(parser.indents)-1] + } + return true +} + +// Initialize the scanner and produce the STREAM-START token. +func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { + + // Set the initial indentation. + parser.indent = -1 + + // Initialize the simple key stack. + parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) + + parser.simple_keys_by_tok = make(map[int]int) + + // A simple key is allowed at the beginning of the stream. + parser.simple_key_allowed = true + + // We have started. + parser.stream_start_produced = true + + // Create the STREAM-START token and append it to the queue. + token := yaml_token_t{ + typ: yaml_STREAM_START_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + encoding: parser.encoding, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the STREAM-END token and shut down the scanner. +func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { + + // Force new line. + if parser.mark.column != 0 { + parser.mark.column = 0 + parser.mark.line++ + } + + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Create the STREAM-END token and append it to the queue. + token := yaml_token_t{ + typ: yaml_STREAM_END_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. +func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. + token := yaml_token_t{} + if !yaml_parser_scan_directive(parser, &token) { + return false + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the DOCUMENT-START or DOCUMENT-END token. +func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Consume the token. + start_mark := parser.mark + + skip(parser) + skip(parser) + skip(parser) + + end_mark := parser.mark + + // Create the DOCUMENT-START or DOCUMENT-END token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. +func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // The indicators '[' and '{' may start a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // Increase the flow level. + if !yaml_parser_increase_flow_level(parser) { + return false + } + + // A simple key may follow the indicators '[' and '{'. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. +func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // Reset any potential simple key on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Decrease the flow level. + if !yaml_parser_decrease_flow_level(parser) { + return false + } + + // No simple keys after the indicators ']' and '}'. + parser.simple_key_allowed = false + + // Consume the token. + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-ENTRY token. +func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after ','. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-ENTRY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_FLOW_ENTRY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the BLOCK-ENTRY token. +func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { + // Check if the scanner is in the block context. + if parser.flow_level == 0 { + // Check if we are allowed to start a new entry. + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "block sequence entries are not allowed in this context") + } + // Add the BLOCK-SEQUENCE-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { + return false + } + } else { + // It is an error for the '-' indicator to occur in the flow context, + // but we let the Parser detect and report about it because the Parser + // is able to point to the context. + } + + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after '-'. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the BLOCK-ENTRY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_BLOCK_ENTRY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the KEY token. +func yaml_parser_fetch_key(parser *yaml_parser_t) bool { + + // In the block context, additional checks are required. + if parser.flow_level == 0 { + // Check if we are allowed to start a new key (not nessesary simple). + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "mapping keys are not allowed in this context") + } + // Add the BLOCK-MAPPING-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { + return false + } + } + + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after '?' in the block context. + parser.simple_key_allowed = parser.flow_level == 0 + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the KEY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_KEY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the VALUE token. +func yaml_parser_fetch_value(parser *yaml_parser_t) bool { + + simple_key := &parser.simple_keys[len(parser.simple_keys)-1] + + // Have we found a simple key? + if valid, ok := yaml_simple_key_is_valid(parser, simple_key); !ok { + return false + + } else if valid { + + // Create the KEY token and insert it into the queue. + token := yaml_token_t{ + typ: yaml_KEY_TOKEN, + start_mark: simple_key.mark, + end_mark: simple_key.mark, + } + yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) + + // In the block context, we may need to add the BLOCK-MAPPING-START token. + if !yaml_parser_roll_indent(parser, simple_key.mark.column, + simple_key.token_number, + yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { + return false + } + + // Remove the simple key. + simple_key.possible = false + delete(parser.simple_keys_by_tok, simple_key.token_number) + + // A simple key cannot follow another simple key. + parser.simple_key_allowed = false + + } else { + // The ':' indicator follows a complex key. + + // In the block context, extra checks are required. + if parser.flow_level == 0 { + + // Check if we are allowed to start a complex value. + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "mapping values are not allowed in this context") + } + + // Add the BLOCK-MAPPING-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { + return false + } + } + + // Simple keys after ':' are allowed in the block context. + parser.simple_key_allowed = parser.flow_level == 0 + } + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the VALUE token and append it to the queue. + token := yaml_token_t{ + typ: yaml_VALUE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the ALIAS or ANCHOR token. +func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // An anchor or an alias could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow an anchor or an alias. + parser.simple_key_allowed = false + + // Create the ALIAS or ANCHOR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_anchor(parser, &token, typ) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the TAG token. +func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { + // A tag could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a tag. + parser.simple_key_allowed = false + + // Create the TAG token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_tag(parser, &token) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. +func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { + // Remove any potential simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // A simple key may follow a block scalar. + parser.simple_key_allowed = true + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_block_scalar(parser, &token, literal) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. +func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { + // A plain scalar could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a flow scalar. + parser.simple_key_allowed = false + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_flow_scalar(parser, &token, single) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,plain) token. +func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { + // A plain scalar could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a flow scalar. + parser.simple_key_allowed = false + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_plain_scalar(parser, &token) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Eat whitespaces and comments until the next token is found. +func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { + + // Until the next token is not found. + for { + // Allow the BOM mark to start a line. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { + skip(parser) + } + + // Eat whitespaces. + // Tabs are allowed: + // - in the flow context + // - in the block context, but not at the beginning of the line or + // after '-', '?', or ':' (complex value). + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Eat a comment until a line break. + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + } + + // If it is a line break, eat it. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + + // In the block context, a new line may start a simple key. + if parser.flow_level == 0 { + parser.simple_key_allowed = true + } + } else { + break // We have found a token. + } + } + + return true +} + +// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. +// +// Scope: +// %YAML 1.1 # a comment \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// +func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { + // Eat '%'. + start_mark := parser.mark + skip(parser) + + // Scan the directive name. + var name []byte + if !yaml_parser_scan_directive_name(parser, start_mark, &name) { + return false + } + + // Is it a YAML directive? + if bytes.Equal(name, []byte("YAML")) { + // Scan the VERSION directive value. + var major, minor int8 + if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { + return false + } + end_mark := parser.mark + + // Create a VERSION-DIRECTIVE token. + *token = yaml_token_t{ + typ: yaml_VERSION_DIRECTIVE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + major: major, + minor: minor, + } + + // Is it a TAG directive? + } else if bytes.Equal(name, []byte("TAG")) { + // Scan the TAG directive value. + var handle, prefix []byte + if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { + return false + } + end_mark := parser.mark + + // Create a TAG-DIRECTIVE token. + *token = yaml_token_t{ + typ: yaml_TAG_DIRECTIVE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: handle, + prefix: prefix, + } + + // Unknown directive. + } else { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "found unknown directive name") + return false + } + + // Eat the rest of the line including any comments. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + } + + // Check if we are at the end of the line. + if !is_breakz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "did not find expected comment or line break") + return false + } + + // Eat a line break. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + } + + return true +} + +// Scan the directive name. +// +// Scope: +// %YAML 1.1 # a comment \n +// ^^^^ +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^ +// +func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { + // Consume the directive name. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + var s []byte + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the name is empty. + if len(s) == 0 { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "could not find expected directive name") + return false + } + + // Check for an blank character after the name. + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "found unexpected non-alphabetical character") + return false + } + *name = s + return true +} + +// Scan the value of VERSION-DIRECTIVE. +// +// Scope: +// %YAML 1.1 # a comment \n +// ^^^^^^ +func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { + // Eat whitespaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Consume the major version number. + if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { + return false + } + + // Eat '.'. + if parser.buffer[parser.buffer_pos] != '.' { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "did not find expected digit or '.' character") + } + + skip(parser) + + // Consume the minor version number. + if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { + return false + } + return true +} + +const max_number_length = 2 + +// Scan the version number of VERSION-DIRECTIVE. +// +// Scope: +// %YAML 1.1 # a comment \n +// ^ +// %YAML 1.1 # a comment \n +// ^ +func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { + + // Repeat while the next character is digit. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + var value, length int8 + for is_digit(parser.buffer, parser.buffer_pos) { + // Check if the number is too long. + length++ + if length > max_number_length { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "found extremely long version number") + } + value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the number was present. + if length == 0 { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "did not find expected version number") + } + *number = value + return true +} + +// Scan the value of a TAG-DIRECTIVE token. +// +// Scope: +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// +func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { + var handle_value, prefix_value []byte + + // Eat whitespaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Scan a handle. + if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { + return false + } + + // Expect a whitespace. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blank(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", + start_mark, "did not find expected whitespace") + return false + } + + // Eat whitespaces. + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Scan a prefix. + if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { + return false + } + + // Expect a whitespace or line break. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", + start_mark, "did not find expected whitespace or line break") + return false + } + + *handle = handle_value + *prefix = prefix_value + return true +} + +func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { + var s []byte + + // Eat the indicator character. + start_mark := parser.mark + skip(parser) + + // Consume the value. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + end_mark := parser.mark + + /* + * Check if length of the anchor is greater than 0 and it is followed by + * a whitespace character or one of the indicators: + * + * '?', ':', ',', ']', '}', '%', '@', '`'. + */ + + if len(s) == 0 || + !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || + parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || + parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || + parser.buffer[parser.buffer_pos] == '`') { + context := "while scanning an alias" + if typ == yaml_ANCHOR_TOKEN { + context = "while scanning an anchor" + } + yaml_parser_set_scanner_error(parser, context, start_mark, + "did not find expected alphabetic or numeric character") + return false + } + + // Create a token. + *token = yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + value: s, + } + + return true +} + +/* + * Scan a TAG token. + */ + +func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { + var handle, suffix []byte + + start_mark := parser.mark + + // Check if the tag is in the canonical form. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + if parser.buffer[parser.buffer_pos+1] == '<' { + // Keep the handle as '' + + // Eat '!<' + skip(parser) + skip(parser) + + // Consume the tag value. + if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { + return false + } + + // Check for '>' and eat it. + if parser.buffer[parser.buffer_pos] != '>' { + yaml_parser_set_scanner_error(parser, "while scanning a tag", + start_mark, "did not find the expected '>'") + return false + } + + skip(parser) + } else { + // The tag has either the '!suffix' or the '!handle!suffix' form. + + // First, try to scan a handle. + if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { + return false + } + + // Check if it is, indeed, handle. + if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { + // Scan the suffix now. + if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { + return false + } + } else { + // It wasn't a handle after all. Scan the rest of the tag. + if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { + return false + } + + // Set the handle to '!'. + handle = []byte{'!'} + + // A special case: the '!' tag. Set the handle to '' and the + // suffix to '!'. + if len(suffix) == 0 { + handle, suffix = suffix, handle + } + } + } + + // Check the character which ends the tag. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a tag", + start_mark, "did not find expected whitespace or line break") + return false + } + + end_mark := parser.mark + + // Create a token. + *token = yaml_token_t{ + typ: yaml_TAG_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: handle, + suffix: suffix, + } + return true +} + +// Scan a tag handle. +func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { + // Check the initial '!' character. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.buffer[parser.buffer_pos] != '!' { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected '!'") + return false + } + + var s []byte + + // Copy the '!' character. + s = read(parser, s) + + // Copy all subsequent alphabetical and numerical characters. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the trailing character is '!' and copy it. + if parser.buffer[parser.buffer_pos] == '!' { + s = read(parser, s) + } else { + // It's either the '!' tag or not really a tag handle. If it's a %TAG + // directive, it's an error. If it's a tag token, it must be a part of URI. + if directive && string(s) != "!" { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected '!'") + return false + } + } + + *handle = s + return true +} + +// Scan a tag. +func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { + //size_t length = head ? strlen((char *)head) : 0 + var s []byte + hasTag := len(head) > 0 + + // Copy the head if needed. + // + // Note that we don't copy the leading '!' character. + if len(head) > 1 { + s = append(s, head[1:]...) + } + + // Scan the tag. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // The set of characters that may appear in URI is as follows: + // + // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', + // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', + // '%'. + // [Go] Convert this into more reasonable logic. + for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || + parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || + parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || + parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || + parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || + parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || + parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || + parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || + parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || + parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || + parser.buffer[parser.buffer_pos] == '%' { + // Check if it is a URI-escape sequence. + if parser.buffer[parser.buffer_pos] == '%' { + if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { + return false + } + } else { + s = read(parser, s) + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + hasTag = true + } + + if !hasTag { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected tag URI") + return false + } + *uri = s + return true +} + +// Decode an URI-escape sequence corresponding to a single UTF-8 character. +func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { + + // Decode the required number of characters. + w := 1024 + for w > 0 { + // Check for a URI-escaped octet. + if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { + return false + } + + if !(parser.buffer[parser.buffer_pos] == '%' && + is_hex(parser.buffer, parser.buffer_pos+1) && + is_hex(parser.buffer, parser.buffer_pos+2)) { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find URI escaped octet") + } + + // Get the octet. + octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) + + // If it is the leading octet, determine the length of the UTF-8 sequence. + if w == 1024 { + w = width(octet) + if w == 0 { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "found an incorrect leading UTF-8 octet") + } + } else { + // Check if the trailing octet is correct. + if octet&0xC0 != 0x80 { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "found an incorrect trailing UTF-8 octet") + } + } + + // Copy the octet and move the pointers. + *s = append(*s, octet) + skip(parser) + skip(parser) + skip(parser) + w-- + } + return true +} + +// Scan a block scalar. +func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { + // Eat the indicator '|' or '>'. + start_mark := parser.mark + skip(parser) + + // Scan the additional block scalar indicators. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // Check for a chomping indicator. + var chomping, increment int + if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { + // Set the chomping method and eat the indicator. + if parser.buffer[parser.buffer_pos] == '+' { + chomping = +1 + } else { + chomping = -1 + } + skip(parser) + + // Check for an indentation indicator. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if is_digit(parser.buffer, parser.buffer_pos) { + // Check that the indentation is greater than 0. + if parser.buffer[parser.buffer_pos] == '0' { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found an indentation indicator equal to 0") + return false + } + + // Get the indentation level and eat the indicator. + increment = as_digit(parser.buffer, parser.buffer_pos) + skip(parser) + } + + } else if is_digit(parser.buffer, parser.buffer_pos) { + // Do the same as above, but in the opposite order. + + if parser.buffer[parser.buffer_pos] == '0' { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found an indentation indicator equal to 0") + return false + } + increment = as_digit(parser.buffer, parser.buffer_pos) + skip(parser) + + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { + if parser.buffer[parser.buffer_pos] == '+' { + chomping = +1 + } else { + chomping = -1 + } + skip(parser) + } + } + + // Eat whitespaces and comments to the end of the line. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + } + + // Check if we are at the end of the line. + if !is_breakz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "did not find expected comment or line break") + return false + } + + // Eat a line break. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + } + + end_mark := parser.mark + + // Set the indentation level if it was specified. + var indent int + if increment > 0 { + if parser.indent >= 0 { + indent = parser.indent + increment + } else { + indent = increment + } + } + + // Scan the leading line breaks and determine the indentation level if needed. + var s, leading_break, trailing_breaks []byte + if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { + return false + } + + // Scan the block scalar content. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + var leading_blank, trailing_blank bool + for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { + // We are at the beginning of a non-empty line. + + // Is it a trailing whitespace? + trailing_blank = is_blank(parser.buffer, parser.buffer_pos) + + // Check if we need to fold the leading line break. + if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { + // Do we need to join the lines by space? + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } + } else { + s = append(s, leading_break...) + } + leading_break = leading_break[:0] + + // Append the remaining line breaks. + s = append(s, trailing_breaks...) + trailing_breaks = trailing_breaks[:0] + + // Is it a leading whitespace? + leading_blank = is_blank(parser.buffer, parser.buffer_pos) + + // Consume the current line. + for !is_breakz(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Consume the line break. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + leading_break = read_line(parser, leading_break) + + // Eat the following indentation spaces and line breaks. + if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { + return false + } + } + + // Chomp the tail. + if chomping != -1 { + s = append(s, leading_break...) + } + if chomping == 1 { + s = append(s, trailing_breaks...) + } + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_LITERAL_SCALAR_STYLE, + } + if !literal { + token.style = yaml_FOLDED_SCALAR_STYLE + } + return true +} + +// Scan indentation spaces and line breaks for a block scalar. Determine the +// indentation level if needed. +func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { + *end_mark = parser.mark + + // Eat the indentation spaces and line breaks. + max_indent := 0 + for { + // Eat the indentation spaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + if parser.mark.column > max_indent { + max_indent = parser.mark.column + } + + // Check for a tab character messing the indentation. + if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { + return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found a tab character where an indentation space is expected") + } + + // Have we found a non-empty line? + if !is_break(parser.buffer, parser.buffer_pos) { + break + } + + // Consume the line break. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + // [Go] Should really be returning breaks instead. + *breaks = read_line(parser, *breaks) + *end_mark = parser.mark + } + + // Determine the indentation level if needed. + if *indent == 0 { + *indent = max_indent + if *indent < parser.indent+1 { + *indent = parser.indent + 1 + } + if *indent < 1 { + *indent = 1 + } + } + return true +} + +// Scan a quoted scalar. +func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { + // Eat the left quote. + start_mark := parser.mark + skip(parser) + + // Consume the content of the quoted scalar. + var s, leading_break, trailing_breaks, whitespaces []byte + for { + // Check that there are no document indicators at the beginning of the line. + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + + if parser.mark.column == 0 && + ((parser.buffer[parser.buffer_pos+0] == '-' && + parser.buffer[parser.buffer_pos+1] == '-' && + parser.buffer[parser.buffer_pos+2] == '-') || + (parser.buffer[parser.buffer_pos+0] == '.' && + parser.buffer[parser.buffer_pos+1] == '.' && + parser.buffer[parser.buffer_pos+2] == '.')) && + is_blankz(parser.buffer, parser.buffer_pos+3) { + yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", + start_mark, "found unexpected document indicator") + return false + } + + // Check for EOF. + if is_z(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", + start_mark, "found unexpected end of stream") + return false + } + + // Consume non-blank characters. + leading_blanks := false + for !is_blankz(parser.buffer, parser.buffer_pos) { + if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { + // Is is an escaped single quote. + s = append(s, '\'') + skip(parser) + skip(parser) + + } else if single && parser.buffer[parser.buffer_pos] == '\'' { + // It is a right single quote. + break + } else if !single && parser.buffer[parser.buffer_pos] == '"' { + // It is a right double quote. + break + + } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { + // It is an escaped line break. + if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { + return false + } + skip(parser) + skip_line(parser) + leading_blanks = true + break + + } else if !single && parser.buffer[parser.buffer_pos] == '\\' { + // It is an escape sequence. + code_length := 0 + + // Check the escape character. + switch parser.buffer[parser.buffer_pos+1] { + case '0': + s = append(s, 0) + case 'a': + s = append(s, '\x07') + case 'b': + s = append(s, '\x08') + case 't', '\t': + s = append(s, '\x09') + case 'n': + s = append(s, '\x0A') + case 'v': + s = append(s, '\x0B') + case 'f': + s = append(s, '\x0C') + case 'r': + s = append(s, '\x0D') + case 'e': + s = append(s, '\x1B') + case ' ': + s = append(s, '\x20') + case '"': + s = append(s, '"') + case '\'': + s = append(s, '\'') + case '\\': + s = append(s, '\\') + case 'N': // NEL (#x85) + s = append(s, '\xC2') + s = append(s, '\x85') + case '_': // #xA0 + s = append(s, '\xC2') + s = append(s, '\xA0') + case 'L': // LS (#x2028) + s = append(s, '\xE2') + s = append(s, '\x80') + s = append(s, '\xA8') + case 'P': // PS (#x2029) + s = append(s, '\xE2') + s = append(s, '\x80') + s = append(s, '\xA9') + case 'x': + code_length = 2 + case 'u': + code_length = 4 + case 'U': + code_length = 8 + default: + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "found unknown escape character") + return false + } + + skip(parser) + skip(parser) + + // Consume an arbitrary escape code. + if code_length > 0 { + var value int + + // Scan the character value. + if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { + return false + } + for k := 0; k < code_length; k++ { + if !is_hex(parser.buffer, parser.buffer_pos+k) { + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "did not find expected hexdecimal number") + return false + } + value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) + } + + // Check the value and write the character. + if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "found invalid Unicode character escape code") + return false + } + if value <= 0x7F { + s = append(s, byte(value)) + } else if value <= 0x7FF { + s = append(s, byte(0xC0+(value>>6))) + s = append(s, byte(0x80+(value&0x3F))) + } else if value <= 0xFFFF { + s = append(s, byte(0xE0+(value>>12))) + s = append(s, byte(0x80+((value>>6)&0x3F))) + s = append(s, byte(0x80+(value&0x3F))) + } else { + s = append(s, byte(0xF0+(value>>18))) + s = append(s, byte(0x80+((value>>12)&0x3F))) + s = append(s, byte(0x80+((value>>6)&0x3F))) + s = append(s, byte(0x80+(value&0x3F))) + } + + // Advance the pointer. + for k := 0; k < code_length; k++ { + skip(parser) + } + } + } else { + // It is a non-escaped non-blank character. + s = read(parser, s) + } + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + } + + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // Check if we are at the end of the scalar. + if single { + if parser.buffer[parser.buffer_pos] == '\'' { + break + } + } else { + if parser.buffer[parser.buffer_pos] == '"' { + break + } + } + + // Consume blank characters. + for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { + if is_blank(parser.buffer, parser.buffer_pos) { + // Consume a space or a tab character. + if !leading_blanks { + whitespaces = read(parser, whitespaces) + } else { + skip(parser) + } + } else { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + // Check if it is a first line break. + if !leading_blanks { + whitespaces = whitespaces[:0] + leading_break = read_line(parser, leading_break) + leading_blanks = true + } else { + trailing_breaks = read_line(parser, trailing_breaks) + } + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Join the whitespaces or fold line breaks. + if leading_blanks { + // Do we need to fold line breaks? + if len(leading_break) > 0 && leading_break[0] == '\n' { + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } else { + s = append(s, trailing_breaks...) + } + } else { + s = append(s, leading_break...) + s = append(s, trailing_breaks...) + } + trailing_breaks = trailing_breaks[:0] + leading_break = leading_break[:0] + } else { + s = append(s, whitespaces...) + whitespaces = whitespaces[:0] + } + } + + // Eat the right quote. + skip(parser) + end_mark := parser.mark + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_SINGLE_QUOTED_SCALAR_STYLE, + } + if !single { + token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + return true +} + +// Scan a plain scalar. +func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { + + var s, leading_break, trailing_breaks, whitespaces []byte + var leading_blanks bool + var indent = parser.indent + 1 + + start_mark := parser.mark + end_mark := parser.mark + + // Consume the content of the plain scalar. + for { + // Check for a document indicator. + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + if parser.mark.column == 0 && + ((parser.buffer[parser.buffer_pos+0] == '-' && + parser.buffer[parser.buffer_pos+1] == '-' && + parser.buffer[parser.buffer_pos+2] == '-') || + (parser.buffer[parser.buffer_pos+0] == '.' && + parser.buffer[parser.buffer_pos+1] == '.' && + parser.buffer[parser.buffer_pos+2] == '.')) && + is_blankz(parser.buffer, parser.buffer_pos+3) { + break + } + + // Check for a comment. + if parser.buffer[parser.buffer_pos] == '#' { + break + } + + // Consume non-blank characters. + for !is_blankz(parser.buffer, parser.buffer_pos) { + + // Check for indicators that may end a plain scalar. + if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || + (parser.flow_level > 0 && + (parser.buffer[parser.buffer_pos] == ',' || + parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || + parser.buffer[parser.buffer_pos] == '}')) { + break + } + + // Check if we need to join whitespaces and breaks. + if leading_blanks || len(whitespaces) > 0 { + if leading_blanks { + // Do we need to fold line breaks? + if leading_break[0] == '\n' { + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } else { + s = append(s, trailing_breaks...) + } + } else { + s = append(s, leading_break...) + s = append(s, trailing_breaks...) + } + trailing_breaks = trailing_breaks[:0] + leading_break = leading_break[:0] + leading_blanks = false + } else { + s = append(s, whitespaces...) + whitespaces = whitespaces[:0] + } + } + + // Copy the character. + s = read(parser, s) + + end_mark = parser.mark + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + } + + // Is it the end? + if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { + break + } + + // Consume blank characters. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { + if is_blank(parser.buffer, parser.buffer_pos) { + + // Check for tab characters that abuse indentation. + if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", + start_mark, "found a tab character that violates indentation") + return false + } + + // Consume a space or a tab character. + if !leading_blanks { + whitespaces = read(parser, whitespaces) + } else { + skip(parser) + } + } else { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + // Check if it is a first line break. + if !leading_blanks { + whitespaces = whitespaces[:0] + leading_break = read_line(parser, leading_break) + leading_blanks = true + } else { + trailing_breaks = read_line(parser, trailing_breaks) + } + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check indentation level. + if parser.flow_level == 0 && parser.mark.column < indent { + break + } + } + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_PLAIN_SCALAR_STYLE, + } + + // Note that we change the 'simple_key_allowed' flag. + if leading_blanks { + parser.simple_key_allowed = true + } + return true +} diff --git a/vendor/gopkg.in/yaml.v2/sorter.go b/vendor/gopkg.in/yaml.v2/sorter.go new file mode 100644 index 0000000..4c45e66 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/sorter.go @@ -0,0 +1,113 @@ +package yaml + +import ( + "reflect" + "unicode" +) + +type keyList []reflect.Value + +func (l keyList) Len() int { return len(l) } +func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l keyList) Less(i, j int) bool { + a := l[i] + b := l[j] + ak := a.Kind() + bk := b.Kind() + for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() { + a = a.Elem() + ak = a.Kind() + } + for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() { + b = b.Elem() + bk = b.Kind() + } + af, aok := keyFloat(a) + bf, bok := keyFloat(b) + if aok && bok { + if af != bf { + return af < bf + } + if ak != bk { + return ak < bk + } + return numLess(a, b) + } + if ak != reflect.String || bk != reflect.String { + return ak < bk + } + ar, br := []rune(a.String()), []rune(b.String()) + for i := 0; i < len(ar) && i < len(br); i++ { + if ar[i] == br[i] { + continue + } + al := unicode.IsLetter(ar[i]) + bl := unicode.IsLetter(br[i]) + if al && bl { + return ar[i] < br[i] + } + if al || bl { + return bl + } + var ai, bi int + var an, bn int64 + if ar[i] == '0' || br[i] == '0' { + for j := i-1; j >= 0 && unicode.IsDigit(ar[j]); j-- { + if ar[j] != '0' { + an = 1 + bn = 1 + break + } + } + } + for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ { + an = an*10 + int64(ar[ai]-'0') + } + for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ { + bn = bn*10 + int64(br[bi]-'0') + } + if an != bn { + return an < bn + } + if ai != bi { + return ai < bi + } + return ar[i] < br[i] + } + return len(ar) < len(br) +} + +// keyFloat returns a float value for v if it is a number/bool +// and whether it is a number/bool or not. +func keyFloat(v reflect.Value) (f float64, ok bool) { + switch v.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return float64(v.Int()), true + case reflect.Float32, reflect.Float64: + return v.Float(), true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return float64(v.Uint()), true + case reflect.Bool: + if v.Bool() { + return 1, true + } + return 0, true + } + return 0, false +} + +// numLess returns whether a < b. +// a and b must necessarily have the same kind. +func numLess(a, b reflect.Value) bool { + switch a.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return a.Int() < b.Int() + case reflect.Float32, reflect.Float64: + return a.Float() < b.Float() + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return a.Uint() < b.Uint() + case reflect.Bool: + return !a.Bool() && b.Bool() + } + panic("not a number") +} diff --git a/vendor/gopkg.in/yaml.v2/writerc.go b/vendor/gopkg.in/yaml.v2/writerc.go new file mode 100644 index 0000000..a2dde60 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/writerc.go @@ -0,0 +1,26 @@ +package yaml + +// Set the writer error and return false. +func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { + emitter.error = yaml_WRITER_ERROR + emitter.problem = problem + return false +} + +// Flush the output buffer. +func yaml_emitter_flush(emitter *yaml_emitter_t) bool { + if emitter.write_handler == nil { + panic("write handler not set") + } + + // Check if the buffer is empty. + if emitter.buffer_pos == 0 { + return true + } + + if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { + return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) + } + emitter.buffer_pos = 0 + return true +} diff --git a/vendor/gopkg.in/yaml.v2/yaml.go b/vendor/gopkg.in/yaml.v2/yaml.go new file mode 100644 index 0000000..3081388 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/yaml.go @@ -0,0 +1,478 @@ +// Package yaml implements YAML support for the Go language. +// +// Source code and other details for the project are available at GitHub: +// +// https://github.com/go-yaml/yaml +// +package yaml + +import ( + "errors" + "fmt" + "io" + "reflect" + "strings" + "sync" +) + +// MapSlice encodes and decodes as a YAML map. +// The order of keys is preserved when encoding and decoding. +type MapSlice []MapItem + +// MapItem is an item in a MapSlice. +type MapItem struct { + Key, Value interface{} +} + +// The Unmarshaler interface may be implemented by types to customize their +// behavior when being unmarshaled from a YAML document. The UnmarshalYAML +// method receives a function that may be called to unmarshal the original +// YAML value into a field or variable. It is safe to call the unmarshal +// function parameter more than once if necessary. +type Unmarshaler interface { + UnmarshalYAML(unmarshal func(interface{}) error) error +} + +// The Marshaler interface may be implemented by types to customize their +// behavior when being marshaled into a YAML document. The returned value +// is marshaled in place of the original value implementing Marshaler. +// +// If an error is returned by MarshalYAML, the marshaling procedure stops +// and returns with the provided error. +type Marshaler interface { + MarshalYAML() (interface{}, error) +} + +// Unmarshal decodes the first document found within the in byte slice +// and assigns decoded values into the out value. +// +// Maps and pointers (to a struct, string, int, etc) are accepted as out +// values. If an internal pointer within a struct is not initialized, +// the yaml package will initialize it if necessary for unmarshalling +// the provided data. The out parameter must not be nil. +// +// The type of the decoded values should be compatible with the respective +// values in out. If one or more values cannot be decoded due to a type +// mismatches, decoding continues partially until the end of the YAML +// content, and a *yaml.TypeError is returned with details for all +// missed values. +// +// Struct fields are only unmarshalled if they are exported (have an +// upper case first letter), and are unmarshalled using the field name +// lowercased as the default key. Custom keys may be defined via the +// "yaml" name in the field tag: the content preceding the first comma +// is used as the key, and the following comma-separated options are +// used to tweak the marshalling process (see Marshal). +// Conflicting names result in a runtime error. +// +// For example: +// +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// var t T +// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) +// +// See the documentation of Marshal for the format of tags and a list of +// supported tag options. +// +func Unmarshal(in []byte, out interface{}) (err error) { + return unmarshal(in, out, false) +} + +// UnmarshalStrict is like Unmarshal except that any fields that are found +// in the data that do not have corresponding struct members, or mapping +// keys that are duplicates, will result in +// an error. +func UnmarshalStrict(in []byte, out interface{}) (err error) { + return unmarshal(in, out, true) +} + +// A Decoder reads and decodes YAML values from an input stream. +type Decoder struct { + strict bool + parser *parser +} + +// NewDecoder returns a new decoder that reads from r. +// +// The decoder introduces its own buffering and may read +// data from r beyond the YAML values requested. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{ + parser: newParserFromReader(r), + } +} + +// SetStrict sets whether strict decoding behaviour is enabled when +// decoding items in the data (see UnmarshalStrict). By default, decoding is not strict. +func (dec *Decoder) SetStrict(strict bool) { + dec.strict = strict +} + +// Decode reads the next YAML-encoded value from its input +// and stores it in the value pointed to by v. +// +// See the documentation for Unmarshal for details about the +// conversion of YAML into a Go value. +func (dec *Decoder) Decode(v interface{}) (err error) { + d := newDecoder(dec.strict) + defer handleErr(&err) + node := dec.parser.parse() + if node == nil { + return io.EOF + } + out := reflect.ValueOf(v) + if out.Kind() == reflect.Ptr && !out.IsNil() { + out = out.Elem() + } + d.unmarshal(node, out) + if len(d.terrors) > 0 { + return &TypeError{d.terrors} + } + return nil +} + +func unmarshal(in []byte, out interface{}, strict bool) (err error) { + defer handleErr(&err) + d := newDecoder(strict) + p := newParser(in) + defer p.destroy() + node := p.parse() + if node != nil { + v := reflect.ValueOf(out) + if v.Kind() == reflect.Ptr && !v.IsNil() { + v = v.Elem() + } + d.unmarshal(node, v) + } + if len(d.terrors) > 0 { + return &TypeError{d.terrors} + } + return nil +} + +// Marshal serializes the value provided into a YAML document. The structure +// of the generated document will reflect the structure of the value itself. +// Maps and pointers (to struct, string, int, etc) are accepted as the in value. +// +// Struct fields are only marshalled if they are exported (have an upper case +// first letter), and are marshalled using the field name lowercased as the +// default key. Custom keys may be defined via the "yaml" name in the field +// tag: the content preceding the first comma is used as the key, and the +// following comma-separated options are used to tweak the marshalling process. +// Conflicting names result in a runtime error. +// +// The field tag format accepted is: +// +// `(...) yaml:"[][,[,]]" (...)` +// +// The following flags are currently supported: +// +// omitempty Only include the field if it's not set to the zero +// value for the type or to empty slices or maps. +// Zero valued structs will be omitted if all their public +// fields are zero, unless they implement an IsZero +// method (see the IsZeroer interface type), in which +// case the field will be excluded if IsZero returns true. +// +// flow Marshal using a flow style (useful for structs, +// sequences and maps). +// +// inline Inline the field, which must be a struct or a map, +// causing all of its fields or keys to be processed as if +// they were part of the outer struct. For maps, keys must +// not conflict with the yaml keys of other struct fields. +// +// In addition, if the key is "-", the field is ignored. +// +// For example: +// +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" +// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" +// +func Marshal(in interface{}) (out []byte, err error) { + defer handleErr(&err) + e := newEncoder() + defer e.destroy() + e.marshalDoc("", reflect.ValueOf(in)) + e.finish() + out = e.out + return +} + +// An Encoder writes YAML values to an output stream. +type Encoder struct { + encoder *encoder +} + +// NewEncoder returns a new encoder that writes to w. +// The Encoder should be closed after use to flush all data +// to w. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + encoder: newEncoderWithWriter(w), + } +} + +// Encode writes the YAML encoding of v to the stream. +// If multiple items are encoded to the stream, the +// second and subsequent document will be preceded +// with a "---" document separator, but the first will not. +// +// See the documentation for Marshal for details about the conversion of Go +// values to YAML. +func (e *Encoder) Encode(v interface{}) (err error) { + defer handleErr(&err) + e.encoder.marshalDoc("", reflect.ValueOf(v)) + return nil +} + +// Close closes the encoder by writing any remaining data. +// It does not write a stream terminating string "...". +func (e *Encoder) Close() (err error) { + defer handleErr(&err) + e.encoder.finish() + return nil +} + +func handleErr(err *error) { + if v := recover(); v != nil { + if e, ok := v.(yamlError); ok { + *err = e.err + } else { + panic(v) + } + } +} + +type yamlError struct { + err error +} + +func fail(err error) { + panic(yamlError{err}) +} + +func failf(format string, args ...interface{}) { + panic(yamlError{fmt.Errorf("yaml: "+format, args...)}) +} + +// A TypeError is returned by Unmarshal when one or more fields in +// the YAML document cannot be properly decoded into the requested +// types. When this error is returned, the value is still +// unmarshaled partially. +type TypeError struct { + Errors []string +} + +func (e *TypeError) Error() string { + return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n ")) +} + +// -------------------------------------------------------------------------- +// Maintain a mapping of keys to structure field indexes + +// The code in this section was copied from mgo/bson. + +// structInfo holds details for the serialization of fields of +// a given struct. +type structInfo struct { + FieldsMap map[string]fieldInfo + FieldsList []fieldInfo + + // InlineMap is the number of the field in the struct that + // contains an ,inline map, or -1 if there's none. + InlineMap int +} + +type fieldInfo struct { + Key string + Num int + OmitEmpty bool + Flow bool + // Id holds the unique field identifier, so we can cheaply + // check for field duplicates without maintaining an extra map. + Id int + + // Inline holds the field index if the field is part of an inlined struct. + Inline []int +} + +var structMap = make(map[reflect.Type]*structInfo) +var fieldMapMutex sync.RWMutex + +func getStructInfo(st reflect.Type) (*structInfo, error) { + fieldMapMutex.RLock() + sinfo, found := structMap[st] + fieldMapMutex.RUnlock() + if found { + return sinfo, nil + } + + n := st.NumField() + fieldsMap := make(map[string]fieldInfo) + fieldsList := make([]fieldInfo, 0, n) + inlineMap := -1 + for i := 0; i != n; i++ { + field := st.Field(i) + if field.PkgPath != "" && !field.Anonymous { + continue // Private field + } + + info := fieldInfo{Num: i} + + tag := field.Tag.Get("yaml") + if tag == "" && strings.Index(string(field.Tag), ":") < 0 { + tag = string(field.Tag) + } + if tag == "-" { + continue + } + + inline := false + fields := strings.Split(tag, ",") + if len(fields) > 1 { + for _, flag := range fields[1:] { + switch flag { + case "omitempty": + info.OmitEmpty = true + case "flow": + info.Flow = true + case "inline": + inline = true + default: + return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)) + } + } + tag = fields[0] + } + + if inline { + switch field.Type.Kind() { + case reflect.Map: + if inlineMap >= 0 { + return nil, errors.New("Multiple ,inline maps in struct " + st.String()) + } + if field.Type.Key() != reflect.TypeOf("") { + return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) + } + inlineMap = info.Num + case reflect.Struct: + sinfo, err := getStructInfo(field.Type) + if err != nil { + return nil, err + } + for _, finfo := range sinfo.FieldsList { + if _, found := fieldsMap[finfo.Key]; found { + msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() + return nil, errors.New(msg) + } + if finfo.Inline == nil { + finfo.Inline = []int{i, finfo.Num} + } else { + finfo.Inline = append([]int{i}, finfo.Inline...) + } + finfo.Id = len(fieldsList) + fieldsMap[finfo.Key] = finfo + fieldsList = append(fieldsList, finfo) + } + default: + //return nil, errors.New("Option ,inline needs a struct value or map field") + return nil, errors.New("Option ,inline needs a struct value field") + } + continue + } + + if tag != "" { + info.Key = tag + } else { + info.Key = strings.ToLower(field.Name) + } + + if _, found = fieldsMap[info.Key]; found { + msg := "Duplicated key '" + info.Key + "' in struct " + st.String() + return nil, errors.New(msg) + } + + info.Id = len(fieldsList) + fieldsList = append(fieldsList, info) + fieldsMap[info.Key] = info + } + + sinfo = &structInfo{ + FieldsMap: fieldsMap, + FieldsList: fieldsList, + InlineMap: inlineMap, + } + + fieldMapMutex.Lock() + structMap[st] = sinfo + fieldMapMutex.Unlock() + return sinfo, nil +} + +// IsZeroer is used to check whether an object is zero to +// determine whether it should be omitted when marshaling +// with the omitempty flag. One notable implementation +// is time.Time. +type IsZeroer interface { + IsZero() bool +} + +func isZero(v reflect.Value) bool { + kind := v.Kind() + if z, ok := v.Interface().(IsZeroer); ok { + if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() { + return true + } + return z.IsZero() + } + switch kind { + case reflect.String: + return len(v.String()) == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + case reflect.Slice: + return v.Len() == 0 + case reflect.Map: + return v.Len() == 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Struct: + vt := v.Type() + for i := v.NumField() - 1; i >= 0; i-- { + if vt.Field(i).PkgPath != "" { + continue // Private field + } + if !isZero(v.Field(i)) { + return false + } + } + return true + } + return false +} + +// FutureLineWrap globally disables line wrapping when encoding long strings. +// This is a temporary and thus deprecated method introduced to faciliate +// migration towards v3, which offers more control of line lengths on +// individual encodings, and has a default matching the behavior introduced +// by this function. +// +// The default formatting of v2 was erroneously changed in v2.3.0 and reverted +// in v2.4.0, at which point this function was introduced to help migration. +func FutureLineWrap() { + disableLineWrapping = true +} diff --git a/vendor/gopkg.in/yaml.v2/yamlh.go b/vendor/gopkg.in/yaml.v2/yamlh.go new file mode 100644 index 0000000..f6a9c8e --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/yamlh.go @@ -0,0 +1,739 @@ +package yaml + +import ( + "fmt" + "io" +) + +// The version directive data. +type yaml_version_directive_t struct { + major int8 // The major version number. + minor int8 // The minor version number. +} + +// The tag directive data. +type yaml_tag_directive_t struct { + handle []byte // The tag handle. + prefix []byte // The tag prefix. +} + +type yaml_encoding_t int + +// The stream encoding. +const ( + // Let the parser choose the encoding. + yaml_ANY_ENCODING yaml_encoding_t = iota + + yaml_UTF8_ENCODING // The default UTF-8 encoding. + yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. + yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. +) + +type yaml_break_t int + +// Line break types. +const ( + // Let the parser choose the break type. + yaml_ANY_BREAK yaml_break_t = iota + + yaml_CR_BREAK // Use CR for line breaks (Mac style). + yaml_LN_BREAK // Use LN for line breaks (Unix style). + yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). +) + +type yaml_error_type_t int + +// Many bad things could happen with the parser and emitter. +const ( + // No error is produced. + yaml_NO_ERROR yaml_error_type_t = iota + + yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. + yaml_READER_ERROR // Cannot read or decode the input stream. + yaml_SCANNER_ERROR // Cannot scan the input stream. + yaml_PARSER_ERROR // Cannot parse the input stream. + yaml_COMPOSER_ERROR // Cannot compose a YAML document. + yaml_WRITER_ERROR // Cannot write to the output stream. + yaml_EMITTER_ERROR // Cannot emit a YAML stream. +) + +// The pointer position. +type yaml_mark_t struct { + index int // The position index. + line int // The position line. + column int // The position column. +} + +// Node Styles + +type yaml_style_t int8 + +type yaml_scalar_style_t yaml_style_t + +// Scalar styles. +const ( + // Let the emitter choose the style. + yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota + + yaml_PLAIN_SCALAR_STYLE // The plain scalar style. + yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. + yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. + yaml_LITERAL_SCALAR_STYLE // The literal scalar style. + yaml_FOLDED_SCALAR_STYLE // The folded scalar style. +) + +type yaml_sequence_style_t yaml_style_t + +// Sequence styles. +const ( + // Let the emitter choose the style. + yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota + + yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. + yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. +) + +type yaml_mapping_style_t yaml_style_t + +// Mapping styles. +const ( + // Let the emitter choose the style. + yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota + + yaml_BLOCK_MAPPING_STYLE // The block mapping style. + yaml_FLOW_MAPPING_STYLE // The flow mapping style. +) + +// Tokens + +type yaml_token_type_t int + +// Token types. +const ( + // An empty token. + yaml_NO_TOKEN yaml_token_type_t = iota + + yaml_STREAM_START_TOKEN // A STREAM-START token. + yaml_STREAM_END_TOKEN // A STREAM-END token. + + yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. + yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. + yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. + yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. + + yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. + yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. + yaml_BLOCK_END_TOKEN // A BLOCK-END token. + + yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. + yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. + yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. + yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. + + yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. + yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. + yaml_KEY_TOKEN // A KEY token. + yaml_VALUE_TOKEN // A VALUE token. + + yaml_ALIAS_TOKEN // An ALIAS token. + yaml_ANCHOR_TOKEN // An ANCHOR token. + yaml_TAG_TOKEN // A TAG token. + yaml_SCALAR_TOKEN // A SCALAR token. +) + +func (tt yaml_token_type_t) String() string { + switch tt { + case yaml_NO_TOKEN: + return "yaml_NO_TOKEN" + case yaml_STREAM_START_TOKEN: + return "yaml_STREAM_START_TOKEN" + case yaml_STREAM_END_TOKEN: + return "yaml_STREAM_END_TOKEN" + case yaml_VERSION_DIRECTIVE_TOKEN: + return "yaml_VERSION_DIRECTIVE_TOKEN" + case yaml_TAG_DIRECTIVE_TOKEN: + return "yaml_TAG_DIRECTIVE_TOKEN" + case yaml_DOCUMENT_START_TOKEN: + return "yaml_DOCUMENT_START_TOKEN" + case yaml_DOCUMENT_END_TOKEN: + return "yaml_DOCUMENT_END_TOKEN" + case yaml_BLOCK_SEQUENCE_START_TOKEN: + return "yaml_BLOCK_SEQUENCE_START_TOKEN" + case yaml_BLOCK_MAPPING_START_TOKEN: + return "yaml_BLOCK_MAPPING_START_TOKEN" + case yaml_BLOCK_END_TOKEN: + return "yaml_BLOCK_END_TOKEN" + case yaml_FLOW_SEQUENCE_START_TOKEN: + return "yaml_FLOW_SEQUENCE_START_TOKEN" + case yaml_FLOW_SEQUENCE_END_TOKEN: + return "yaml_FLOW_SEQUENCE_END_TOKEN" + case yaml_FLOW_MAPPING_START_TOKEN: + return "yaml_FLOW_MAPPING_START_TOKEN" + case yaml_FLOW_MAPPING_END_TOKEN: + return "yaml_FLOW_MAPPING_END_TOKEN" + case yaml_BLOCK_ENTRY_TOKEN: + return "yaml_BLOCK_ENTRY_TOKEN" + case yaml_FLOW_ENTRY_TOKEN: + return "yaml_FLOW_ENTRY_TOKEN" + case yaml_KEY_TOKEN: + return "yaml_KEY_TOKEN" + case yaml_VALUE_TOKEN: + return "yaml_VALUE_TOKEN" + case yaml_ALIAS_TOKEN: + return "yaml_ALIAS_TOKEN" + case yaml_ANCHOR_TOKEN: + return "yaml_ANCHOR_TOKEN" + case yaml_TAG_TOKEN: + return "yaml_TAG_TOKEN" + case yaml_SCALAR_TOKEN: + return "yaml_SCALAR_TOKEN" + } + return "" +} + +// The token structure. +type yaml_token_t struct { + // The token type. + typ yaml_token_type_t + + // The start/end of the token. + start_mark, end_mark yaml_mark_t + + // The stream encoding (for yaml_STREAM_START_TOKEN). + encoding yaml_encoding_t + + // The alias/anchor/scalar value or tag/tag directive handle + // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). + value []byte + + // The tag suffix (for yaml_TAG_TOKEN). + suffix []byte + + // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). + prefix []byte + + // The scalar style (for yaml_SCALAR_TOKEN). + style yaml_scalar_style_t + + // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). + major, minor int8 +} + +// Events + +type yaml_event_type_t int8 + +// Event types. +const ( + // An empty event. + yaml_NO_EVENT yaml_event_type_t = iota + + yaml_STREAM_START_EVENT // A STREAM-START event. + yaml_STREAM_END_EVENT // A STREAM-END event. + yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. + yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. + yaml_ALIAS_EVENT // An ALIAS event. + yaml_SCALAR_EVENT // A SCALAR event. + yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. + yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. + yaml_MAPPING_START_EVENT // A MAPPING-START event. + yaml_MAPPING_END_EVENT // A MAPPING-END event. +) + +var eventStrings = []string{ + yaml_NO_EVENT: "none", + yaml_STREAM_START_EVENT: "stream start", + yaml_STREAM_END_EVENT: "stream end", + yaml_DOCUMENT_START_EVENT: "document start", + yaml_DOCUMENT_END_EVENT: "document end", + yaml_ALIAS_EVENT: "alias", + yaml_SCALAR_EVENT: "scalar", + yaml_SEQUENCE_START_EVENT: "sequence start", + yaml_SEQUENCE_END_EVENT: "sequence end", + yaml_MAPPING_START_EVENT: "mapping start", + yaml_MAPPING_END_EVENT: "mapping end", +} + +func (e yaml_event_type_t) String() string { + if e < 0 || int(e) >= len(eventStrings) { + return fmt.Sprintf("unknown event %d", e) + } + return eventStrings[e] +} + +// The event structure. +type yaml_event_t struct { + + // The event type. + typ yaml_event_type_t + + // The start and end of the event. + start_mark, end_mark yaml_mark_t + + // The document encoding (for yaml_STREAM_START_EVENT). + encoding yaml_encoding_t + + // The version directive (for yaml_DOCUMENT_START_EVENT). + version_directive *yaml_version_directive_t + + // The list of tag directives (for yaml_DOCUMENT_START_EVENT). + tag_directives []yaml_tag_directive_t + + // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). + anchor []byte + + // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). + tag []byte + + // The scalar value (for yaml_SCALAR_EVENT). + value []byte + + // Is the document start/end indicator implicit, or the tag optional? + // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). + implicit bool + + // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). + quoted_implicit bool + + // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). + style yaml_style_t +} + +func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } +func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } +func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } + +// Nodes + +const ( + yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. + yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. + yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. + yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. + yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. + yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. + + yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. + yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. + + // Not in original libyaml. + yaml_BINARY_TAG = "tag:yaml.org,2002:binary" + yaml_MERGE_TAG = "tag:yaml.org,2002:merge" + + yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. + yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. + yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. +) + +type yaml_node_type_t int + +// Node types. +const ( + // An empty node. + yaml_NO_NODE yaml_node_type_t = iota + + yaml_SCALAR_NODE // A scalar node. + yaml_SEQUENCE_NODE // A sequence node. + yaml_MAPPING_NODE // A mapping node. +) + +// An element of a sequence node. +type yaml_node_item_t int + +// An element of a mapping node. +type yaml_node_pair_t struct { + key int // The key of the element. + value int // The value of the element. +} + +// The node structure. +type yaml_node_t struct { + typ yaml_node_type_t // The node type. + tag []byte // The node tag. + + // The node data. + + // The scalar parameters (for yaml_SCALAR_NODE). + scalar struct { + value []byte // The scalar value. + length int // The length of the scalar value. + style yaml_scalar_style_t // The scalar style. + } + + // The sequence parameters (for YAML_SEQUENCE_NODE). + sequence struct { + items_data []yaml_node_item_t // The stack of sequence items. + style yaml_sequence_style_t // The sequence style. + } + + // The mapping parameters (for yaml_MAPPING_NODE). + mapping struct { + pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). + pairs_start *yaml_node_pair_t // The beginning of the stack. + pairs_end *yaml_node_pair_t // The end of the stack. + pairs_top *yaml_node_pair_t // The top of the stack. + style yaml_mapping_style_t // The mapping style. + } + + start_mark yaml_mark_t // The beginning of the node. + end_mark yaml_mark_t // The end of the node. + +} + +// The document structure. +type yaml_document_t struct { + + // The document nodes. + nodes []yaml_node_t + + // The version directive. + version_directive *yaml_version_directive_t + + // The list of tag directives. + tag_directives_data []yaml_tag_directive_t + tag_directives_start int // The beginning of the tag directives list. + tag_directives_end int // The end of the tag directives list. + + start_implicit int // Is the document start indicator implicit? + end_implicit int // Is the document end indicator implicit? + + // The start/end of the document. + start_mark, end_mark yaml_mark_t +} + +// The prototype of a read handler. +// +// The read handler is called when the parser needs to read more bytes from the +// source. The handler should write not more than size bytes to the buffer. +// The number of written bytes should be set to the size_read variable. +// +// [in,out] data A pointer to an application data specified by +// yaml_parser_set_input(). +// [out] buffer The buffer to write the data from the source. +// [in] size The size of the buffer. +// [out] size_read The actual number of bytes read from the source. +// +// On success, the handler should return 1. If the handler failed, +// the returned value should be 0. On EOF, the handler should set the +// size_read to 0 and return 1. +type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) + +// This structure holds information about a potential simple key. +type yaml_simple_key_t struct { + possible bool // Is a simple key possible? + required bool // Is a simple key required? + token_number int // The number of the token. + mark yaml_mark_t // The position mark. +} + +// The states of the parser. +type yaml_parser_state_t int + +const ( + yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota + + yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. + yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. + yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. + yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. + yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. + yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. + yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. + yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. + yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. + yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. + yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. + yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. + yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. + yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. + yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. + yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. + yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. + yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. + yaml_PARSE_END_STATE // Expect nothing. +) + +func (ps yaml_parser_state_t) String() string { + switch ps { + case yaml_PARSE_STREAM_START_STATE: + return "yaml_PARSE_STREAM_START_STATE" + case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: + return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" + case yaml_PARSE_DOCUMENT_START_STATE: + return "yaml_PARSE_DOCUMENT_START_STATE" + case yaml_PARSE_DOCUMENT_CONTENT_STATE: + return "yaml_PARSE_DOCUMENT_CONTENT_STATE" + case yaml_PARSE_DOCUMENT_END_STATE: + return "yaml_PARSE_DOCUMENT_END_STATE" + case yaml_PARSE_BLOCK_NODE_STATE: + return "yaml_PARSE_BLOCK_NODE_STATE" + case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: + return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" + case yaml_PARSE_FLOW_NODE_STATE: + return "yaml_PARSE_FLOW_NODE_STATE" + case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: + return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" + case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: + return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" + case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: + return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" + case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: + return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" + case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: + return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" + case yaml_PARSE_FLOW_MAPPING_KEY_STATE: + return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" + case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: + return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: + return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" + case yaml_PARSE_END_STATE: + return "yaml_PARSE_END_STATE" + } + return "" +} + +// This structure holds aliases data. +type yaml_alias_data_t struct { + anchor []byte // The anchor. + index int // The node id. + mark yaml_mark_t // The anchor mark. +} + +// The parser structure. +// +// All members are internal. Manage the structure using the +// yaml_parser_ family of functions. +type yaml_parser_t struct { + + // Error handling + + error yaml_error_type_t // Error type. + + problem string // Error description. + + // The byte about which the problem occurred. + problem_offset int + problem_value int + problem_mark yaml_mark_t + + // The error context. + context string + context_mark yaml_mark_t + + // Reader stuff + + read_handler yaml_read_handler_t // Read handler. + + input_reader io.Reader // File input data. + input []byte // String input data. + input_pos int + + eof bool // EOF flag + + buffer []byte // The working buffer. + buffer_pos int // The current position of the buffer. + + unread int // The number of unread characters in the buffer. + + raw_buffer []byte // The raw buffer. + raw_buffer_pos int // The current position of the buffer. + + encoding yaml_encoding_t // The input encoding. + + offset int // The offset of the current position (in bytes). + mark yaml_mark_t // The mark of the current position. + + // Scanner stuff + + stream_start_produced bool // Have we started to scan the input stream? + stream_end_produced bool // Have we reached the end of the input stream? + + flow_level int // The number of unclosed '[' and '{' indicators. + + tokens []yaml_token_t // The tokens queue. + tokens_head int // The head of the tokens queue. + tokens_parsed int // The number of tokens fetched from the queue. + token_available bool // Does the tokens queue contain a token ready for dequeueing. + + indent int // The current indentation level. + indents []int // The indentation levels stack. + + simple_key_allowed bool // May a simple key occur at the current position? + simple_keys []yaml_simple_key_t // The stack of simple keys. + simple_keys_by_tok map[int]int // possible simple_key indexes indexed by token_number + + // Parser stuff + + state yaml_parser_state_t // The current parser state. + states []yaml_parser_state_t // The parser states stack. + marks []yaml_mark_t // The stack of marks. + tag_directives []yaml_tag_directive_t // The list of TAG directives. + + // Dumper stuff + + aliases []yaml_alias_data_t // The alias data. + + document *yaml_document_t // The currently parsed document. +} + +// Emitter Definitions + +// The prototype of a write handler. +// +// The write handler is called when the emitter needs to flush the accumulated +// characters to the output. The handler should write @a size bytes of the +// @a buffer to the output. +// +// @param[in,out] data A pointer to an application data specified by +// yaml_emitter_set_output(). +// @param[in] buffer The buffer with bytes to be written. +// @param[in] size The size of the buffer. +// +// @returns On success, the handler should return @c 1. If the handler failed, +// the returned value should be @c 0. +// +type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error + +type yaml_emitter_state_t int + +// The emitter states. +const ( + // Expect STREAM-START. + yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota + + yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. + yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. + yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. + yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. + yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. + yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. + yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. + yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. + yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. + yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. + yaml_EMIT_END_STATE // Expect nothing. +) + +// The emitter structure. +// +// All members are internal. Manage the structure using the @c yaml_emitter_ +// family of functions. +type yaml_emitter_t struct { + + // Error handling + + error yaml_error_type_t // Error type. + problem string // Error description. + + // Writer stuff + + write_handler yaml_write_handler_t // Write handler. + + output_buffer *[]byte // String output data. + output_writer io.Writer // File output data. + + buffer []byte // The working buffer. + buffer_pos int // The current position of the buffer. + + raw_buffer []byte // The raw buffer. + raw_buffer_pos int // The current position of the buffer. + + encoding yaml_encoding_t // The stream encoding. + + // Emitter stuff + + canonical bool // If the output is in the canonical style? + best_indent int // The number of indentation spaces. + best_width int // The preferred width of the output lines. + unicode bool // Allow unescaped non-ASCII characters? + line_break yaml_break_t // The preferred line break. + + state yaml_emitter_state_t // The current emitter state. + states []yaml_emitter_state_t // The stack of states. + + events []yaml_event_t // The event queue. + events_head int // The head of the event queue. + + indents []int // The stack of indentation levels. + + tag_directives []yaml_tag_directive_t // The list of tag directives. + + indent int // The current indentation level. + + flow_level int // The current flow level. + + root_context bool // Is it the document root context? + sequence_context bool // Is it a sequence context? + mapping_context bool // Is it a mapping context? + simple_key_context bool // Is it a simple mapping key context? + + line int // The current line. + column int // The current column. + whitespace bool // If the last character was a whitespace? + indention bool // If the last character was an indentation character (' ', '-', '?', ':')? + open_ended bool // If an explicit document end is required? + + // Anchor analysis. + anchor_data struct { + anchor []byte // The anchor value. + alias bool // Is it an alias? + } + + // Tag analysis. + tag_data struct { + handle []byte // The tag handle. + suffix []byte // The tag suffix. + } + + // Scalar analysis. + scalar_data struct { + value []byte // The scalar value. + multiline bool // Does the scalar contain line breaks? + flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? + block_plain_allowed bool // Can the scalar be expressed in the block plain style? + single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? + block_allowed bool // Can the scalar be expressed in the literal or folded styles? + style yaml_scalar_style_t // The output style. + } + + // Dumper stuff + + opened bool // If the stream was already opened? + closed bool // If the stream was already closed? + + // The information associated with the document nodes. + anchors *struct { + references int // The number of references. + anchor int // The anchor id. + serialized bool // If the node has been emitted? + } + + last_anchor_id int // The last assigned anchor id. + + document *yaml_document_t // The currently emitted document. +} diff --git a/vendor/gopkg.in/yaml.v2/yamlprivateh.go b/vendor/gopkg.in/yaml.v2/yamlprivateh.go new file mode 100644 index 0000000..8110ce3 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/yamlprivateh.go @@ -0,0 +1,173 @@ +package yaml + +const ( + // The size of the input raw buffer. + input_raw_buffer_size = 512 + + // The size of the input buffer. + // It should be possible to decode the whole raw buffer. + input_buffer_size = input_raw_buffer_size * 3 + + // The size of the output buffer. + output_buffer_size = 128 + + // The size of the output raw buffer. + // It should be possible to encode the whole output buffer. + output_raw_buffer_size = (output_buffer_size*2 + 2) + + // The size of other stacks and queues. + initial_stack_size = 16 + initial_queue_size = 16 + initial_string_size = 16 +) + +// Check if the character at the specified position is an alphabetical +// character, a digit, '_', or '-'. +func is_alpha(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' +} + +// Check if the character at the specified position is a digit. +func is_digit(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' +} + +// Get the value of a digit. +func as_digit(b []byte, i int) int { + return int(b[i]) - '0' +} + +// Check if the character at the specified position is a hex-digit. +func is_hex(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' +} + +// Get the value of a hex-digit. +func as_hex(b []byte, i int) int { + bi := b[i] + if bi >= 'A' && bi <= 'F' { + return int(bi) - 'A' + 10 + } + if bi >= 'a' && bi <= 'f' { + return int(bi) - 'a' + 10 + } + return int(bi) - '0' +} + +// Check if the character is ASCII. +func is_ascii(b []byte, i int) bool { + return b[i] <= 0x7F +} + +// Check if the character at the start of the buffer can be printed unescaped. +func is_printable(b []byte, i int) bool { + return ((b[i] == 0x0A) || // . == #x0A + (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E + (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF + (b[i] > 0xC2 && b[i] < 0xED) || + (b[i] == 0xED && b[i+1] < 0xA0) || + (b[i] == 0xEE) || + (b[i] == 0xEF && // #xE000 <= . <= #xFFFD + !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF + !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) +} + +// Check if the character at the specified position is NUL. +func is_z(b []byte, i int) bool { + return b[i] == 0x00 +} + +// Check if the beginning of the buffer is a BOM. +func is_bom(b []byte, i int) bool { + return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF +} + +// Check if the character at the specified position is space. +func is_space(b []byte, i int) bool { + return b[i] == ' ' +} + +// Check if the character at the specified position is tab. +func is_tab(b []byte, i int) bool { + return b[i] == '\t' +} + +// Check if the character at the specified position is blank (space or tab). +func is_blank(b []byte, i int) bool { + //return is_space(b, i) || is_tab(b, i) + return b[i] == ' ' || b[i] == '\t' +} + +// Check if the character at the specified position is a line break. +func is_break(b []byte, i int) bool { + return (b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) +} + +func is_crlf(b []byte, i int) bool { + return b[i] == '\r' && b[i+1] == '\n' +} + +// Check if the character is a line break or NUL. +func is_breakz(b []byte, i int) bool { + //return is_break(b, i) || is_z(b, i) + return ( // is_break: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + // is_z: + b[i] == 0) +} + +// Check if the character is a line break, space, or NUL. +func is_spacez(b []byte, i int) bool { + //return is_space(b, i) || is_breakz(b, i) + return ( // is_space: + b[i] == ' ' || + // is_breakz: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + b[i] == 0) +} + +// Check if the character is a line break, space, tab, or NUL. +func is_blankz(b []byte, i int) bool { + //return is_blank(b, i) || is_breakz(b, i) + return ( // is_blank: + b[i] == ' ' || b[i] == '\t' || + // is_breakz: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + b[i] == 0) +} + +// Determine the width of the character. +func width(b byte) int { + // Don't replace these by a switch without first + // confirming that it is being inlined. + if b&0x80 == 0x00 { + return 1 + } + if b&0xE0 == 0xC0 { + return 2 + } + if b&0xF0 == 0xE0 { + return 3 + } + if b&0xF8 == 0xF0 { + return 4 + } + return 0 + +} diff --git a/vendor/gopkg.in/yaml.v3/emitterc.go b/vendor/gopkg.in/yaml.v3/emitterc.go index dde20e5..0f47c9c 100644 --- a/vendor/gopkg.in/yaml.v3/emitterc.go +++ b/vendor/gopkg.in/yaml.v3/emitterc.go @@ -162,9 +162,10 @@ func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { // Check if we need to accumulate more events before emitting. // // We accumulate extra -// - 1 event for DOCUMENT-START -// - 2 events for SEQUENCE-START -// - 3 events for MAPPING-START +// - 1 event for DOCUMENT-START +// - 2 events for SEQUENCE-START +// - 3 events for MAPPING-START +// func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { if emitter.events_head == len(emitter.events) { return true @@ -240,7 +241,7 @@ func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool emitter.indent += 2 } else { // Everything else aligns to the chosen indentation. - emitter.indent = emitter.best_indent * ((emitter.indent + emitter.best_indent) / emitter.best_indent) + emitter.indent = emitter.best_indent*((emitter.indent+emitter.best_indent)/emitter.best_indent) } } return true diff --git a/vendor/gopkg.in/yaml.v3/scannerc.go b/vendor/gopkg.in/yaml.v3/scannerc.go index 30b1f08..ca00701 100644 --- a/vendor/gopkg.in/yaml.v3/scannerc.go +++ b/vendor/gopkg.in/yaml.v3/scannerc.go @@ -1614,11 +1614,11 @@ func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { // Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. // // Scope: +// %YAML 1.1 # a comment \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ // -// %YAML 1.1 # a comment \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { // Eat '%'. start_mark := parser.mark @@ -1719,11 +1719,11 @@ func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool // Scan the directive name. // // Scope: +// %YAML 1.1 # a comment \n +// ^^^^ +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^ // -// %YAML 1.1 # a comment \n -// ^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^ func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { // Consume the directive name. if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { @@ -1758,9 +1758,8 @@ func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark // Scan the value of VERSION-DIRECTIVE. // // Scope: -// -// %YAML 1.1 # a comment \n -// ^^^^^^ +// %YAML 1.1 # a comment \n +// ^^^^^^ func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { // Eat whitespaces. if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { @@ -1798,11 +1797,10 @@ const max_number_length = 2 // Scan the version number of VERSION-DIRECTIVE. // // Scope: -// -// %YAML 1.1 # a comment \n -// ^ -// %YAML 1.1 # a comment \n -// ^ +// %YAML 1.1 # a comment \n +// ^ +// %YAML 1.1 # a comment \n +// ^ func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { // Repeat while the next character is digit. @@ -1836,9 +1834,9 @@ func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark // Scan the value of a TAG-DIRECTIVE token. // // Scope: +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ // -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { var handle_value, prefix_value []byte @@ -2849,7 +2847,7 @@ func yaml_parser_scan_line_comment(parser *yaml_parser_t, token_mark yaml_mark_t continue } if parser.buffer[parser.buffer_pos+peek] == '#' { - seen := parser.mark.index + peek + seen := parser.mark.index+peek for { if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { return false @@ -2878,7 +2876,7 @@ func yaml_parser_scan_line_comment(parser *yaml_parser_t, token_mark yaml_mark_t parser.comments = append(parser.comments, yaml_comment_t{ token_mark: token_mark, start_mark: start_mark, - line: text, + line: text, }) } return true @@ -2912,7 +2910,7 @@ func yaml_parser_scan_comments(parser *yaml_parser_t, scan_mark yaml_mark_t) boo // the foot is the line below it. var foot_line = -1 if scan_mark.line > 0 { - foot_line = parser.mark.line - parser.newlines + 1 + foot_line = parser.mark.line-parser.newlines+1 if parser.newlines == 0 && parser.mark.column > 1 { foot_line++ } @@ -2998,7 +2996,7 @@ func yaml_parser_scan_comments(parser *yaml_parser_t, scan_mark yaml_mark_t) boo recent_empty = false // Consume until after the consumed comment line. - seen := parser.mark.index + peek + seen := parser.mark.index+peek for { if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { return false diff --git a/vendor/gopkg.in/yaml.v3/yaml.go b/vendor/gopkg.in/yaml.v3/yaml.go index f0bedf3..8cec6da 100644 --- a/vendor/gopkg.in/yaml.v3/yaml.go +++ b/vendor/gopkg.in/yaml.v3/yaml.go @@ -17,7 +17,8 @@ // // Source code and other details for the project are available at GitHub: // -// https://github.com/go-yaml/yaml +// https://github.com/go-yaml/yaml +// package yaml import ( @@ -74,15 +75,16 @@ type Marshaler interface { // // For example: // -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// var t T -// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// var t T +// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) // // See the documentation of Marshal for the format of tags and a list of // supported tag options. +// func Unmarshal(in []byte, out interface{}) (err error) { return unmarshal(in, out, false) } @@ -183,35 +185,36 @@ func unmarshal(in []byte, out interface{}, strict bool) (err error) { // // The field tag format accepted is: // -// `(...) yaml:"[][,[,]]" (...)` +// `(...) yaml:"[][,[,]]" (...)` // // The following flags are currently supported: // -// omitempty Only include the field if it's not set to the zero -// value for the type or to empty slices or maps. -// Zero valued structs will be omitted if all their public -// fields are zero, unless they implement an IsZero -// method (see the IsZeroer interface type), in which -// case the field will be excluded if IsZero returns true. +// omitempty Only include the field if it's not set to the zero +// value for the type or to empty slices or maps. +// Zero valued structs will be omitted if all their public +// fields are zero, unless they implement an IsZero +// method (see the IsZeroer interface type), in which +// case the field will be excluded if IsZero returns true. // -// flow Marshal using a flow style (useful for structs, -// sequences and maps). +// flow Marshal using a flow style (useful for structs, +// sequences and maps). // -// inline Inline the field, which must be a struct or a map, -// causing all of its fields or keys to be processed as if -// they were part of the outer struct. For maps, keys must -// not conflict with the yaml keys of other struct fields. +// inline Inline the field, which must be a struct or a map, +// causing all of its fields or keys to be processed as if +// they were part of the outer struct. For maps, keys must +// not conflict with the yaml keys of other struct fields. // // In addition, if the key is "-", the field is ignored. // // For example: // -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" -// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" +// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" +// func Marshal(in interface{}) (out []byte, err error) { defer handleErr(&err) e := newEncoder() @@ -355,21 +358,22 @@ const ( // // For example: // -// var person struct { -// Name string -// Address yaml.Node -// } -// err := yaml.Unmarshal(data, &person) -// +// var person struct { +// Name string +// Address yaml.Node +// } +// err := yaml.Unmarshal(data, &person) +// // Or by itself: // -// var person Node -// err := yaml.Unmarshal(data, &person) +// var person Node +// err := yaml.Unmarshal(data, &person) +// type Node struct { // Kind defines whether the node is a document, a mapping, a sequence, // a scalar value, or an alias to another node. The specific data type of // scalar nodes may be obtained via the ShortTag and LongTag methods. - Kind Kind + Kind Kind // Style allows customizing the apperance of the node in the tree. Style Style @@ -417,6 +421,7 @@ func (n *Node) IsZero() bool { n.HeadComment == "" && n.LineComment == "" && n.FootComment == "" && n.Line == 0 && n.Column == 0 } + // LongTag returns the long form of the tag that indicates the data type for // the node. If the Tag field isn't explicitly defined, one will be computed // based on the node properties. diff --git a/vendor/gopkg.in/yaml.v3/yamlh.go b/vendor/gopkg.in/yaml.v3/yamlh.go index ddcd551..7c6d007 100644 --- a/vendor/gopkg.in/yaml.v3/yamlh.go +++ b/vendor/gopkg.in/yaml.v3/yamlh.go @@ -438,9 +438,7 @@ type yaml_document_t struct { // The number of written bytes should be set to the size_read variable. // // [in,out] data A pointer to an application data specified by -// -// yaml_parser_set_input(). -// +// yaml_parser_set_input(). // [out] buffer The buffer to write the data from the source. // [in] size The size of the buffer. // [out] size_read The actual number of bytes read from the source. @@ -641,6 +639,7 @@ type yaml_parser_t struct { } type yaml_comment_t struct { + scan_mark yaml_mark_t // Position where scanning for comments started token_mark yaml_mark_t // Position after which tokens will be associated with this comment start_mark yaml_mark_t // Position of '#' comment mark @@ -660,14 +659,13 @@ type yaml_comment_t struct { // @a buffer to the output. // // @param[in,out] data A pointer to an application data specified by -// -// yaml_emitter_set_output(). -// +// yaml_emitter_set_output(). // @param[in] buffer The buffer with bytes to be written. // @param[in] size The size of the buffer. // // @returns On success, the handler should return @c 1. If the handler failed, // the returned value should be @c 0. +// type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error type yaml_emitter_state_t int diff --git a/vendor/modules.txt b/vendor/modules.txt index 13d6445..443e284 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,3 +1,6 @@ +# github.com/KyleBanks/depth v1.2.1 +## explicit +github.com/KyleBanks/depth # github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496 ## explicit; go 1.12 github.com/asaskevich/govalidator @@ -20,9 +23,25 @@ github.com/fatih/structs # github.com/fsnotify/fsnotify v1.7.0 ## explicit; go 1.17 github.com/fsnotify/fsnotify +# github.com/ghodss/yaml v1.0.0 +## explicit +github.com/ghodss/yaml # github.com/go-gorp/gorp/v3 v3.1.0 ## explicit; go 1.18 github.com/go-gorp/gorp/v3 +# github.com/go-openapi/jsonpointer v0.21.0 +## explicit; go 1.20 +github.com/go-openapi/jsonpointer +# github.com/go-openapi/jsonreference v0.21.0 +## explicit; go 1.20 +github.com/go-openapi/jsonreference +github.com/go-openapi/jsonreference/internal +# github.com/go-openapi/spec v0.21.0 +## explicit; go 1.20 +github.com/go-openapi/spec +# github.com/go-openapi/swag v0.23.0 +## explicit; go 1.20 +github.com/go-openapi/swag # github.com/go-ozzo/ozzo-validation v3.6.0+incompatible ## explicit github.com/go-ozzo/ozzo-validation @@ -42,6 +61,9 @@ github.com/golang-jwt/jwt/v4 # github.com/golang-jwt/jwt/v5 v5.0.0 ## explicit; go 1.18 github.com/golang-jwt/jwt/v5 +# github.com/josharian/intern v1.0.0 +## explicit; go 1.5 +github.com/josharian/intern # github.com/kavenegar/kavenegar-go v0.0.0-20221124112814-40341057b5ca ## explicit; go 1.14 github.com/kavenegar/kavenegar-go @@ -56,7 +78,7 @@ github.com/knadh/koanf/providers/structs # github.com/labstack/echo-jwt/v4 v4.2.0 ## explicit; go 1.17 github.com/labstack/echo-jwt/v4 -# github.com/labstack/echo/v4 v4.11.4 +# github.com/labstack/echo/v4 v4.12.0 ## explicit; go 1.18 github.com/labstack/echo/v4 github.com/labstack/echo/v4/middleware @@ -65,6 +87,11 @@ github.com/labstack/echo/v4/middleware github.com/labstack/gommon/bytes github.com/labstack/gommon/color github.com/labstack/gommon/log +# github.com/mailru/easyjson v0.7.7 +## explicit; go 1.12 +github.com/mailru/easyjson/buffer +github.com/mailru/easyjson/jlexer +github.com/mailru/easyjson/jwriter # github.com/mattn/go-colorable v0.1.13 ## explicit; go 1.15 github.com/mattn/go-colorable @@ -80,8 +107,6 @@ github.com/mitchellh/mapstructure # github.com/mitchellh/reflectwalk v1.0.2 ## explicit github.com/mitchellh/reflectwalk -# github.com/oklog/ulid/v2 v2.1.0 -## explicit # github.com/pmezard/go-difflib v1.0.0 ## explicit github.com/pmezard/go-difflib/difflib @@ -99,33 +124,43 @@ github.com/redis/go-redis/v9/internal/util ## explicit; go 1.21 github.com/rubenv/sql-migrate github.com/rubenv/sql-migrate/sqlparse -# github.com/stretchr/testify v1.8.4 -## explicit; go 1.20 +# github.com/stretchr/testify v1.9.0 +## explicit; go 1.17 github.com/stretchr/testify/assert +# github.com/swaggo/echo-swagger v1.4.1 +## explicit; go 1.17 +github.com/swaggo/echo-swagger +github.com/swaggo/echo-swagger/example/docs +# github.com/swaggo/files/v2 v2.0.0 +## explicit; go 1.16 +github.com/swaggo/files/v2 +# github.com/swaggo/swag v1.16.3 +## explicit; go 1.18 +github.com/swaggo/swag # github.com/valyala/bytebufferpool v1.0.0 ## explicit github.com/valyala/bytebufferpool # github.com/valyala/fasttemplate v1.2.2 ## explicit; go 1.12 github.com/valyala/fasttemplate -# golang.org/x/crypto v0.17.0 +# golang.org/x/crypto v0.23.0 ## explicit; go 1.18 golang.org/x/crypto/acme golang.org/x/crypto/acme/autocert golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish -# golang.org/x/net v0.19.0 +# golang.org/x/net v0.25.0 ## explicit; go 1.18 golang.org/x/net/http/httpguts golang.org/x/net/http2 golang.org/x/net/http2/h2c golang.org/x/net/http2/hpack golang.org/x/net/idna -# golang.org/x/sys v0.15.0 +# golang.org/x/sys v0.20.0 ## explicit; go 1.18 golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.14.0 +# golang.org/x/text v0.15.0 ## explicit; go 1.18 golang.org/x/text/secure/bidirule golang.org/x/text/transform @@ -134,9 +169,19 @@ golang.org/x/text/unicode/norm # golang.org/x/time v0.5.0 ## explicit; go 1.18 golang.org/x/time/rate +# golang.org/x/tools v0.21.0 +## explicit; go 1.19 +golang.org/x/tools/go/ast/astutil +golang.org/x/tools/go/buildutil +golang.org/x/tools/go/internal/cgo +golang.org/x/tools/go/loader +golang.org/x/tools/internal/versions # gopkg.in/natefinch/lumberjack.v2 v2.2.1 ## explicit; go 1.13 gopkg.in/natefinch/lumberjack.v2 +# gopkg.in/yaml.v2 v2.4.0 +## explicit; go 1.15 +gopkg.in/yaml.v2 # gopkg.in/yaml.v3 v3.0.1 ## explicit gopkg.in/yaml.v3