diff --git a/.gitignore b/.gitignore index e5a35fea..d66177eb 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ .idea .vscode *.log -git.sh -gomod.sh -/*/*_test.go +*.sh +*_test.go +/service/*_test.go +/utils/*_test.go diff --git a/gojobs/grpc_build.sh b/gojobs/grpc_build.sh deleted file mode 100644 index 90ad8f73..00000000 --- a/gojobs/grpc_build.sh +++ /dev/null @@ -1,5 +0,0 @@ -#protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative --grpc-gateway_out . --grpc-gateway_opt paths=source_relative ./pb/basics.proto - -protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative ./pb/basics.proto -protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative ./pb/task.proto -protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative ./pb/pubsub.proto \ No newline at end of file diff --git a/library_test.go b/library_test.go deleted file mode 100644 index ab95d972..00000000 --- a/library_test.go +++ /dev/null @@ -1,13 +0,0 @@ -package go_library - -import "testing" - -func TestVersion(t *testing.T) { - t.Log(Version()) -} - -func BenchmarkVersion(b *testing.B) { - for i := 0; i < b.N; i++ { - Version() - } -} diff --git a/cloudflare/client.go b/service/cloudflare/client.go similarity index 100% rename from cloudflare/client.go rename to service/cloudflare/client.go diff --git a/cloudflare/const.go b/service/cloudflare/const.go similarity index 100% rename from cloudflare/const.go rename to service/cloudflare/const.go diff --git a/dingdanxia/app.go b/service/dingdanxia/app.go similarity index 92% rename from dingdanxia/app.go rename to service/dingdanxia/app.go index 2590fde4..e0209cec 100644 --- a/dingdanxia/app.go +++ b/service/dingdanxia/app.go @@ -1,9 +1,9 @@ package dingdanxia import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/dingdanxia/jd.jy_order_details.go b/service/dingdanxia/jd.jy_order_details.go similarity index 98% rename from dingdanxia/jd.jy_order_details.go rename to service/dingdanxia/jd.jy_order_details.go index 1fb2c3af..fd67c8b4 100644 --- a/dingdanxia/jd.jy_order_details.go +++ b/service/dingdanxia/jd.jy_order_details.go @@ -2,7 +2,7 @@ package dingdanxia import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/dingdanxia/jd.order_details2.go b/service/dingdanxia/jd.order_details2.go similarity index 100% rename from dingdanxia/jd.order_details2.go rename to service/dingdanxia/jd.order_details2.go diff --git a/dingdanxia/params.go b/service/dingdanxia/params.go similarity index 100% rename from dingdanxia/params.go rename to service/dingdanxia/params.go diff --git a/dingdanxia/pgsql.go b/service/dingdanxia/pgsql.go similarity index 93% rename from dingdanxia/pgsql.go rename to service/dingdanxia/pgsql.go index 67289057..42e9d505 100644 --- a/dingdanxia/pgsql.go +++ b/service/dingdanxia/pgsql.go @@ -1,9 +1,9 @@ package dingdanxia import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) diff --git a/dingdanxia/waimai.meituan_orderid.go b/service/dingdanxia/waimai.meituan_orderid.go similarity index 98% rename from dingdanxia/waimai.meituan_orderid.go rename to service/dingdanxia/waimai.meituan_orderid.go index 9ff6f99b..86049930 100644 --- a/dingdanxia/waimai.meituan_orderid.go +++ b/service/dingdanxia/waimai.meituan_orderid.go @@ -2,7 +2,7 @@ package dingdanxia import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/dingdanxia/waimai.meituan_orders.go b/service/dingdanxia/waimai.meituan_orders.go similarity index 98% rename from dingdanxia/waimai.meituan_orders.go rename to service/dingdanxia/waimai.meituan_orders.go index 63e1a0b5..116cbdee 100644 --- a/dingdanxia/waimai.meituan_orders.go +++ b/service/dingdanxia/waimai.meituan_orders.go @@ -2,7 +2,7 @@ package dingdanxia import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/dingdanxia/waimai.meituan_privilege.go b/service/dingdanxia/waimai.meituan_privilege.go similarity index 97% rename from dingdanxia/waimai.meituan_privilege.go rename to service/dingdanxia/waimai.meituan_privilege.go index b4c6b871..5c1eeade 100644 --- a/dingdanxia/waimai.meituan_privilege.go +++ b/service/dingdanxia/waimai.meituan_privilege.go @@ -2,7 +2,7 @@ package dingdanxia import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/dingdanxia/waimai.meituan_sg_privilege.go b/service/dingdanxia/waimai.meituan_sg_privilege.go similarity index 97% rename from dingdanxia/waimai.meituan_sg_privilege.go rename to service/dingdanxia/waimai.meituan_sg_privilege.go index 78310d0e..3bc2b96e 100644 --- a/dingdanxia/waimai.meituan_sg_privilege.go +++ b/service/dingdanxia/waimai.meituan_sg_privilege.go @@ -2,7 +2,7 @@ package dingdanxia import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/dingtalk/app.go b/service/dingtalk/app.go similarity index 94% rename from dingtalk/app.go rename to service/dingtalk/app.go index f05f15fe..861ab112 100644 --- a/dingtalk/app.go +++ b/service/dingtalk/app.go @@ -1,8 +1,8 @@ package dingtalk import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/dingtalk/params.go b/service/dingtalk/params.go similarity index 100% rename from dingtalk/params.go rename to service/dingtalk/params.go diff --git a/dingtalk/pgsql.go b/service/dingtalk/pgsql.go similarity index 93% rename from dingtalk/pgsql.go rename to service/dingtalk/pgsql.go index 89b43d9b..7ab9147e 100644 --- a/dingtalk/pgsql.go +++ b/service/dingtalk/pgsql.go @@ -1,9 +1,9 @@ package dingtalk import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) diff --git a/dingtalk/robot.send.go b/service/dingtalk/robot.send.go similarity index 96% rename from dingtalk/robot.send.go rename to service/dingtalk/robot.send.go index e737613c..33f87aa7 100644 --- a/dingtalk/robot.send.go +++ b/service/dingtalk/robot.send.go @@ -3,7 +3,7 @@ package dingtalk import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" "time" ) diff --git a/dingtalk/sign.go b/service/dingtalk/sign.go similarity index 100% rename from dingtalk/sign.go rename to service/dingtalk/sign.go diff --git a/douyin/api.go b/service/douyin/api.go similarity index 99% rename from douyin/api.go rename to service/douyin/api.go index def0b6ee..6dffd173 100644 --- a/douyin/api.go +++ b/service/douyin/api.go @@ -4,7 +4,7 @@ import ( "encoding/json" "errors" "github.com/mvdan/xurls" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" "regexp" "strings" diff --git a/douyin/app.go b/service/douyin/app.go similarity index 96% rename from douyin/app.go rename to service/douyin/app.go index ac25387f..69666757 100644 --- a/douyin/app.go +++ b/service/douyin/app.go @@ -2,8 +2,8 @@ package douyin import ( "errors" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" "net/http" "strings" diff --git a/douyin/pgsql.go b/service/douyin/pgsql.go similarity index 93% rename from douyin/pgsql.go rename to service/douyin/pgsql.go index 5ec994d9..57f12fa3 100644 --- a/douyin/pgsql.go +++ b/service/douyin/pgsql.go @@ -1,9 +1,9 @@ package douyin import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) diff --git a/eastiot/Iot_api.query_ordered_pkgInfo.go b/service/eastiot/Iot_api.query_ordered_pkgInfo.go similarity index 97% rename from eastiot/Iot_api.query_ordered_pkgInfo.go rename to service/eastiot/Iot_api.query_ordered_pkgInfo.go index bab44ce6..ac543ed1 100644 --- a/eastiot/Iot_api.query_ordered_pkgInfo.go +++ b/service/eastiot/Iot_api.query_ordered_pkgInfo.go @@ -2,7 +2,7 @@ package eastiot import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/eastiot/Iot_api.query_sim_pkgInfo.go b/service/eastiot/Iot_api.query_sim_pkgInfo.go similarity index 98% rename from eastiot/Iot_api.query_sim_pkgInfo.go rename to service/eastiot/Iot_api.query_sim_pkgInfo.go index 3434cb5b..c8ba1f9a 100644 --- a/eastiot/Iot_api.query_sim_pkgInfo.go +++ b/service/eastiot/Iot_api.query_sim_pkgInfo.go @@ -2,7 +2,7 @@ package eastiot import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/eastiot/Iot_api.query_user_pkgInfo.go b/service/eastiot/Iot_api.query_user_pkgInfo.go similarity index 97% rename from eastiot/Iot_api.query_user_pkgInfo.go rename to service/eastiot/Iot_api.query_user_pkgInfo.go index a158c53a..6d6309a6 100644 --- a/eastiot/Iot_api.query_user_pkgInfo.go +++ b/service/eastiot/Iot_api.query_user_pkgInfo.go @@ -2,7 +2,7 @@ package eastiot import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/eastiot/Iot_api.recharge_sim.go b/service/eastiot/Iot_api.recharge_sim.go similarity index 96% rename from eastiot/Iot_api.recharge_sim.go rename to service/eastiot/Iot_api.recharge_sim.go index dd2373d6..872987c9 100644 --- a/eastiot/Iot_api.recharge_sim.go +++ b/service/eastiot/Iot_api.recharge_sim.go @@ -2,7 +2,7 @@ package eastiot import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/eastiot/api.Iot_api.get_all_sim_type.go b/service/eastiot/api.Iot_api.get_all_sim_type.go similarity index 97% rename from eastiot/api.Iot_api.get_all_sim_type.go rename to service/eastiot/api.Iot_api.get_all_sim_type.go index 2678a869..54a50760 100644 --- a/eastiot/api.Iot_api.get_all_sim_type.go +++ b/service/eastiot/api.Iot_api.get_all_sim_type.go @@ -2,7 +2,7 @@ package eastiot import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/eastiot/app.go b/service/eastiot/app.go similarity index 92% rename from eastiot/app.go rename to service/eastiot/app.go index 9553c7a1..5d6b462c 100644 --- a/eastiot/app.go +++ b/service/eastiot/app.go @@ -1,9 +1,9 @@ package eastiot import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" "time" ) diff --git a/eastiot/lot_api.query_user_balance.go b/service/eastiot/lot_api.query_user_balance.go similarity index 96% rename from eastiot/lot_api.query_user_balance.go rename to service/eastiot/lot_api.query_user_balance.go index e847fccb..b9986f8e 100644 --- a/eastiot/lot_api.query_user_balance.go +++ b/service/eastiot/lot_api.query_user_balance.go @@ -2,7 +2,7 @@ package eastiot import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/eastiot/params.go b/service/eastiot/params.go similarity index 100% rename from eastiot/params.go rename to service/eastiot/params.go diff --git a/eastiot/pgsql.go b/service/eastiot/pgsql.go similarity index 58% rename from eastiot/pgsql.go rename to service/eastiot/pgsql.go index 965b1b5c..d71cfe3a 100644 --- a/eastiot/pgsql.go +++ b/service/eastiot/pgsql.go @@ -1,19 +1,19 @@ package eastiot import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/eastiot/sign.go b/service/eastiot/sign.go similarity index 94% rename from eastiot/sign.go rename to service/eastiot/sign.go index f605d3eb..5c0662c9 100644 --- a/eastiot/sign.go +++ b/service/eastiot/sign.go @@ -3,7 +3,7 @@ package eastiot import ( "encoding/json" "fmt" - "go.dtapp.net/library/gomd5" + "go.dtapp.net/library/utils/gomd5" "sort" "strconv" ) diff --git a/ejiaofei/app.go b/service/ejiaofei/app.go similarity index 91% rename from ejiaofei/app.go rename to service/ejiaofei/app.go index 47ca10dd..17812c8c 100644 --- a/ejiaofei/app.go +++ b/service/ejiaofei/app.go @@ -2,10 +2,10 @@ package ejiaofei import ( "fmt" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomd5" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomd5" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/ejiaofei/check_cost.go b/service/ejiaofei/check_cost.go similarity index 97% rename from ejiaofei/check_cost.go rename to service/ejiaofei/check_cost.go index ff54c0b8..2653cb46 100644 --- a/ejiaofei/check_cost.go +++ b/service/ejiaofei/check_cost.go @@ -3,7 +3,7 @@ package ejiaofei import ( "encoding/xml" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/ejiaofei/chongzhi_jkorders.go b/service/ejiaofei/chongzhi_jkorders.go similarity index 98% rename from ejiaofei/chongzhi_jkorders.go rename to service/ejiaofei/chongzhi_jkorders.go index ebd64113..0a63928f 100644 --- a/ejiaofei/chongzhi_jkorders.go +++ b/service/ejiaofei/chongzhi_jkorders.go @@ -3,7 +3,7 @@ package ejiaofei import ( "encoding/xml" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/ejiaofei/gprs_chongzhi_advance.go b/service/ejiaofei/gprs_chongzhi_advance.go similarity index 98% rename from ejiaofei/gprs_chongzhi_advance.go rename to service/ejiaofei/gprs_chongzhi_advance.go index bc874736..5dd9a380 100644 --- a/ejiaofei/gprs_chongzhi_advance.go +++ b/service/ejiaofei/gprs_chongzhi_advance.go @@ -3,7 +3,7 @@ package ejiaofei import ( "encoding/xml" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/ejiaofei/money_jkuser.go b/service/ejiaofei/money_jkuser.go similarity index 96% rename from ejiaofei/money_jkuser.go rename to service/ejiaofei/money_jkuser.go index 32e45c6b..19b0a748 100644 --- a/ejiaofei/money_jkuser.go +++ b/service/ejiaofei/money_jkuser.go @@ -3,7 +3,7 @@ package ejiaofei import ( "encoding/xml" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/ejiaofei/params.go b/service/ejiaofei/params.go similarity index 100% rename from ejiaofei/params.go rename to service/ejiaofei/params.go diff --git a/ejiaofei/pgsql.go b/service/ejiaofei/pgsql.go similarity index 60% rename from ejiaofei/pgsql.go rename to service/ejiaofei/pgsql.go index e1d15a7f..893f3b74 100644 --- a/ejiaofei/pgsql.go +++ b/service/ejiaofei/pgsql.go @@ -1,20 +1,20 @@ package ejiaofei import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -22,6 +22,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: datatypes.JSON(gojson.JsonEncodeNoError(gomongo.XmlDecodeNoError(request.ResponseBody))), //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/ejiaofei/query_jkorders.go b/service/ejiaofei/query_jkorders.go similarity index 97% rename from ejiaofei/query_jkorders.go rename to service/ejiaofei/query_jkorders.go index 71a962d9..8403d12c 100644 --- a/ejiaofei/query_jkorders.go +++ b/service/ejiaofei/query_jkorders.go @@ -3,7 +3,7 @@ package ejiaofei import ( "encoding/xml" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/ejiaofei/query_txproduct.go b/service/ejiaofei/query_txproduct.go similarity index 96% rename from ejiaofei/query_txproduct.go rename to service/ejiaofei/query_txproduct.go index c0655866..ba371a18 100644 --- a/ejiaofei/query_txproduct.go +++ b/service/ejiaofei/query_txproduct.go @@ -3,7 +3,7 @@ package ejiaofei import ( "encoding/xml" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/ejiaofei/txchongzhi.go b/service/ejiaofei/txchongzhi.go similarity index 98% rename from ejiaofei/txchongzhi.go rename to service/ejiaofei/txchongzhi.go index 70319260..66871b02 100644 --- a/ejiaofei/txchongzhi.go +++ b/service/ejiaofei/txchongzhi.go @@ -3,7 +3,7 @@ package ejiaofei import ( "encoding/xml" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/feishu/app.go b/service/feishu/app.go similarity index 93% rename from feishu/app.go rename to service/feishu/app.go index bfc860e4..03b7601d 100644 --- a/feishu/app.go +++ b/service/feishu/app.go @@ -1,8 +1,8 @@ package feishu import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/feishu/params.go b/service/feishu/params.go similarity index 100% rename from feishu/params.go rename to service/feishu/params.go diff --git a/feishu/pgsql.go b/service/feishu/pgsql.go similarity index 58% rename from feishu/pgsql.go rename to service/feishu/pgsql.go index 3069e853..50aeb8a2 100644 --- a/feishu/pgsql.go +++ b/service/feishu/pgsql.go @@ -1,19 +1,19 @@ package feishu import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/feishu/webhook.go b/service/feishu/webhook.go similarity index 96% rename from feishu/webhook.go rename to service/feishu/webhook.go index df71d69d..8178114b 100644 --- a/feishu/webhook.go +++ b/service/feishu/webhook.go @@ -3,7 +3,7 @@ package feishu import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type WebhookSendResponse struct { diff --git a/jd/app.go b/service/jd/app.go similarity index 95% rename from jd/app.go rename to service/jd/app.go index 3083ac8d..38575bf6 100644 --- a/jd/app.go +++ b/service/jd/app.go @@ -1,9 +1,9 @@ package jd import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" - "go.dtapp.net/library/gostring" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" + "go.dtapp.net/library/utils/gostring" "gorm.io/gorm" ) diff --git a/jd/crypto.go b/service/jd/crypto.go similarity index 100% rename from jd/crypto.go rename to service/jd/crypto.go diff --git a/jd/jd.union.open.activity.query.go b/service/jd/jd.union.open.activity.query.go similarity index 98% rename from jd/jd.union.open.activity.query.go rename to service/jd/jd.union.open.activity.query.go index 2d2c8156..550dbeae 100644 --- a/jd/jd.union.open.activity.query.go +++ b/service/jd/jd.union.open.activity.query.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenActivityQueryResultResponse struct { diff --git a/jd/jd.union.open.category.goods.get.go b/service/jd/jd.union.open.category.goods.get.go similarity index 98% rename from jd/jd.union.open.category.goods.get.go rename to service/jd/jd.union.open.category.goods.get.go index 50087b46..444ced5b 100644 --- a/jd/jd.union.open.category.goods.get.go +++ b/service/jd/jd.union.open.category.goods.get.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenCategoryGoodsGetResultResponse struct { diff --git a/jd/jd.union.open.coupon.gift.get.go b/service/jd/jd.union.open.coupon.gift.get.go similarity index 100% rename from jd/jd.union.open.coupon.gift.get.go rename to service/jd/jd.union.open.coupon.gift.get.go diff --git a/jd/jd.union.open.coupon.gift.stop.go b/service/jd/jd.union.open.coupon.gift.stop.go similarity index 100% rename from jd/jd.union.open.coupon.gift.stop.go rename to service/jd/jd.union.open.coupon.gift.stop.go diff --git a/jd/jd.union.open.goods.bigfield.query.go b/service/jd/jd.union.open.goods.bigfield.query.go similarity index 98% rename from jd/jd.union.open.goods.bigfield.query.go rename to service/jd/jd.union.open.goods.bigfield.query.go index aa8c22b7..38bb4a0f 100644 --- a/jd/jd.union.open.goods.bigfield.query.go +++ b/service/jd/jd.union.open.goods.bigfield.query.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenGoodsBigfieldQueryResultResponse struct { diff --git a/jd/jd.union.open.goods.jingfen.query.go b/service/jd/jd.union.open.goods.jingfen.query.go similarity index 99% rename from jd/jd.union.open.goods.jingfen.query.go rename to service/jd/jd.union.open.goods.jingfen.query.go index 709513ff..b0edca17 100644 --- a/jd/jd.union.open.goods.jingfen.query.go +++ b/service/jd/jd.union.open.goods.jingfen.query.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenGoodsJIngFenQueryResultResponse struct { diff --git a/jd/jd.union.open.goods.material.query.go b/service/jd/jd.union.open.goods.material.query.go similarity index 99% rename from jd/jd.union.open.goods.material.query.go rename to service/jd/jd.union.open.goods.material.query.go index d4773d89..41fadfa8 100644 --- a/jd/jd.union.open.goods.material.query.go +++ b/service/jd/jd.union.open.goods.material.query.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenGoodsMaterialQueryResultResponse struct { diff --git a/jd/jd.union.open.goods.promotiongoodsinfo.query.go b/service/jd/jd.union.open.goods.promotiongoodsinfo.query.go similarity index 98% rename from jd/jd.union.open.goods.promotiongoodsinfo.query.go rename to service/jd/jd.union.open.goods.promotiongoodsinfo.query.go index aec33b0a..d21a33d4 100644 --- a/jd/jd.union.open.goods.promotiongoodsinfo.query.go +++ b/service/jd/jd.union.open.goods.promotiongoodsinfo.query.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenGoodsPromotionGoodsInfoQueryResultResponse struct { diff --git a/jd/jd.union.open.order.query.go b/service/jd/jd.union.open.order.query.go similarity index 98% rename from jd/jd.union.open.order.query.go rename to service/jd/jd.union.open.order.query.go index d1156bc4..895c9514 100644 --- a/jd/jd.union.open.order.query.go +++ b/service/jd/jd.union.open.order.query.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenOrderQueryResultResponse struct { diff --git a/jd/jd.union.open.order.row.query.go b/service/jd/jd.union.open.order.row.query.go similarity index 99% rename from jd/jd.union.open.order.row.query.go rename to service/jd/jd.union.open.order.row.query.go index 833932fd..c42bf97c 100644 --- a/jd/jd.union.open.order.row.query.go +++ b/service/jd/jd.union.open.order.row.query.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenOrderRowQueryResultResponse struct { diff --git a/jd/jd.union.open.promotion.bysubunionid.get.go b/service/jd/jd.union.open.promotion.bysubunionid.get.go similarity index 98% rename from jd/jd.union.open.promotion.bysubunionid.get.go rename to service/jd/jd.union.open.promotion.bysubunionid.get.go index 566bdda2..11295b3f 100644 --- a/jd/jd.union.open.promotion.bysubunionid.get.go +++ b/service/jd/jd.union.open.promotion.bysubunionid.get.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenPromotionBySubUnionIdGetResultResponse struct { diff --git a/jd/jd.union.open.promotion.common.get.go b/service/jd/jd.union.open.promotion.common.get.go similarity index 98% rename from jd/jd.union.open.promotion.common.get.go rename to service/jd/jd.union.open.promotion.common.get.go index f46f7d46..6f45418d 100644 --- a/jd/jd.union.open.promotion.common.get.go +++ b/service/jd/jd.union.open.promotion.common.get.go @@ -2,7 +2,7 @@ package jd import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type UnionOpenPromotionCommonGetResultResponse struct { diff --git a/jd/jd.union.open.selling.goods.query.go b/service/jd/jd.union.open.selling.goods.query.go similarity index 100% rename from jd/jd.union.open.selling.goods.query.go rename to service/jd/jd.union.open.selling.goods.query.go diff --git a/jd/jd.union.open.selling.order.row.query.go b/service/jd/jd.union.open.selling.order.row.query.go similarity index 100% rename from jd/jd.union.open.selling.order.row.query.go rename to service/jd/jd.union.open.selling.order.row.query.go diff --git a/jd/jd.union.open.selling.promotion.get.go b/service/jd/jd.union.open.selling.promotion.get.go similarity index 100% rename from jd/jd.union.open.selling.promotion.get.go rename to service/jd/jd.union.open.selling.promotion.get.go diff --git a/jd/jd.union.open.statistics.giftcoupon.query.go b/service/jd/jd.union.open.statistics.giftcoupon.query.go similarity index 100% rename from jd/jd.union.open.statistics.giftcoupon.query.go rename to service/jd/jd.union.open.statistics.giftcoupon.query.go diff --git a/jd/jd.union.open.statistics.redpacket.query.go b/service/jd/jd.union.open.statistics.redpacket.query.go similarity index 100% rename from jd/jd.union.open.statistics.redpacket.query.go rename to service/jd/jd.union.open.statistics.redpacket.query.go diff --git a/jd/params.go b/service/jd/params.go similarity index 100% rename from jd/params.go rename to service/jd/params.go diff --git a/jd/pgsql.go b/service/jd/pgsql.go similarity index 64% rename from jd/pgsql.go rename to service/jd/pgsql.go index 951a0fd0..970438c3 100644 --- a/jd/pgsql.go +++ b/service/jd/pgsql.go @@ -1,18 +1,18 @@ package jd import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(api string, request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(api string, request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 RequestApi: api, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(api string, request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/kashangwl/api.buy.go b/service/kashangwl/api.buy.go similarity index 97% rename from kashangwl/api.buy.go rename to service/kashangwl/api.buy.go index 91853560..5f43e6bb 100644 --- a/kashangwl/api.buy.go +++ b/service/kashangwl/api.buy.go @@ -2,7 +2,7 @@ package kashangwl import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiBuyResponse struct { diff --git a/kashangwl/api.customer.go b/service/kashangwl/api.customer.go similarity index 96% rename from kashangwl/api.customer.go rename to service/kashangwl/api.customer.go index d7e2af71..094aafbc 100644 --- a/kashangwl/api.customer.go +++ b/service/kashangwl/api.customer.go @@ -2,7 +2,7 @@ package kashangwl import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiCustomerResponse struct { diff --git a/kashangwl/api.order.go b/service/kashangwl/api.order.go similarity index 98% rename from kashangwl/api.order.go rename to service/kashangwl/api.order.go index 6a428f91..b7617c5b 100644 --- a/kashangwl/api.order.go +++ b/service/kashangwl/api.order.go @@ -2,7 +2,7 @@ package kashangwl import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiOrderResponse struct { diff --git a/kashangwl/api.outer-order.go b/service/kashangwl/api.outer-order.go similarity index 98% rename from kashangwl/api.outer-order.go rename to service/kashangwl/api.outer-order.go index f7bd0470..323bc779 100644 --- a/kashangwl/api.outer-order.go +++ b/service/kashangwl/api.outer-order.go @@ -2,7 +2,7 @@ package kashangwl import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiOuterOrderResponse struct { diff --git a/kashangwl/api.product.go b/service/kashangwl/api.product.go similarity index 97% rename from kashangwl/api.product.go rename to service/kashangwl/api.product.go index c8ac521f..eec53a44 100644 --- a/kashangwl/api.product.go +++ b/service/kashangwl/api.product.go @@ -2,7 +2,7 @@ package kashangwl import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiProductResponse struct { diff --git a/kashangwl/api.product.recharge-params.go b/service/kashangwl/api.product.recharge-params.go similarity index 97% rename from kashangwl/api.product.recharge-params.go rename to service/kashangwl/api.product.recharge-params.go index 155e67c3..c5eeb011 100644 --- a/kashangwl/api.product.recharge-params.go +++ b/service/kashangwl/api.product.recharge-params.go @@ -2,7 +2,7 @@ package kashangwl import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiProductRechargeParamsResponse struct { diff --git a/kashangwl/app.go b/service/kashangwl/app.go similarity index 95% rename from kashangwl/app.go rename to service/kashangwl/app.go index 4aaefc65..a6e62c38 100644 --- a/kashangwl/app.go +++ b/service/kashangwl/app.go @@ -1,8 +1,8 @@ package kashangwl import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" "time" ) diff --git a/kashangwl/params.go b/service/kashangwl/params.go similarity index 100% rename from kashangwl/params.go rename to service/kashangwl/params.go diff --git a/kashangwl/pgsql.go b/service/kashangwl/pgsql.go similarity index 58% rename from kashangwl/pgsql.go rename to service/kashangwl/pgsql.go index 36bcf756..dc213a67 100644 --- a/kashangwl/pgsql.go +++ b/service/kashangwl/pgsql.go @@ -1,19 +1,19 @@ package kashangwl import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/kashangwl/sign.go b/service/kashangwl/sign.go similarity index 96% rename from kashangwl/sign.go rename to service/kashangwl/sign.go index 47ab40dc..3d43f7c1 100644 --- a/kashangwl/sign.go +++ b/service/kashangwl/sign.go @@ -5,7 +5,7 @@ import ( "crypto/md5" "encoding/hex" "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "io" "net/url" "sort" diff --git a/kuaishou/api.go b/service/kuaishou/api.go similarity index 100% rename from kuaishou/api.go rename to service/kuaishou/api.go diff --git a/kuaishou/app.go b/service/kuaishou/app.go similarity index 87% rename from kuaishou/app.go rename to service/kuaishou/app.go index 15e8e1a2..a527c29b 100644 --- a/kuaishou/app.go +++ b/service/kuaishou/app.go @@ -1,9 +1,9 @@ package kuaishou import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/kuaishou/extract_image_link.go b/service/kuaishou/extract_image_link.go similarity index 100% rename from kuaishou/extract_image_link.go rename to service/kuaishou/extract_image_link.go diff --git a/kuaishou/extract_video_link.go b/service/kuaishou/extract_video_link.go similarity index 100% rename from kuaishou/extract_video_link.go rename to service/kuaishou/extract_video_link.go diff --git a/kuaishou/get_video_html.go b/service/kuaishou/get_video_html.go similarity index 100% rename from kuaishou/get_video_html.go rename to service/kuaishou/get_video_html.go diff --git a/kuaishou/get_video_link.go b/service/kuaishou/get_video_link.go similarity index 100% rename from kuaishou/get_video_link.go rename to service/kuaishou/get_video_link.go diff --git a/wechatpayapiv2/mongodb.go b/service/kuaishou/mongo.go similarity index 67% rename from wechatpayapiv2/mongodb.go rename to service/kuaishou/mongo.go index 8552be8d..4f6d7e06 100644 --- a/wechatpayapiv2/mongodb.go +++ b/service/kuaishou/mongo.go @@ -1,26 +1,25 @@ -package wechatpayapiv2 +package kuaishou import ( - gomongo2 "go.dtapp.net/library/gomongo" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gomongo" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) // 日志 type mongoZap struct { - RequestTime gomongo2.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 + RequestTime gomongo.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 RequestUri string `json:"request_uri" bson:"request_uri"` //【请求】链接 RequestUrl string `json:"request_url" bson:"request_url"` //【请求】链接 RequestApi string `json:"request_api" bson:"request_api"` //【请求】接口 RequestMethod string `json:"request_method" bson:"request_method"` //【请求】方式 - RequestParams gorequest.Params `json:"request_params" bson:"request_params"` //【请求】参数 - RequestHeader gorequest.Headers `json:"request_header" bson:"request_header"` //【请求】头部 + RequestParams gorequest2.Params `json:"request_params" bson:"request_params"` //【请求】参数 + RequestHeader gorequest2.Headers `json:"request_header" bson:"request_header"` //【请求】头部 ResponseHeader http.Header `json:"response_header" bson:"response_header"` //【返回】头部 ResponseStatusCode int `json:"response_status_code" bson:"response_status_code"` //【返回】状态码 ResponseBody map[string]interface{} `json:"response_body" bson:"response_body"` //【返回】内容 - ResponseXml string `json:"response_xml" bson:"response_xml"` //【返回】内容 ResponseContentLength int64 `json:"response_content_length" bson:"response_content_length"` //【返回】大小 - ResponseTime gomongo2.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 + ResponseTime gomongo.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 } func (m *mongoZap) Database() string { @@ -28,23 +27,22 @@ func (m *mongoZap) Database() string { } func (m *mongoZap) TableName() string { - return "wechatpayapiv2" + return "kuaishou" } -func (app *App) mongoLog(request gorequest.Response) { +func (app *App) mongoLog(request gorequest2.Response) { _, _ = app.mongo.Model(&mongoZap{}).InsertOne(mongoZap{ - RequestTime: gomongo2.BsonTime(request.RequestTime), //【请求】时间 + RequestTime: gomongo.BsonTime(request.RequestTime), //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: request.RequestParams, //【请求】参数 RequestHeader: request.RequestHeader, //【请求】头部 ResponseHeader: request.ResponseHeader, //【返回】头部 ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 - ResponseBody: gomongo2.XmlDecodeNoError(request.ResponseBody), //【返回】内容 - ResponseXml: string(request.ResponseBody), //【返回】内容 + ResponseBody: gomongo.JsonDecodeNoError(request.ResponseBody), //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: gomongo2.BsonTime(request.ResponseTime), //【返回】时间 + ResponseTime: gomongo.BsonTime(request.ResponseTime), //【返回】时间 }) } diff --git a/kuaishou/pgsql.go b/service/kuaishou/pgsql.go similarity index 58% rename from kuaishou/pgsql.go rename to service/kuaishou/pgsql.go index fcfa424f..a97fe6ab 100644 --- a/kuaishou/pgsql.go +++ b/service/kuaishou/pgsql.go @@ -1,19 +1,19 @@ package kuaishou import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/leshuazf/apiv2.merchant.updateAuthority.go b/service/leshuazf/apiv2.merchant.updateAuthority.go similarity index 97% rename from leshuazf/apiv2.merchant.updateAuthority.go rename to service/leshuazf/apiv2.merchant.updateAuthority.go index 635dde10..207af115 100644 --- a/leshuazf/apiv2.merchant.updateAuthority.go +++ b/service/leshuazf/apiv2.merchant.updateAuthority.go @@ -2,7 +2,7 @@ package leshuazf import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/leshuazf/app.go b/service/leshuazf/app.go similarity index 91% rename from leshuazf/app.go rename to service/leshuazf/app.go index cfa13dc1..7d582cdb 100644 --- a/leshuazf/app.go +++ b/service/leshuazf/app.go @@ -1,11 +1,11 @@ package leshuazf import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorandom" - "go.dtapp.net/library/gorequest" - "go.dtapp.net/library/gotime" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorandom" + "go.dtapp.net/library/utils/gorequest" + "go.dtapp.net/library/utils/gotime" "gorm.io/gorm" ) diff --git a/leshuazf/data.area.go b/service/leshuazf/data.area.go similarity index 96% rename from leshuazf/data.area.go rename to service/leshuazf/data.area.go index 3231526a..d7f413a2 100644 --- a/leshuazf/data.area.go +++ b/service/leshuazf/data.area.go @@ -2,7 +2,7 @@ package leshuazf import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/leshuazf/data.bankbranch2.go b/service/leshuazf/data.bankbranch2.go similarity index 97% rename from leshuazf/data.bankbranch2.go rename to service/leshuazf/data.bankbranch2.go index b3a4ee86..5cad0481 100644 --- a/leshuazf/data.bankbranch2.go +++ b/service/leshuazf/data.bankbranch2.go @@ -2,7 +2,7 @@ package leshuazf import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/leshuazf/data.mcc.go b/service/leshuazf/data.mcc.go similarity index 96% rename from leshuazf/data.mcc.go rename to service/leshuazf/data.mcc.go index 9cb73be7..29609b0d 100644 --- a/leshuazf/data.mcc.go +++ b/service/leshuazf/data.mcc.go @@ -2,7 +2,7 @@ package leshuazf import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/leshuazf/mongo.go b/service/leshuazf/mongo.go similarity index 64% rename from leshuazf/mongo.go rename to service/leshuazf/mongo.go index 74a0a717..a05d444d 100644 --- a/leshuazf/mongo.go +++ b/service/leshuazf/mongo.go @@ -1,25 +1,25 @@ package leshuazf import ( - gomongo2 "go.dtapp.net/library/gomongo" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gomongo" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) // 日志 type mongoZap struct { - RequestTime gomongo2.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 + RequestTime gomongo.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 RequestUri string `json:"request_uri" bson:"request_uri"` //【请求】链接 RequestUrl string `json:"request_url" bson:"request_url"` //【请求】链接 RequestApi string `json:"request_api" bson:"request_api"` //【请求】接口 RequestMethod string `json:"request_method" bson:"request_method"` //【请求】方式 - RequestParams gorequest.Params `json:"request_params" bson:"request_params"` //【请求】参数 - RequestHeader gorequest.Headers `json:"request_header" bson:"request_header"` //【请求】头部 + RequestParams gorequest2.Params `json:"request_params" bson:"request_params"` //【请求】参数 + RequestHeader gorequest2.Headers `json:"request_header" bson:"request_header"` //【请求】头部 ResponseHeader http.Header `json:"response_header" bson:"response_header"` //【返回】头部 ResponseStatusCode int `json:"response_status_code" bson:"response_status_code"` //【返回】状态码 ResponseBody map[string]interface{} `json:"response_body" bson:"response_body"` //【返回】内容 ResponseContentLength int64 `json:"response_content_length" bson:"response_content_length"` //【返回】大小 - ResponseTime gomongo2.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 + ResponseTime gomongo.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 } func (m *mongoZap) Database() string { @@ -30,19 +30,19 @@ func (m *mongoZap) TableName() string { return "leshuazf" } -func (app *App) mongoLog(request gorequest.Response) { +func (app *App) mongoLog(request gorequest2.Response) { _, _ = app.mongo.Model(&mongoZap{}).InsertOne(mongoZap{ - RequestTime: gomongo2.BsonTime(request.RequestTime), //【请求】时间 - RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 - RequestMethod: request.RequestMethod, //【请求】方式 - RequestParams: request.RequestParams, //【请求】参数 - RequestHeader: request.RequestHeader, //【请求】头部 - ResponseHeader: request.ResponseHeader, //【返回】头部 - ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 - ResponseBody: gomongo2.JsonDecodeNoError(request.ResponseBody), //【返回】内容 - ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: gomongo2.BsonTime(request.ResponseTime), //【返回】时间 + RequestTime: gomongo.BsonTime(request.RequestTime), //【请求】时间 + RequestUri: request.RequestUri, //【请求】链接 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 + RequestMethod: request.RequestMethod, //【请求】方式 + RequestParams: request.RequestParams, //【请求】参数 + RequestHeader: request.RequestHeader, //【请求】头部 + ResponseHeader: request.ResponseHeader, //【返回】头部 + ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 + ResponseBody: gomongo.JsonDecodeNoError(request.ResponseBody), //【返回】内容 + ResponseContentLength: request.ResponseContentLength, //【返回】大小 + ResponseTime: gomongo.BsonTime(request.ResponseTime), //【返回】时间 }) } diff --git a/leshuazf/params.go b/service/leshuazf/params.go similarity index 100% rename from leshuazf/params.go rename to service/leshuazf/params.go diff --git a/leshuazf/pgsql.go b/service/leshuazf/pgsql.go similarity index 75% rename from leshuazf/pgsql.go rename to service/leshuazf/pgsql.go index ec3c975b..5df8053c 100644 --- a/leshuazf/pgsql.go +++ b/service/leshuazf/pgsql.go @@ -1,19 +1,19 @@ package leshuazf import ( - "go.dtapp.net/library/gojson" - "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { +func (app *App) postgresqlLog(request gorequest2.Response) { app.log.Api.Record(golog.ApiPostgresqlLog{ RequestTime: request.RequestTime, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 diff --git a/leshuazf/picture.upload.go b/service/leshuazf/picture.upload.go similarity index 100% rename from leshuazf/picture.upload.go rename to service/leshuazf/picture.upload.go diff --git a/leshuazf/sign.go b/service/leshuazf/sign.go similarity index 85% rename from leshuazf/sign.go rename to service/leshuazf/sign.go index 1da08653..f48949f3 100644 --- a/leshuazf/sign.go +++ b/service/leshuazf/sign.go @@ -2,8 +2,8 @@ package leshuazf import ( "encoding/json" - "go.dtapp.net/library/gobase64" - "go.dtapp.net/library/gomd5" + "go.dtapp.net/library/utils/gobase64" + "go.dtapp.net/library/utils/gomd5" "sort" ) diff --git a/meituan/api.generate_link.go b/service/meituan/api.generate_link.go similarity index 87% rename from meituan/api.generate_link.go rename to service/meituan/api.generate_link.go index 85d88f03..45f95d9b 100644 --- a/meituan/api.generate_link.go +++ b/service/meituan/api.generate_link.go @@ -2,7 +2,7 @@ package meituan import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -15,11 +15,11 @@ type ApiGenerateLinkResponse struct { type ApiGenerateLinkResult struct { Result ApiGenerateLinkResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewApiGenerateLinkResult(result ApiGenerateLinkResponse, body []byte, http gorequest.Response, err error) *ApiGenerateLinkResult { +func NewApiGenerateLinkResult(result ApiGenerateLinkResponse, body []byte, http gorequest2.Response, err error) *ApiGenerateLinkResult { return &ApiGenerateLinkResult{Result: result, Body: body, Http: http, Err: err} } @@ -27,14 +27,14 @@ func NewApiGenerateLinkResult(result ApiGenerateLinkResponse, body []byte, http // https://union.meituan.com/v2/apiDetail?id=25 func (app *App) ApiGenerateLink(actId int64, sid string, linkType, shortLink int) *ApiGenerateLinkResult { // 参数 - param := gorequest.NewParams() + param := gorequest2.NewParams() param.Set("actId", actId) // 活动id,可以在联盟活动列表中查看获取 param.Set("appkey", app.appKey) // 媒体名称,可在推广者备案-媒体管理中查询 param.Set("sid", sid) // 推广位sid,支持通过接口自定义创建,不受平台200个上限限制,长度不能超过64个字符,支持小写字母和数字,历史已创建的推广位不受这个约束 param.Set("linkType", linkType) // 投放链接的类型 param.Set("shortLink", shortLink) // 获取长链还是短链 // 转换 - params := gorequest.NewParamsWith(param) + params := gorequest2.NewParamsWith(param) params["sign"] = app.getSign(app.secret, params) // 请求 request, err := app.request("https://openapi.meituan.com/api/generateLink", params, http.MethodGet) diff --git a/meituan/api.getqualityscorebysid.go b/service/meituan/api.getqualityscorebysid.go similarity index 83% rename from meituan/api.getqualityscorebysid.go rename to service/meituan/api.getqualityscorebysid.go index 81fe1c43..6a74f667 100644 --- a/meituan/api.getqualityscorebysid.go +++ b/service/meituan/api.getqualityscorebysid.go @@ -2,8 +2,8 @@ package meituan import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" - "go.dtapp.net/library/gotime" + gorequest2 "go.dtapp.net/library/utils/gorequest" + "go.dtapp.net/library/utils/gotime" "net/http" ) @@ -24,19 +24,19 @@ type ApiGetQuaLitYsCoreBySidResponse struct { type ApiGetQuaLitYsCoreBySidResult struct { Result ApiGetQuaLitYsCoreBySidResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewApiGetQuaLitYsCoreBySidResult(result ApiGetQuaLitYsCoreBySidResponse, body []byte, http gorequest.Response, err error) *ApiGetQuaLitYsCoreBySidResult { +func NewApiGetQuaLitYsCoreBySidResult(result ApiGetQuaLitYsCoreBySidResponse, body []byte, http gorequest2.Response, err error) *ApiGetQuaLitYsCoreBySidResult { return &ApiGetQuaLitYsCoreBySidResult{Result: result, Body: body, Http: http, Err: err} } // ApiGetQuaLitYsCoreBySid 优选sid质量分&复购率查询 // https://union.meituan.com/v2/apiDetail?id=28 -func (app *App) ApiGetQuaLitYsCoreBySid(notMustParams ...gorequest.Params) *ApiGetQuaLitYsCoreBySidResult { +func (app *App) ApiGetQuaLitYsCoreBySid(notMustParams ...gorequest2.Params) *ApiGetQuaLitYsCoreBySidResult { // 参数 - params := gorequest.NewParamsWith(notMustParams...) + params := gorequest2.NewParamsWith(notMustParams...) // 请求时刻10位时间戳(秒级),有效期60s params["ts"] = gotime.Current().Timestamp() params["appkey"] = app.appKey diff --git a/meituan/api.mini_code.go b/service/meituan/api.mini_code.go similarity index 84% rename from meituan/api.mini_code.go rename to service/meituan/api.mini_code.go index 244d23fe..850d9318 100644 --- a/meituan/api.mini_code.go +++ b/service/meituan/api.mini_code.go @@ -2,7 +2,7 @@ package meituan import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -15,11 +15,11 @@ type ApiMiniCodeResponse struct { type ApiMiniCodeResult struct { Result ApiMiniCodeResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewApiMiniCodeResult(result ApiMiniCodeResponse, body []byte, http gorequest.Response, err error) *ApiMiniCodeResult { +func NewApiMiniCodeResult(result ApiMiniCodeResponse, body []byte, http gorequest2.Response, err error) *ApiMiniCodeResult { return &ApiMiniCodeResult{Result: result, Body: body, Http: http, Err: err} } @@ -27,12 +27,12 @@ func NewApiMiniCodeResult(result ApiMiniCodeResponse, body []byte, http goreques // https://union.meituan.com/v2/apiDetail?id=26 func (app *App) ApiMiniCode(actId int64, sid string) *ApiMiniCodeResult { // 参数 - param := gorequest.NewParams() + param := gorequest2.NewParams() param.Set("appkey", app.appKey) param.Set("sid", sid) param.Set("actId", actId) // 转换 - params := gorequest.NewParamsWith(param) + params := gorequest2.NewParamsWith(param) params["sign"] = app.getSign(app.secret, params) // 请求 request, err := app.request("https://openapi.meituan.com/api/miniCode", params, http.MethodGet) diff --git a/meituan/api.order.go b/service/meituan/api.order.go similarity index 88% rename from meituan/api.order.go rename to service/meituan/api.order.go index ab08df6c..2c4e437f 100644 --- a/meituan/api.order.go +++ b/service/meituan/api.order.go @@ -2,7 +2,7 @@ package meituan import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -35,21 +35,21 @@ type ApiOrderResponse struct { } type ApiOrderResult struct { - Result ApiOrderResponse // 结果 - Body []byte // 内容 - Http gorequest.Response // 请求 - Err error // 错误 + Result ApiOrderResponse // 结果 + Body []byte // 内容 + Http gorequest2.Response // 请求 + Err error // 错误 } -func NewApiOrderResult(result ApiOrderResponse, body []byte, http gorequest.Response, err error) *ApiOrderResult { +func NewApiOrderResult(result ApiOrderResponse, body []byte, http gorequest2.Response, err error) *ApiOrderResult { return &ApiOrderResult{Result: result, Body: body, Http: http, Err: err} } // ApiOrder 单订单查询接口(新版) // https://union.meituan.com/v2/apiDetail?id=24 -func (app *App) ApiOrder(notMustParams ...gorequest.Params) *ApiOrderResult { +func (app *App) ApiOrder(notMustParams ...gorequest2.Params) *ApiOrderResult { // 参数 - params := gorequest.NewParamsWith(notMustParams...) + params := gorequest2.NewParamsWith(notMustParams...) // 请求时刻10位时间戳(秒级),有效期60s params["appkey"] = app.appKey params["sign"] = app.getSign(app.secret, params) diff --git a/meituan/api.order_list.go b/service/meituan/api.order_list.go similarity index 91% rename from meituan/api.order_list.go rename to service/meituan/api.order_list.go index c3b63a97..413eccdf 100644 --- a/meituan/api.order_list.go +++ b/service/meituan/api.order_list.go @@ -2,8 +2,8 @@ package meituan import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" - "go.dtapp.net/library/gotime" + gorequest2 "go.dtapp.net/library/utils/gorequest" + "go.dtapp.net/library/utils/gotime" "net/http" ) @@ -36,19 +36,19 @@ type ApiOrderListResponse struct { type ApiOrderListResult struct { Result ApiOrderListResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewApiOrderListResult(result ApiOrderListResponse, body []byte, http gorequest.Response, err error) *ApiOrderListResult { +func NewApiOrderListResult(result ApiOrderListResponse, body []byte, http gorequest2.Response, err error) *ApiOrderListResult { return &ApiOrderListResult{Result: result, Body: body, Http: http, Err: err} } // ApiOrderList 订单列表查询接口(新版) // https://union.meituan.com/v2/apiDetail?id=23 -func (app *App) ApiOrderList(notMustParams ...gorequest.Params) *ApiOrderListResult { +func (app *App) ApiOrderList(notMustParams ...gorequest2.Params) *ApiOrderListResult { // 参数 - params := gorequest.NewParamsWith(notMustParams...) + params := gorequest2.NewParamsWith(notMustParams...) // 请求时刻10位时间戳(秒级),有效期60s params["ts"] = gotime.Current().Timestamp() params["appkey"] = app.appKey diff --git a/meituan/app.go b/service/meituan/app.go similarity index 94% rename from meituan/app.go rename to service/meituan/app.go index 74540a38..3f140a2d 100644 --- a/meituan/app.go +++ b/service/meituan/app.go @@ -1,8 +1,8 @@ package meituan import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/meituan/pgsql.go b/service/meituan/pgsql.go similarity index 58% rename from meituan/pgsql.go rename to service/meituan/pgsql.go index 6ed7d7cb..4650883c 100644 --- a/meituan/pgsql.go +++ b/service/meituan/pgsql.go @@ -1,19 +1,19 @@ package meituan import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/meituan/poi.area.go b/service/meituan/poi.area.go similarity index 76% rename from meituan/poi.area.go rename to service/meituan/poi.area.go index 15521542..2da93b0d 100644 --- a/meituan/poi.area.go +++ b/service/meituan/poi.area.go @@ -2,7 +2,7 @@ package meituan import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -19,13 +19,13 @@ type PoiAreaResponse struct { } type PoiAreaResult struct { - Result PoiAreaResponse // 结果 - Body []byte // 内容 - Http gorequest.Response // 请求 - Err error // 错误 + Result PoiAreaResponse // 结果 + Body []byte // 内容 + Http gorequest2.Response // 请求 + Err error // 错误 } -func NewPoiAreaResult(result PoiAreaResponse, body []byte, http gorequest.Response, err error) *PoiAreaResult { +func NewPoiAreaResult(result PoiAreaResponse, body []byte, http gorequest2.Response, err error) *PoiAreaResult { return &PoiAreaResult{Result: result, Body: body, Http: http, Err: err} } @@ -33,9 +33,9 @@ func NewPoiAreaResult(result PoiAreaResponse, body []byte, http gorequest.Respon // https://openapi.meituan.com/#api-0.%E5%9F%BA%E7%A1%80%E6%95%B0%E6%8D%AE-GetHttpsOpenapiMeituanComPoiAreaCityid1 func (app *App) PoiArea(cityID int) *PoiAreaResult { // 参数 - param := gorequest.NewParams() + param := gorequest2.NewParams() param.Set("cityid", cityID) - params := gorequest.NewParamsWith(param) + params := gorequest2.NewParamsWith(param) // 请求 request, err := app.request("https://openapi.meituan.com/poi/area", params, http.MethodGet) // 定义 diff --git a/meituan/poi.category.go b/service/meituan/poi.category.go similarity index 83% rename from meituan/poi.category.go rename to service/meituan/poi.category.go index 1bda2e56..01bb3c26 100644 --- a/meituan/poi.category.go +++ b/service/meituan/poi.category.go @@ -2,7 +2,7 @@ package meituan import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -21,11 +21,11 @@ type PoiCategoryResponse struct { type PoiCategoryResult struct { Result PoiCategoryResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewPoiCategoryResult(result PoiCategoryResponse, body []byte, http gorequest.Response, err error) *PoiCategoryResult { +func NewPoiCategoryResult(result PoiCategoryResponse, body []byte, http gorequest2.Response, err error) *PoiCategoryResult { return &PoiCategoryResult{Result: result, Body: body, Http: http, Err: err} } @@ -33,9 +33,9 @@ func NewPoiCategoryResult(result PoiCategoryResponse, body []byte, http goreques // https://openapi.meituan.com/#api-0.%E5%9F%BA%E7%A1%80%E6%95%B0%E6%8D%AE-GetHttpsOpenapiMeituanComPoiDistrictCityid1 func (app *App) PoiCategory(cityID int) *PoiCategoryResult { // 参数 - param := gorequest.NewParams() + param := gorequest2.NewParams() param.Set("cityid", cityID) - params := gorequest.NewParamsWith(param) + params := gorequest2.NewParamsWith(param) // 请求 request, err := app.request("https://openapi.meituan.com/poi/category", params, http.MethodGet) // 定义 diff --git a/meituan/poi.city.go b/service/meituan/poi.city.go similarity index 96% rename from meituan/poi.city.go rename to service/meituan/poi.city.go index 7de2ebd9..713b132a 100644 --- a/meituan/poi.city.go +++ b/service/meituan/poi.city.go @@ -2,7 +2,7 @@ package meituan import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/meituan/poi.district.go b/service/meituan/poi.district.go similarity index 83% rename from meituan/poi.district.go rename to service/meituan/poi.district.go index d5b5d620..0e8bc0c9 100644 --- a/meituan/poi.district.go +++ b/service/meituan/poi.district.go @@ -2,7 +2,7 @@ package meituan import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -17,11 +17,11 @@ type PoiDistrictResponse struct { type PoiDistrictResult struct { Result PoiDistrictResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewPoiDistrictResult(result PoiDistrictResponse, body []byte, http gorequest.Response, err error) *PoiDistrictResult { +func NewPoiDistrictResult(result PoiDistrictResponse, body []byte, http gorequest2.Response, err error) *PoiDistrictResult { return &PoiDistrictResult{Result: result, Body: body, Http: http, Err: err} } @@ -29,9 +29,9 @@ func NewPoiDistrictResult(result PoiDistrictResponse, body []byte, http goreques // https://openapi.meituan.com/#api-0.%E5%9F%BA%E7%A1%80%E6%95%B0%E6%8D%AE-GetHttpsOpenapiMeituanComPoiDistrictCityid1 func (app *App) PoiDistrict(cityID int) *PoiDistrictResult { // 参数 - param := gorequest.NewParams() + param := gorequest2.NewParams() param.Set("cityid", cityID) - params := gorequest.NewParamsWith(param) + params := gorequest2.NewParamsWith(param) // 请求 request, err := app.request("https://openapi.meituan.com/poi/district", params, http.MethodGet) // 定义 diff --git a/meituan/service_http.order.go b/service/meituan/service_http.order.go similarity index 100% rename from meituan/service_http.order.go rename to service/meituan/service_http.order.go diff --git a/meituan/sign.go b/service/meituan/sign.go similarity index 100% rename from meituan/sign.go rename to service/meituan/sign.go diff --git a/oauth/gitee/access_token.go b/service/oauth/gitee/access_token.go similarity index 100% rename from oauth/gitee/access_token.go rename to service/oauth/gitee/access_token.go diff --git a/oauth/gitee/api_user.go b/service/oauth/gitee/api_user.go similarity index 100% rename from oauth/gitee/api_user.go rename to service/oauth/gitee/api_user.go diff --git a/oauth/gitee/app.go b/service/oauth/gitee/app.go similarity index 100% rename from oauth/gitee/app.go rename to service/oauth/gitee/app.go diff --git a/oauth/gitee/authorize.go b/service/oauth/gitee/authorize.go similarity index 100% rename from oauth/gitee/authorize.go rename to service/oauth/gitee/authorize.go diff --git a/oauth/gitee/config.go b/service/oauth/gitee/config.go similarity index 100% rename from oauth/gitee/config.go rename to service/oauth/gitee/config.go diff --git a/oauth/workwechat/access_token.go b/service/oauth/workwechat/access_token.go similarity index 100% rename from oauth/workwechat/access_token.go rename to service/oauth/workwechat/access_token.go diff --git a/oauth/workwechat/app.go b/service/oauth/workwechat/app.go similarity index 100% rename from oauth/workwechat/app.go rename to service/oauth/workwechat/app.go diff --git a/oauth/workwechat/authorize.go b/service/oauth/workwechat/authorize.go similarity index 100% rename from oauth/workwechat/authorize.go rename to service/oauth/workwechat/authorize.go diff --git a/oauth/workwechat/get_user_info.go b/service/oauth/workwechat/get_user_info.go similarity index 100% rename from oauth/workwechat/get_user_info.go rename to service/oauth/workwechat/get_user_info.go diff --git a/oauth/workwechat/qr_connect.go b/service/oauth/workwechat/qr_connect.go similarity index 100% rename from oauth/workwechat/qr_connect.go rename to service/oauth/workwechat/qr_connect.go diff --git a/pconline/app.go b/service/pconline/app.go similarity index 85% rename from pconline/app.go rename to service/pconline/app.go index 6376e28f..902b2afd 100644 --- a/pconline/app.go +++ b/service/pconline/app.go @@ -2,8 +2,9 @@ package pconline import ( "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + golog2 "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) @@ -11,7 +12,7 @@ type App struct { mongo *gomongo.Client // 日志数据库 pgsql *gorm.DB // pgsql数据库 client *gorequest.App // 请求客户端 - log *golog.Api // 日志服务 + log *golog2.Api // 日志服务 logTableName string // 日志表名 logStatus bool // 日志状态 } diff --git a/pconline/ip.go b/service/pconline/ip.go similarity index 97% rename from pconline/ip.go rename to service/pconline/ip.go index e41ce830..60aad1df 100644 --- a/pconline/ip.go +++ b/service/pconline/ip.go @@ -3,7 +3,7 @@ package pconline import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "golang.org/x/text/encoding/simplifiedchinese" ) diff --git a/pconline/mongo.go b/service/pconline/mongo.go similarity index 64% rename from pconline/mongo.go rename to service/pconline/mongo.go index 2afbb594..46eaa471 100644 --- a/pconline/mongo.go +++ b/service/pconline/mongo.go @@ -1,25 +1,25 @@ package pconline import ( - gomongo2 "go.dtapp.net/library/gomongo" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gomongo" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) // 日志 type mongoZap struct { - RequestTime gomongo2.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 + RequestTime gomongo.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 RequestUri string `json:"request_uri" bson:"request_uri"` //【请求】链接 RequestUrl string `json:"request_url" bson:"request_url"` //【请求】链接 RequestApi string `json:"request_api" bson:"request_api"` //【请求】接口 RequestMethod string `json:"request_method" bson:"request_method"` //【请求】方式 - RequestParams gorequest.Params `json:"request_params" bson:"request_params"` //【请求】参数 - RequestHeader gorequest.Headers `json:"request_header" bson:"request_header"` //【请求】头部 + RequestParams gorequest2.Params `json:"request_params" bson:"request_params"` //【请求】参数 + RequestHeader gorequest2.Headers `json:"request_header" bson:"request_header"` //【请求】头部 ResponseHeader http.Header `json:"response_header" bson:"response_header"` //【返回】头部 ResponseStatusCode int `json:"response_status_code" bson:"response_status_code"` //【返回】状态码 ResponseBody map[string]interface{} `json:"response_body" bson:"response_body"` //【返回】内容 ResponseContentLength int64 `json:"response_content_length" bson:"response_content_length"` //【返回】大小 - ResponseTime gomongo2.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 + ResponseTime gomongo.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 } func (m *mongoZap) Database() string { @@ -30,19 +30,19 @@ func (m *mongoZap) TableName() string { return "pconline" } -func (app *App) mongoLog(request gorequest.Response) { +func (app *App) mongoLog(request gorequest2.Response) { _, _ = app.mongo.Model(&mongoZap{}).InsertOne(mongoZap{ - RequestTime: gomongo2.BsonTime(request.RequestTime), //【请求】时间 - RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 - RequestMethod: request.RequestMethod, //【请求】方式 - RequestParams: request.RequestParams, //【请求】参数 - RequestHeader: request.RequestHeader, //【请求】头部 - ResponseHeader: request.ResponseHeader, //【返回】头部 - ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 - ResponseBody: gomongo2.JsonDecodeNoError(request.ResponseBody), //【返回】内容 - ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: gomongo2.BsonTime(request.ResponseTime), //【返回】时间 + RequestTime: gomongo.BsonTime(request.RequestTime), //【请求】时间 + RequestUri: request.RequestUri, //【请求】链接 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 + RequestMethod: request.RequestMethod, //【请求】方式 + RequestParams: request.RequestParams, //【请求】参数 + RequestHeader: request.RequestHeader, //【请求】头部 + ResponseHeader: request.ResponseHeader, //【返回】头部 + ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 + ResponseBody: gomongo.JsonDecodeNoError(request.ResponseBody), //【返回】内容 + ResponseContentLength: request.ResponseContentLength, //【返回】大小 + ResponseTime: gomongo.BsonTime(request.ResponseTime), //【返回】时间 }) } diff --git a/pconline/pgsql.go b/service/pconline/pgsql.go similarity index 58% rename from pconline/pgsql.go rename to service/pconline/pgsql.go index 03ad0b4d..f5e67714 100644 --- a/pconline/pgsql.go +++ b/service/pconline/pgsql.go @@ -1,19 +1,19 @@ package pconline import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Api.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Api.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/pinduoduo/app.go b/service/pinduoduo/app.go similarity index 95% rename from pinduoduo/app.go rename to service/pinduoduo/app.go index 14aa0f87..d6e1dc1b 100644 --- a/pinduoduo/app.go +++ b/service/pinduoduo/app.go @@ -2,9 +2,9 @@ package pinduoduo import ( "fmt" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" - "go.dtapp.net/library/gostring" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" + "go.dtapp.net/library/utils/gostring" "gorm.io/gorm" "regexp" "strconv" diff --git a/pinduoduo/crypto.go b/service/pinduoduo/crypto.go similarity index 100% rename from pinduoduo/crypto.go rename to service/pinduoduo/crypto.go diff --git a/pinduoduo/param.go b/service/pinduoduo/param.go similarity index 100% rename from pinduoduo/param.go rename to service/pinduoduo/param.go diff --git a/pinduoduo/params.go b/service/pinduoduo/params.go similarity index 100% rename from pinduoduo/params.go rename to service/pinduoduo/params.go diff --git a/pinduoduo/pdd.ddk.cms.prom.url.generate.go b/service/pinduoduo/pdd.ddk.cms.prom.url.generate.go similarity index 98% rename from pinduoduo/pdd.ddk.cms.prom.url.generate.go rename to service/pinduoduo/pdd.ddk.cms.prom.url.generate.go index 7914bab0..ebdb9a09 100644 --- a/pinduoduo/pdd.ddk.cms.prom.url.generate.go +++ b/service/pinduoduo/pdd.ddk.cms.prom.url.generate.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type CmsPromUrlGenerateResponse struct { diff --git a/pinduoduo/pdd.ddk.goods.detail.go b/service/pinduoduo/pdd.ddk.goods.detail.go similarity index 99% rename from pinduoduo/pdd.ddk.goods.detail.go rename to service/pinduoduo/pdd.ddk.goods.detail.go index f5f662e4..bbb1d497 100644 --- a/pinduoduo/pdd.ddk.goods.detail.go +++ b/service/pinduoduo/pdd.ddk.goods.detail.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GoodsDetailResponse struct { diff --git a/pinduoduo/pdd.ddk.goods.pid.generate.go b/service/pinduoduo/pdd.ddk.goods.pid.generate.go similarity index 97% rename from pinduoduo/pdd.ddk.goods.pid.generate.go rename to service/pinduoduo/pdd.ddk.goods.pid.generate.go index eaec3ceb..d5d93ca6 100644 --- a/pinduoduo/pdd.ddk.goods.pid.generate.go +++ b/service/pinduoduo/pdd.ddk.goods.pid.generate.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GoodsPidGenerateResponse struct { diff --git a/pinduoduo/pdd.ddk.goods.promotion.url.generate.go b/service/pinduoduo/pdd.ddk.goods.promotion.url.generate.go similarity index 98% rename from pinduoduo/pdd.ddk.goods.promotion.url.generate.go rename to service/pinduoduo/pdd.ddk.goods.promotion.url.generate.go index ead1aa34..51e414c9 100644 --- a/pinduoduo/pdd.ddk.goods.promotion.url.generate.go +++ b/service/pinduoduo/pdd.ddk.goods.promotion.url.generate.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GoodsPromotionUrlGenerateResponse struct { diff --git a/pinduoduo/pdd.ddk.goods.recommend.get.go b/service/pinduoduo/pdd.ddk.goods.recommend.get.go similarity index 99% rename from pinduoduo/pdd.ddk.goods.recommend.get.go rename to service/pinduoduo/pdd.ddk.goods.recommend.get.go index 21e3164a..40917b6a 100644 --- a/pinduoduo/pdd.ddk.goods.recommend.get.go +++ b/service/pinduoduo/pdd.ddk.goods.recommend.get.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GoodsRecommendGetResponse struct { diff --git a/pinduoduo/pdd.ddk.goods.search.go b/service/pinduoduo/pdd.ddk.goods.search.go similarity index 99% rename from pinduoduo/pdd.ddk.goods.search.go rename to service/pinduoduo/pdd.ddk.goods.search.go index 20fa59c4..48c1ccd1 100644 --- a/pinduoduo/pdd.ddk.goods.search.go +++ b/service/pinduoduo/pdd.ddk.goods.search.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GoodsSearchResponse struct { diff --git a/pinduoduo/pdd.ddk.member.authority.query.go b/service/pinduoduo/pdd.ddk.member.authority.query.go similarity index 97% rename from pinduoduo/pdd.ddk.member.authority.query.go rename to service/pinduoduo/pdd.ddk.member.authority.query.go index b16323b6..696c365b 100644 --- a/pinduoduo/pdd.ddk.member.authority.query.go +++ b/service/pinduoduo/pdd.ddk.member.authority.query.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type MemberAuthorityQueryResponse struct { diff --git a/pinduoduo/pdd.ddk.order.detail.get.go b/service/pinduoduo/pdd.ddk.order.detail.get.go similarity index 98% rename from pinduoduo/pdd.ddk.order.detail.get.go rename to service/pinduoduo/pdd.ddk.order.detail.get.go index 69d3636d..e25af15f 100644 --- a/pinduoduo/pdd.ddk.order.detail.get.go +++ b/service/pinduoduo/pdd.ddk.order.detail.get.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type OrderDetailGetResponse struct { diff --git a/pinduoduo/pdd.ddk.order.list.increment.get.go b/service/pinduoduo/pdd.ddk.order.list.increment.get.go similarity index 98% rename from pinduoduo/pdd.ddk.order.list.increment.get.go rename to service/pinduoduo/pdd.ddk.order.list.increment.get.go index c3335014..3f332dbe 100644 --- a/pinduoduo/pdd.ddk.order.list.increment.get.go +++ b/service/pinduoduo/pdd.ddk.order.list.increment.get.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type OrderListIncrementGetResponse struct { diff --git a/pinduoduo/pdd.ddk.order.list.range.get.go b/service/pinduoduo/pdd.ddk.order.list.range.get.go similarity index 98% rename from pinduoduo/pdd.ddk.order.list.range.get.go rename to service/pinduoduo/pdd.ddk.order.list.range.get.go index 9dd7167e..444c86df 100644 --- a/pinduoduo/pdd.ddk.order.list.range.get.go +++ b/service/pinduoduo/pdd.ddk.order.list.range.get.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type OrderListRangeGetResponse struct { diff --git a/pinduoduo/pdd.ddk.resource.url.gen.go b/service/pinduoduo/pdd.ddk.resource.url.gen.go similarity index 98% rename from pinduoduo/pdd.ddk.resource.url.gen.go rename to service/pinduoduo/pdd.ddk.resource.url.gen.go index 3e591cdb..5f74d412 100644 --- a/pinduoduo/pdd.ddk.resource.url.gen.go +++ b/service/pinduoduo/pdd.ddk.resource.url.gen.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ResourceUrlGenResponse struct { diff --git a/pinduoduo/pdd.ddk.rp.prom.url.generate.go b/service/pinduoduo/pdd.ddk.rp.prom.url.generate.go similarity index 99% rename from pinduoduo/pdd.ddk.rp.prom.url.generate.go rename to service/pinduoduo/pdd.ddk.rp.prom.url.generate.go index 8390aa36..84ca6ed4 100644 --- a/pinduoduo/pdd.ddk.rp.prom.url.generate.go +++ b/service/pinduoduo/pdd.ddk.rp.prom.url.generate.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RpPromUrlGenerateResponse struct { diff --git a/pinduoduo/pdd.ddk.top.goods.list.query.go b/service/pinduoduo/pdd.ddk.top.goods.list.query.go similarity index 98% rename from pinduoduo/pdd.ddk.top.goods.list.query.go rename to service/pinduoduo/pdd.ddk.top.goods.list.query.go index 392b217e..f2c047e5 100644 --- a/pinduoduo/pdd.ddk.top.goods.list.query.go +++ b/service/pinduoduo/pdd.ddk.top.goods.list.query.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TopGoodsListQueryResponse struct { diff --git a/pinduoduo/pdd.goods.cats.get.go b/service/pinduoduo/pdd.goods.cats.get.go similarity index 97% rename from pinduoduo/pdd.goods.cats.get.go rename to service/pinduoduo/pdd.goods.cats.get.go index 709f9635..34aac9ae 100644 --- a/pinduoduo/pdd.goods.cats.get.go +++ b/service/pinduoduo/pdd.goods.cats.get.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GoodsCatsGetResponse struct { diff --git a/pinduoduo/pdd.goods.opt.get.go b/service/pinduoduo/pdd.goods.opt.get.go similarity index 97% rename from pinduoduo/pdd.goods.opt.get.go rename to service/pinduoduo/pdd.goods.opt.get.go index b17982c2..d11afd9e 100644 --- a/pinduoduo/pdd.goods.opt.get.go +++ b/service/pinduoduo/pdd.goods.opt.get.go @@ -2,7 +2,7 @@ package pinduoduo import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GoodsOptGetResponse struct { diff --git a/pinduoduo/pgsql.go b/service/pinduoduo/pgsql.go similarity index 65% rename from pinduoduo/pgsql.go rename to service/pinduoduo/pgsql.go index dab694d4..863b3b74 100644 --- a/pinduoduo/pgsql.go +++ b/service/pinduoduo/pgsql.go @@ -1,18 +1,18 @@ package pinduoduo import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(api string, request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(api string, request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 RequestApi: api, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(api string, request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/pintoto/api.order.create-soon-order.go b/service/pintoto/api.order.create-soon-order.go similarity index 98% rename from pintoto/api.order.create-soon-order.go rename to service/pintoto/api.order.create-soon-order.go index c0a3e9b2..70dce46d 100644 --- a/pintoto/api.order.create-soon-order.go +++ b/service/pintoto/api.order.create-soon-order.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiOrderCreateSoonOrder struct { diff --git a/pintoto/api.order.create.go b/service/pintoto/api.order.create.go similarity index 96% rename from pintoto/api.order.create.go rename to service/pintoto/api.order.create.go index b85c0e87..5c96b778 100644 --- a/pintoto/api.order.create.go +++ b/service/pintoto/api.order.create.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiOrderCreateResponse struct { diff --git a/pintoto/api.order.query.go b/service/pintoto/api.order.query.go similarity index 98% rename from pintoto/api.order.query.go rename to service/pintoto/api.order.query.go index 6fcc284b..3ed11c01 100644 --- a/pintoto/api.order.query.go +++ b/service/pintoto/api.order.query.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiOrderQueryResponse struct { diff --git a/pintoto/api.user.info.go b/service/pintoto/api.user.info.go similarity index 96% rename from pintoto/api.user.info.go rename to service/pintoto/api.user.info.go index 9477e8d3..49005275 100644 --- a/pintoto/api.user.info.go +++ b/service/pintoto/api.user.info.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type ApiUserInfoResponse struct { diff --git a/pintoto/app.go b/service/pintoto/app.go similarity index 93% rename from pintoto/app.go rename to service/pintoto/app.go index 8ee1e404..7f39ec47 100644 --- a/pintoto/app.go +++ b/service/pintoto/app.go @@ -1,9 +1,9 @@ package pintoto import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" "math" "strconv" diff --git a/pintoto/movie_info_get_cinema_list.go b/service/pintoto/movie_info_get_cinema_list.go similarity index 98% rename from pintoto/movie_info_get_cinema_list.go rename to service/pintoto/movie_info_get_cinema_list.go index b4481570..59793a6e 100644 --- a/pintoto/movie_info_get_cinema_list.go +++ b/service/pintoto/movie_info_get_cinema_list.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetCinemaListResponse struct { diff --git a/pintoto/movie_info_get_city_area.go b/service/pintoto/movie_info_get_city_area.go similarity index 96% rename from pintoto/movie_info_get_city_area.go rename to service/pintoto/movie_info_get_city_area.go index bda32e79..b6859174 100644 --- a/pintoto/movie_info_get_city_area.go +++ b/service/pintoto/movie_info_get_city_area.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetCityAreaResponse struct { diff --git a/pintoto/movie_info_get_city_list.go b/service/pintoto/movie_info_get_city_list.go similarity index 96% rename from pintoto/movie_info_get_city_list.go rename to service/pintoto/movie_info_get_city_list.go index e07b1d6f..fd51f7f9 100644 --- a/pintoto/movie_info_get_city_list.go +++ b/service/pintoto/movie_info_get_city_list.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetCityListResponse struct { diff --git a/pintoto/movie_info_get_hot_list.go b/service/pintoto/movie_info_get_hot_list.go similarity index 98% rename from pintoto/movie_info_get_hot_list.go rename to service/pintoto/movie_info_get_hot_list.go index 4d33d07f..d9240e91 100644 --- a/pintoto/movie_info_get_hot_list.go +++ b/service/pintoto/movie_info_get_hot_list.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetHotListResponse struct { diff --git a/pintoto/movie_info_get_schedule_list.go b/service/pintoto/movie_info_get_schedule_list.go similarity index 98% rename from pintoto/movie_info_get_schedule_list.go rename to service/pintoto/movie_info_get_schedule_list.go index 2ef84538..958d1a98 100644 --- a/pintoto/movie_info_get_schedule_list.go +++ b/service/pintoto/movie_info_get_schedule_list.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetScheduleListResponse struct { diff --git a/pintoto/movie_info_get_sea.go b/service/pintoto/movie_info_get_sea.go similarity index 97% rename from pintoto/movie_info_get_sea.go rename to service/pintoto/movie_info_get_sea.go index b2b74aeb..ff3e942d 100644 --- a/pintoto/movie_info_get_sea.go +++ b/service/pintoto/movie_info_get_sea.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetSeat struct { diff --git a/pintoto/movie_info_get_show_date.go b/service/pintoto/movie_info_get_show_date.go similarity index 96% rename from pintoto/movie_info_get_show_date.go rename to service/pintoto/movie_info_get_show_date.go index 6db14f2d..9e911a42 100644 --- a/pintoto/movie_info_get_show_date.go +++ b/service/pintoto/movie_info_get_show_date.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetShowDateResponse struct { diff --git a/pintoto/movie_info_get_show_list.go b/service/pintoto/movie_info_get_show_list.go similarity index 98% rename from pintoto/movie_info_get_show_list.go rename to service/pintoto/movie_info_get_show_list.go index 443e5142..bb5cc411 100644 --- a/pintoto/movie_info_get_show_list.go +++ b/service/pintoto/movie_info_get_show_list.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetShowList struct { diff --git a/pintoto/movie_info_get_soon_list.go b/service/pintoto/movie_info_get_soon_list.go similarity index 98% rename from pintoto/movie_info_get_soon_list.go rename to service/pintoto/movie_info_get_soon_list.go index 1af00beb..417678cd 100644 --- a/pintoto/movie_info_get_soon_list.go +++ b/service/pintoto/movie_info_get_soon_list.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetSoonListResponse struct { diff --git a/pintoto/movieapi.movie-info.get-version.go b/service/pintoto/movieapi.movie-info.get-version.go similarity index 96% rename from pintoto/movieapi.movie-info.get-version.go rename to service/pintoto/movieapi.movie-info.get-version.go index 2f814c27..2c1719c4 100644 --- a/pintoto/movieapi.movie-info.get-version.go +++ b/service/pintoto/movieapi.movie-info.get-version.go @@ -2,7 +2,7 @@ package pintoto import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetVersionResponse struct { diff --git a/pintoto/params.go b/service/pintoto/params.go similarity index 100% rename from pintoto/params.go rename to service/pintoto/params.go diff --git a/pintoto/pgsql.go b/service/pintoto/pgsql.go similarity index 58% rename from pintoto/pgsql.go rename to service/pintoto/pgsql.go index ce693fcc..f2ef6ef4 100644 --- a/pintoto/pgsql.go +++ b/service/pintoto/pgsql.go @@ -1,19 +1,19 @@ package pintoto import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/pintoto/sign.go b/service/pintoto/sign.go similarity index 100% rename from pintoto/sign.go rename to service/pintoto/sign.go diff --git a/sendcloud/apiv2.userinfo.get.go b/service/sendcloud/apiv2.userinfo.get.go similarity index 98% rename from sendcloud/apiv2.userinfo.get.go rename to service/sendcloud/apiv2.userinfo.get.go index 3a7d8bb9..5a6924e2 100644 --- a/sendcloud/apiv2.userinfo.get.go +++ b/service/sendcloud/apiv2.userinfo.get.go @@ -2,7 +2,7 @@ package sendcloud import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/sendcloud/app.go b/service/sendcloud/app.go similarity index 93% rename from sendcloud/app.go rename to service/sendcloud/app.go index 3396250e..9195a752 100644 --- a/sendcloud/app.go +++ b/service/sendcloud/app.go @@ -1,9 +1,9 @@ package sendcloud import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/sendcloud/mongo.go b/service/sendcloud/mongo.go similarity index 84% rename from sendcloud/mongo.go rename to service/sendcloud/mongo.go index 9f1fc54d..6a333b7b 100644 --- a/sendcloud/mongo.go +++ b/service/sendcloud/mongo.go @@ -1,8 +1,8 @@ package sendcloud import ( - gomongo2 "go.dtapp.net/library/gomongo" - gorequest "go.dtapp.net/library/gorequest" + gomongo2 "go.dtapp.net/library/utils/gomongo" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -13,8 +13,8 @@ type mongoZap struct { RequestUrl string `json:"request_url" bson:"request_url"` //【请求】链接 RequestApi string `json:"request_api" bson:"request_api"` //【请求】接口 RequestMethod string `json:"request_method" bson:"request_method"` //【请求】方式 - RequestParams gorequest.Params `json:"request_params" bson:"request_params"` //【请求】参数 - RequestHeader gorequest.Headers `json:"request_header" bson:"request_header"` //【请求】头部 + RequestParams gorequest2.Params `json:"request_params" bson:"request_params"` //【请求】参数 + RequestHeader gorequest2.Headers `json:"request_header" bson:"request_header"` //【请求】头部 ResponseHeader http.Header `json:"response_header" bson:"response_header"` //【返回】头部 ResponseStatusCode int `json:"response_status_code" bson:"response_status_code"` //【返回】状态码 ResponseBody map[string]interface{} `json:"response_body" bson:"response_body"` //【返回】内容 @@ -30,12 +30,12 @@ func (m *mongoZap) TableName() string { return "sendcloud" } -func (app *App) mongoLog(request gorequest.Response) { +func (app *App) mongoLog(request gorequest2.Response) { _, _ = app.mongo.Model(&mongoZap{}).InsertOne(mongoZap{ RequestTime: gomongo2.BsonTime(request.RequestTime), //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: request.RequestParams, //【请求】参数 RequestHeader: request.RequestHeader, //【请求】头部 diff --git a/sendcloud/params.go b/service/sendcloud/params.go similarity index 100% rename from sendcloud/params.go rename to service/sendcloud/params.go diff --git a/sendcloud/pgsql.go b/service/sendcloud/pgsql.go similarity index 75% rename from sendcloud/pgsql.go rename to service/sendcloud/pgsql.go index 26258948..89cdc2bb 100644 --- a/sendcloud/pgsql.go +++ b/service/sendcloud/pgsql.go @@ -1,19 +1,19 @@ package sendcloud import ( - "go.dtapp.net/library/gojson" - "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { +func (app *App) postgresqlLog(request gorequest2.Response) { app.log.Record(golog.ApiPostgresqlLog{ RequestTime: request.RequestTime, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 diff --git a/taobao/app.go b/service/taobao/app.go similarity index 96% rename from taobao/app.go rename to service/taobao/app.go index 067d56eb..2329fa47 100644 --- a/taobao/app.go +++ b/service/taobao/app.go @@ -2,9 +2,9 @@ package taobao import ( "fmt" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" - "go.dtapp.net/library/gostring" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" + "go.dtapp.net/library/utils/gostring" "gorm.io/gorm" "regexp" "strconv" diff --git a/taobao/crypto.go b/service/taobao/crypto.go similarity index 100% rename from taobao/crypto.go rename to service/taobao/crypto.go diff --git a/taobao/help.md b/service/taobao/help.md similarity index 100% rename from taobao/help.md rename to service/taobao/help.md diff --git a/taobao/params.go b/service/taobao/params.go similarity index 100% rename from taobao/params.go rename to service/taobao/params.go diff --git a/taobao/pgsql.go b/service/taobao/pgsql.go similarity index 65% rename from taobao/pgsql.go rename to service/taobao/pgsql.go index ab2746f3..b5b73182 100644 --- a/taobao/pgsql.go +++ b/service/taobao/pgsql.go @@ -1,18 +1,18 @@ package taobao import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(api string, request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(api string, request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 RequestApi: api, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(api string, request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/taobao/taobao.tbk.dg.newuser.order.get.go b/service/taobao/taobao.tbk.dg.newuser.order.get.go similarity index 97% rename from taobao/taobao.tbk.dg.newuser.order.get.go rename to service/taobao/taobao.tbk.dg.newuser.order.get.go index bed6eb96..04456ee9 100644 --- a/taobao/taobao.tbk.dg.newuser.order.get.go +++ b/service/taobao/taobao.tbk.dg.newuser.order.get.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkDgNewuserOrderGetResponse struct { diff --git a/taobao/taobao.tbk.order.details.get.go b/service/taobao/taobao.tbk.order.details.get.go similarity index 99% rename from taobao/taobao.tbk.order.details.get.go rename to service/taobao/taobao.tbk.order.details.get.go index 9652115a..3d1919c3 100644 --- a/taobao/taobao.tbk.order.details.get.go +++ b/service/taobao/taobao.tbk.order.details.get.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkOrderDetailsGetResponse struct { diff --git a/taobao/tbk.activity.info.get.go b/service/taobao/tbk.activity.info.get.go similarity index 97% rename from taobao/tbk.activity.info.get.go rename to service/taobao/tbk.activity.info.get.go index c9119e27..e6fbea91 100644 --- a/taobao/tbk.activity.info.get.go +++ b/service/taobao/tbk.activity.info.get.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkActivityInfoGetResponse struct { diff --git a/taobao/tbk.coupon.get.go b/service/taobao/tbk.coupon.get.go similarity index 98% rename from taobao/tbk.coupon.get.go rename to service/taobao/tbk.coupon.get.go index 151e6321..fd658eb9 100644 --- a/taobao/tbk.coupon.get.go +++ b/service/taobao/tbk.coupon.get.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkCouponGetResponse struct { diff --git a/taobao/tbk.dg.material.optional.go b/service/taobao/tbk.dg.material.optional.go similarity index 99% rename from taobao/tbk.dg.material.optional.go rename to service/taobao/tbk.dg.material.optional.go index 20b10a5c..631b352f 100644 --- a/taobao/tbk.dg.material.optional.go +++ b/service/taobao/tbk.dg.material.optional.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkDgMaterialOptionalResponse struct { diff --git a/taobao/tbk.dg.optimus.material.go b/service/taobao/tbk.dg.optimus.material.go similarity index 98% rename from taobao/tbk.dg.optimus.material.go rename to service/taobao/tbk.dg.optimus.material.go index 9c294865..075fcfe0 100644 --- a/taobao/tbk.dg.optimus.material.go +++ b/service/taobao/tbk.dg.optimus.material.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkDgOptimusMaterialResponse struct { diff --git a/taobao/tbk.item.info.get.go b/service/taobao/tbk.item.info.get.go similarity index 98% rename from taobao/tbk.item.info.get.go rename to service/taobao/tbk.item.info.get.go index 87cefbb1..53415272 100644 --- a/taobao/tbk.item.info.get.go +++ b/service/taobao/tbk.item.info.get.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkItemInfoGetResponse struct { diff --git a/taobao/tbk.shop.get.go b/service/taobao/tbk.shop.get.go similarity index 97% rename from taobao/tbk.shop.get.go rename to service/taobao/tbk.shop.get.go index 4c0ef66b..5196d219 100644 --- a/taobao/tbk.shop.get.go +++ b/service/taobao/tbk.shop.get.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkShopGetResponse struct { diff --git a/taobao/tbk.shop.recommend.get.go b/service/taobao/tbk.shop.recommend.get.go similarity index 97% rename from taobao/tbk.shop.recommend.get.go rename to service/taobao/tbk.shop.recommend.get.go index b76a190f..c7df9bc7 100644 --- a/taobao/tbk.shop.recommend.get.go +++ b/service/taobao/tbk.shop.recommend.get.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkShopRecommendGetResponse struct { diff --git a/taobao/tbk.spread.get.go b/service/taobao/tbk.spread.get.go similarity index 97% rename from taobao/tbk.spread.get.go rename to service/taobao/tbk.spread.get.go index ddccefca..e74ac71a 100644 --- a/taobao/tbk.spread.get.go +++ b/service/taobao/tbk.spread.get.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkSpreadGetResponse struct { diff --git a/taobao/tbk.tpwd.create.go b/service/taobao/tbk.tpwd.create.go similarity index 96% rename from taobao/tbk.tpwd.create.go rename to service/taobao/tbk.tpwd.create.go index 32140d2d..f3e1fd42 100644 --- a/taobao/tbk.tpwd.create.go +++ b/service/taobao/tbk.tpwd.create.go @@ -2,7 +2,7 @@ package taobao import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type TbkTPwdCreateResponse struct { diff --git a/service/tencent/lighthouse.go b/service/tencent/lighthouse.go new file mode 100644 index 00000000..58d86c23 --- /dev/null +++ b/service/tencent/lighthouse.go @@ -0,0 +1,10 @@ +package tencent + +import "log" + +type lighthouse struct { +} + +func (l lighthouse) DescribeFirewallRules() { + log.Println("DescribeFirewallRules") +} diff --git a/service/tencent/tencent.go b/service/tencent/tencent.go new file mode 100644 index 00000000..49b5cacb --- /dev/null +++ b/service/tencent/tencent.go @@ -0,0 +1,18 @@ +package tencent + +type ConfigTencent struct { + SecretId string + SecretKey string +} + +type Tencent struct { + config ConfigTencent + Lighthouse lighthouse +} + +func NewTencent(config *ConfigTencent) *Tencent { + t := &Tencent{} + t.config.SecretId = config.SecretId + t.config.SecretKey = config.SecretKey + return t +} diff --git a/wechatminiprogram/app.go b/service/wechatminiprogram/app.go similarity index 94% rename from wechatminiprogram/app.go rename to service/wechatminiprogram/app.go index 693bc50f..f8d5d2aa 100644 --- a/wechatminiprogram/app.go +++ b/service/wechatminiprogram/app.go @@ -1,9 +1,9 @@ package wechatminiprogram import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/goredis" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/goredis" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/wechatminiprogram/cgi-bin.getcallbackip.go b/service/wechatminiprogram/cgi-bin.getcallbackip.go similarity index 97% rename from wechatminiprogram/cgi-bin.getcallbackip.go rename to service/wechatminiprogram/cgi-bin.getcallbackip.go index 949bdfb4..0ba99ccf 100644 --- a/wechatminiprogram/cgi-bin.getcallbackip.go +++ b/service/wechatminiprogram/cgi-bin.getcallbackip.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/cgi-bin.message.subscribe.send.go b/service/wechatminiprogram/cgi-bin.message.subscribe.send.go similarity index 97% rename from wechatminiprogram/cgi-bin.message.subscribe.send.go rename to service/wechatminiprogram/cgi-bin.message.subscribe.send.go index cf97166b..2a7b69d0 100644 --- a/wechatminiprogram/cgi-bin.message.subscribe.send.go +++ b/service/wechatminiprogram/cgi-bin.message.subscribe.send.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/cgi-bin.token.go b/service/wechatminiprogram/cgi-bin.token.go similarity index 97% rename from wechatminiprogram/cgi-bin.token.go rename to service/wechatminiprogram/cgi-bin.token.go index fc1f38da..d371858e 100644 --- a/wechatminiprogram/cgi-bin.token.go +++ b/service/wechatminiprogram/cgi-bin.token.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/cgi-bin.token.monitor.go b/service/wechatminiprogram/cgi-bin.token.monitor.go similarity index 100% rename from wechatminiprogram/cgi-bin.token.monitor.go rename to service/wechatminiprogram/cgi-bin.token.monitor.go diff --git a/wechatminiprogram/cgi-bin.token.rdb.go b/service/wechatminiprogram/cgi-bin.token.rdb.go similarity index 100% rename from wechatminiprogram/cgi-bin.token.rdb.go rename to service/wechatminiprogram/cgi-bin.token.rdb.go diff --git a/wechatminiprogram/cgi-bin.wxaapp.createwxaqrcode.go b/service/wechatminiprogram/cgi-bin.wxaapp.createwxaqrcode.go similarity index 97% rename from wechatminiprogram/cgi-bin.wxaapp.createwxaqrcode.go rename to service/wechatminiprogram/cgi-bin.wxaapp.createwxaqrcode.go index 2afe7dbe..acf9b3e7 100644 --- a/wechatminiprogram/cgi-bin.wxaapp.createwxaqrcode.go +++ b/service/wechatminiprogram/cgi-bin.wxaapp.createwxaqrcode.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/params.go b/service/wechatminiprogram/params.go similarity index 100% rename from wechatminiprogram/params.go rename to service/wechatminiprogram/params.go diff --git a/wechatminiprogram/pgsql.go b/service/wechatminiprogram/pgsql.go similarity index 69% rename from wechatminiprogram/pgsql.go rename to service/wechatminiprogram/pgsql.go index 5c879e42..cfc8af70 100644 --- a/wechatminiprogram/pgsql.go +++ b/service/wechatminiprogram/pgsql.go @@ -1,26 +1,26 @@ package wechatminiprogram import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - body := golog.ApiPostgresqlLog{} - body.RequestTime = golog.TimeString{Time: request.RequestTime} //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + body := golog2.ApiPostgresqlLog{} + body.RequestTime = golog2.TimeString{Time: request.RequestTime} //【请求】时间 body.RequestUri = request.RequestUri //【请求】链接 - body.RequestUrl = gorequest.UriParse(request.RequestUri).Url //【请求】链接 - body.RequestApi = gorequest.UriParse(request.RequestUri).Path //【请求】接口 + body.RequestUrl = gorequest2.UriParse(request.RequestUri).Url //【请求】链接 + body.RequestApi = gorequest2.UriParse(request.RequestUri).Path //【请求】接口 body.RequestMethod = request.RequestMethod //【请求】方式 body.RequestParams = datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)) //【请求】参数 body.RequestHeader = datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)) //【请求】头部 body.ResponseHeader = datatypes.JSON(gojson.JsonEncodeNoError(request.ResponseHeader)) //【返回】头部 body.ResponseStatusCode = request.ResponseStatusCode //【返回】状态码 body.ResponseContentLength = request.ResponseContentLength //【返回】大小 - body.ResponseTime = golog.TimeString{Time: request.ResponseTime} //【返回】时间 + body.ResponseTime = golog2.TimeString{Time: request.ResponseTime} //【返回】时间 if request.ResponseHeader.Get("Content-Type") == "image/jpeg" || request.ResponseHeader.Get("Content-Type") == "image/png" { } else { body.ResponseBody = request.ResponseBody //【返回】内容 diff --git a/wechatminiprogram/save_img.go b/service/wechatminiprogram/save_img.go similarity index 95% rename from wechatminiprogram/save_img.go rename to service/wechatminiprogram/save_img.go index e53ff7e4..77e62675 100644 --- a/wechatminiprogram/save_img.go +++ b/service/wechatminiprogram/save_img.go @@ -1,7 +1,7 @@ package wechatminiprogram import ( - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "log" "os" ) diff --git a/wechatminiprogram/sign.go b/service/wechatminiprogram/sign.go similarity index 100% rename from wechatminiprogram/sign.go rename to service/wechatminiprogram/sign.go diff --git a/wechatminiprogram/sns.jscode2session.go b/service/wechatminiprogram/sns.jscode2session.go similarity index 97% rename from wechatminiprogram/sns.jscode2session.go rename to service/wechatminiprogram/sns.jscode2session.go index 900c014b..48a33952 100644 --- a/wechatminiprogram/sns.jscode2session.go +++ b/service/wechatminiprogram/sns.jscode2session.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/user_info.go b/service/wechatminiprogram/user_info.go similarity index 100% rename from wechatminiprogram/user_info.go rename to service/wechatminiprogram/user_info.go diff --git a/wechatminiprogram/user_phone.go b/service/wechatminiprogram/user_phone.go similarity index 100% rename from wechatminiprogram/user_phone.go rename to service/wechatminiprogram/user_phone.go diff --git a/wechatminiprogram/wxa.business.getliveinfo.go b/service/wechatminiprogram/wxa.business.getliveinfo.go similarity index 98% rename from wechatminiprogram/wxa.business.getliveinfo.go rename to service/wechatminiprogram/wxa.business.getliveinfo.go index c9978a12..bda45a7d 100644 --- a/wechatminiprogram/wxa.business.getliveinfo.go +++ b/service/wechatminiprogram/wxa.business.getliveinfo.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/wxa.business.getuserphonenumber.go b/service/wechatminiprogram/wxa.business.getuserphonenumber.go similarity index 98% rename from wechatminiprogram/wxa.business.getuserphonenumber.go rename to service/wechatminiprogram/wxa.business.getuserphonenumber.go index 0041191f..82d1fbf0 100644 --- a/wechatminiprogram/wxa.business.getuserphonenumber.go +++ b/service/wechatminiprogram/wxa.business.getuserphonenumber.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/wxa.generate_urllink.go b/service/wechatminiprogram/wxa.generate_urllink.go similarity index 97% rename from wechatminiprogram/wxa.generate_urllink.go rename to service/wechatminiprogram/wxa.generate_urllink.go index 913d013f..c43e90a3 100644 --- a/wechatminiprogram/wxa.generate_urllink.go +++ b/service/wechatminiprogram/wxa.generate_urllink.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/wxa.generatescheme.go b/service/wechatminiprogram/wxa.generatescheme.go similarity index 97% rename from wechatminiprogram/wxa.generatescheme.go rename to service/wechatminiprogram/wxa.generatescheme.go index b9f3b26d..237f7af8 100644 --- a/wechatminiprogram/wxa.generatescheme.go +++ b/service/wechatminiprogram/wxa.generatescheme.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/wxa.getwxacode.go b/service/wechatminiprogram/wxa.getwxacode.go similarity index 91% rename from wechatminiprogram/wxa.getwxacode.go rename to service/wechatminiprogram/wxa.getwxacode.go index d5a33a40..81361343 100644 --- a/wechatminiprogram/wxa.getwxacode.go +++ b/service/wechatminiprogram/wxa.getwxacode.go @@ -5,8 +5,8 @@ import ( "encoding/json" "errors" "fmt" - "go.dtapp.net/library/gorequest" - gostorage2 "go.dtapp.net/library/gostorage" + "go.dtapp.net/library/utils/gorequest" + "go.dtapp.net/library/utils/gostorage" "net/http" ) @@ -63,6 +63,6 @@ func (resp *WxaGetWxaCodeResult) Check() error { } // Update 上传 -func (resp *WxaGetWxaCodeResult) Update(storage *gostorage2.AliYun, filePath, fileName string) (gostorage2.FileInfo, error) { +func (resp *WxaGetWxaCodeResult) Update(storage *gostorage.AliYun, filePath, fileName string) (gostorage.FileInfo, error) { return storage.PutObject(bytes.NewReader(resp.Body), filePath, fileName) } diff --git a/wechatminiprogram/wxa.getwxacodeunlimit.go b/service/wechatminiprogram/wxa.getwxacodeunlimit.go similarity index 94% rename from wechatminiprogram/wxa.getwxacodeunlimit.go rename to service/wechatminiprogram/wxa.getwxacodeunlimit.go index 940c6603..a35efbeb 100644 --- a/wechatminiprogram/wxa.getwxacodeunlimit.go +++ b/service/wechatminiprogram/wxa.getwxacodeunlimit.go @@ -5,8 +5,8 @@ import ( "encoding/json" "errors" "fmt" - "go.dtapp.net/library/gorequest" - gostorage2 "go.dtapp.net/library/gostorage" + "go.dtapp.net/library/utils/gorequest" + "go.dtapp.net/library/utils/gostorage" "net/http" ) @@ -65,6 +65,6 @@ func (resp *WxaGetWxaCodeUnLimitResult) Check() error { } // Update 上传 -func (resp *WxaGetWxaCodeUnLimitResult) Update(storage *gostorage2.AliYun, filePath, fileName string) (gostorage2.FileInfo, error) { +func (resp *WxaGetWxaCodeUnLimitResult) Update(storage *gostorage.AliYun, filePath, fileName string) (gostorage.FileInfo, error) { return storage.PutObject(bytes.NewReader(resp.Body), filePath, fileName) } diff --git a/wechatminiprogram/wxa.query_urllink.go b/service/wechatminiprogram/wxa.query_urllink.go similarity index 98% rename from wechatminiprogram/wxa.query_urllink.go rename to service/wechatminiprogram/wxa.query_urllink.go index e783713d..9ac8e857 100644 --- a/wechatminiprogram/wxa.query_urllink.go +++ b/service/wechatminiprogram/wxa.query_urllink.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/wxa.queryscheme.go b/service/wechatminiprogram/wxa.queryscheme.go similarity index 97% rename from wechatminiprogram/wxa.queryscheme.go rename to service/wechatminiprogram/wxa.queryscheme.go index 9a26ef2b..2cda3872 100644 --- a/wechatminiprogram/wxa.queryscheme.go +++ b/service/wechatminiprogram/wxa.queryscheme.go @@ -3,7 +3,7 @@ package wechatminiprogram import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatminiprogram/wxaapi.feedback.list.go b/service/wechatminiprogram/wxaapi.feedback.list.go similarity index 97% rename from wechatminiprogram/wxaapi.feedback.list.go rename to service/wechatminiprogram/wxaapi.feedback.list.go index 5c4d1700..ec469605 100644 --- a/wechatminiprogram/wxaapi.feedback.list.go +++ b/service/wechatminiprogram/wxaapi.feedback.list.go @@ -2,7 +2,7 @@ package wechatminiprogram import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/app.go b/service/wechatoffice/app.go similarity index 94% rename from wechatoffice/app.go rename to service/wechatoffice/app.go index b96a610c..b07d9431 100644 --- a/wechatoffice/app.go +++ b/service/wechatoffice/app.go @@ -1,9 +1,9 @@ package wechatoffice import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/goredis" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/goredis" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/wechatoffice/cgi-bin.ticket.getticket.go b/service/wechatoffice/cgi-bin.ticket.getticket.go similarity index 97% rename from wechatoffice/cgi-bin.ticket.getticket.go rename to service/wechatoffice/cgi-bin.ticket.getticket.go index c5aee82c..76326c5c 100644 --- a/wechatoffice/cgi-bin.ticket.getticket.go +++ b/service/wechatoffice/cgi-bin.ticket.getticket.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/cgi-bin.ticket.getticket.monitor.go b/service/wechatoffice/cgi-bin.ticket.getticket.monitor.go similarity index 100% rename from wechatoffice/cgi-bin.ticket.getticket.monitor.go rename to service/wechatoffice/cgi-bin.ticket.getticket.monitor.go diff --git a/wechatoffice/cgi-bin.ticket.getticket.rdb.go b/service/wechatoffice/cgi-bin.ticket.getticket.rdb.go similarity index 100% rename from wechatoffice/cgi-bin.ticket.getticket.rdb.go rename to service/wechatoffice/cgi-bin.ticket.getticket.rdb.go diff --git a/wechatoffice/cgi-bin.token.go b/service/wechatoffice/cgi-bin.token.go similarity index 97% rename from wechatoffice/cgi-bin.token.go rename to service/wechatoffice/cgi-bin.token.go index 32eebd01..9ddf66b2 100644 --- a/wechatoffice/cgi-bin.token.go +++ b/service/wechatoffice/cgi-bin.token.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/cgi-bin.token.monitor.go b/service/wechatoffice/cgi-bin.token.monitor.go similarity index 100% rename from wechatoffice/cgi-bin.token.monitor.go rename to service/wechatoffice/cgi-bin.token.monitor.go diff --git a/wechatoffice/cgi-bin.token.rdb.go b/service/wechatoffice/cgi-bin.token.rdb.go similarity index 100% rename from wechatoffice/cgi-bin.token.rdb.go rename to service/wechatoffice/cgi-bin.token.rdb.go diff --git a/wechatoffice/cgi-bin.user.get.go b/service/wechatoffice/cgi-bin.user.get.go similarity index 97% rename from wechatoffice/cgi-bin.user.get.go rename to service/wechatoffice/cgi-bin.user.get.go index 540bfcd7..b472c379 100644 --- a/wechatoffice/cgi-bin.user.get.go +++ b/service/wechatoffice/cgi-bin.user.get.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/cgi-bin.user.info.go b/service/wechatoffice/cgi-bin.user.info.go similarity index 98% rename from wechatoffice/cgi-bin.user.info.go rename to service/wechatoffice/cgi-bin.user.info.go index c39f6038..7e67b7b9 100644 --- a/wechatoffice/cgi-bin.user.info.go +++ b/service/wechatoffice/cgi-bin.user.info.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/debug.cgi-bin.ticket.check.go b/service/wechatoffice/debug.cgi-bin.ticket.check.go similarity index 96% rename from wechatoffice/debug.cgi-bin.ticket.check.go rename to service/wechatoffice/debug.cgi-bin.ticket.check.go index 2f305cc7..640f50dd 100644 --- a/wechatoffice/debug.cgi-bin.ticket.check.go +++ b/service/wechatoffice/debug.cgi-bin.ticket.check.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/get.go b/service/wechatoffice/get.go similarity index 100% rename from wechatoffice/get.go rename to service/wechatoffice/get.go diff --git a/wechatoffice/getcallbackip.go b/service/wechatoffice/getcallbackip.go similarity index 97% rename from wechatoffice/getcallbackip.go rename to service/wechatoffice/getcallbackip.go index 60fabb9f..3d879b00 100644 --- a/wechatoffice/getcallbackip.go +++ b/service/wechatoffice/getcallbackip.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/message.template.send.go b/service/wechatoffice/message.template.send.go similarity index 97% rename from wechatoffice/message.template.send.go rename to service/wechatoffice/message.template.send.go index 9c902124..82ccdfd2 100644 --- a/wechatoffice/message.template.send.go +++ b/service/wechatoffice/message.template.send.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/oauth2.go b/service/wechatoffice/oauth2.go similarity index 100% rename from wechatoffice/oauth2.go rename to service/wechatoffice/oauth2.go diff --git a/wechatoffice/params.go b/service/wechatoffice/params.go similarity index 100% rename from wechatoffice/params.go rename to service/wechatoffice/params.go diff --git a/wechatoffice/pgsql.go b/service/wechatoffice/pgsql.go similarity index 58% rename from wechatoffice/pgsql.go rename to service/wechatoffice/pgsql.go index 5fac43ed..ce1a9b7c 100644 --- a/wechatoffice/pgsql.go +++ b/service/wechatoffice/pgsql.go @@ -1,19 +1,19 @@ package wechatoffice import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/wechatoffice/share.go b/service/wechatoffice/share.go similarity index 96% rename from wechatoffice/share.go rename to service/wechatoffice/share.go index 2ab54380..610b6048 100644 --- a/wechatoffice/share.go +++ b/service/wechatoffice/share.go @@ -3,7 +3,7 @@ package wechatoffice import ( "crypto/sha1" "fmt" - "go.dtapp.net/library/gorandom" + "go.dtapp.net/library/utils/gorandom" "io" "time" ) diff --git a/wechatoffice/sign.go b/service/wechatoffice/sign.go similarity index 100% rename from wechatoffice/sign.go rename to service/wechatoffice/sign.go diff --git a/wechatoffice/sns.jscode2session.go b/service/wechatoffice/sns.jscode2session.go similarity index 97% rename from wechatoffice/sns.jscode2session.go rename to service/wechatoffice/sns.jscode2session.go index b09840f6..f792df05 100644 --- a/wechatoffice/sns.jscode2session.go +++ b/service/wechatoffice/sns.jscode2session.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/sns.oauth2.access_token.go b/service/wechatoffice/sns.oauth2.access_token.go similarity index 97% rename from wechatoffice/sns.oauth2.access_token.go rename to service/wechatoffice/sns.oauth2.access_token.go index 3d4df582..4ceccafb 100644 --- a/wechatoffice/sns.oauth2.access_token.go +++ b/service/wechatoffice/sns.oauth2.access_token.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatoffice/sns.userinfo.go b/service/wechatoffice/sns.userinfo.go similarity index 98% rename from wechatoffice/sns.userinfo.go rename to service/wechatoffice/sns.userinfo.go index 5a474265..06959dad 100644 --- a/wechatoffice/sns.userinfo.go +++ b/service/wechatoffice/sns.userinfo.go @@ -3,7 +3,7 @@ package wechatoffice import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/aes_crypto.go b/service/wechatopen/aes_crypto.go similarity index 100% rename from wechatopen/aes_crypto.go rename to service/wechatopen/aes_crypto.go diff --git a/wechatopen/app.go b/service/wechatopen/app.go similarity index 95% rename from wechatopen/app.go rename to service/wechatopen/app.go index 9cc4b009..9ca8854a 100644 --- a/wechatopen/app.go +++ b/service/wechatopen/app.go @@ -1,9 +1,9 @@ package wechatopen import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/goredis" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/goredis" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/wechatopen/cgi-bin.account.getaccountbasicinfo.go b/service/wechatopen/cgi-bin.account.getaccountbasicinfo.go similarity index 98% rename from wechatopen/cgi-bin.account.getaccountbasicinfo.go rename to service/wechatopen/cgi-bin.account.getaccountbasicinfo.go index 3be7bfd3..22482bc8 100644 --- a/wechatopen/cgi-bin.account.getaccountbasicinfo.go +++ b/service/wechatopen/cgi-bin.account.getaccountbasicinfo.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.component.api_authorizer_token.go b/service/wechatopen/cgi-bin.component.api_authorizer_token.go similarity index 98% rename from wechatopen/cgi-bin.component.api_authorizer_token.go rename to service/wechatopen/cgi-bin.component.api_authorizer_token.go index 5fe2735a..af295287 100644 --- a/wechatopen/cgi-bin.component.api_authorizer_token.go +++ b/service/wechatopen/cgi-bin.component.api_authorizer_token.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.component.api_component_token.go b/service/wechatopen/cgi-bin.component.api_component_token.go similarity index 97% rename from wechatopen/cgi-bin.component.api_component_token.go rename to service/wechatopen/cgi-bin.component.api_component_token.go index b8b5fa17..144d96d5 100644 --- a/wechatopen/cgi-bin.component.api_component_token.go +++ b/service/wechatopen/cgi-bin.component.api_component_token.go @@ -2,7 +2,7 @@ package wechatopen import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.component.api_create_preauthcode.go b/service/wechatopen/cgi-bin.component.api_create_preauthcode.go similarity index 97% rename from wechatopen/cgi-bin.component.api_create_preauthcode.go rename to service/wechatopen/cgi-bin.component.api_create_preauthcode.go index 33b8d79f..327b3d41 100644 --- a/wechatopen/cgi-bin.component.api_create_preauthcode.go +++ b/service/wechatopen/cgi-bin.component.api_create_preauthcode.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.component.api_get_authorizer_info.go b/service/wechatopen/cgi-bin.component.api_get_authorizer_info.go similarity index 99% rename from wechatopen/cgi-bin.component.api_get_authorizer_info.go rename to service/wechatopen/cgi-bin.component.api_get_authorizer_info.go index 42e23913..19b8684f 100644 --- a/wechatopen/cgi-bin.component.api_get_authorizer_info.go +++ b/service/wechatopen/cgi-bin.component.api_get_authorizer_info.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.component.api_query_auth.go b/service/wechatopen/cgi-bin.component.api_query_auth.go similarity index 98% rename from wechatopen/cgi-bin.component.api_query_auth.go rename to service/wechatopen/cgi-bin.component.api_query_auth.go index dd83d33a..eda53b8a 100644 --- a/wechatopen/cgi-bin.component.api_query_auth.go +++ b/service/wechatopen/cgi-bin.component.api_query_auth.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.component.api_start_push_ticket.go b/service/wechatopen/cgi-bin.component.api_start_push_ticket.go similarity index 97% rename from wechatopen/cgi-bin.component.api_start_push_ticket.go rename to service/wechatopen/cgi-bin.component.api_start_push_ticket.go index 71292ad1..704948fe 100644 --- a/wechatopen/cgi-bin.component.api_start_push_ticket.go +++ b/service/wechatopen/cgi-bin.component.api_start_push_ticket.go @@ -2,7 +2,7 @@ package wechatopen import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.component.getprivacysetting.go b/service/wechatopen/cgi-bin.component.getprivacysetting.go similarity index 99% rename from wechatopen/cgi-bin.component.getprivacysetting.go rename to service/wechatopen/cgi-bin.component.getprivacysetting.go index 8828b8c5..de100875 100644 --- a/wechatopen/cgi-bin.component.getprivacysetting.go +++ b/service/wechatopen/cgi-bin.component.getprivacysetting.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.component.setprivacysetting.go b/service/wechatopen/cgi-bin.component.setprivacysetting.go similarity index 98% rename from wechatopen/cgi-bin.component.setprivacysetting.go rename to service/wechatopen/cgi-bin.component.setprivacysetting.go index 7a8c79cd..29e96a26 100644 --- a/wechatopen/cgi-bin.component.setprivacysetting.go +++ b/service/wechatopen/cgi-bin.component.setprivacysetting.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.get_api_domain_ip.go b/service/wechatopen/cgi-bin.get_api_domain_ip.go similarity index 96% rename from wechatopen/cgi-bin.get_api_domain_ip.go rename to service/wechatopen/cgi-bin.get_api_domain_ip.go index 7db23645..43ec64e5 100644 --- a/wechatopen/cgi-bin.get_api_domain_ip.go +++ b/service/wechatopen/cgi-bin.get_api_domain_ip.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.shorturl.go b/service/wechatopen/cgi-bin.shorturl.go similarity index 97% rename from wechatopen/cgi-bin.shorturl.go rename to service/wechatopen/cgi-bin.shorturl.go index ecf2dae1..74b2540e 100644 --- a/wechatopen/cgi-bin.shorturl.go +++ b/service/wechatopen/cgi-bin.shorturl.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.wxopen.getcategory.go b/service/wechatopen/cgi-bin.wxopen.getcategory.go similarity index 100% rename from wechatopen/cgi-bin.wxopen.getcategory.go rename to service/wechatopen/cgi-bin.wxopen.getcategory.go diff --git a/wechatopen/cgi-bin.wxopen.qrcodejumpadd.go b/service/wechatopen/cgi-bin.wxopen.qrcodejumpadd.go similarity index 98% rename from wechatopen/cgi-bin.wxopen.qrcodejumpadd.go rename to service/wechatopen/cgi-bin.wxopen.qrcodejumpadd.go index c311e1ec..f98ebf52 100644 --- a/wechatopen/cgi-bin.wxopen.qrcodejumpadd.go +++ b/service/wechatopen/cgi-bin.wxopen.qrcodejumpadd.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.wxopen.qrcodejumpdelete.go b/service/wechatopen/cgi-bin.wxopen.qrcodejumpdelete.go similarity index 97% rename from wechatopen/cgi-bin.wxopen.qrcodejumpdelete.go rename to service/wechatopen/cgi-bin.wxopen.qrcodejumpdelete.go index ac30ed81..91501edf 100644 --- a/wechatopen/cgi-bin.wxopen.qrcodejumpdelete.go +++ b/service/wechatopen/cgi-bin.wxopen.qrcodejumpdelete.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.wxopen.qrcodejumpdownloa.go b/service/wechatopen/cgi-bin.wxopen.qrcodejumpdownloa.go similarity index 97% rename from wechatopen/cgi-bin.wxopen.qrcodejumpdownloa.go rename to service/wechatopen/cgi-bin.wxopen.qrcodejumpdownloa.go index 05c539ea..67fac972 100644 --- a/wechatopen/cgi-bin.wxopen.qrcodejumpdownloa.go +++ b/service/wechatopen/cgi-bin.wxopen.qrcodejumpdownloa.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.wxopen.qrcodejumpget.go b/service/wechatopen/cgi-bin.wxopen.qrcodejumpget.go similarity index 98% rename from wechatopen/cgi-bin.wxopen.qrcodejumpget.go rename to service/wechatopen/cgi-bin.wxopen.qrcodejumpget.go index 7c78edd1..cdaf1114 100644 --- a/wechatopen/cgi-bin.wxopen.qrcodejumpget.go +++ b/service/wechatopen/cgi-bin.wxopen.qrcodejumpget.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/cgi-bin.wxopen.qrcodejumppublish.go b/service/wechatopen/cgi-bin.wxopen.qrcodejumppublish.go similarity index 98% rename from wechatopen/cgi-bin.wxopen.qrcodejumppublish.go rename to service/wechatopen/cgi-bin.wxopen.qrcodejumppublish.go index 98279fcc..a90ae601 100644 --- a/wechatopen/cgi-bin.wxopen.qrcodejumppublish.go +++ b/service/wechatopen/cgi-bin.wxopen.qrcodejumppublish.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/get.go b/service/wechatopen/get.go similarity index 100% rename from wechatopen/get.go rename to service/wechatopen/get.go diff --git a/wechatopen/params.go b/service/wechatopen/params.go similarity index 100% rename from wechatopen/params.go rename to service/wechatopen/params.go diff --git a/wechatopen/pgsql.go b/service/wechatopen/pgsql.go similarity index 58% rename from wechatopen/pgsql.go rename to service/wechatopen/pgsql.go index 6c33e069..83028800 100644 --- a/wechatopen/pgsql.go +++ b/service/wechatopen/pgsql.go @@ -1,19 +1,19 @@ package wechatopen import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/wechatopen/redis.go b/service/wechatopen/redis.go similarity index 100% rename from wechatopen/redis.go rename to service/wechatopen/redis.go diff --git a/wechatopen/save_img.go b/service/wechatopen/save_img.go similarity index 95% rename from wechatopen/save_img.go rename to service/wechatopen/save_img.go index 0a78fe4c..69c4acef 100644 --- a/wechatopen/save_img.go +++ b/service/wechatopen/save_img.go @@ -1,7 +1,7 @@ package wechatopen import ( - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "log" "os" ) diff --git a/wechatopen/service_http.authorizer_appid.go b/service/wechatopen/service_http.authorizer_appid.go similarity index 100% rename from wechatopen/service_http.authorizer_appid.go rename to service/wechatopen/service_http.authorizer_appid.go diff --git a/wechatopen/service_http.verify_ticket.go b/service/wechatopen/service_http.verify_ticket.go similarity index 100% rename from wechatopen/service_http.verify_ticket.go rename to service/wechatopen/service_http.verify_ticket.go diff --git a/wechatopen/set.go b/service/wechatopen/set.go similarity index 100% rename from wechatopen/set.go rename to service/wechatopen/set.go diff --git a/wechatopen/sign.go b/service/wechatopen/sign.go similarity index 100% rename from wechatopen/sign.go rename to service/wechatopen/sign.go diff --git a/wechatopen/sns.component.jscode2session.go b/service/wechatopen/sns.component.jscode2session.go similarity index 99% rename from wechatopen/sns.component.jscode2session.go rename to service/wechatopen/sns.component.jscode2session.go index b1cf97d6..b47df7c2 100644 --- a/wechatopen/sns.component.jscode2session.go +++ b/service/wechatopen/sns.component.jscode2session.go @@ -6,7 +6,7 @@ import ( "encoding/base64" "encoding/json" "errors" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" "strings" ) diff --git a/service/wechatopen/version.go b/service/wechatopen/version.go new file mode 100644 index 00000000..d047e40b --- /dev/null +++ b/service/wechatopen/version.go @@ -0,0 +1,3 @@ +package wechatopen + +const Version = "1.0.0" diff --git a/wechatopen/wxa.addtotemplate.go b/service/wechatopen/wxa.addtotemplate.go similarity index 97% rename from wechatopen/wxa.addtotemplate.go rename to service/wechatopen/wxa.addtotemplate.go index 402cc8ea..66d558fc 100644 --- a/wechatopen/wxa.addtotemplate.go +++ b/service/wechatopen/wxa.addtotemplate.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.bind_tester.go b/service/wechatopen/wxa.bind_tester.go similarity index 97% rename from wechatopen/wxa.bind_tester.go rename to service/wechatopen/wxa.bind_tester.go index 9e941576..619101d7 100644 --- a/wechatopen/wxa.bind_tester.go +++ b/service/wechatopen/wxa.bind_tester.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.business.getuserphonenumber.go b/service/wechatopen/wxa.business.getuserphonenumber.go similarity index 91% rename from wechatopen/wxa.business.getuserphonenumber.go rename to service/wechatopen/wxa.business.getuserphonenumber.go index ada8c91b..41aab071 100644 --- a/wechatopen/wxa.business.getuserphonenumber.go +++ b/service/wechatopen/wxa.business.getuserphonenumber.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -24,11 +24,11 @@ type WxaBusinessGetUserPhoneNumberResponse struct { type WxaBusinessGetUserPhoneNumberResult struct { Result WxaBusinessGetUserPhoneNumberResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewWxaBusinessGetUserPhoneNumberResult(result WxaBusinessGetUserPhoneNumberResponse, body []byte, http gorequest.Response, err error) *WxaBusinessGetUserPhoneNumberResult { +func NewWxaBusinessGetUserPhoneNumberResult(result WxaBusinessGetUserPhoneNumberResponse, body []byte, http gorequest2.Response, err error) *WxaBusinessGetUserPhoneNumberResult { return &WxaBusinessGetUserPhoneNumberResult{Result: result, Body: body, Http: http, Err: err} } @@ -36,7 +36,7 @@ func NewWxaBusinessGetUserPhoneNumberResult(result WxaBusinessGetUserPhoneNumber // https://developers.weixin.qq.com/miniprogram/dev/api-backend/open-api/phonenumber/phonenumber.getPhoneNumber.html func (app *App) WxaBusinessGetUserPhoneNumber(code string) *WxaBusinessGetUserPhoneNumberResult { // 参数 - params := gorequest.NewParams() + params := gorequest2.NewParams() params.Set("code", code) // 请求 request, err := app.request(fmt.Sprintf("https://api.weixin.qq.com/wxa/business/getuserphonenumber?access_token=%s", app.GetAuthorizerAccessToken()), params, http.MethodPost) diff --git a/wechatopen/wxa.commit.go b/service/wechatopen/wxa.commit.go similarity index 81% rename from wechatopen/wxa.commit.go rename to service/wechatopen/wxa.commit.go index 237ab1cb..a25a4083 100644 --- a/wechatopen/wxa.commit.go +++ b/service/wechatopen/wxa.commit.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -13,21 +13,21 @@ type WxaCommitResponse struct { } type WxaCommitResult struct { - Result WxaCommitResponse // 结果 - Body []byte // 内容 - Http gorequest.Response // 请求 - Err error // 错误 + Result WxaCommitResponse // 结果 + Body []byte // 内容 + Http gorequest2.Response // 请求 + Err error // 错误 } -func NewWxaCommitResult(result WxaCommitResponse, body []byte, http gorequest.Response, err error) *WxaCommitResult { +func NewWxaCommitResult(result WxaCommitResponse, body []byte, http gorequest2.Response, err error) *WxaCommitResult { return &WxaCommitResult{Result: result, Body: body, Http: http, Err: err} } // WxaCommit 上传小程序代码并生成体验版 // https://developers.weixin.qq.com/doc/oplatform/Third-party_Platforms/2.0/api/code/commit.html -func (app *App) WxaCommit(notMustParams ...gorequest.Params) *WxaCommitResult { +func (app *App) WxaCommit(notMustParams ...gorequest2.Params) *WxaCommitResult { // 参数 - params := gorequest.NewParamsWith(notMustParams...) + params := gorequest2.NewParamsWith(notMustParams...) // 请求 request, err := app.request(fmt.Sprintf("https://api.weixin.qq.com/wxa/commit?access_token=%s", app.GetAuthorizerAccessToken()), params, http.MethodPost) // 定义 diff --git a/wechatopen/wxa.deletetemplate.go b/service/wechatopen/wxa.deletetemplate.go similarity index 97% rename from wechatopen/wxa.deletetemplate.go rename to service/wechatopen/wxa.deletetemplate.go index 0b5494a2..d3d8db93 100644 --- a/wechatopen/wxa.deletetemplate.go +++ b/service/wechatopen/wxa.deletetemplate.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.get_auditstatus.go b/service/wechatopen/wxa.get_auditstatus.go similarity index 98% rename from wechatopen/wxa.get_auditstatus.go rename to service/wechatopen/wxa.get_auditstatus.go index 268dc21d..5ecd9b7f 100644 --- a/wechatopen/wxa.get_auditstatus.go +++ b/service/wechatopen/wxa.get_auditstatus.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.get_category.go b/service/wechatopen/wxa.get_category.go similarity index 97% rename from wechatopen/wxa.get_category.go rename to service/wechatopen/wxa.get_category.go index 78edac60..eabf2e7f 100644 --- a/wechatopen/wxa.get_category.go +++ b/service/wechatopen/wxa.get_category.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.get_effective_domain.go b/service/wechatopen/wxa.get_effective_domain.go similarity index 98% rename from wechatopen/wxa.get_effective_domain.go rename to service/wechatopen/wxa.get_effective_domain.go index 39ddb41d..b6174842 100644 --- a/wechatopen/wxa.get_effective_domain.go +++ b/service/wechatopen/wxa.get_effective_domain.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.get_latest_auditstatus.go b/service/wechatopen/wxa.get_latest_auditstatus.go similarity index 98% rename from wechatopen/wxa.get_latest_auditstatus.go rename to service/wechatopen/wxa.get_latest_auditstatus.go index beed4235..1c438c0f 100644 --- a/wechatopen/wxa.get_latest_auditstatus.go +++ b/service/wechatopen/wxa.get_latest_auditstatus.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.get_page.go b/service/wechatopen/wxa.get_page.go similarity index 96% rename from wechatopen/wxa.get_page.go rename to service/wechatopen/wxa.get_page.go index acdf5aa0..fce16155 100644 --- a/wechatopen/wxa.get_page.go +++ b/service/wechatopen/wxa.get_page.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.get_qrcode.go b/service/wechatopen/wxa.get_qrcode.go similarity index 96% rename from wechatopen/wxa.get_qrcode.go rename to service/wechatopen/wxa.get_qrcode.go index 70a28a49..5fe17003 100644 --- a/wechatopen/wxa.get_qrcode.go +++ b/service/wechatopen/wxa.get_qrcode.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.gettemplatedraftlist.go b/service/wechatopen/wxa.gettemplatedraftlist.go similarity index 98% rename from wechatopen/wxa.gettemplatedraftlist.go rename to service/wechatopen/wxa.gettemplatedraftlist.go index 0510e991..a58b24f3 100644 --- a/wechatopen/wxa.gettemplatedraftlist.go +++ b/service/wechatopen/wxa.gettemplatedraftlist.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.gettemplatelist.go b/service/wechatopen/wxa.gettemplatelist.go similarity index 98% rename from wechatopen/wxa.gettemplatelist.go rename to service/wechatopen/wxa.gettemplatelist.go index 6ad30133..91b23180 100644 --- a/wechatopen/wxa.gettemplatelist.go +++ b/service/wechatopen/wxa.gettemplatelist.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.getversioninfo.go b/service/wechatopen/wxa.getversioninfo.go similarity index 97% rename from wechatopen/wxa.getversioninfo.go rename to service/wechatopen/wxa.getversioninfo.go index d46c8355..5eb42df9 100644 --- a/wechatopen/wxa.getversioninfo.go +++ b/service/wechatopen/wxa.getversioninfo.go @@ -4,7 +4,7 @@ import ( "encoding/json" "errors" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.getwxacodeunlimit.go b/service/wechatopen/wxa.getwxacodeunlimit.go similarity index 98% rename from wechatopen/wxa.getwxacodeunlimit.go rename to service/wechatopen/wxa.getwxacodeunlimit.go index 6b5f7fb8..66aa3cf7 100644 --- a/wechatopen/wxa.getwxacodeunlimit.go +++ b/service/wechatopen/wxa.getwxacodeunlimit.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.memberauth.go b/service/wechatopen/wxa.memberauth.go similarity index 97% rename from wechatopen/wxa.memberauth.go rename to service/wechatopen/wxa.memberauth.go index 204e4efd..50e99814 100644 --- a/wechatopen/wxa.memberauth.go +++ b/service/wechatopen/wxa.memberauth.go @@ -4,7 +4,7 @@ import ( "encoding/json" "errors" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.modify_domain.go b/service/wechatopen/wxa.modify_domain.go similarity index 98% rename from wechatopen/wxa.modify_domain.go rename to service/wechatopen/wxa.modify_domain.go index 4160481f..6adcc283 100644 --- a/wechatopen/wxa.modify_domain.go +++ b/service/wechatopen/wxa.modify_domain.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.modify_domain_directly.go b/service/wechatopen/wxa.modify_domain_directly.go similarity index 98% rename from wechatopen/wxa.modify_domain_directly.go rename to service/wechatopen/wxa.modify_domain_directly.go index a41cf35c..b281f58d 100644 --- a/wechatopen/wxa.modify_domain_directly.go +++ b/service/wechatopen/wxa.modify_domain_directly.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.release.go b/service/wechatopen/wxa.release.go similarity index 97% rename from wechatopen/wxa.release.go rename to service/wechatopen/wxa.release.go index 7a4a5759..6995ac8d 100644 --- a/wechatopen/wxa.release.go +++ b/service/wechatopen/wxa.release.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.revertcoderelease.go b/service/wechatopen/wxa.revertcoderelease.go similarity index 100% rename from wechatopen/wxa.revertcoderelease.go rename to service/wechatopen/wxa.revertcoderelease.go diff --git a/wechatopen/wxa.security.apply_privacy_interface.go b/service/wechatopen/wxa.security.apply_privacy_interface.go similarity index 98% rename from wechatopen/wxa.security.apply_privacy_interface.go rename to service/wechatopen/wxa.security.apply_privacy_interface.go index b75bd14c..bcd1f716 100644 --- a/wechatopen/wxa.security.apply_privacy_interface.go +++ b/service/wechatopen/wxa.security.apply_privacy_interface.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.security.get_privacy_interface.go b/service/wechatopen/wxa.security.get_privacy_interface.go similarity index 98% rename from wechatopen/wxa.security.get_privacy_interface.go rename to service/wechatopen/wxa.security.get_privacy_interface.go index 13e17ef7..6798bfe7 100644 --- a/wechatopen/wxa.security.get_privacy_interface.go +++ b/service/wechatopen/wxa.security.get_privacy_interface.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.submit_audit.go b/service/wechatopen/wxa.submit_audit.go similarity index 96% rename from wechatopen/wxa.submit_audit.go rename to service/wechatopen/wxa.submit_audit.go index e10e6c09..5323cf75 100644 --- a/wechatopen/wxa.submit_audit.go +++ b/service/wechatopen/wxa.submit_audit.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.unbind_tester.go b/service/wechatopen/wxa.unbind_tester.go similarity index 97% rename from wechatopen/wxa.unbind_tester.go rename to service/wechatopen/wxa.unbind_tester.go index 3db3b4ff..0871ef8c 100644 --- a/wechatopen/wxa.unbind_tester.go +++ b/service/wechatopen/wxa.unbind_tester.go @@ -3,7 +3,7 @@ package wechatopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatopen/wxa.undocodeaudit.go b/service/wechatopen/wxa.undocodeaudit.go similarity index 100% rename from wechatopen/wxa.undocodeaudit.go rename to service/wechatopen/wxa.undocodeaudit.go diff --git a/wechatopen/xml.go b/service/wechatopen/xml.go similarity index 100% rename from wechatopen/xml.go rename to service/wechatopen/xml.go diff --git a/wechatpayapiv2/app.go b/service/wechatpayapiv2/app.go similarity index 94% rename from wechatpayapiv2/app.go rename to service/wechatpayapiv2/app.go index 2ec98177..6f046743 100644 --- a/wechatpayapiv2/app.go +++ b/service/wechatpayapiv2/app.go @@ -2,9 +2,9 @@ package wechatpayapiv2 import ( "crypto/tls" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/wechatpayapiv2/mmpaymkttransfers.gettransferinfo.go b/service/wechatpayapiv2/mmpaymkttransfers.gettransferinfo.go similarity index 97% rename from wechatpayapiv2/mmpaymkttransfers.gettransferinfo.go rename to service/wechatpayapiv2/mmpaymkttransfers.gettransferinfo.go index ebe17900..d3dea9d3 100644 --- a/wechatpayapiv2/mmpaymkttransfers.gettransferinfo.go +++ b/service/wechatpayapiv2/mmpaymkttransfers.gettransferinfo.go @@ -2,8 +2,8 @@ package wechatpayapiv2 import ( "encoding/xml" - "go.dtapp.net/library/gorandom" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorandom" + "go.dtapp.net/library/utils/gorequest" ) type TransfersQueryResponse struct { diff --git a/wechatpayapiv2/mmpaymkttransfers.promotion.transfers.go b/service/wechatpayapiv2/mmpaymkttransfers.promotion.transfers.go similarity index 97% rename from wechatpayapiv2/mmpaymkttransfers.promotion.transfers.go rename to service/wechatpayapiv2/mmpaymkttransfers.promotion.transfers.go index 13bb84f5..f3e0b3dc 100644 --- a/wechatpayapiv2/mmpaymkttransfers.promotion.transfers.go +++ b/service/wechatpayapiv2/mmpaymkttransfers.promotion.transfers.go @@ -2,8 +2,8 @@ package wechatpayapiv2 import ( "encoding/xml" - "go.dtapp.net/library/gorandom" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorandom" + "go.dtapp.net/library/utils/gorequest" ) type TransfersResponse struct { diff --git a/kuaishou/mongo.go b/service/wechatpayapiv2/mongodb.go similarity index 59% rename from kuaishou/mongo.go rename to service/wechatpayapiv2/mongodb.go index 4e5acdfe..caaa771e 100644 --- a/kuaishou/mongo.go +++ b/service/wechatpayapiv2/mongodb.go @@ -1,25 +1,26 @@ -package kuaishou +package wechatpayapiv2 import ( - gomongo2 "go.dtapp.net/library/gomongo" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gomongo" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) // 日志 type mongoZap struct { - RequestTime gomongo2.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 + RequestTime gomongo.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 RequestUri string `json:"request_uri" bson:"request_uri"` //【请求】链接 RequestUrl string `json:"request_url" bson:"request_url"` //【请求】链接 RequestApi string `json:"request_api" bson:"request_api"` //【请求】接口 RequestMethod string `json:"request_method" bson:"request_method"` //【请求】方式 - RequestParams gorequest.Params `json:"request_params" bson:"request_params"` //【请求】参数 - RequestHeader gorequest.Headers `json:"request_header" bson:"request_header"` //【请求】头部 + RequestParams gorequest2.Params `json:"request_params" bson:"request_params"` //【请求】参数 + RequestHeader gorequest2.Headers `json:"request_header" bson:"request_header"` //【请求】头部 ResponseHeader http.Header `json:"response_header" bson:"response_header"` //【返回】头部 ResponseStatusCode int `json:"response_status_code" bson:"response_status_code"` //【返回】状态码 ResponseBody map[string]interface{} `json:"response_body" bson:"response_body"` //【返回】内容 + ResponseXml string `json:"response_xml" bson:"response_xml"` //【返回】内容 ResponseContentLength int64 `json:"response_content_length" bson:"response_content_length"` //【返回】大小 - ResponseTime gomongo2.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 + ResponseTime gomongo.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 } func (m *mongoZap) Database() string { @@ -27,22 +28,23 @@ func (m *mongoZap) Database() string { } func (m *mongoZap) TableName() string { - return "kuaishou" + return "wechatpayapiv2" } -func (app *App) mongoLog(request gorequest.Response) { +func (app *App) mongoLog(request gorequest2.Response) { _, _ = app.mongo.Model(&mongoZap{}).InsertOne(mongoZap{ - RequestTime: gomongo2.BsonTime(request.RequestTime), //【请求】时间 - RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 - RequestMethod: request.RequestMethod, //【请求】方式 - RequestParams: request.RequestParams, //【请求】参数 - RequestHeader: request.RequestHeader, //【请求】头部 - ResponseHeader: request.ResponseHeader, //【返回】头部 - ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 - ResponseBody: gomongo2.JsonDecodeNoError(request.ResponseBody), //【返回】内容 - ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: gomongo2.BsonTime(request.ResponseTime), //【返回】时间 + RequestTime: gomongo.BsonTime(request.RequestTime), //【请求】时间 + RequestUri: request.RequestUri, //【请求】链接 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 + RequestMethod: request.RequestMethod, //【请求】方式 + RequestParams: request.RequestParams, //【请求】参数 + RequestHeader: request.RequestHeader, //【请求】头部 + ResponseHeader: request.ResponseHeader, //【返回】头部 + ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 + ResponseBody: gomongo.XmlDecodeNoError(request.ResponseBody), //【返回】内容 + ResponseXml: string(request.ResponseBody), //【返回】内容 + ResponseContentLength: request.ResponseContentLength, //【返回】大小 + ResponseTime: gomongo.BsonTime(request.ResponseTime), //【返回】时间 }) } diff --git a/wechatpayapiv2/params.go b/service/wechatpayapiv2/params.go similarity index 100% rename from wechatpayapiv2/params.go rename to service/wechatpayapiv2/params.go diff --git a/wechatpayapiv2/pgsql.go b/service/wechatpayapiv2/pgsql.go similarity index 60% rename from wechatpayapiv2/pgsql.go rename to service/wechatpayapiv2/pgsql.go index 6b6f45c7..cbe23bd2 100644 --- a/wechatpayapiv2/pgsql.go +++ b/service/wechatpayapiv2/pgsql.go @@ -1,20 +1,20 @@ package wechatpayapiv2 import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -22,6 +22,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: datatypes.JSON(gojson.JsonEncodeNoError(gomongo.XmlDecodeNoError(request.ResponseBody))), //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/wechatpayapiv2/sign.go b/service/wechatpayapiv2/sign.go similarity index 96% rename from wechatpayapiv2/sign.go rename to service/wechatpayapiv2/sign.go index da3de488..0fa69369 100644 --- a/wechatpayapiv2/sign.go +++ b/service/wechatpayapiv2/sign.go @@ -3,7 +3,7 @@ package wechatpayapiv2 import ( "bytes" "fmt" - "go.dtapp.net/library/gomd5" + "go.dtapp.net/library/utils/gomd5" "sort" "strings" ) diff --git a/wechatpayapiv3/app.go b/service/wechatpayapiv3/app.go similarity index 95% rename from wechatpayapiv3/app.go rename to service/wechatpayapiv3/app.go index 117bbb4b..ce23aeda 100644 --- a/wechatpayapiv3/app.go +++ b/service/wechatpayapiv3/app.go @@ -1,9 +1,9 @@ package wechatpayapiv3 import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gomongo" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gomongo" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" "net/http" ) diff --git a/wechatpayapiv3/get.go b/service/wechatpayapiv3/get.go similarity index 100% rename from wechatpayapiv3/get.go rename to service/wechatpayapiv3/get.go diff --git a/wechatpayapiv3/merchant-service.complaints-v2.go b/service/wechatpayapiv3/merchant-service.complaints-v2.go similarity index 98% rename from wechatpayapiv3/merchant-service.complaints-v2.go rename to service/wechatpayapiv3/merchant-service.complaints-v2.go index d48810d2..cfbb40d1 100644 --- a/wechatpayapiv3/merchant-service.complaints-v2.go +++ b/service/wechatpayapiv3/merchant-service.complaints-v2.go @@ -2,7 +2,7 @@ package wechatpayapiv3 import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatpayapiv3/mongo.go b/service/wechatpayapiv3/mongo.go similarity index 64% rename from wechatpayapiv3/mongo.go rename to service/wechatpayapiv3/mongo.go index 1afa90bd..ca8b6404 100644 --- a/wechatpayapiv3/mongo.go +++ b/service/wechatpayapiv3/mongo.go @@ -1,25 +1,25 @@ package wechatpayapiv3 import ( - gomongo2 "go.dtapp.net/library/gomongo" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gomongo" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) // 日志 type mongoZap struct { - RequestTime gomongo2.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 + RequestTime gomongo.BsonTime `json:"request_time" bson:"request_time"` //【请求】时间 RequestUri string `json:"request_uri" bson:"request_uri"` //【请求】链接 RequestUrl string `json:"request_url" bson:"request_url"` //【请求】链接 RequestApi string `json:"request_api" bson:"request_api"` //【请求】接口 RequestMethod string `json:"request_method" bson:"request_method"` //【请求】方式 - RequestParams gorequest.Params `json:"request_params" bson:"request_params"` //【请求】参数 - RequestHeader gorequest.Headers `json:"request_header" bson:"request_header"` //【请求】头部 + RequestParams gorequest2.Params `json:"request_params" bson:"request_params"` //【请求】参数 + RequestHeader gorequest2.Headers `json:"request_header" bson:"request_header"` //【请求】头部 ResponseHeader http.Header `json:"response_header" bson:"response_header"` //【返回】头部 ResponseStatusCode int `json:"response_status_code" bson:"response_status_code"` //【返回】状态码 ResponseBody map[string]interface{} `json:"response_body" bson:"response_body"` //【返回】内容 ResponseContentLength int64 `json:"response_content_length" bson:"response_content_length"` //【返回】大小 - ResponseTime gomongo2.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 + ResponseTime gomongo.BsonTime `json:"response_time" bson:"response_time"` //【返回】时间 } func (m *mongoZap) Database() string { @@ -30,19 +30,19 @@ func (m *mongoZap) TableName() string { return "wechatpayapiv3" } -func (app *App) mongoLog(request gorequest.Response) { +func (app *App) mongoLog(request gorequest2.Response) { _, _ = app.mongo.Model(&mongoZap{}).InsertOne(mongoZap{ - RequestTime: gomongo2.BsonTime(request.RequestTime), //【请求】时间 - RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 - RequestMethod: request.RequestMethod, //【请求】方式 - RequestParams: request.RequestParams, //【请求】参数 - RequestHeader: request.RequestHeader, //【请求】头部 - ResponseHeader: request.ResponseHeader, //【返回】头部 - ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 - ResponseBody: gomongo2.JsonDecodeNoError(request.ResponseBody), //【返回】内容 - ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: gomongo2.BsonTime(request.ResponseTime), //【返回】时间 + RequestTime: gomongo.BsonTime(request.RequestTime), //【请求】时间 + RequestUri: request.RequestUri, //【请求】链接 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 + RequestMethod: request.RequestMethod, //【请求】方式 + RequestParams: request.RequestParams, //【请求】参数 + RequestHeader: request.RequestHeader, //【请求】头部 + ResponseHeader: request.ResponseHeader, //【返回】头部 + ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 + ResponseBody: gomongo.JsonDecodeNoError(request.ResponseBody), //【返回】内容 + ResponseContentLength: request.ResponseContentLength, //【返回】大小 + ResponseTime: gomongo.BsonTime(request.ResponseTime), //【返回】时间 }) } diff --git a/wechatpayapiv3/params.go b/service/wechatpayapiv3/params.go similarity index 100% rename from wechatpayapiv3/params.go rename to service/wechatpayapiv3/params.go diff --git a/wechatpayapiv3/pay.jsapi.go b/service/wechatpayapiv3/pay.jsapi.go similarity index 96% rename from wechatpayapiv3/pay.jsapi.go rename to service/wechatpayapiv3/pay.jsapi.go index ebd6ec95..1d09c1f2 100644 --- a/wechatpayapiv3/pay.jsapi.go +++ b/service/wechatpayapiv3/pay.jsapi.go @@ -2,7 +2,7 @@ package wechatpayapiv3 import ( "fmt" - "go.dtapp.net/library/gorandom" + "go.dtapp.net/library/utils/gorandom" "time" ) diff --git a/wechatpayapiv3/pay.transactions.id.go b/service/wechatpayapiv3/pay.transactions.id.go similarity index 98% rename from wechatpayapiv3/pay.transactions.id.go rename to service/wechatpayapiv3/pay.transactions.id.go index 56a83fa4..944dd3c8 100644 --- a/wechatpayapiv3/pay.transactions.id.go +++ b/service/wechatpayapiv3/pay.transactions.id.go @@ -3,7 +3,7 @@ package wechatpayapiv3 import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatpayapiv3/pay.transactions.jsapi.go b/service/wechatpayapiv3/pay.transactions.jsapi.go similarity index 97% rename from wechatpayapiv3/pay.transactions.jsapi.go rename to service/wechatpayapiv3/pay.transactions.jsapi.go index b5034430..abea9bb8 100644 --- a/wechatpayapiv3/pay.transactions.jsapi.go +++ b/service/wechatpayapiv3/pay.transactions.jsapi.go @@ -2,7 +2,7 @@ package wechatpayapiv3 import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatpayapiv3/pay.transactions.out-trade-no.close.go b/service/wechatpayapiv3/pay.transactions.out-trade-no.close.go similarity index 96% rename from wechatpayapiv3/pay.transactions.out-trade-no.close.go rename to service/wechatpayapiv3/pay.transactions.out-trade-no.close.go index 5fc9fc8c..efb0c9c0 100644 --- a/wechatpayapiv3/pay.transactions.out-trade-no.close.go +++ b/service/wechatpayapiv3/pay.transactions.out-trade-no.close.go @@ -2,7 +2,7 @@ package wechatpayapiv3 import ( "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatpayapiv3/pay.transactions.out-trade-no.go b/service/wechatpayapiv3/pay.transactions.out-trade-no.go similarity index 98% rename from wechatpayapiv3/pay.transactions.out-trade-no.go rename to service/wechatpayapiv3/pay.transactions.out-trade-no.go index edcb39a0..ecf716a2 100644 --- a/wechatpayapiv3/pay.transactions.out-trade-no.go +++ b/service/wechatpayapiv3/pay.transactions.out-trade-no.go @@ -3,7 +3,7 @@ package wechatpayapiv3 import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatpayapiv3/pgsql.go b/service/wechatpayapiv3/pgsql.go similarity index 58% rename from wechatpayapiv3/pgsql.go rename to service/wechatpayapiv3/pgsql.go index 61876f65..3afa2d14 100644 --- a/wechatpayapiv3/pgsql.go +++ b/service/wechatpayapiv3/pgsql.go @@ -1,19 +1,19 @@ package wechatpayapiv3 import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/wechatpayapiv3/refund.domestic.refunds.go b/service/wechatpayapiv3/refund.domestic.refunds.go similarity index 98% rename from wechatpayapiv3/refund.domestic.refunds.go rename to service/wechatpayapiv3/refund.domestic.refunds.go index 1d9ffba2..14f6de31 100644 --- a/wechatpayapiv3/refund.domestic.refunds.go +++ b/service/wechatpayapiv3/refund.domestic.refunds.go @@ -2,7 +2,7 @@ package wechatpayapiv3 import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatpayapiv3/refund.domestic.refunds.out_refund_no.go b/service/wechatpayapiv3/refund.domestic.refunds.out_refund_no.go similarity index 99% rename from wechatpayapiv3/refund.domestic.refunds.out_refund_no.go rename to service/wechatpayapiv3/refund.domestic.refunds.out_refund_no.go index 0a6cd4c5..c1c7c8d2 100644 --- a/wechatpayapiv3/refund.domestic.refunds.out_refund_no.go +++ b/service/wechatpayapiv3/refund.domestic.refunds.out_refund_no.go @@ -3,7 +3,7 @@ package wechatpayapiv3 import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatpayapiv3/set.go b/service/wechatpayapiv3/set.go similarity index 100% rename from wechatpayapiv3/set.go rename to service/wechatpayapiv3/set.go diff --git a/wechatpayapiv3/sign.go b/service/wechatpayapiv3/sign.go similarity index 98% rename from wechatpayapiv3/sign.go rename to service/wechatpayapiv3/sign.go index c62f8190..5aa8c090 100644 --- a/wechatpayapiv3/sign.go +++ b/service/wechatpayapiv3/sign.go @@ -13,7 +13,7 @@ import ( "encoding/pem" "errors" "fmt" - "go.dtapp.net/library/gorandom" + "go.dtapp.net/library/utils/gorandom" "net/url" "time" ) diff --git a/wechatpayopen/app.go b/service/wechatpayopen/app.go similarity index 96% rename from wechatpayopen/app.go rename to service/wechatpayopen/app.go index 1dc69ca9..fa860a00 100644 --- a/wechatpayopen/app.go +++ b/service/wechatpayopen/app.go @@ -1,8 +1,8 @@ package wechatpayopen import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/wechatpayopen/certificates.go b/service/wechatpayopen/certificates.go similarity index 97% rename from wechatpayopen/certificates.go rename to service/wechatpayopen/certificates.go index b120c0ad..305a852b 100644 --- a/wechatpayopen/certificates.go +++ b/service/wechatpayopen/certificates.go @@ -2,7 +2,7 @@ package wechatpayopen import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" "time" ) diff --git a/wechatpayopen/const.go b/service/wechatpayopen/const.go similarity index 100% rename from wechatpayopen/const.go rename to service/wechatpayopen/const.go diff --git a/wechatpayopen/pay.jsapi.go b/service/wechatpayopen/pay.jsapi.go similarity index 97% rename from wechatpayopen/pay.jsapi.go rename to service/wechatpayopen/pay.jsapi.go index 35de352d..b0daf57d 100644 --- a/wechatpayopen/pay.jsapi.go +++ b/service/wechatpayopen/pay.jsapi.go @@ -2,7 +2,7 @@ package wechatpayopen import ( "fmt" - "go.dtapp.net/library/gorandom" + "go.dtapp.net/library/utils/gorandom" "time" ) diff --git a/wechatpayopen/pay.partner.transactions.id.go b/service/wechatpayopen/pay.partner.transactions.id.go similarity index 92% rename from wechatpayopen/pay.partner.transactions.id.go rename to service/wechatpayopen/pay.partner.transactions.id.go index d1213ec4..db7d8f4f 100644 --- a/wechatpayopen/pay.partner.transactions.id.go +++ b/service/wechatpayopen/pay.partner.transactions.id.go @@ -3,7 +3,7 @@ package wechatpayopen import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -54,11 +54,11 @@ type PayPartnerTransactionsIdResponse struct { type PayPartnerTransactionsIdResult struct { Result PayPartnerTransactionsIdResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewPayPartnerTransactionsIdResult(result PayPartnerTransactionsIdResponse, body []byte, http gorequest.Response, err error) *PayPartnerTransactionsIdResult { +func NewPayPartnerTransactionsIdResult(result PayPartnerTransactionsIdResponse, body []byte, http gorequest2.Response, err error) *PayPartnerTransactionsIdResult { return &PayPartnerTransactionsIdResult{Result: result, Body: body, Http: http, Err: err} } @@ -66,7 +66,7 @@ func NewPayPartnerTransactionsIdResult(result PayPartnerTransactionsIdResponse, // https://pay.weixin.qq.com/wiki/doc/apiv3_partner/apis/chapter4_5_2.shtml func (app *App) PayPartnerTransactionsId(transactionId string) *PayPartnerTransactionsIdResult { // 参数 - params := gorequest.NewParams() + params := gorequest2.NewParams() // 请求 // 请求 request, err := app.request(fmt.Sprintf("https://api.mch.weixin.qq.com/v3/pay/partner/transactions/id/%s?sp_mchid=%s&sub_mchid=%s", transactionId, app.spMchId, app.subMchId), params, http.MethodGet) diff --git a/wechatpayopen/pay.partner.transactions.jsapi.go b/service/wechatpayopen/pay.partner.transactions.jsapi.go similarity index 81% rename from wechatpayopen/pay.partner.transactions.jsapi.go rename to service/wechatpayopen/pay.partner.transactions.jsapi.go index 94b4905e..6d7b356d 100644 --- a/wechatpayopen/pay.partner.transactions.jsapi.go +++ b/service/wechatpayopen/pay.partner.transactions.jsapi.go @@ -2,7 +2,7 @@ package wechatpayopen import ( "encoding/json" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -13,19 +13,19 @@ type PayPartnerTransactionsJsapiResponse struct { type PayPartnerTransactionsJsapiResult struct { Result PayPartnerTransactionsJsapiResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewPayPartnerTransactionsJsapiResult(result PayPartnerTransactionsJsapiResponse, body []byte, http gorequest.Response, err error) *PayPartnerTransactionsJsapiResult { +func NewPayPartnerTransactionsJsapiResult(result PayPartnerTransactionsJsapiResponse, body []byte, http gorequest2.Response, err error) *PayPartnerTransactionsJsapiResult { return &PayPartnerTransactionsJsapiResult{Result: result, Body: body, Http: http, Err: err} } // PayPartnerTransactionsJsapi JSAPI下单 // https://pay.weixin.qq.com/wiki/doc/apiv3_partner/apis/chapter4_5_1.shtml -func (app *App) PayPartnerTransactionsJsapi(notMustParams ...gorequest.Params) *PayPartnerTransactionsJsapiResult { +func (app *App) PayPartnerTransactionsJsapi(notMustParams ...gorequest2.Params) *PayPartnerTransactionsJsapiResult { // 参数 - params := gorequest.NewParamsWith(notMustParams...) + params := gorequest2.NewParamsWith(notMustParams...) params.Set("sp_appid", app.spAppid) // 服务商应用ID params.Set("sp_mchid", app.spMchId) // 服务商户号 params.Set("sub_appid", app.subAppid) // 子商户应用ID diff --git a/wechatpayopen/pay.partner.transactions.out-trade-no.close.go b/service/wechatpayopen/pay.partner.transactions.out-trade-no.close.go similarity index 78% rename from wechatpayopen/pay.partner.transactions.out-trade-no.close.go rename to service/wechatpayopen/pay.partner.transactions.out-trade-no.close.go index c001ada5..47e23c19 100644 --- a/wechatpayopen/pay.partner.transactions.out-trade-no.close.go +++ b/service/wechatpayopen/pay.partner.transactions.out-trade-no.close.go @@ -2,17 +2,17 @@ package wechatpayopen import ( "fmt" - "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) type PayPartnerTransactionsOutTradeNoCloseResult struct { - Body []byte // 内容 - Http gorequest.Response // 请求 - Err error // 错误 + Body []byte // 内容 + Http gorequest2.Response // 请求 + Err error // 错误 } -func NewPayPartnerTransactionsOutTradeNoCloseResult(body []byte, http gorequest.Response, err error) *PayPartnerTransactionsOutTradeNoCloseResult { +func NewPayPartnerTransactionsOutTradeNoCloseResult(body []byte, http gorequest2.Response, err error) *PayPartnerTransactionsOutTradeNoCloseResult { return &PayPartnerTransactionsOutTradeNoCloseResult{Body: body, Http: http, Err: err} } @@ -20,7 +20,7 @@ func NewPayPartnerTransactionsOutTradeNoCloseResult(body []byte, http gorequest. // https://pay.weixin.qq.com/wiki/doc/apiv3_partner/apis/chapter4_5_3.shtml func (app *App) PayPartnerTransactionsOutTradeNoClose(outTradeNo string) *PayPartnerTransactionsOutTradeNoCloseResult { // 参数 - params := gorequest.NewParams() + params := gorequest2.NewParams() params.Set("sp_mchid", app.spMchId) // 服务商户号 params.Set("sub_mchid", app.subMchId) // 子商户号 // 请求 diff --git a/wechatpayopen/pay.partner.transactions.out-trade-no.go b/service/wechatpayopen/pay.partner.transactions.out-trade-no.go similarity index 93% rename from wechatpayopen/pay.partner.transactions.out-trade-no.go rename to service/wechatpayopen/pay.partner.transactions.out-trade-no.go index b49434dd..aaf6f2b5 100644 --- a/wechatpayopen/pay.partner.transactions.out-trade-no.go +++ b/service/wechatpayopen/pay.partner.transactions.out-trade-no.go @@ -3,7 +3,7 @@ package wechatpayopen import ( "encoding/json" "fmt" - gorequest "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" ) @@ -57,11 +57,11 @@ type PayPartnerTransactionsOutTradeNoResponse struct { type PayPartnerTransactionsOutTradeNoResult struct { Result PayPartnerTransactionsOutTradeNoResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewPayPartnerTransactionsOutTradeNoResult(result PayPartnerTransactionsOutTradeNoResponse, body []byte, http gorequest.Response, err error) *PayPartnerTransactionsOutTradeNoResult { +func NewPayPartnerTransactionsOutTradeNoResult(result PayPartnerTransactionsOutTradeNoResponse, body []byte, http gorequest2.Response, err error) *PayPartnerTransactionsOutTradeNoResult { return &PayPartnerTransactionsOutTradeNoResult{Result: result, Body: body, Http: http, Err: err} } @@ -69,7 +69,7 @@ func NewPayPartnerTransactionsOutTradeNoResult(result PayPartnerTransactionsOutT // https://pay.weixin.qq.com/wiki/doc/apiv3_partner/apis/chapter4_5_2.shtml func (app *App) PayPartnerTransactionsOutTradeNo(outTradeNo string) *PayPartnerTransactionsOutTradeNoResult { // 参数 - params := gorequest.NewParams() + params := gorequest2.NewParams() // 请求 request, err := app.request(fmt.Sprintf("https://api.mch.weixin.qq.com/v3/pay/partner/transactions/out-trade-no/%s?sp_mchid=%s&sub_mchid=%s", outTradeNo, app.spMchId, app.subMchId), params, http.MethodGet) if err != nil { diff --git a/wechatpayopen/pgsql.go b/service/wechatpayopen/pgsql.go similarity index 58% rename from wechatpayopen/pgsql.go rename to service/wechatpayopen/pgsql.go index 5d167b59..79c36557 100644 --- a/wechatpayopen/pgsql.go +++ b/service/wechatpayopen/pgsql.go @@ -1,19 +1,19 @@ package wechatpayopen import ( - "go.dtapp.net/library/gojson" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/wechatpayopen/refund.domestic.refunds.go b/service/wechatpayopen/refund.domestic.refunds.go similarity index 90% rename from wechatpayopen/refund.domestic.refunds.go rename to service/wechatpayopen/refund.domestic.refunds.go index cabb4ffe..a16b3778 100644 --- a/wechatpayopen/refund.domestic.refunds.go +++ b/service/wechatpayopen/refund.domestic.refunds.go @@ -2,7 +2,7 @@ package wechatpayopen import ( "encoding/json" - "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" "time" ) @@ -52,19 +52,19 @@ type RefundDomesticRefundsResponse struct { type RefundDomesticRefundsResult struct { Result RefundDomesticRefundsResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewRefundDomesticRefundsResult(result RefundDomesticRefundsResponse, body []byte, http gorequest.Response, err error) *RefundDomesticRefundsResult { +func NewRefundDomesticRefundsResult(result RefundDomesticRefundsResponse, body []byte, http gorequest2.Response, err error) *RefundDomesticRefundsResult { return &RefundDomesticRefundsResult{Result: result, Body: body, Http: http, Err: err} } // RefundDomesticRefunds 申请退款API // https://pay.weixin.qq.com/wiki/doc/apiv3_partner/apis/chapter4_5_9.shtml -func (app *App) RefundDomesticRefunds(notMustParams ...gorequest.Params) *RefundDomesticRefundsResult { +func (app *App) RefundDomesticRefunds(notMustParams ...gorequest2.Params) *RefundDomesticRefundsResult { // 参数 - params := gorequest.NewParamsWith(notMustParams...) + params := gorequest2.NewParamsWith(notMustParams...) params.Set("sub_mchid", app.subMchId) // 子商户号 // 请求 request, err := app.request("https://api.mch.weixin.qq.com/v3/refund/domestic/refunds", params, http.MethodPost) diff --git a/wechatpayopen/refund.domestic.refunds.out_refund_no.go b/service/wechatpayopen/refund.domestic.refunds.out_refund_no.go similarity index 91% rename from wechatpayopen/refund.domestic.refunds.out_refund_no.go rename to service/wechatpayopen/refund.domestic.refunds.out_refund_no.go index ab5e5b80..34d9740d 100644 --- a/wechatpayopen/refund.domestic.refunds.out_refund_no.go +++ b/service/wechatpayopen/refund.domestic.refunds.out_refund_no.go @@ -2,7 +2,7 @@ package wechatpayopen import ( "encoding/json" - "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" "net/http" "time" ) @@ -52,11 +52,11 @@ type RefundDomesticRefundsOutRefundNoResponse struct { type RefundDomesticRefundsOutRefundNoResult struct { Result RefundDomesticRefundsOutRefundNoResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewRefundDomesticRefundsOutRefundNoResult(result RefundDomesticRefundsOutRefundNoResponse, body []byte, http gorequest.Response, err error) *RefundDomesticRefundsOutRefundNoResult { +func NewRefundDomesticRefundsOutRefundNoResult(result RefundDomesticRefundsOutRefundNoResponse, body []byte, http gorequest2.Response, err error) *RefundDomesticRefundsOutRefundNoResult { return &RefundDomesticRefundsOutRefundNoResult{Result: result, Body: body, Http: http, Err: err} } @@ -64,7 +64,7 @@ func NewRefundDomesticRefundsOutRefundNoResult(result RefundDomesticRefundsOutRe // https://pay.weixin.qq.com/wiki/doc/apiv3_partner/apis/chapter4_5_9.shtml func (app *App) RefundDomesticRefundsOutRefundNo(outRefundNo string) *RefundDomesticRefundsOutRefundNoResult { // 参数 - params := gorequest.NewParams() + params := gorequest2.NewParams() // 请求 request, err := app.request("https://api.mch.weixin.qq.com/v3/refund/domestic/refunds/"+outRefundNo+"?sub_mchid="+app.subMchId, params, http.MethodGet) if err != nil { diff --git a/wechatpayopen/sign.go b/service/wechatpayopen/sign.go similarity index 98% rename from wechatpayopen/sign.go rename to service/wechatpayopen/sign.go index 74037f9f..0cea68ad 100644 --- a/wechatpayopen/sign.go +++ b/service/wechatpayopen/sign.go @@ -13,7 +13,7 @@ import ( "encoding/pem" "errors" "fmt" - "go.dtapp.net/library/gorandom" + "go.dtapp.net/library/utils/gorandom" "net/url" "time" ) diff --git a/wechatqy/params.go b/service/wechatqy/params.go similarity index 100% rename from wechatqy/params.go rename to service/wechatqy/params.go diff --git a/wechatqy/pgsql.go b/service/wechatqy/pgsql.go similarity index 58% rename from wechatqy/pgsql.go rename to service/wechatqy/pgsql.go index cc7ee266..928a26d5 100644 --- a/wechatqy/pgsql.go +++ b/service/wechatqy/pgsql.go @@ -1,19 +1,19 @@ package wechatqy import ( - "go.dtapp.net/library/gojson" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/wechatqy/webhook.send.go b/service/wechatqy/webhook.send.go similarity index 77% rename from wechatqy/webhook.send.go rename to service/wechatqy/webhook.send.go index 16096ffa..6a4257ca 100644 --- a/wechatqy/webhook.send.go +++ b/service/wechatqy/webhook.send.go @@ -3,7 +3,7 @@ package wechatqy import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + gorequest2 "go.dtapp.net/library/utils/gorequest" ) type WebhookSendResponse struct { @@ -17,19 +17,19 @@ type WebhookSendResponse struct { type WebhookSendResult struct { Result WebhookSendResponse // 结果 Body []byte // 内容 - Http gorequest.Response // 请求 + Http gorequest2.Response // 请求 Err error // 错误 } -func NewWebhookSendResult(result WebhookSendResponse, body []byte, http gorequest.Response, err error) *WebhookSendResult { +func NewWebhookSendResult(result WebhookSendResponse, body []byte, http gorequest2.Response, err error) *WebhookSendResult { return &WebhookSendResult{Result: result, Body: body, Http: http, Err: err} } // WebhookSend 发送应用消息 // https://developer.work.weixin.qq.com/document/path/90372 -func (app *App) WebhookSend(notMustParams ...gorequest.Params) *WebhookSendResult { +func (app *App) WebhookSend(notMustParams ...gorequest2.Params) *WebhookSendResult { // 参数 - params := gorequest.NewParamsWith(notMustParams...) + params := gorequest2.NewParamsWith(notMustParams...) // 请求 request, err := app.request(fmt.Sprintf("https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=%s&type=%s", app.key, "text"), params) // 定义 diff --git a/wechatqy/wechatqy.go b/service/wechatqy/wechatqy.go similarity index 93% rename from wechatqy/wechatqy.go rename to service/wechatqy/wechatqy.go index 8da3bde5..218ac66a 100644 --- a/wechatqy/wechatqy.go +++ b/service/wechatqy/wechatqy.go @@ -1,8 +1,8 @@ package wechatqy import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/wechatunion/app.go b/service/wechatunion/app.go similarity index 93% rename from wechatunion/app.go rename to service/wechatunion/app.go index 2ffe4c64..3a8aeec0 100644 --- a/wechatunion/app.go +++ b/service/wechatunion/app.go @@ -1,9 +1,9 @@ package wechatunion import ( - "go.dtapp.net/library/golog" - "go.dtapp.net/library/goredis" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/goredis" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/wechatunion/cgi-bin.token.monitor.go b/service/wechatunion/cgi-bin.token.monitor.go similarity index 100% rename from wechatunion/cgi-bin.token.monitor.go rename to service/wechatunion/cgi-bin.token.monitor.go diff --git a/wechatunion/cgi-bin.token.rdb.go b/service/wechatunion/cgi-bin.token.rdb.go similarity index 100% rename from wechatunion/cgi-bin.token.rdb.go rename to service/wechatunion/cgi-bin.token.rdb.go diff --git a/wechatunion/cgi_bin.token.go b/service/wechatunion/cgi_bin.token.go similarity index 97% rename from wechatunion/cgi_bin.token.go rename to service/wechatunion/cgi_bin.token.go index 6cc67603..3fc67cf4 100644 --- a/wechatunion/cgi_bin.token.go +++ b/service/wechatunion/cgi_bin.token.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/getcallbackip.go b/service/wechatunion/getcallbackip.go similarity index 96% rename from wechatunion/getcallbackip.go rename to service/wechatunion/getcallbackip.go index 4023a088..046336d5 100644 --- a/wechatunion/getcallbackip.go +++ b/service/wechatunion/getcallbackip.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type GetCallBackIpResponse struct { diff --git a/wechatunion/order.go b/service/wechatunion/order.go similarity index 100% rename from wechatunion/order.go rename to service/wechatunion/order.go diff --git a/wechatunion/params.go b/service/wechatunion/params.go similarity index 100% rename from wechatunion/params.go rename to service/wechatunion/params.go diff --git a/wechatunion/pgsql.go b/service/wechatunion/pgsql.go similarity index 58% rename from wechatunion/pgsql.go rename to service/wechatunion/pgsql.go index dd21be19..d11c9d4c 100644 --- a/wechatunion/pgsql.go +++ b/service/wechatunion/pgsql.go @@ -1,19 +1,19 @@ package wechatunion import ( - "go.dtapp.net/library/gojson" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/wechatunion/promoter.order.info.go b/service/wechatunion/promoter.order.info.go similarity index 98% rename from wechatunion/promoter.order.info.go rename to service/wechatunion/promoter.order.info.go index d418cd9e..ab944e09 100644 --- a/wechatunion/promoter.order.info.go +++ b/service/wechatunion/promoter.order.info.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.order.search.go b/service/wechatunion/promoter.order.search.go similarity index 99% rename from wechatunion/promoter.order.search.go rename to service/wechatunion/promoter.order.search.go index 50e893e9..79acf6fd 100644 --- a/wechatunion/promoter.order.search.go +++ b/service/wechatunion/promoter.order.search.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.product.category.go b/service/wechatunion/promoter.product.category.go similarity index 97% rename from wechatunion/promoter.product.category.go rename to service/wechatunion/promoter.product.category.go index 231a6517..18ea2c67 100644 --- a/wechatunion/promoter.product.category.go +++ b/service/wechatunion/promoter.product.category.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.product.generate.go b/service/wechatunion/promoter.product.generate.go similarity index 98% rename from wechatunion/promoter.product.generate.go rename to service/wechatunion/promoter.product.generate.go index a05ac34e..335a7fcb 100644 --- a/wechatunion/promoter.product.generate.go +++ b/service/wechatunion/promoter.product.generate.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.product.list.go b/service/wechatunion/promoter.product.list.go similarity index 99% rename from wechatunion/promoter.product.list.go rename to service/wechatunion/promoter.product.list.go index 7fe66f58..ec4be461 100644 --- a/wechatunion/promoter.product.list.go +++ b/service/wechatunion/promoter.product.list.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.product.select.go b/service/wechatunion/promoter.product.select.go similarity index 99% rename from wechatunion/promoter.product.select.go rename to service/wechatunion/promoter.product.select.go index 5a06b430..037c7c26 100644 --- a/wechatunion/promoter.product.select.go +++ b/service/wechatunion/promoter.product.select.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.promotion.add.go b/service/wechatunion/promoter.promotion.add.go similarity index 97% rename from wechatunion/promoter.promotion.add.go rename to service/wechatunion/promoter.promotion.add.go index 4c9f24e0..e3c83c93 100644 --- a/wechatunion/promoter.promotion.add.go +++ b/service/wechatunion/promoter.promotion.add.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.promotion.del.go b/service/wechatunion/promoter.promotion.del.go similarity index 97% rename from wechatunion/promoter.promotion.del.go rename to service/wechatunion/promoter.promotion.del.go index f9100df5..04b512f9 100644 --- a/wechatunion/promoter.promotion.del.go +++ b/service/wechatunion/promoter.promotion.del.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.promotion.list.go b/service/wechatunion/promoter.promotion.list.go similarity index 97% rename from wechatunion/promoter.promotion.list.go rename to service/wechatunion/promoter.promotion.list.go index 9eec83a2..db0a2d28 100644 --- a/wechatunion/promoter.promotion.list.go +++ b/service/wechatunion/promoter.promotion.list.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/wechatunion/promoter.promotion.upd.go b/service/wechatunion/promoter.promotion.upd.go similarity index 96% rename from wechatunion/promoter.promotion.upd.go rename to service/wechatunion/promoter.promotion.upd.go index 50732d3c..e426d86c 100644 --- a/wechatunion/promoter.promotion.upd.go +++ b/service/wechatunion/promoter.promotion.upd.go @@ -3,7 +3,7 @@ package wechatunion import ( "encoding/json" "fmt" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "net/http" ) diff --git a/weishi/api.go b/service/weishi/api.go similarity index 99% rename from weishi/api.go rename to service/weishi/api.go index 71585d47..275f0c5d 100644 --- a/weishi/api.go +++ b/service/weishi/api.go @@ -4,7 +4,7 @@ import ( "encoding/json" "errors" "github.com/mvdan/xurls" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" "regexp" "strings" ) diff --git a/weishi/pgsql.go b/service/weishi/pgsql.go similarity index 58% rename from weishi/pgsql.go rename to service/weishi/pgsql.go index 184c85a6..897a32c8 100644 --- a/weishi/pgsql.go +++ b/service/weishi/pgsql.go @@ -1,19 +1,19 @@ package weishi import ( - "go.dtapp.net/library/gojson" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (ws *WeiShi) postgresqlLog(request gorequest.Response) { - ws.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (ws *WeiShi) postgresqlLog(request gorequest2.Response) { + ws.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (ws *WeiShi) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/weishi/weishi.go b/service/weishi/weishi.go similarity index 96% rename from weishi/weishi.go rename to service/weishi/weishi.go index 1aba3aad..14c3e5a8 100644 --- a/weishi/weishi.go +++ b/service/weishi/weishi.go @@ -2,8 +2,8 @@ package weishi import ( "errors" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" "net/http" "strings" diff --git a/wikeyun/app.go b/service/wikeyun/app.go similarity index 93% rename from wikeyun/app.go rename to service/wikeyun/app.go index 7d44cdbd..db22b06b 100644 --- a/wikeyun/app.go +++ b/service/wikeyun/app.go @@ -2,9 +2,9 @@ package wikeyun import ( "fmt" - "go.dtapp.net/library/goip" - "go.dtapp.net/library/golog" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/goip" + "go.dtapp.net/library/utils/golog" + "go.dtapp.net/library/utils/gorequest" "gorm.io/gorm" ) diff --git a/wikeyun/const.go b/service/wikeyun/const.go similarity index 100% rename from wikeyun/const.go rename to service/wikeyun/const.go diff --git a/wikeyun/params.go b/service/wikeyun/params.go similarity index 100% rename from wikeyun/params.go rename to service/wikeyun/params.go diff --git a/wikeyun/pgsql.go b/service/wikeyun/pgsql.go similarity index 58% rename from wikeyun/pgsql.go rename to service/wikeyun/pgsql.go index 72c01a5a..f1d1a797 100644 --- a/wikeyun/pgsql.go +++ b/service/wikeyun/pgsql.go @@ -1,19 +1,19 @@ package wikeyun import ( - "go.dtapp.net/library/gojson" - golog "go.dtapp.net/library/golog" - gorequest "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gojson" + golog2 "go.dtapp.net/library/utils/golog" + gorequest2 "go.dtapp.net/library/utils/gorequest" "gorm.io/datatypes" ) // 记录日志 -func (app *App) postgresqlLog(request gorequest.Response) { - app.log.Record(golog.ApiPostgresqlLog{ - RequestTime: golog.TimeString{Time: request.RequestTime}, //【请求】时间 +func (app *App) postgresqlLog(request gorequest2.Response) { + app.log.Record(golog2.ApiPostgresqlLog{ + RequestTime: golog2.TimeString{Time: request.RequestTime}, //【请求】时间 RequestUri: request.RequestUri, //【请求】链接 - RequestUrl: gorequest.UriParse(request.RequestUri).Url, //【请求】链接 - RequestApi: gorequest.UriParse(request.RequestUri).Path, //【请求】接口 + RequestUrl: gorequest2.UriParse(request.RequestUri).Url, //【请求】链接 + RequestApi: gorequest2.UriParse(request.RequestUri).Path, //【请求】接口 RequestMethod: request.RequestMethod, //【请求】方式 RequestParams: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestParams)), //【请求】参数 RequestHeader: datatypes.JSON(gojson.JsonEncodeNoError(request.RequestHeader)), //【返回】头部 @@ -21,6 +21,6 @@ func (app *App) postgresqlLog(request gorequest.Response) { ResponseStatusCode: request.ResponseStatusCode, //【返回】状态码 ResponseBody: request.ResponseBody, //【返回】内容 ResponseContentLength: request.ResponseContentLength, //【返回】大小 - ResponseTime: golog.TimeString{Time: request.ResponseTime}, //【返回】时间 + ResponseTime: golog2.TimeString{Time: request.ResponseTime}, //【返回】时间 }) } diff --git a/wikeyun/rest.oil.add_card.go b/service/wikeyun/rest.oil.add_card.go similarity index 100% rename from wikeyun/rest.oil.add_card.go rename to service/wikeyun/rest.oil.add_card.go diff --git a/wikeyun/rest.oil.card_info.go b/service/wikeyun/rest.oil.card_info.go similarity index 100% rename from wikeyun/rest.oil.card_info.go rename to service/wikeyun/rest.oil.card_info.go diff --git a/wikeyun/rest.oil.del_card.go b/service/wikeyun/rest.oil.del_card.go similarity index 100% rename from wikeyun/rest.oil.del_card.go rename to service/wikeyun/rest.oil.del_card.go diff --git a/wikeyun/rest.oil.push_order.go b/service/wikeyun/rest.oil.push_order.go similarity index 100% rename from wikeyun/rest.oil.push_order.go rename to service/wikeyun/rest.oil.push_order.go diff --git a/wikeyun/rest.oil.query.go b/service/wikeyun/rest.oil.query.go similarity index 100% rename from wikeyun/rest.oil.query.go rename to service/wikeyun/rest.oil.query.go diff --git a/wikeyun/rest.oiledit_card.go b/service/wikeyun/rest.oiledit_card.go similarity index 100% rename from wikeyun/rest.oiledit_card.go rename to service/wikeyun/rest.oiledit_card.go diff --git a/wikeyun/rest.power.add_card.go b/service/wikeyun/rest.power.add_card.go similarity index 97% rename from wikeyun/rest.power.add_card.go rename to service/wikeyun/rest.power.add_card.go index b7381831..234f11a1 100644 --- a/wikeyun/rest.power.add_card.go +++ b/service/wikeyun/rest.power.add_card.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestPowerAddCardResponse struct { diff --git a/wikeyun/rest.power.cancel.go b/service/wikeyun/rest.power.cancel.go similarity index 96% rename from wikeyun/rest.power.cancel.go rename to service/wikeyun/rest.power.cancel.go index 6387b061..dde1a220 100644 --- a/wikeyun/rest.power.cancel.go +++ b/service/wikeyun/rest.power.cancel.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestPowerCancelResponse struct { diff --git a/wikeyun/rest.power.card_info.go b/service/wikeyun/rest.power.card_info.go similarity index 100% rename from wikeyun/rest.power.card_info.go rename to service/wikeyun/rest.power.card_info.go diff --git a/wikeyun/rest.power.del_card.go b/service/wikeyun/rest.power.del_card.go similarity index 96% rename from wikeyun/rest.power.del_card.go rename to service/wikeyun/rest.power.del_card.go index cc81846f..66ccdf42 100644 --- a/wikeyun/rest.power.del_card.go +++ b/service/wikeyun/rest.power.del_card.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestPowerDelCardResponse struct { diff --git a/wikeyun/rest.power.edit_card.go b/service/wikeyun/rest.power.edit_card.go similarity index 100% rename from wikeyun/rest.power.edit_card.go rename to service/wikeyun/rest.power.edit_card.go diff --git a/wikeyun/rest.power.push_order.go b/service/wikeyun/rest.power.push_order.go similarity index 96% rename from wikeyun/rest.power.push_order.go rename to service/wikeyun/rest.power.push_order.go index d2f34130..bfa0dc19 100644 --- a/wikeyun/rest.power.push_order.go +++ b/service/wikeyun/rest.power.push_order.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestPowerPushOrderResponse struct { diff --git a/wikeyun/rest.power.query.go b/service/wikeyun/rest.power.query.go similarity index 97% rename from wikeyun/rest.power.query.go rename to service/wikeyun/rest.power.query.go index eb49f012..e98551e9 100644 --- a/wikeyun/rest.power.query.go +++ b/service/wikeyun/rest.power.query.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestPowerQueryResponse struct { diff --git a/wikeyun/rest.recharge.cancel.go b/service/wikeyun/rest.recharge.cancel.go similarity index 96% rename from wikeyun/rest.recharge.cancel.go rename to service/wikeyun/rest.recharge.cancel.go index 71d44452..3540cc67 100644 --- a/wikeyun/rest.recharge.cancel.go +++ b/service/wikeyun/rest.recharge.cancel.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestRechargeCancelResponse struct { diff --git a/wikeyun/rest.recharge.mobileInfo.go b/service/wikeyun/rest.recharge.mobileInfo.go similarity index 98% rename from wikeyun/rest.recharge.mobileInfo.go rename to service/wikeyun/rest.recharge.mobileInfo.go index 0ed612e9..87e440d0 100644 --- a/wikeyun/rest.recharge.mobileInfo.go +++ b/service/wikeyun/rest.recharge.mobileInfo.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestRechargeMobileInfoResponse struct { diff --git a/wikeyun/rest.recharge.push_order.go b/service/wikeyun/rest.recharge.push_order.go similarity index 97% rename from wikeyun/rest.recharge.push_order.go rename to service/wikeyun/rest.recharge.push_order.go index 794b73a2..e70817e0 100644 --- a/wikeyun/rest.recharge.push_order.go +++ b/service/wikeyun/rest.recharge.push_order.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestRechargePushOrderResponse struct { diff --git a/wikeyun/rest.recharge.query.go b/service/wikeyun/rest.recharge.query.go similarity index 97% rename from wikeyun/rest.recharge.query.go rename to service/wikeyun/rest.recharge.query.go index 0374e889..03b7b1e8 100644 --- a/wikeyun/rest.recharge.query.go +++ b/service/wikeyun/rest.recharge.query.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestRechargeQueryResponse struct { diff --git a/wikeyun/rest.user.query.go b/service/wikeyun/rest.user.query.go similarity index 96% rename from wikeyun/rest.user.query.go rename to service/wikeyun/rest.user.query.go index 69f4f579..552181da 100644 --- a/wikeyun/rest.user.query.go +++ b/service/wikeyun/rest.user.query.go @@ -2,7 +2,7 @@ package wikeyun import ( "encoding/json" - "go.dtapp.net/library/gorequest" + "go.dtapp.net/library/utils/gorequest" ) type RestUserQueryResponse struct { diff --git a/wikeyun/sign.go b/service/wikeyun/sign.go similarity index 100% rename from wikeyun/sign.go rename to service/wikeyun/sign.go diff --git a/vendor/github.com/dgraph-io/ristretto/test.sh b/vendor/github.com/dgraph-io/ristretto/test.sh deleted file mode 100644 index d53b32d4..00000000 --- a/vendor/github.com/dgraph-io/ristretto/test.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /bin/sh - -starttest() { - set -e - GO111MODULE=on go test -race ./... -} - -if [ -z "${TEAMCITY_VERSION}" ]; then - # running locally, so start test in a container - # TEAMCITY_VERSION=local will avoid recursive calls, when it would be running in container - docker run --rm --name ristretto-test -ti \ - -v `pwd`:/go/src/github.com/dgraph-io/ristretto \ - --workdir /go/src/github.com/dgraph-io/ristretto \ - --env TEAMCITY_VERSION=local \ - golang:1.13 \ - sh test.sh -else - # running in teamcity, since teamcity itself run this in container, let's simply run this - starttest -fi diff --git a/vendor/github.com/go-playground/locales/logo.png b/vendor/github.com/go-playground/locales/logo.png index 3038276e..e69de29b 100644 Binary files a/vendor/github.com/go-playground/locales/logo.png and b/vendor/github.com/go-playground/locales/logo.png differ diff --git a/vendor/github.com/go-playground/universal-translator/logo.png b/vendor/github.com/go-playground/universal-translator/logo.png index a37aa8c0..e69de29b 100644 Binary files a/vendor/github.com/go-playground/universal-translator/logo.png and b/vendor/github.com/go-playground/universal-translator/logo.png differ diff --git a/vendor/github.com/go-playground/validator/v10/logo.png b/vendor/github.com/go-playground/validator/v10/logo.png index 355000f5..e69de29b 100644 Binary files a/vendor/github.com/go-playground/validator/v10/logo.png and b/vendor/github.com/go-playground/validator/v10/logo.png differ diff --git a/vendor/github.com/jackc/pgtype/typed_array_gen.sh b/vendor/github.com/jackc/pgtype/typed_array_gen.sh deleted file mode 100644 index d922f1cb..00000000 --- a/vendor/github.com/jackc/pgtype/typed_array_gen.sh +++ /dev/null @@ -1,30 +0,0 @@ -erb pgtype_array_type=Int2Array pgtype_element_type=Int2 go_array_types=[]int16,[]*int16,[]uint16,[]*uint16,[]int32,[]*int32,[]uint32,[]*uint32,[]int64,[]*int64,[]uint64,[]*uint64,[]int,[]*int,[]uint,[]*uint element_type_name=int2 typed_array.go.erb > int2_array.go -erb pgtype_array_type=Int4Array pgtype_element_type=Int4 go_array_types=[]int16,[]*int16,[]uint16,[]*uint16,[]int32,[]*int32,[]uint32,[]*uint32,[]int64,[]*int64,[]uint64,[]*uint64,[]int,[]*int,[]uint,[]*uint element_type_name=int4 typed_array.go.erb > int4_array.go -erb pgtype_array_type=Int8Array pgtype_element_type=Int8 go_array_types=[]int16,[]*int16,[]uint16,[]*uint16,[]int32,[]*int32,[]uint32,[]*uint32,[]int64,[]*int64,[]uint64,[]*uint64,[]int,[]*int,[]uint,[]*uint element_type_name=int8 typed_array.go.erb > int8_array.go -erb pgtype_array_type=BoolArray pgtype_element_type=Bool go_array_types=[]bool,[]*bool element_type_name=bool typed_array.go.erb > bool_array.go -erb pgtype_array_type=DateArray pgtype_element_type=Date go_array_types=[]time.Time,[]*time.Time element_type_name=date typed_array.go.erb > date_array.go -erb pgtype_array_type=TimestamptzArray pgtype_element_type=Timestamptz go_array_types=[]time.Time,[]*time.Time element_type_name=timestamptz typed_array.go.erb > timestamptz_array.go -erb pgtype_array_type=TstzrangeArray pgtype_element_type=Tstzrange go_array_types=[]Tstzrange element_type_name=tstzrange typed_array.go.erb > tstzrange_array.go -erb pgtype_array_type=TsrangeArray pgtype_element_type=Tsrange go_array_types=[]Tsrange element_type_name=tsrange typed_array.go.erb > tsrange_array.go -erb pgtype_array_type=TimestampArray pgtype_element_type=Timestamp go_array_types=[]time.Time,[]*time.Time element_type_name=timestamp typed_array.go.erb > timestamp_array.go -erb pgtype_array_type=Float4Array pgtype_element_type=Float4 go_array_types=[]float32,[]*float32 element_type_name=float4 typed_array.go.erb > float4_array.go -erb pgtype_array_type=Float8Array pgtype_element_type=Float8 go_array_types=[]float64,[]*float64 element_type_name=float8 typed_array.go.erb > float8_array.go -erb pgtype_array_type=InetArray pgtype_element_type=Inet go_array_types=[]*net.IPNet,[]net.IP,[]*net.IP element_type_name=inet typed_array.go.erb > inet_array.go -erb pgtype_array_type=MacaddrArray pgtype_element_type=Macaddr go_array_types=[]net.HardwareAddr,[]*net.HardwareAddr element_type_name=macaddr typed_array.go.erb > macaddr_array.go -erb pgtype_array_type=CIDRArray pgtype_element_type=CIDR go_array_types=[]*net.IPNet,[]net.IP,[]*net.IP element_type_name=cidr typed_array.go.erb > cidr_array.go -erb pgtype_array_type=TextArray pgtype_element_type=Text go_array_types=[]string,[]*string element_type_name=text typed_array.go.erb > text_array.go -erb pgtype_array_type=VarcharArray pgtype_element_type=Varchar go_array_types=[]string,[]*string element_type_name=varchar typed_array.go.erb > varchar_array.go -erb pgtype_array_type=BPCharArray pgtype_element_type=BPChar go_array_types=[]string,[]*string element_type_name=bpchar typed_array.go.erb > bpchar_array.go -erb pgtype_array_type=ByteaArray pgtype_element_type=Bytea go_array_types=[][]byte element_type_name=bytea typed_array.go.erb > bytea_array.go -erb pgtype_array_type=ACLItemArray pgtype_element_type=ACLItem go_array_types=[]string,[]*string element_type_name=aclitem binary_format=false typed_array.go.erb > aclitem_array.go -erb pgtype_array_type=HstoreArray pgtype_element_type=Hstore go_array_types=[]map[string]string element_type_name=hstore typed_array.go.erb > hstore_array.go -erb pgtype_array_type=NumericArray pgtype_element_type=Numeric go_array_types=[]float32,[]*float32,[]float64,[]*float64,[]int64,[]*int64,[]uint64,[]*uint64 element_type_name=numeric typed_array.go.erb > numeric_array.go -erb pgtype_array_type=UUIDArray pgtype_element_type=UUID go_array_types=[][16]byte,[][]byte,[]string,[]*string element_type_name=uuid typed_array.go.erb > uuid_array.go -erb pgtype_array_type=JSONBArray pgtype_element_type=JSONB go_array_types=[]string,[][]byte,[]json.RawMessage element_type_name=jsonb typed_array.go.erb > jsonb_array.go - -# While the binary format is theoretically possible it is only practical to use the text format. -erb pgtype_array_type=EnumArray pgtype_element_type=GenericText go_array_types=[]string,[]*string binary_format=false typed_array.go.erb > enum_array.go - -erb pgtype_array_type=RecordArray pgtype_element_type=Record go_array_types=[][]Value element_type_name=record text_null=NULL encode_binary=false text_format=false typed_array.go.erb > record_array.go - -goimports -w *_array.go diff --git a/vendor/github.com/jackc/pgtype/typed_multirange_gen.sh b/vendor/github.com/jackc/pgtype/typed_multirange_gen.sh deleted file mode 100644 index 610f40a1..00000000 --- a/vendor/github.com/jackc/pgtype/typed_multirange_gen.sh +++ /dev/null @@ -1,8 +0,0 @@ -erb range_type=Numrange multirange_type=Nummultirange typed_multirange.go.erb > num_multirange.go -erb range_type=Int4range multirange_type=Int4multirange typed_multirange.go.erb > int4_multirange.go -erb range_type=Int8range multirange_type=Int8multirange typed_multirange.go.erb > int8_multirange.go -# TODO -# erb range_type=Tsrange multirange_type=Tsmultirange typed_multirange.go.erb > ts_multirange.go -# erb range_type=Tstzrange multirange_type=Tstzmultirange typed_multirange.go.erb > tstz_multirange.go -# erb range_type=Daterange multirange_type=Datemultirange typed_multirange.go.erb > date_multirange.go -goimports -w *multirange.go \ No newline at end of file diff --git a/vendor/github.com/jackc/pgtype/typed_range_gen.sh b/vendor/github.com/jackc/pgtype/typed_range_gen.sh deleted file mode 100644 index bedda292..00000000 --- a/vendor/github.com/jackc/pgtype/typed_range_gen.sh +++ /dev/null @@ -1,7 +0,0 @@ -erb range_type=Int4range element_type=Int4 typed_range.go.erb > int4range.go -erb range_type=Int8range element_type=Int8 typed_range.go.erb > int8range.go -erb range_type=Tsrange element_type=Timestamp typed_range.go.erb > tsrange.go -erb range_type=Tstzrange element_type=Timestamptz typed_range.go.erb > tstzrange.go -erb range_type=Daterange element_type=Date typed_range.go.erb > daterange.go -erb range_type=Numrange element_type=Numeric typed_range.go.erb > numrange.go -goimports -w *range.go diff --git a/vendor/github.com/json-iterator/go/build.sh b/vendor/github.com/json-iterator/go/build.sh deleted file mode 100644 index b45ef688..00000000 --- a/vendor/github.com/json-iterator/go/build.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e -set -x - -if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then - mkdir -p /tmp/build-golang/src/github.com/json-iterator - ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go -fi -export GOPATH=/tmp/build-golang -go get -u github.com/golang/dep/cmd/dep -cd /tmp/build-golang/src/github.com/json-iterator/go -exec $GOPATH/bin/dep ensure -update diff --git a/vendor/github.com/json-iterator/go/test.sh b/vendor/github.com/json-iterator/go/test.sh deleted file mode 100644 index f4e7c0b2..00000000 --- a/vendor/github.com/json-iterator/go/test.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -set -e -echo "" > coverage.txt - -for d in $(go list ./... | grep -v vendor); do - go test -coverprofile=profile.out -coverpkg=github.com/json-iterator/go $d - if [ -f profile.out ]; then - cat profile.out >> coverage.txt - rm profile.out - fi -done diff --git a/vendor/github.com/klauspost/compress/gen.sh b/vendor/github.com/klauspost/compress/gen.sh deleted file mode 100644 index aff94220..00000000 --- a/vendor/github.com/klauspost/compress/gen.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh - -cd s2/cmd/_s2sx/ || exit 1 -go generate . diff --git a/vendor/github.com/mattn/go-isatty/go.test.sh b/vendor/github.com/mattn/go-isatty/go.test.sh deleted file mode 100644 index 012162b0..00000000 --- a/vendor/github.com/mattn/go-isatty/go.test.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -set -e -echo "" > coverage.txt - -for d in $(go list ./... | grep -v vendor); do - go test -race -coverprofile=profile.out -covermode=atomic "$d" - if [ -f profile.out ]; then - cat profile.out >> coverage.txt - rm profile.out - fi -done diff --git a/vendor/github.com/modern-go/concurrent/test.sh b/vendor/github.com/modern-go/concurrent/test.sh deleted file mode 100644 index d1e6b2ec..00000000 --- a/vendor/github.com/modern-go/concurrent/test.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -set -e -echo "" > coverage.txt - -for d in $(go list ./... | grep -v vendor); do - go test -coverprofile=profile.out -coverpkg=github.com/modern-go/concurrent $d - if [ -f profile.out ]; then - cat profile.out >> coverage.txt - rm profile.out - fi -done diff --git a/vendor/github.com/pelletier/go-toml/v2/ci.sh b/vendor/github.com/pelletier/go-toml/v2/ci.sh deleted file mode 100644 index d916c5f2..00000000 --- a/vendor/github.com/pelletier/go-toml/v2/ci.sh +++ /dev/null @@ -1,279 +0,0 @@ -#!/usr/bin/env bash - - -stderr() { - echo "$@" 1>&2 -} - -usage() { - b=$(basename "$0") - echo $b: ERROR: "$@" 1>&2 - - cat 1>&2 < coverage.out - go tool cover -func=coverage.out - popd - - if [ "${branch}" != "HEAD" ]; then - git worktree remove --force "$dir" - fi -} - -coverage() { - case "$1" in - -d) - shift - target="${1?Need to provide a target branch argument}" - - output_dir="$(mktemp -d)" - target_out="${output_dir}/target.txt" - head_out="${output_dir}/head.txt" - - cover "${target}" > "${target_out}" - cover "HEAD" > "${head_out}" - - cat "${target_out}" - cat "${head_out}" - - echo "" - - target_pct="$(tail -n2 ${target_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%.*/\1/')" - head_pct="$(tail -n2 ${head_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%/\1/')" - echo "Results: ${target} ${target_pct}% HEAD ${head_pct}%" - - delta_pct=$(echo "$head_pct - $target_pct" | bc -l) - echo "Delta: ${delta_pct}" - - if [[ $delta_pct = \-* ]]; then - echo "Regression!"; - - target_diff="${output_dir}/target.diff.txt" - head_diff="${output_dir}/head.diff.txt" - cat "${target_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${target_diff}" - cat "${head_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${head_diff}" - - diff --side-by-side --suppress-common-lines "${target_diff}" "${head_diff}" - return 1 - fi - return 0 - ;; - esac - - cover "${1-HEAD}" -} - -bench() { - branch="${1}" - out="${2}" - replace="${3}" - dir="$(mktemp -d)" - - stderr "Executing benchmark for ${branch} at ${dir}" - - if [ "${branch}" = "HEAD" ]; then - cp -r . "${dir}/" - else - git worktree add "$dir" "$branch" - fi - - pushd "$dir" - - if [ "${replace}" != "" ]; then - find ./benchmark/ -iname '*.go' -exec sed -i -E "s|github.com/pelletier/go-toml/v2|${replace}|g" {} \; - go get "${replace}" - fi - - export GOMAXPROCS=2 - nice -n -19 taskset --cpu-list 0,1 go test '-bench=^Benchmark(Un)?[mM]arshal' -count=5 -run=Nothing ./... | tee "${out}" - popd - - if [ "${branch}" != "HEAD" ]; then - git worktree remove --force "$dir" - fi -} - -fmktemp() { - if mktemp --version|grep GNU >/dev/null; then - mktemp --suffix=-$1; - else - mktemp -t $1; - fi -} - -benchstathtml() { -python3 - $1 <<'EOF' -import sys - -lines = [] -stop = False - -with open(sys.argv[1]) as f: - for line in f.readlines(): - line = line.strip() - if line == "": - stop = True - if not stop: - lines.append(line.split(',')) - -results = [] -for line in reversed(lines[1:]): - v2 = float(line[1]) - results.append([ - line[0].replace("-32", ""), - "%.1fx" % (float(line[3])/v2), # v1 - "%.1fx" % (float(line[5])/v2), # bs - ]) -# move geomean to the end -results.append(results[0]) -del results[0] - - -def printtable(data): - print(""" - - - - - """) - - for r in data: - print(" ".format(*r)) - - print(""" -
Benchmarkgo-toml v1BurntSushi/toml
{}{}{}
""") - - -def match(x): - return "ReferenceFile" in x[0] or "HugoFrontMatter" in x[0] - -above = [x for x in results if match(x)] -below = [x for x in results if not match(x)] - -printtable(above) -print("
See more") -print("""

The table above has the results of the most common use-cases. The table below -contains the results of all benchmarks, including unrealistic ones. It is -provided for completeness.

""") -printtable(below) -print('

This table can be generated with ./ci.sh benchmark -a -html.

') -print("
") - -EOF -} - -benchmark() { - case "$1" in - -d) - shift - target="${1?Need to provide a target branch argument}" - - old=`fmktemp ${target}` - bench "${target}" "${old}" - - new=`fmktemp HEAD` - bench HEAD "${new}" - - benchstat "${old}" "${new}" - return 0 - ;; - -a) - shift - - v2stats=`fmktemp go-toml-v2` - bench HEAD "${v2stats}" "github.com/pelletier/go-toml/v2" - v1stats=`fmktemp go-toml-v1` - bench HEAD "${v1stats}" "github.com/pelletier/go-toml" - bsstats=`fmktemp bs-toml` - bench HEAD "${bsstats}" "github.com/BurntSushi/toml" - - cp "${v2stats}" go-toml-v2.txt - cp "${v1stats}" go-toml-v1.txt - cp "${bsstats}" bs-toml.txt - - if [ "$1" = "-html" ]; then - tmpcsv=`fmktemp csv` - benchstat -csv -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv - benchstathtml $tmpcsv - else - benchstat -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt - fi - - rm -f go-toml-v2.txt go-toml-v1.txt bs-toml.txt - return $? - esac - - bench "${1-HEAD}" `mktemp` -} - -case "$1" in - coverage) shift; coverage $@;; - benchmark) shift; benchmark $@;; - *) usage "bad argument $1";; -esac diff --git a/vendor/github.com/ugorji/go/codec/build.sh b/vendor/github.com/ugorji/go/codec/build.sh deleted file mode 100644 index c7704f75..00000000 --- a/vendor/github.com/ugorji/go/codec/build.sh +++ /dev/null @@ -1,368 +0,0 @@ -#!/bin/bash - -# Run all the different permutations of all the tests and other things -# This helps ensure that nothing gets broken. - -_tests() { - local vet="" # TODO: make it off - local gover=$( ${gocmd} version | cut -f 3 -d ' ' ) - [[ $( ${gocmd} version ) == *"gccgo"* ]] && zcover=0 - [[ $( ${gocmd} version ) == *"gollvm"* ]] && zcover=0 - case $gover in - go1.[7-9]*|go1.1[0-9]*|go2.*|devel*) true ;; - *) return 1 - esac - # note that codecgen requires fastpath, so you cannot do "codecgen codec.notfastpath" - # we test the following permutations wnich all execute different code paths as below. - echo "TestCodecSuite: (fastpath/unsafe), (!fastpath/unsafe), (fastpath/!unsafe), (!fastpath/!unsafe), (codecgen/unsafe)" - local echo=1 - local nc=2 # count - local cpus="1,$(nproc)" - # if using the race detector, then set nc to - if [[ " ${zargs[@]} " =~ "-race" ]]; then - cpus="$(nproc)" - fi - local a=( "" "codec.notfastpath" "codec.safe" "codec.notfastpath codec.safe" "codecgen" ) - local b=() - local c=() - for i in "${a[@]}" - do - local i2=${i:-default} - [[ "$zwait" == "1" ]] && echo ">>>> TAGS: 'alltests $i'; RUN: 'TestCodecSuite'" - [[ "$zcover" == "1" ]] && c=( -coverprofile "${i2// /-}.cov.out" ) - true && - ${gocmd} vet -printfuncs "errorf" "$@" && - if [[ "$echo" == 1 ]]; then set -o xtrace; fi && - ${gocmd} test ${zargs[*]} ${ztestargs[*]} -vet "$vet" -tags "alltests $i" -count $nc -cpu $cpus -run "TestCodecSuite" "${c[@]}" "$@" & - if [[ "$echo" == 1 ]]; then set +o xtrace; fi - b+=("${i2// /-}.cov.out") - [[ "$zwait" == "1" ]] && wait - - # if [[ "$?" != 0 ]]; then return 1; fi - done - if [[ "$zextra" == "1" ]]; then - [[ "$zwait" == "1" ]] && echo ">>>> TAGS: 'codec.notfastpath x'; RUN: 'Test.*X$'" - [[ "$zcover" == "1" ]] && c=( -coverprofile "x.cov.out" ) - ${gocmd} test ${zargs[*]} ${ztestargs[*]} -vet "$vet" -tags "codec.notfastpath x" -count $nc -run 'Test.*X$' "${c[@]}" & - b+=("x.cov.out") - [[ "$zwait" == "1" ]] && wait - fi - wait - # go tool cover is not supported for gccgo, gollvm, other non-standard go compilers - [[ "$zcover" == "1" ]] && - command -v gocovmerge && - gocovmerge "${b[@]}" > __merge.cov.out && - ${gocmd} tool cover -html=__merge.cov.out -} - -# is a generation needed? -_ng() { - local a="$1" - if [[ ! -e "$a" ]]; then echo 1; return; fi - for i in `ls -1 *.go.tmpl gen.go values_test.go` - do - if [[ "$a" -ot "$i" ]]; then echo 1; return; fi - done -} - -_prependbt() { - cat > ${2} <> ${2} - rm -f ${1} -} - -# _build generates fast-path.go and gen-helper.go. -_build() { - if ! [[ "${zforce}" || $(_ng "fast-path.generated.go") || $(_ng "gen-helper.generated.go") || $(_ng "gen.generated.go") ]]; then return 0; fi - - if [ "${zbak}" ]; then - _zts=`date '+%m%d%Y_%H%M%S'` - _gg=".generated.go" - [ -e "gen-helper${_gg}" ] && mv gen-helper${_gg} gen-helper${_gg}__${_zts}.bak - [ -e "fast-path${_gg}" ] && mv fast-path${_gg} fast-path${_gg}__${_zts}.bak - [ -e "gen${_gg}" ] && mv gen${_gg} gen${_gg}__${_zts}.bak - fi - rm -f gen-helper.generated.go fast-path.generated.go gen.generated.go \ - *safe.generated.go *_generated_test.go *.generated_ffjson_expose.go - - cat > gen.generated.go <> gen.generated.go < gen-dec-map.go.tmpl - cat >> gen.generated.go <> gen.generated.go < gen-dec-array.go.tmpl - cat >> gen.generated.go <> gen.generated.go < gen-enc-chan.go.tmpl - cat >> gen.generated.go < gen-from-tmpl.codec.generated.go < gen-from-tmpl.sort-slice-stubs.generated.go <> gen-from-tmpl.sort-slice-stubs.generated.go < bench/shared_test.go - - # explicitly return 0 if this passes, else return 1 - local btags="codec.notfastpath codec.safe codecgen.exec" - rm -f sort-slice.generated.go fast-path.generated.go gen-helper.generated.go mammoth_generated_test.go mammoth2_generated_test.go - - cat > gen-from-tmpl.sort-slice.generated.go < gen-from-tmpl.generated.go < $f <>$f - if [[ "$i" != "master" ]]; then i="release-branch.go$i"; fi - (false || - (echo "===== BUILDING GO SDK for branch: $i ... =====" && - cd $GOROOT && - git checkout -f $i && git reset --hard && git clean -f . && - cd src && ./make.bash >>$f 2>&1 && sleep 1 ) ) && - echo "===== GO SDK BUILD DONE =====" && - _prebuild && - echo "===== PREBUILD DONE with exit: $? =====" && - _tests "$@" - if [[ "$?" != 0 ]]; then return 1; fi - done - zforce=${makeforce} - echo "++++++++ RELEASE TEST SUITES ALL PASSED ++++++++" -} - -_usage() { - # hidden args: - # -pf [p=prebuild (f=force)] - - cat < [t=tests (e=extra, s=short, o=cover, w=wait), m=make, n=inlining diagnostics, l=mid-stack inlining, d=race detector] - -v -> v=verbose -EOF - if [[ "$(type -t _usage_run)" = "function" ]]; then _usage_run ; fi -} - -_main() { - if [[ -z "$1" ]]; then _usage; return 1; fi - local x # determines the main action to run in this build - local zforce # force - local zcover # generate cover profile and show in browser when done - local zwait # run tests in sequence, not parallel ie wait for one to finish before starting another - local zextra # means run extra (python based tests, etc) during testing - - local ztestargs=() - local zargs=() - local zverbose=() - local zbenchflags="" - - local gocmd=${MYGOCMD:-go} - - OPTIND=1 - while getopts ":cetmnrgpfvldsowkxyzb:" flag - do - case "x$flag" in - 'xo') zcover=1 ;; - 'xe') zextra=1 ;; - 'xw') zwait=1 ;; - 'xf') zforce=1 ;; - 'xs') ztestargs+=("-short") ;; - 'xv') zverbose+=(1) ;; - 'xl') zargs+=("-gcflags"); zargs+=("-l=4") ;; - 'xn') zargs+=("-gcflags"); zargs+=("-m=2") ;; - 'xd') zargs+=("-race") ;; - 'xb') x='b'; zbenchflags=${OPTARG} ;; - x\?) _usage; return 1 ;; - *) x=$flag ;; - esac - done - shift $((OPTIND-1)) - # echo ">>>> _main: extra args: $@" - case "x$x" in - 'xt') _tests "$@" ;; - 'xm') _make "$@" ;; - 'xr') _release "$@" ;; - 'xg') _go ;; - 'xp') _prebuild "$@" ;; - 'xc') _clean "$@" ;; - 'xx') _analyze_checks "$@" ;; - 'xy') _analyze_debug_types "$@" ;; - 'xz') _analyze_do_inlining_and_more "$@" ;; - 'xk') _go_compiler_validation_suite ;; - 'xb') _bench "$@" ;; - esac - # unset zforce zargs zbenchflags -} - -[ "." = `dirname $0` ] && _main "$@" - diff --git a/vendor/github.com/ulikunitz/xz/lzma/fox.lzma b/vendor/github.com/ulikunitz/xz/lzma/fox.lzma index 5edad633..e69de29b 100644 Binary files a/vendor/github.com/ulikunitz/xz/lzma/fox.lzma and b/vendor/github.com/ulikunitz/xz/lzma/fox.lzma differ diff --git a/vendor/go.uber.org/zap/checklicense.sh b/vendor/go.uber.org/zap/checklicense.sh deleted file mode 100644 index 345ac8b8..00000000 --- a/vendor/go.uber.org/zap/checklicense.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -e - -ERROR_COUNT=0 -while read -r file -do - case "$(head -1 "${file}")" in - *"Copyright (c) "*" Uber Technologies, Inc.") - # everything's cool - ;; - *) - echo "$file is missing license header." - (( ERROR_COUNT++ )) - ;; - esac -done < <(git ls-files "*\.go") - -exit $ERROR_COUNT diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/sync.sh b/vendor/golang.org/x/crypto/curve25519/internal/field/sync.sh deleted file mode 100644 index 1ba22a8b..00000000 --- a/vendor/golang.org/x/crypto/curve25519/internal/field/sync.sh +++ /dev/null @@ -1,19 +0,0 @@ -#! /bin/bash -set -euo pipefail - -cd "$(git rev-parse --show-toplevel)" - -STD_PATH=src/crypto/ed25519/internal/edwards25519/field -LOCAL_PATH=curve25519/internal/field -LAST_SYNC_REF=$(cat $LOCAL_PATH/sync.checkpoint) - -git fetch https://go.googlesource.com/go master - -if git diff --quiet $LAST_SYNC_REF:$STD_PATH FETCH_HEAD:$STD_PATH; then - echo "No changes." -else - NEW_REF=$(git rev-parse FETCH_HEAD | tee $LOCAL_PATH/sync.checkpoint) - echo "Applying changes from $LAST_SYNC_REF to $NEW_REF..." - git diff $LAST_SYNC_REF:$STD_PATH FETCH_HEAD:$STD_PATH | \ - git apply -3 --directory=$LOCAL_PATH -fi diff --git a/vendor/golang.org/x/sys/unix/mkall.sh b/vendor/golang.org/x/sys/unix/mkall.sh deleted file mode 100644 index ee736234..00000000 --- a/vendor/golang.org/x/sys/unix/mkall.sh +++ /dev/null @@ -1,231 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2009 The Go Authors. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE file. - -# This script runs or (given -n) prints suggested commands to generate files for -# the Architecture/OS specified by the GOARCH and GOOS environment variables. -# See README.md for more information about how the build system works. - -GOOSARCH="${GOOS}_${GOARCH}" - -# defaults -mksyscall="go run mksyscall.go" -mkerrors="./mkerrors.sh" -zerrors="zerrors_$GOOSARCH.go" -mksysctl="" -zsysctl="zsysctl_$GOOSARCH.go" -mksysnum= -mktypes= -mkasm= -run="sh" -cmd="" - -case "$1" in --syscalls) - for i in zsyscall*go - do - # Run the command line that appears in the first line - # of the generated file to regenerate it. - sed 1q $i | sed 's;^// ;;' | sh > _$i && gofmt < _$i > $i - rm _$i - done - exit 0 - ;; --n) - run="cat" - cmd="echo" - shift -esac - -case "$#" in -0) - ;; -*) - echo 'usage: mkall.sh [-n]' 1>&2 - exit 2 -esac - -if [[ "$GOOS" = "linux" ]]; then - # Use the Docker-based build system - # Files generated through docker (use $cmd so you can Ctl-C the build or run) - $cmd docker build --tag generate:$GOOS $GOOS - $cmd docker run --interactive --tty --volume $(cd -- "$(dirname -- "$0")/.." && /bin/pwd):/build generate:$GOOS - exit -fi - -GOOSARCH_in=syscall_$GOOSARCH.go -case "$GOOSARCH" in -_* | *_ | _) - echo 'undefined $GOOS_$GOARCH:' "$GOOSARCH" 1>&2 - exit 1 - ;; -aix_ppc) - mkerrors="$mkerrors -maix32" - mksyscall="go run mksyscall_aix_ppc.go -aix" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -aix_ppc64) - mkerrors="$mkerrors -maix64" - mksyscall="go run mksyscall_aix_ppc64.go -aix" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -darwin_amd64) - mkerrors="$mkerrors -m64" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - mkasm="go run mkasm_darwin.go" - ;; -darwin_arm64) - mkerrors="$mkerrors -m64" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - mkasm="go run mkasm_darwin.go" - ;; -dragonfly_amd64) - mkerrors="$mkerrors -m64" - mksyscall="go run mksyscall.go -dragonfly" - mksysnum="go run mksysnum.go 'https://gitweb.dragonflybsd.org/dragonfly.git/blob_plain/HEAD:/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -freebsd_386) - mkerrors="$mkerrors -m32" - mksyscall="go run mksyscall.go -l32" - mksysnum="go run mksysnum.go 'https://svn.freebsd.org/base/stable/11/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -freebsd_amd64) - mkerrors="$mkerrors -m64" - mksysnum="go run mksysnum.go 'https://svn.freebsd.org/base/stable/11/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -freebsd_arm) - mkerrors="$mkerrors" - mksyscall="go run mksyscall.go -l32 -arm" - mksysnum="go run mksysnum.go 'https://svn.freebsd.org/base/stable/11/sys/kern/syscalls.master'" - # Let the type of C char be signed for making the bare syscall - # API consistent across platforms. - mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char" - ;; -freebsd_arm64) - mkerrors="$mkerrors -m64" - mksysnum="go run mksysnum.go 'https://svn.freebsd.org/base/stable/11/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char" - ;; -netbsd_386) - mkerrors="$mkerrors -m32" - mksyscall="go run mksyscall.go -l32 -netbsd" - mksysnum="go run mksysnum.go 'http://cvsweb.netbsd.org/bsdweb.cgi/~checkout~/src/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -netbsd_amd64) - mkerrors="$mkerrors -m64" - mksyscall="go run mksyscall.go -netbsd" - mksysnum="go run mksysnum.go 'http://cvsweb.netbsd.org/bsdweb.cgi/~checkout~/src/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -netbsd_arm) - mkerrors="$mkerrors" - mksyscall="go run mksyscall.go -l32 -netbsd -arm" - mksysnum="go run mksysnum.go 'http://cvsweb.netbsd.org/bsdweb.cgi/~checkout~/src/sys/kern/syscalls.master'" - # Let the type of C char be signed for making the bare syscall - # API consistent across platforms. - mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char" - ;; -netbsd_arm64) - mkerrors="$mkerrors -m64" - mksyscall="go run mksyscall.go -netbsd" - mksysnum="go run mksysnum.go 'http://cvsweb.netbsd.org/bsdweb.cgi/~checkout~/src/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -openbsd_386) - mkerrors="$mkerrors -m32" - mksyscall="go run mksyscall.go -l32 -openbsd" - mksysctl="go run mksysctl_openbsd.go" - mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -openbsd_amd64) - mkerrors="$mkerrors -m64" - mksyscall="go run mksyscall.go -openbsd" - mksysctl="go run mksysctl_openbsd.go" - mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'" - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -openbsd_arm) - mkerrors="$mkerrors" - mksyscall="go run mksyscall.go -l32 -openbsd -arm" - mksysctl="go run mksysctl_openbsd.go" - mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'" - # Let the type of C char be signed for making the bare syscall - # API consistent across platforms. - mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char" - ;; -openbsd_arm64) - mkerrors="$mkerrors -m64" - mksyscall="go run mksyscall.go -openbsd" - mksysctl="go run mksysctl_openbsd.go" - mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'" - # Let the type of C char be signed for making the bare syscall - # API consistent across platforms. - mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char" - ;; -openbsd_mips64) - mkerrors="$mkerrors -m64" - mksyscall="go run mksyscall.go -openbsd" - mksysctl="go run mksysctl_openbsd.go" - mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'" - # Let the type of C char be signed for making the bare syscall - # API consistent across platforms. - mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char" - ;; -solaris_amd64) - mksyscall="go run mksyscall_solaris.go" - mkerrors="$mkerrors -m64" - mksysnum= - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -illumos_amd64) - mksyscall="go run mksyscall_solaris.go" - mkerrors= - mksysnum= - mktypes="GOARCH=$GOARCH go tool cgo -godefs" - ;; -*) - echo 'unrecognized $GOOS_$GOARCH: ' "$GOOSARCH" 1>&2 - exit 1 - ;; -esac - -( - if [ -n "$mkerrors" ]; then echo "$mkerrors |gofmt >$zerrors"; fi - case "$GOOS" in - *) - syscall_goos="syscall_$GOOS.go" - case "$GOOS" in - darwin | dragonfly | freebsd | netbsd | openbsd) - syscall_goos="syscall_bsd.go $syscall_goos" - ;; - esac - if [ -n "$mksyscall" ]; then - if [ "$GOOSARCH" == "aix_ppc64" ]; then - # aix/ppc64 script generates files instead of writing to stdin. - echo "$mksyscall -tags $GOOS,$GOARCH $syscall_goos $GOOSARCH_in && gofmt -w zsyscall_$GOOSARCH.go && gofmt -w zsyscall_"$GOOSARCH"_gccgo.go && gofmt -w zsyscall_"$GOOSARCH"_gc.go " ; - elif [ "$GOOS" == "darwin" ]; then - # 1.12 and later, syscalls via libSystem - echo "$mksyscall -tags $GOOS,$GOARCH,go1.12 $syscall_goos $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.go"; - # 1.13 and later, syscalls via libSystem (including syscallPtr) - echo "$mksyscall -tags $GOOS,$GOARCH,go1.13 syscall_darwin.1_13.go |gofmt >zsyscall_$GOOSARCH.1_13.go"; - elif [ "$GOOS" == "illumos" ]; then - # illumos code generation requires a --illumos switch - echo "$mksyscall -illumos -tags illumos,$GOARCH syscall_illumos.go |gofmt > zsyscall_illumos_$GOARCH.go"; - # illumos implies solaris, so solaris code generation is also required - echo "$mksyscall -tags solaris,$GOARCH syscall_solaris.go syscall_solaris_$GOARCH.go |gofmt >zsyscall_solaris_$GOARCH.go"; - else - echo "$mksyscall -tags $GOOS,$GOARCH $syscall_goos $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.go"; - fi - fi - esac - if [ -n "$mksysctl" ]; then echo "$mksysctl |gofmt >$zsysctl"; fi - if [ -n "$mksysnum" ]; then echo "$mksysnum |gofmt >zsysnum_$GOOSARCH.go"; fi - if [ -n "$mktypes" ]; then echo "$mktypes types_$GOOS.go | go run mkpost.go > ztypes_$GOOSARCH.go"; fi - if [ -n "$mkasm" ]; then echo "$mkasm $GOARCH"; fi -) | $run diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh deleted file mode 100644 index d888fb77..00000000 --- a/vendor/golang.org/x/sys/unix/mkerrors.sh +++ /dev/null @@ -1,772 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2009 The Go Authors. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE file. - -# Generate Go code listing errors and other #defined constant -# values (ENAMETOOLONG etc.), by asking the preprocessor -# about the definitions. - -unset LANG -export LC_ALL=C -export LC_CTYPE=C - -if test -z "$GOARCH" -o -z "$GOOS"; then - echo 1>&2 "GOARCH or GOOS not defined in environment" - exit 1 -fi - -# Check that we are using the new build system if we should -if [[ "$GOOS" = "linux" ]] && [[ "$GOLANG_SYS_BUILD" != "docker" ]]; then - echo 1>&2 "In the Docker based build system, mkerrors should not be called directly." - echo 1>&2 "See README.md" - exit 1 -fi - -if [[ "$GOOS" = "aix" ]]; then - CC=${CC:-gcc} -else - CC=${CC:-cc} -fi - -if [[ "$GOOS" = "solaris" ]]; then - # Assumes GNU versions of utilities in PATH. - export PATH=/usr/gnu/bin:$PATH -fi - -uname=$(uname) - -includes_AIX=' -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#define AF_LOCAL AF_UNIX -' - -includes_Darwin=' -#define _DARWIN_C_SOURCE -#define KERNEL 1 -#define _DARWIN_USE_64_BIT_INODE -#define __APPLE_USE_RFC_3542 -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -// for backwards compatibility because moved TIOCREMOTE to Kernel.framework after MacOSX12.0.sdk. -#define TIOCREMOTE 0x80047469 -' - -includes_DragonFly=' -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -' - -includes_FreeBSD=' -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#if __FreeBSD__ >= 10 -#define IFT_CARP 0xf8 // IFT_CARP is deprecated in FreeBSD 10 -#undef SIOCAIFADDR -#define SIOCAIFADDR _IOW(105, 26, struct oifaliasreq) // ifaliasreq contains if_data -#undef SIOCSIFPHYADDR -#define SIOCSIFPHYADDR _IOW(105, 70, struct oifaliasreq) // ifaliasreq contains if_data -#endif -' - -includes_Linux=' -#define _LARGEFILE_SOURCE -#define _LARGEFILE64_SOURCE -#ifndef __LP64__ -#define _FILE_OFFSET_BITS 64 -#endif -#define _GNU_SOURCE - -// is broken on powerpc64, as it fails to include definitions of -// these structures. We just include them copied from . -#if defined(__powerpc__) -struct sgttyb { - char sg_ispeed; - char sg_ospeed; - char sg_erase; - char sg_kill; - short sg_flags; -}; - -struct tchars { - char t_intrc; - char t_quitc; - char t_startc; - char t_stopc; - char t_eofc; - char t_brkc; -}; - -struct ltchars { - char t_suspc; - char t_dsuspc; - char t_rprntc; - char t_flushc; - char t_werasc; - char t_lnextc; -}; -#endif - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include - -#if defined(__sparc__) -// On sparc{,64}, the kernel defines struct termios2 itself which clashes with the -// definition in glibc. As only the error constants are needed here, include the -// generic termibits.h (which is included by termbits.h on sparc). -#include -#else -#include -#endif - -#ifndef MSG_FASTOPEN -#define MSG_FASTOPEN 0x20000000 -#endif - -#ifndef PTRACE_GETREGS -#define PTRACE_GETREGS 0xc -#endif - -#ifndef PTRACE_SETREGS -#define PTRACE_SETREGS 0xd -#endif - -#ifndef SOL_NETLINK -#define SOL_NETLINK 270 -#endif - -#ifdef SOL_BLUETOOTH -// SPARC includes this in /usr/include/sparc64-linux-gnu/bits/socket.h -// but it is already in bluetooth_linux.go -#undef SOL_BLUETOOTH -#endif - -// Certain constants are missing from the fs/crypto UAPI -#define FS_KEY_DESC_PREFIX "fscrypt:" -#define FS_KEY_DESC_PREFIX_SIZE 8 -#define FS_MAX_KEY_SIZE 64 - -// The code generator produces -0x1 for (~0), but an unsigned value is necessary -// for the tipc_subscr timeout __u32 field. -#undef TIPC_WAIT_FOREVER -#define TIPC_WAIT_FOREVER 0xffffffff - -// Copied from linux/l2tp.h -// Including linux/l2tp.h here causes conflicts between linux/in.h -// and netinet/in.h included via net/route.h above. -#define IPPROTO_L2TP 115 - -// Copied from linux/hid.h. -// Keep in sync with the size of the referenced fields. -#define _HIDIOCGRAWNAME_LEN 128 // sizeof_field(struct hid_device, name) -#define _HIDIOCGRAWPHYS_LEN 64 // sizeof_field(struct hid_device, phys) -#define _HIDIOCGRAWUNIQ_LEN 64 // sizeof_field(struct hid_device, uniq) - -#define _HIDIOCGRAWNAME HIDIOCGRAWNAME(_HIDIOCGRAWNAME_LEN) -#define _HIDIOCGRAWPHYS HIDIOCGRAWPHYS(_HIDIOCGRAWPHYS_LEN) -#define _HIDIOCGRAWUNIQ HIDIOCGRAWUNIQ(_HIDIOCGRAWUNIQ_LEN) - -' - -includes_NetBSD=' -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -// Needed since refers to it... -#define schedppq 1 -' - -includes_OpenBSD=' -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -// We keep some constants not supported in OpenBSD 5.5 and beyond for -// the promise of compatibility. -#define EMUL_ENABLED 0x1 -#define EMUL_NATIVE 0x2 -#define IPV6_FAITH 0x1d -#define IPV6_OPTIONS 0x1 -#define IPV6_RTHDR_STRICT 0x1 -#define IPV6_SOCKOPT_RESERVED1 0x3 -#define SIOCGIFGENERIC 0xc020693a -#define SIOCSIFGENERIC 0x80206939 -#define WALTSIG 0x4 -' - -includes_SunOS=' -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -' - - -includes=' -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -' -ccflags="$@" - -# Write go tool cgo -godefs input. -( - echo package unix - echo - echo '/*' - indirect="includes_$(uname)" - echo "${!indirect} $includes" - echo '*/' - echo 'import "C"' - echo 'import "syscall"' - echo - echo 'const (' - - # The gcc command line prints all the #defines - # it encounters while processing the input - echo "${!indirect} $includes" | $CC -x c - -E -dM $ccflags | - awk ' - $1 != "#define" || $2 ~ /\(/ || $3 == "" {next} - - $2 ~ /^E([ABCD]X|[BIS]P|[SD]I|S|FL)$/ {next} # 386 registers - $2 ~ /^(SIGEV_|SIGSTKSZ|SIGRT(MIN|MAX))/ {next} - $2 ~ /^(SCM_SRCRT)$/ {next} - $2 ~ /^(MAP_FAILED)$/ {next} - $2 ~ /^ELF_.*$/ {next}# contains ELF_ARCH, etc. - - $2 ~ /^EXTATTR_NAMESPACE_NAMES/ || - $2 ~ /^EXTATTR_NAMESPACE_[A-Z]+_STRING/ {next} - - $2 !~ /^ECCAPBITS/ && - $2 !~ /^ETH_/ && - $2 !~ /^EPROC_/ && - $2 !~ /^EQUIV_/ && - $2 !~ /^EXPR_/ && - $2 !~ /^EVIOC/ && - $2 ~ /^E[A-Z0-9_]+$/ || - $2 ~ /^B[0-9_]+$/ || - $2 ~ /^(OLD|NEW)DEV$/ || - $2 == "BOTHER" || - $2 ~ /^CI?BAUD(EX)?$/ || - $2 == "IBSHIFT" || - $2 ~ /^V[A-Z0-9]+$/ || - $2 ~ /^CS[A-Z0-9]/ || - $2 ~ /^I(SIG|CANON|CRNL|UCLC|EXTEN|MAXBEL|STRIP|UTF8)$/ || - $2 ~ /^IGN/ || - $2 ~ /^IX(ON|ANY|OFF)$/ || - $2 ~ /^IN(LCR|PCK)$/ || - $2 !~ "X86_CR3_PCID_NOFLUSH" && - $2 ~ /(^FLU?SH)|(FLU?SH$)/ || - $2 ~ /^C(LOCAL|READ|MSPAR|RTSCTS)$/ || - $2 == "BRKINT" || - $2 == "HUPCL" || - $2 == "PENDIN" || - $2 == "TOSTOP" || - $2 == "XCASE" || - $2 == "ALTWERASE" || - $2 == "NOKERNINFO" || - $2 == "NFDBITS" || - $2 ~ /^PAR/ || - $2 ~ /^SIG[^_]/ || - $2 ~ /^O[CNPFPL][A-Z]+[^_][A-Z]+$/ || - $2 ~ /^(NL|CR|TAB|BS|VT|FF)DLY$/ || - $2 ~ /^(NL|CR|TAB|BS|VT|FF)[0-9]$/ || - $2 ~ /^O?XTABS$/ || - $2 ~ /^TC[IO](ON|OFF)$/ || - $2 ~ /^IN_/ || - $2 ~ /^KCM/ || - $2 ~ /^LANDLOCK_/ || - $2 ~ /^LOCK_(SH|EX|NB|UN)$/ || - $2 ~ /^LO_(KEY|NAME)_SIZE$/ || - $2 ~ /^LOOP_(CLR|CTL|GET|SET)_/ || - $2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|TCP|MCAST|EVFILT|NOTE|SHUT|PROT|MAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR|LOCAL|TCPOPT)_/ || - $2 ~ /^NFC_(GENL|PROTO|COMM|RF|SE|DIRECTION|LLCP|SOCKPROTO)_/ || - $2 ~ /^NFC_.*_(MAX)?SIZE$/ || - $2 ~ /^RAW_PAYLOAD_/ || - $2 ~ /^TP_STATUS_/ || - $2 ~ /^FALLOC_/ || - $2 ~ /^ICMPV?6?_(FILTER|SEC)/ || - $2 == "SOMAXCONN" || - $2 == "NAME_MAX" || - $2 == "IFNAMSIZ" || - $2 ~ /^CTL_(HW|KERN|MAXNAME|NET|QUERY)$/ || - $2 ~ /^KERN_(HOSTNAME|OS(RELEASE|TYPE)|VERSION)$/ || - $2 ~ /^HW_MACHINE$/ || - $2 ~ /^SYSCTL_VERS/ || - $2 !~ "MNT_BITS" && - $2 ~ /^(MS|MNT|MOUNT|UMOUNT)_/ || - $2 ~ /^NS_GET_/ || - $2 ~ /^TUN(SET|GET|ATTACH|DETACH)/ || - $2 ~ /^(O|F|[ES]?FD|NAME|S|PTRACE|PT|TFD)_/ || - $2 ~ /^KEXEC_/ || - $2 ~ /^LINUX_REBOOT_CMD_/ || - $2 ~ /^LINUX_REBOOT_MAGIC[12]$/ || - $2 ~ /^MODULE_INIT_/ || - $2 !~ "NLA_TYPE_MASK" && - $2 !~ /^RTC_VL_(ACCURACY|BACKUP|DATA)/ && - $2 ~ /^(NETLINK|NLM|NLMSG|NLA|IFA|IFAN|RT|RTC|RTCF|RTN|RTPROT|RTNH|ARPHRD|ETH_P|NETNSA)_/ || - $2 ~ /^FIORDCHK$/ || - $2 ~ /^SIOC/ || - $2 ~ /^TIOC/ || - $2 ~ /^TCGET/ || - $2 ~ /^TCSET/ || - $2 ~ /^TC(FLSH|SBRKP?|XONC)$/ || - $2 !~ "RTF_BITS" && - $2 ~ /^(IFF|IFT|NET_RT|RTM(GRP)?|RTF|RTV|RTA|RTAX)_/ || - $2 ~ /^BIOC/ || - $2 ~ /^DIOC/ || - $2 ~ /^RUSAGE_(SELF|CHILDREN|THREAD)/ || - $2 ~ /^RLIMIT_(AS|CORE|CPU|DATA|FSIZE|LOCKS|MEMLOCK|MSGQUEUE|NICE|NOFILE|NPROC|RSS|RTPRIO|RTTIME|SIGPENDING|STACK)|RLIM_INFINITY/ || - $2 ~ /^PRIO_(PROCESS|PGRP|USER)/ || - $2 ~ /^CLONE_[A-Z_]+/ || - $2 !~ /^(BPF_TIMEVAL|BPF_FIB_LOOKUP_[A-Z]+)$/ && - $2 ~ /^(BPF|DLT)_/ || - $2 ~ /^(CLOCK|TIMER)_/ || - $2 ~ /^CAN_/ || - $2 ~ /^CAP_/ || - $2 ~ /^CP_/ || - $2 ~ /^CPUSTATES$/ || - $2 ~ /^CTLIOCGINFO$/ || - $2 ~ /^ALG_/ || - $2 ~ /^FI(CLONE|DEDUPERANGE)/ || - $2 ~ /^FS_(POLICY_FLAGS|KEY_DESC|ENCRYPTION_MODE|[A-Z0-9_]+_KEY_SIZE)/ || - $2 ~ /^FS_IOC_.*(ENCRYPTION|VERITY|[GS]ETFLAGS)/ || - $2 ~ /^FS_VERITY_/ || - $2 ~ /^FSCRYPT_/ || - $2 ~ /^DM_/ || - $2 ~ /^GRND_/ || - $2 ~ /^RND/ || - $2 ~ /^KEY_(SPEC|REQKEY_DEFL)_/ || - $2 ~ /^KEYCTL_/ || - $2 ~ /^PERF_/ || - $2 ~ /^SECCOMP_MODE_/ || - $2 ~ /^SEEK_/ || - $2 ~ /^SPLICE_/ || - $2 ~ /^SYNC_FILE_RANGE_/ || - $2 !~ /^AUDIT_RECORD_MAGIC/ && - $2 !~ /IOC_MAGIC/ && - $2 ~ /^[A-Z][A-Z0-9_]+_MAGIC2?$/ || - $2 ~ /^(VM|VMADDR)_/ || - $2 ~ /^IOCTL_VM_SOCKETS_/ || - $2 ~ /^(TASKSTATS|TS)_/ || - $2 ~ /^CGROUPSTATS_/ || - $2 ~ /^GENL_/ || - $2 ~ /^STATX_/ || - $2 ~ /^RENAME/ || - $2 ~ /^UBI_IOC[A-Z]/ || - $2 ~ /^UTIME_/ || - $2 ~ /^XATTR_(CREATE|REPLACE|NO(DEFAULT|FOLLOW|SECURITY)|SHOWCOMPRESSION)/ || - $2 ~ /^ATTR_(BIT_MAP_COUNT|(CMN|VOL|FILE)_)/ || - $2 ~ /^FSOPT_/ || - $2 ~ /^WDIO[CFS]_/ || - $2 ~ /^NFN/ || - $2 ~ /^XDP_/ || - $2 ~ /^RWF_/ || - $2 ~ /^(HDIO|WIN|SMART)_/ || - $2 ~ /^CRYPTO_/ || - $2 ~ /^TIPC_/ || - $2 !~ "DEVLINK_RELOAD_LIMITS_VALID_MASK" && - $2 ~ /^DEVLINK_/ || - $2 ~ /^ETHTOOL_/ || - $2 ~ /^LWTUNNEL_IP/ || - $2 ~ /^ITIMER_/ || - $2 !~ "WMESGLEN" && - $2 ~ /^W[A-Z0-9]+$/ || - $2 ~ /^P_/ || - $2 ~/^PPPIOC/ || - $2 ~ /^FAN_|FANOTIFY_/ || - $2 == "HID_MAX_DESCRIPTOR_SIZE" || - $2 ~ /^_?HIDIOC/ || - $2 ~ /^BUS_(USB|HIL|BLUETOOTH|VIRTUAL)$/ || - $2 ~ /^MTD/ || - $2 ~ /^OTP/ || - $2 ~ /^MEM/ || - $2 ~ /^WG/ || - $2 ~ /^FIB_RULE_/ || - $2 ~ /^BLK[A-Z]*(GET$|SET$|BUF$|PART$|SIZE)/ {printf("\t%s = C.%s\n", $2, $2)} - $2 ~ /^__WCOREFLAG$/ {next} - $2 ~ /^__W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", substr($2,3), $2)} - - {next} - ' | sort - - echo ')' -) >_const.go - -# Pull out the error names for later. -errors=$( - echo '#include ' | $CC -x c - -E -dM $ccflags | - awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print $2 }' | - sort -) - -# Pull out the signal names for later. -signals=$( - echo '#include ' | $CC -x c - -E -dM $ccflags | - awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print $2 }' | - egrep -v '(SIGSTKSIZE|SIGSTKSZ|SIGRT|SIGMAX64)' | - sort -) - -# Again, writing regexps to a file. -echo '#include ' | $CC -x c - -E -dM $ccflags | - awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print "^\t" $2 "[ \t]*=" }' | - sort >_error.grep -echo '#include ' | $CC -x c - -E -dM $ccflags | - awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print "^\t" $2 "[ \t]*=" }' | - egrep -v '(SIGSTKSIZE|SIGSTKSZ|SIGRT|SIGMAX64)' | - sort >_signal.grep - -echo '// mkerrors.sh' "$@" -echo '// Code generated by the command above; see README.md. DO NOT EDIT.' -echo -echo "//go:build ${GOARCH} && ${GOOS}" -echo "// +build ${GOARCH},${GOOS}" -echo -go tool cgo -godefs -- "$@" _const.go >_error.out -cat _error.out | grep -vf _error.grep | grep -vf _signal.grep -echo -echo '// Errors' -echo 'const (' -cat _error.out | grep -f _error.grep | sed 's/=\(.*\)/= syscall.Errno(\1)/' -echo ')' - -echo -echo '// Signals' -echo 'const (' -cat _error.out | grep -f _signal.grep | sed 's/=\(.*\)/= syscall.Signal(\1)/' -echo ')' - -# Run C program to print error and syscall strings. -( - echo -E " -#include -#include -#include -#include -#include -#include - -#define nelem(x) (sizeof(x)/sizeof((x)[0])) - -enum { A = 'A', Z = 'Z', a = 'a', z = 'z' }; // avoid need for single quotes below - -struct tuple { - int num; - const char *name; -}; - -struct tuple errors[] = { -" - for i in $errors - do - echo -E ' {'$i', "'$i'" },' - done - - echo -E " -}; - -struct tuple signals[] = { -" - for i in $signals - do - echo -E ' {'$i', "'$i'" },' - done - - # Use -E because on some systems bash builtin interprets \n itself. - echo -E ' -}; - -static int -tuplecmp(const void *a, const void *b) -{ - return ((struct tuple *)a)->num - ((struct tuple *)b)->num; -} - -int -main(void) -{ - int i, e; - char buf[1024], *p; - - printf("\n\n// Error table\n"); - printf("var errorList = [...]struct {\n"); - printf("\tnum syscall.Errno\n"); - printf("\tname string\n"); - printf("\tdesc string\n"); - printf("} {\n"); - qsort(errors, nelem(errors), sizeof errors[0], tuplecmp); - for(i=0; i 0 && errors[i-1].num == e) - continue; - strcpy(buf, strerror(e)); - // lowercase first letter: Bad -> bad, but STREAM -> STREAM. - if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z) - buf[0] += a - A; - printf("\t{ %d, \"%s\", \"%s\" },\n", e, errors[i].name, buf); - } - printf("}\n\n"); - - printf("\n\n// Signal table\n"); - printf("var signalList = [...]struct {\n"); - printf("\tnum syscall.Signal\n"); - printf("\tname string\n"); - printf("\tdesc string\n"); - printf("} {\n"); - qsort(signals, nelem(signals), sizeof signals[0], tuplecmp); - for(i=0; i 0 && signals[i-1].num == e) - continue; - strcpy(buf, strsignal(e)); - // lowercase first letter: Bad -> bad, but STREAM -> STREAM. - if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z) - buf[0] += a - A; - // cut trailing : number. - p = strrchr(buf, ":"[0]); - if(p) - *p = '\0'; - printf("\t{ %d, \"%s\", \"%s\" },\n", e, signals[i].name, buf); - } - printf("}\n\n"); - - return 0; -} - -' -) >_errors.c - -$CC $ccflags -o _errors _errors.c && $GORUN ./_errors && rm -f _errors.c _errors _const.go _error.grep _signal.grep _error.out diff --git a/vendor/google.golang.org/grpc/codegen.sh b/vendor/google.golang.org/grpc/codegen.sh deleted file mode 100644 index 4cdc6ba7..00000000 --- a/vendor/google.golang.org/grpc/codegen.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash - -# This script serves as an example to demonstrate how to generate the gRPC-Go -# interface and the related messages from .proto file. -# -# It assumes the installation of i) Google proto buffer compiler at -# https://github.com/google/protobuf (after v2.6.1) and ii) the Go codegen -# plugin at https://github.com/golang/protobuf (after 2015-02-20). If you have -# not, please install them first. -# -# We recommend running this script at $GOPATH/src. -# -# If this is not what you need, feel free to make your own scripts. Again, this -# script is for demonstration purpose. -# -proto=$1 -protoc --go_out=plugins=grpc:. $proto diff --git a/vendor/google.golang.org/grpc/regenerate.sh b/vendor/google.golang.org/grpc/regenerate.sh deleted file mode 100644 index 978b89f3..00000000 --- a/vendor/google.golang.org/grpc/regenerate.sh +++ /dev/null @@ -1,131 +0,0 @@ -#!/bin/bash -# Copyright 2020 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eu -o pipefail - -WORKDIR=$(mktemp -d) - -function finish { - rm -rf "$WORKDIR" -} -trap finish EXIT - -export GOBIN=${WORKDIR}/bin -export PATH=${GOBIN}:${PATH} -mkdir -p ${GOBIN} - -echo "remove existing generated files" -# grpc_testing_not_regenerate/*.pb.go is not re-generated, -# see grpc_testing_not_regenerate/README.md for details. -rm -f $(find . -name '*.pb.go' | grep -v 'grpc_testing_not_regenerate') - -echo "go install google.golang.org/protobuf/cmd/protoc-gen-go" -(cd test/tools && go install google.golang.org/protobuf/cmd/protoc-gen-go) - -echo "go install cmd/protoc-gen-go-grpc" -(cd cmd/protoc-gen-go-grpc && go install .) - -echo "git clone https://github.com/grpc/grpc-proto" -git clone --quiet https://github.com/grpc/grpc-proto ${WORKDIR}/grpc-proto - -echo "git clone https://github.com/protocolbuffers/protobuf" -git clone --quiet https://github.com/protocolbuffers/protobuf ${WORKDIR}/protobuf - -# Pull in code.proto as a proto dependency -mkdir -p ${WORKDIR}/googleapis/google/rpc -echo "curl https://raw.githubusercontent.com/googleapis/googleapis/master/google/rpc/code.proto" -curl --silent https://raw.githubusercontent.com/googleapis/googleapis/master/google/rpc/code.proto > ${WORKDIR}/googleapis/google/rpc/code.proto - -mkdir -p ${WORKDIR}/out - -# Generates sources without the embed requirement -LEGACY_SOURCES=( - ${WORKDIR}/grpc-proto/grpc/binlog/v1/binarylog.proto - ${WORKDIR}/grpc-proto/grpc/channelz/v1/channelz.proto - ${WORKDIR}/grpc-proto/grpc/health/v1/health.proto - ${WORKDIR}/grpc-proto/grpc/lb/v1/load_balancer.proto - profiling/proto/service.proto - reflection/grpc_reflection_v1alpha/reflection.proto -) - -# Generates only the new gRPC Service symbols -SOURCES=( - $(git ls-files --exclude-standard --cached --others "*.proto" | grep -v '^\(profiling/proto/service.proto\|reflection/grpc_reflection_v1alpha/reflection.proto\)$') - ${WORKDIR}/grpc-proto/grpc/gcp/altscontext.proto - ${WORKDIR}/grpc-proto/grpc/gcp/handshaker.proto - ${WORKDIR}/grpc-proto/grpc/gcp/transport_security_common.proto - ${WORKDIR}/grpc-proto/grpc/lookup/v1/rls.proto - ${WORKDIR}/grpc-proto/grpc/lookup/v1/rls_config.proto - ${WORKDIR}/grpc-proto/grpc/service_config/service_config.proto - ${WORKDIR}/grpc-proto/grpc/testing/*.proto - ${WORKDIR}/grpc-proto/grpc/core/*.proto -) - -# These options of the form 'Mfoo.proto=bar' instruct the codegen to use an -# import path of 'bar' in the generated code when 'foo.proto' is imported in -# one of the sources. -# -# Note that the protos listed here are all for testing purposes. All protos to -# be used externally should have a go_package option (and they don't need to be -# listed here). -OPTS=Mgrpc/service_config/service_config.proto=/internal/proto/grpc_service_config,\ -Mgrpc/core/stats.proto=google.golang.org/grpc/interop/grpc_testing/core,\ -Mgrpc/testing/benchmark_service.proto=google.golang.org/grpc/interop/grpc_testing,\ -Mgrpc/testing/stats.proto=google.golang.org/grpc/interop/grpc_testing,\ -Mgrpc/testing/report_qps_scenario_service.proto=google.golang.org/grpc/interop/grpc_testing,\ -Mgrpc/testing/messages.proto=google.golang.org/grpc/interop/grpc_testing,\ -Mgrpc/testing/worker_service.proto=google.golang.org/grpc/interop/grpc_testing,\ -Mgrpc/testing/control.proto=google.golang.org/grpc/interop/grpc_testing,\ -Mgrpc/testing/test.proto=google.golang.org/grpc/interop/grpc_testing,\ -Mgrpc/testing/payloads.proto=google.golang.org/grpc/interop/grpc_testing,\ -Mgrpc/testing/empty.proto=google.golang.org/grpc/interop/grpc_testing - -for src in ${SOURCES[@]}; do - echo "protoc ${src}" - protoc --go_out=${OPTS}:${WORKDIR}/out --go-grpc_out=${OPTS}:${WORKDIR}/out \ - -I"." \ - -I${WORKDIR}/grpc-proto \ - -I${WORKDIR}/googleapis \ - -I${WORKDIR}/protobuf/src \ - ${src} -done - -for src in ${LEGACY_SOURCES[@]}; do - echo "protoc ${src}" - protoc --go_out=${OPTS}:${WORKDIR}/out --go-grpc_out=${OPTS},require_unimplemented_servers=false:${WORKDIR}/out \ - -I"." \ - -I${WORKDIR}/grpc-proto \ - -I${WORKDIR}/googleapis \ - -I${WORKDIR}/protobuf/src \ - ${src} -done - -# The go_package option in grpc/lookup/v1/rls.proto doesn't match the -# current location. Move it into the right place. -mkdir -p ${WORKDIR}/out/google.golang.org/grpc/internal/proto/grpc_lookup_v1 -mv ${WORKDIR}/out/google.golang.org/grpc/lookup/grpc_lookup_v1/* ${WORKDIR}/out/google.golang.org/grpc/internal/proto/grpc_lookup_v1 - -# grpc_testing_not_regenerate/*.pb.go are not re-generated, -# see grpc_testing_not_regenerate/README.md for details. -rm ${WORKDIR}/out/google.golang.org/grpc/reflection/grpc_testing_not_regenerate/*.pb.go - -# grpc/service_config/service_config.proto does not have a go_package option. -mv ${WORKDIR}/out/grpc/service_config/service_config.pb.go internal/proto/grpc_service_config - -# grpc/testing does not have a go_package option. -mv ${WORKDIR}/out/grpc/testing/*.pb.go interop/grpc_testing/ -mv ${WORKDIR}/out/grpc/core/*.pb.go interop/grpc_testing/core/ - -cp -R ${WORKDIR}/out/google.golang.org/grpc/* . diff --git a/vendor/google.golang.org/grpc/vet.sh b/vendor/google.golang.org/grpc/vet.sh deleted file mode 100644 index ceb436c6..00000000 --- a/vendor/google.golang.org/grpc/vet.sh +++ /dev/null @@ -1,211 +0,0 @@ -#!/bin/bash - -set -ex # Exit on error; debugging enabled. -set -o pipefail # Fail a pipe if any sub-command fails. - -# not makes sure the command passed to it does not exit with a return code of 0. -not() { - # This is required instead of the earlier (! $COMMAND) because subshells and - # pipefail don't work the same on Darwin as in Linux. - ! "$@" -} - -die() { - echo "$@" >&2 - exit 1 -} - -fail_on_output() { - tee /dev/stderr | not read -} - -# Check to make sure it's safe to modify the user's git repo. -git status --porcelain | fail_on_output - -# Undo any edits made by this script. -cleanup() { - git reset --hard HEAD -} -trap cleanup EXIT - -PATH="${HOME}/go/bin:${GOROOT}/bin:${PATH}" -go version - -if [[ "$1" = "-install" ]]; then - # Install the pinned versions as defined in module tools. - pushd ./test/tools - go install \ - golang.org/x/lint/golint \ - golang.org/x/tools/cmd/goimports \ - honnef.co/go/tools/cmd/staticcheck \ - github.com/client9/misspell/cmd/misspell - popd - if [[ -z "${VET_SKIP_PROTO}" ]]; then - if [[ "${TRAVIS}" = "true" ]]; then - PROTOBUF_VERSION=3.14.0 - PROTOC_FILENAME=protoc-${PROTOBUF_VERSION}-linux-x86_64.zip - pushd /home/travis - wget https://github.com/google/protobuf/releases/download/v${PROTOBUF_VERSION}/${PROTOC_FILENAME} - unzip ${PROTOC_FILENAME} - bin/protoc --version - popd - elif [[ "${GITHUB_ACTIONS}" = "true" ]]; then - PROTOBUF_VERSION=3.14.0 - PROTOC_FILENAME=protoc-${PROTOBUF_VERSION}-linux-x86_64.zip - pushd /home/runner/go - wget https://github.com/google/protobuf/releases/download/v${PROTOBUF_VERSION}/${PROTOC_FILENAME} - unzip ${PROTOC_FILENAME} - bin/protoc --version - popd - elif not which protoc > /dev/null; then - die "Please install protoc into your path" - fi - fi - exit 0 -elif [[ "$#" -ne 0 ]]; then - die "Unknown argument(s): $*" -fi - -# - Ensure all source files contain a copyright message. -not git grep -L "\(Copyright [0-9]\{4,\} gRPC authors\)\|DO NOT EDIT" -- '*.go' - -# - Make sure all tests in grpc and grpc/test use leakcheck via Teardown. -not grep 'func Test[^(]' *_test.go -not grep 'func Test[^(]' test/*.go - -# - Do not import x/net/context. -not git grep -l 'x/net/context' -- "*.go" - -# - Do not import math/rand for real library code. Use internal/grpcrand for -# thread safety. -git grep -l '"math/rand"' -- "*.go" 2>&1 | not grep -v '^examples\|^stress\|grpcrand\|^benchmark\|wrr_test' - -# - Do not call grpclog directly. Use grpclog.Component instead. -git grep -l 'grpclog.I\|grpclog.W\|grpclog.E\|grpclog.F\|grpclog.V' -- "*.go" | not grep -v '^grpclog/component.go\|^internal/grpctest/tlogger_test.go' - -# - Ensure all ptypes proto packages are renamed when importing. -not git grep "\(import \|^\s*\)\"github.com/golang/protobuf/ptypes/" -- "*.go" - -# - Ensure all xds proto imports are renamed to *pb or *grpc. -git grep '"github.com/envoyproxy/go-control-plane/envoy' -- '*.go' ':(exclude)*.pb.go' | not grep -v 'pb "\|grpc "' - -misspell -error . - -# - Check that generated proto files are up to date. -if [[ -z "${VET_SKIP_PROTO}" ]]; then - PATH="/home/travis/bin:${PATH}" make proto && \ - git status --porcelain 2>&1 | fail_on_output || \ - (git status; git --no-pager diff; exit 1) -fi - -# - gofmt, goimports, golint (with exceptions for generated code), go vet, -# go mod tidy. -# Perform these checks on each module inside gRPC. -for MOD_FILE in $(find . -name 'go.mod'); do - MOD_DIR=$(dirname ${MOD_FILE}) - pushd ${MOD_DIR} - go vet -all ./... | fail_on_output - gofmt -s -d -l . 2>&1 | fail_on_output - goimports -l . 2>&1 | not grep -vE "\.pb\.go" - golint ./... 2>&1 | not grep -vE "/grpc_testing_not_regenerate/.*\.pb\.go:" - - go mod tidy - git status --porcelain 2>&1 | fail_on_output || \ - (git status; git --no-pager diff; exit 1) - popd -done - -# - Collection of static analysis checks -# -# TODO(dfawley): don't use deprecated functions in examples or first-party -# plugins. -SC_OUT="$(mktemp)" -staticcheck -go 1.9 -checks 'inherit,-ST1015' ./... > "${SC_OUT}" || true -# Error if anything other than deprecation warnings are printed. -not grep -v "is deprecated:.*SA1019" "${SC_OUT}" -# Only ignore the following deprecated types/fields/functions. -not grep -Fv '.CredsBundle -.HeaderMap -.Metadata is deprecated: use Attributes -.NewAddress -.NewServiceConfig -.Type is deprecated: use Attributes -BuildVersion is deprecated -balancer.ErrTransientFailure -balancer.Picker -extDesc.Filename is deprecated -github.com/golang/protobuf/jsonpb is deprecated -grpc.CallCustomCodec -grpc.Code -grpc.Compressor -grpc.CustomCodec -grpc.Decompressor -grpc.MaxMsgSize -grpc.MethodConfig -grpc.NewGZIPCompressor -grpc.NewGZIPDecompressor -grpc.RPCCompressor -grpc.RPCDecompressor -grpc.ServiceConfig -grpc.WithBalancerName -grpc.WithCompressor -grpc.WithDecompressor -grpc.WithDialer -grpc.WithMaxMsgSize -grpc.WithServiceConfig -grpc.WithTimeout -http.CloseNotifier -info.SecurityVersion -proto is deprecated -proto.InternalMessageInfo is deprecated -proto.EnumName is deprecated -proto.ErrInternalBadWireType is deprecated -proto.FileDescriptor is deprecated -proto.Marshaler is deprecated -proto.MessageType is deprecated -proto.RegisterEnum is deprecated -proto.RegisterFile is deprecated -proto.RegisterType is deprecated -proto.RegisterExtension is deprecated -proto.RegisteredExtension is deprecated -proto.RegisteredExtensions is deprecated -proto.RegisterMapType is deprecated -proto.Unmarshaler is deprecated -resolver.Backend -resolver.GRPCLB -Target is deprecated: Use the Target field in the BuildOptions instead. -xxx_messageInfo_ -' "${SC_OUT}" - -# - special golint on package comments. -lint_package_comment_per_package() { - # Number of files in this go package. - fileCount=$(go list -f '{{len .GoFiles}}' $1) - if [ ${fileCount} -eq 0 ]; then - return 0 - fi - # Number of package errors generated by golint. - lintPackageCommentErrorsCount=$(golint --min_confidence 0 $1 | grep -c "should have a package comment") - # golint complains about every file that's missing the package comment. If the - # number of files for this package is greater than the number of errors, there's - # at least one file with package comment, good. Otherwise, fail. - if [ ${fileCount} -le ${lintPackageCommentErrorsCount} ]; then - echo "Package $1 (with ${fileCount} files) is missing package comment" - return 1 - fi -} -lint_package_comment() { - set +ex - - count=0 - for i in $(go list ./...); do - lint_package_comment_per_package "$i" - ((count += $?)) - done - - set -ex - return $count -} -lint_package_comment - -echo SUCCESS diff --git a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/LICENSE b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/LICENSE index 5f5c12af..e69de29b 100644 --- a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/LICENSE +++ b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/LICENSE @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Alexandre Cesaro - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/README.md b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/README.md index 98ddf829..e69de29b 100644 --- a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/README.md +++ b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/README.md @@ -1,16 +0,0 @@ -# quotedprintable - -## Introduction - -Package quotedprintable implements quoted-printable and message header encoding -as specified by RFC 2045 and RFC 2047. - -It is a copy of the Go 1.5 package `mime/quotedprintable`. It also includes -the new functions of package `mime` concerning RFC 2047. - -This code has minor changes with the standard library code in order to work -with Go 1.0 and newer. - -## Documentation - -https://godoc.org/gopkg.in/alexcesaro/quotedprintable.v3 diff --git a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/encodedword.go b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/encodedword.go index cfd02617..e69de29b 100644 --- a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/encodedword.go +++ b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/encodedword.go @@ -1,279 +0,0 @@ -package quotedprintable - -import ( - "bytes" - "encoding/base64" - "errors" - "fmt" - "io" - "strings" - "unicode" - "unicode/utf8" -) - -// A WordEncoder is a RFC 2047 encoded-word encoder. -type WordEncoder byte - -const ( - // BEncoding represents Base64 encoding scheme as defined by RFC 2045. - BEncoding = WordEncoder('b') - // QEncoding represents the Q-encoding scheme as defined by RFC 2047. - QEncoding = WordEncoder('q') -) - -var ( - errInvalidWord = errors.New("mime: invalid RFC 2047 encoded-word") -) - -// Encode returns the encoded-word form of s. If s is ASCII without special -// characters, it is returned unchanged. The provided charset is the IANA -// charset name of s. It is case insensitive. -func (e WordEncoder) Encode(charset, s string) string { - if !needsEncoding(s) { - return s - } - return e.encodeWord(charset, s) -} - -func needsEncoding(s string) bool { - for _, b := range s { - if (b < ' ' || b > '~') && b != '\t' { - return true - } - } - return false -} - -// encodeWord encodes a string into an encoded-word. -func (e WordEncoder) encodeWord(charset, s string) string { - buf := getBuffer() - defer putBuffer(buf) - - buf.WriteString("=?") - buf.WriteString(charset) - buf.WriteByte('?') - buf.WriteByte(byte(e)) - buf.WriteByte('?') - - if e == BEncoding { - w := base64.NewEncoder(base64.StdEncoding, buf) - io.WriteString(w, s) - w.Close() - } else { - enc := make([]byte, 3) - for i := 0; i < len(s); i++ { - b := s[i] - switch { - case b == ' ': - buf.WriteByte('_') - case b <= '~' && b >= '!' && b != '=' && b != '?' && b != '_': - buf.WriteByte(b) - default: - enc[0] = '=' - enc[1] = upperhex[b>>4] - enc[2] = upperhex[b&0x0f] - buf.Write(enc) - } - } - } - buf.WriteString("?=") - return buf.String() -} - -const upperhex = "0123456789ABCDEF" - -// A WordDecoder decodes MIME headers containing RFC 2047 encoded-words. -type WordDecoder struct { - // CharsetReader, if non-nil, defines a function to generate - // charset-conversion readers, converting from the provided - // charset into UTF-8. - // Charsets are always lower-case. utf-8, iso-8859-1 and us-ascii charsets - // are handled by default. - // One of the the CharsetReader's result values must be non-nil. - CharsetReader func(charset string, input io.Reader) (io.Reader, error) -} - -// Decode decodes an encoded-word. If word is not a valid RFC 2047 encoded-word, -// word is returned unchanged. -func (d *WordDecoder) Decode(word string) (string, error) { - fields := strings.Split(word, "?") // TODO: remove allocation? - if len(fields) != 5 || fields[0] != "=" || fields[4] != "=" || len(fields[2]) != 1 { - return "", errInvalidWord - } - - content, err := decode(fields[2][0], fields[3]) - if err != nil { - return "", err - } - - buf := getBuffer() - defer putBuffer(buf) - - if err := d.convert(buf, fields[1], content); err != nil { - return "", err - } - - return buf.String(), nil -} - -// DecodeHeader decodes all encoded-words of the given string. It returns an -// error if and only if CharsetReader of d returns an error. -func (d *WordDecoder) DecodeHeader(header string) (string, error) { - // If there is no encoded-word, returns before creating a buffer. - i := strings.Index(header, "=?") - if i == -1 { - return header, nil - } - - buf := getBuffer() - defer putBuffer(buf) - - buf.WriteString(header[:i]) - header = header[i:] - - betweenWords := false - for { - start := strings.Index(header, "=?") - if start == -1 { - break - } - cur := start + len("=?") - - i := strings.Index(header[cur:], "?") - if i == -1 { - break - } - charset := header[cur : cur+i] - cur += i + len("?") - - if len(header) < cur+len("Q??=") { - break - } - encoding := header[cur] - cur++ - - if header[cur] != '?' { - break - } - cur++ - - j := strings.Index(header[cur:], "?=") - if j == -1 { - break - } - text := header[cur : cur+j] - end := cur + j + len("?=") - - content, err := decode(encoding, text) - if err != nil { - betweenWords = false - buf.WriteString(header[:start+2]) - header = header[start+2:] - continue - } - - // Write characters before the encoded-word. White-space and newline - // characters separating two encoded-words must be deleted. - if start > 0 && (!betweenWords || hasNonWhitespace(header[:start])) { - buf.WriteString(header[:start]) - } - - if err := d.convert(buf, charset, content); err != nil { - return "", err - } - - header = header[end:] - betweenWords = true - } - - if len(header) > 0 { - buf.WriteString(header) - } - - return buf.String(), nil -} - -func decode(encoding byte, text string) ([]byte, error) { - switch encoding { - case 'B', 'b': - return base64.StdEncoding.DecodeString(text) - case 'Q', 'q': - return qDecode(text) - } - return nil, errInvalidWord -} - -func (d *WordDecoder) convert(buf *bytes.Buffer, charset string, content []byte) error { - switch { - case strings.EqualFold("utf-8", charset): - buf.Write(content) - case strings.EqualFold("iso-8859-1", charset): - for _, c := range content { - buf.WriteRune(rune(c)) - } - case strings.EqualFold("us-ascii", charset): - for _, c := range content { - if c >= utf8.RuneSelf { - buf.WriteRune(unicode.ReplacementChar) - } else { - buf.WriteByte(c) - } - } - default: - if d.CharsetReader == nil { - return fmt.Errorf("mime: unhandled charset %q", charset) - } - r, err := d.CharsetReader(strings.ToLower(charset), bytes.NewReader(content)) - if err != nil { - return err - } - if _, err = buf.ReadFrom(r); err != nil { - return err - } - } - return nil -} - -// hasNonWhitespace reports whether s (assumed to be ASCII) contains at least -// one byte of non-whitespace. -func hasNonWhitespace(s string) bool { - for _, b := range s { - switch b { - // Encoded-words can only be separated by linear white spaces which does - // not include vertical tabs (\v). - case ' ', '\t', '\n', '\r': - default: - return true - } - } - return false -} - -// qDecode decodes a Q encoded string. -func qDecode(s string) ([]byte, error) { - dec := make([]byte, len(s)) - n := 0 - for i := 0; i < len(s); i++ { - switch c := s[i]; { - case c == '_': - dec[n] = ' ' - case c == '=': - if i+2 >= len(s) { - return nil, errInvalidWord - } - b, err := readHexByte(s[i+1], s[i+2]) - if err != nil { - return nil, err - } - dec[n] = b - i += 2 - case (c <= '~' && c >= ' ') || c == '\n' || c == '\r' || c == '\t': - dec[n] = c - default: - return nil, errInvalidWord - } - n++ - } - - return dec[:n], nil -} diff --git a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool.go b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool.go index 24283c52..e69de29b 100644 --- a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool.go +++ b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool.go @@ -1,26 +0,0 @@ -// +build go1.3 - -package quotedprintable - -import ( - "bytes" - "sync" -) - -var bufPool = sync.Pool{ - New: func() interface{} { - return new(bytes.Buffer) - }, -} - -func getBuffer() *bytes.Buffer { - return bufPool.Get().(*bytes.Buffer) -} - -func putBuffer(buf *bytes.Buffer) { - if buf.Len() > 1024 { - return - } - buf.Reset() - bufPool.Put(buf) -} diff --git a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool_go12.go b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool_go12.go index d335b4ab..e69de29b 100644 --- a/vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool_go12.go +++ b/vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool_go12.go @@ -1,24 +0,0 @@ -// +build !go1.3 - -package quotedprintable - -import "bytes" - -var ch = make(chan *bytes.Buffer, 32) - -func getBuffer() *bytes.Buffer { - select { - case buf := <-ch: - return buf - default: - } - return new(bytes.Buffer) -} - -func putBuffer(buf *bytes.Buffer) { - buf.Reset() - select { - case ch <- buf: - default: - } -} diff --git a/vendor/gopkg.in/gomail.v2/.travis.yml b/vendor/gopkg.in/gomail.v2/.travis.yml index 48915e73..e69de29b 100644 --- a/vendor/gopkg.in/gomail.v2/.travis.yml +++ b/vendor/gopkg.in/gomail.v2/.travis.yml @@ -1,9 +0,0 @@ -language: go - -go: - - 1.2 - - 1.3 - - 1.4 - - 1.5 - - 1.6 - - tip diff --git a/vendor/gopkg.in/gomail.v2/CHANGELOG.md b/vendor/gopkg.in/gomail.v2/CHANGELOG.md index a797ab4c..e69de29b 100644 --- a/vendor/gopkg.in/gomail.v2/CHANGELOG.md +++ b/vendor/gopkg.in/gomail.v2/CHANGELOG.md @@ -1,20 +0,0 @@ -# Change Log -All notable changes to this project will be documented in this file. -This project adheres to [Semantic Versioning](http://semver.org/). - -## [2.0.0] - 2015-09-02 - -- Mailer has been removed. It has been replaced by Dialer and Sender. -- `File` type and the `CreateFile` and `OpenFile` functions have been removed. -- `Message.Attach` and `Message.Embed` have a new signature. -- `Message.GetBodyWriter` has been removed. Use `Message.AddAlternativeWriter` -instead. -- `Message.Export` has been removed. `Message.WriteTo` can be used instead. -- `Message.DelHeader` has been removed. -- The `Bcc` header field is no longer sent. It is far more simpler and -efficient: the same message is sent to all recipients instead of sending a -different email to each Bcc address. -- LoginAuth has been removed. `NewPlainDialer` now implements the LOGIN -authentication mechanism when needed. -- Go 1.2 is now required instead of Go 1.3. No external dependency are used when -using Go 1.5. diff --git a/vendor/gopkg.in/gomail.v2/CONTRIBUTING.md b/vendor/gopkg.in/gomail.v2/CONTRIBUTING.md index d5601c25..e69de29b 100644 --- a/vendor/gopkg.in/gomail.v2/CONTRIBUTING.md +++ b/vendor/gopkg.in/gomail.v2/CONTRIBUTING.md @@ -1,20 +0,0 @@ -Thank you for contributing to Gomail! Here are a few guidelines: - -## Bugs - -If you think you found a bug, create an issue and supply the minimum amount -of code triggering the bug so it can be reproduced. - - -## Fixing a bug - -If you want to fix a bug, you can send a pull request. It should contains a -new test or update an existing one to cover that bug. - - -## New feature proposal - -If you think Gomail lacks a feature, you can open an issue or send a pull -request. I want to keep Gomail code and API as simple as possible so please -describe your needs so we can discuss whether this feature should be added to -Gomail or not. diff --git a/vendor/gopkg.in/gomail.v2/LICENSE b/vendor/gopkg.in/gomail.v2/LICENSE index 5f5c12af..e69de29b 100644 --- a/vendor/gopkg.in/gomail.v2/LICENSE +++ b/vendor/gopkg.in/gomail.v2/LICENSE @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Alexandre Cesaro - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/gopkg.in/gomail.v2/README.md b/vendor/gopkg.in/gomail.v2/README.md index b3be9e14..e69de29b 100644 --- a/vendor/gopkg.in/gomail.v2/README.md +++ b/vendor/gopkg.in/gomail.v2/README.md @@ -1,92 +0,0 @@ -# Gomail -[![Build Status](https://travis-ci.org/go-gomail/gomail.svg?branch=v2)](https://travis-ci.org/go-gomail/gomail) [![Code Coverage](http://gocover.io/_badge/gopkg.in/gomail.v2)](http://gocover.io/gopkg.in/gomail.v2) [![Documentation](https://godoc.org/gopkg.in/gomail.v2?status.svg)](https://godoc.org/gopkg.in/gomail.v2) - -## Introduction - -Gomail is a simple and efficient package to send emails. It is well tested and -documented. - -Gomail can only send emails using an SMTP server. But the API is flexible and it -is easy to implement other methods for sending emails using a local Postfix, an -API, etc. - -It is versioned using [gopkg.in](https://gopkg.in) so I promise -there will never be backward incompatible changes within each version. - -It requires Go 1.2 or newer. With Go 1.5, no external dependencies are used. - - -## Features - -Gomail supports: -- Attachments -- Embedded images -- HTML and text templates -- Automatic encoding of special characters -- SSL and TLS -- Sending multiple emails with the same SMTP connection - - -## Documentation - -https://godoc.org/gopkg.in/gomail.v2 - - -## Download - - go get gopkg.in/gomail.v2 - - -## Examples - -See the [examples in the documentation](https://godoc.org/gopkg.in/gomail.v2#example-package). - - -## FAQ - -### x509: certificate signed by unknown authority - -If you get this error it means the certificate used by the SMTP server is not -considered valid by the client running Gomail. As a quick workaround you can -bypass the verification of the server's certificate chain and host name by using -`SetTLSConfig`: - - package main - - import ( - "crypto/tls" - - "gopkg.in/gomail.v2" - ) - - func main() { - d := gomail.NewDialer("smtp.example.com", 587, "user", "123456") - d.TLSConfig = &tls.Config{InsecureSkipVerify: true} - - // Send emails using d. - } - -Note, however, that this is insecure and should not be used in production. - - -## Contribute - -Contributions are more than welcome! See [CONTRIBUTING.md](CONTRIBUTING.md) for -more info. - - -## Change log - -See [CHANGELOG.md](CHANGELOG.md). - - -## License - -[MIT](LICENSE) - - -## Contact - -You can ask questions on the [Gomail -thread](https://groups.google.com/d/topic/golang-nuts/jMxZHzvvEVg/discussion) -in the Go mailing-list. diff --git a/vendor/gopkg.in/gomail.v2/message.go b/vendor/gopkg.in/gomail.v2/message.go index 4bffb1e7..e69de29b 100644 --- a/vendor/gopkg.in/gomail.v2/message.go +++ b/vendor/gopkg.in/gomail.v2/message.go @@ -1,322 +0,0 @@ -package gomail - -import ( - "bytes" - "io" - "os" - "path/filepath" - "time" -) - -// Message represents an email. -type Message struct { - header header - parts []*part - attachments []*file - embedded []*file - charset string - encoding Encoding - hEncoder mimeEncoder - buf bytes.Buffer -} - -type header map[string][]string - -type part struct { - contentType string - copier func(io.Writer) error - encoding Encoding -} - -// NewMessage creates a new message. It uses UTF-8 and quoted-printable encoding -// by default. -func NewMessage(settings ...MessageSetting) *Message { - m := &Message{ - header: make(header), - charset: "UTF-8", - encoding: QuotedPrintable, - } - - m.applySettings(settings) - - if m.encoding == Base64 { - m.hEncoder = bEncoding - } else { - m.hEncoder = qEncoding - } - - return m -} - -// Reset resets the message so it can be reused. The message keeps its previous -// settings so it is in the same state that after a call to NewMessage. -func (m *Message) Reset() { - for k := range m.header { - delete(m.header, k) - } - m.parts = nil - m.attachments = nil - m.embedded = nil -} - -func (m *Message) applySettings(settings []MessageSetting) { - for _, s := range settings { - s(m) - } -} - -// A MessageSetting can be used as an argument in NewMessage to configure an -// email. -type MessageSetting func(m *Message) - -// SetCharset is a message setting to set the charset of the email. -func SetCharset(charset string) MessageSetting { - return func(m *Message) { - m.charset = charset - } -} - -// SetEncoding is a message setting to set the encoding of the email. -func SetEncoding(enc Encoding) MessageSetting { - return func(m *Message) { - m.encoding = enc - } -} - -// Encoding represents a MIME encoding scheme like quoted-printable or base64. -type Encoding string - -const ( - // QuotedPrintable represents the quoted-printable encoding as defined in - // RFC 2045. - QuotedPrintable Encoding = "quoted-printable" - // Base64 represents the base64 encoding as defined in RFC 2045. - Base64 Encoding = "base64" - // Unencoded can be used to avoid encoding the body of an email. The headers - // will still be encoded using quoted-printable encoding. - Unencoded Encoding = "8bit" -) - -// SetHeader sets a value to the given header field. -func (m *Message) SetHeader(field string, value ...string) { - m.encodeHeader(value) - m.header[field] = value -} - -func (m *Message) encodeHeader(values []string) { - for i := range values { - values[i] = m.encodeString(values[i]) - } -} - -func (m *Message) encodeString(value string) string { - return m.hEncoder.Encode(m.charset, value) -} - -// SetHeaders sets the message headers. -func (m *Message) SetHeaders(h map[string][]string) { - for k, v := range h { - m.SetHeader(k, v...) - } -} - -// SetAddressHeader sets an address to the given header field. -func (m *Message) SetAddressHeader(field, address, name string) { - m.header[field] = []string{m.FormatAddress(address, name)} -} - -// FormatAddress formats an address and a name as a valid RFC 5322 address. -func (m *Message) FormatAddress(address, name string) string { - if name == "" { - return address - } - - enc := m.encodeString(name) - if enc == name { - m.buf.WriteByte('"') - for i := 0; i < len(name); i++ { - b := name[i] - if b == '\\' || b == '"' { - m.buf.WriteByte('\\') - } - m.buf.WriteByte(b) - } - m.buf.WriteByte('"') - } else if hasSpecials(name) { - m.buf.WriteString(bEncoding.Encode(m.charset, name)) - } else { - m.buf.WriteString(enc) - } - m.buf.WriteString(" <") - m.buf.WriteString(address) - m.buf.WriteByte('>') - - addr := m.buf.String() - m.buf.Reset() - return addr -} - -func hasSpecials(text string) bool { - for i := 0; i < len(text); i++ { - switch c := text[i]; c { - case '(', ')', '<', '>', '[', ']', ':', ';', '@', '\\', ',', '.', '"': - return true - } - } - - return false -} - -// SetDateHeader sets a date to the given header field. -func (m *Message) SetDateHeader(field string, date time.Time) { - m.header[field] = []string{m.FormatDate(date)} -} - -// FormatDate formats a date as a valid RFC 5322 date. -func (m *Message) FormatDate(date time.Time) string { - return date.Format(time.RFC1123Z) -} - -// GetHeader gets a header field. -func (m *Message) GetHeader(field string) []string { - return m.header[field] -} - -// SetBody sets the body of the message. It replaces any content previously set -// by SetBody, AddAlternative or AddAlternativeWriter. -func (m *Message) SetBody(contentType, body string, settings ...PartSetting) { - m.parts = []*part{m.newPart(contentType, newCopier(body), settings)} -} - -// AddAlternative adds an alternative part to the message. -// -// It is commonly used to send HTML emails that default to the plain text -// version for backward compatibility. AddAlternative appends the new part to -// the end of the message. So the plain text part should be added before the -// HTML part. See http://en.wikipedia.org/wiki/MIME#Alternative -func (m *Message) AddAlternative(contentType, body string, settings ...PartSetting) { - m.AddAlternativeWriter(contentType, newCopier(body), settings...) -} - -func newCopier(s string) func(io.Writer) error { - return func(w io.Writer) error { - _, err := io.WriteString(w, s) - return err - } -} - -// AddAlternativeWriter adds an alternative part to the message. It can be -// useful with the text/template or html/template packages. -func (m *Message) AddAlternativeWriter(contentType string, f func(io.Writer) error, settings ...PartSetting) { - m.parts = append(m.parts, m.newPart(contentType, f, settings)) -} - -func (m *Message) newPart(contentType string, f func(io.Writer) error, settings []PartSetting) *part { - p := &part{ - contentType: contentType, - copier: f, - encoding: m.encoding, - } - - for _, s := range settings { - s(p) - } - - return p -} - -// A PartSetting can be used as an argument in Message.SetBody, -// Message.AddAlternative or Message.AddAlternativeWriter to configure the part -// added to a message. -type PartSetting func(*part) - -// SetPartEncoding sets the encoding of the part added to the message. By -// default, parts use the same encoding than the message. -func SetPartEncoding(e Encoding) PartSetting { - return PartSetting(func(p *part) { - p.encoding = e - }) -} - -type file struct { - Name string - Header map[string][]string - CopyFunc func(w io.Writer) error -} - -func (f *file) setHeader(field, value string) { - f.Header[field] = []string{value} -} - -// A FileSetting can be used as an argument in Message.Attach or Message.Embed. -type FileSetting func(*file) - -// SetHeader is a file setting to set the MIME header of the message part that -// contains the file content. -// -// Mandatory headers are automatically added if they are not set when sending -// the email. -func SetHeader(h map[string][]string) FileSetting { - return func(f *file) { - for k, v := range h { - f.Header[k] = v - } - } -} - -// Rename is a file setting to set the name of the attachment if the name is -// different than the filename on disk. -func Rename(name string) FileSetting { - return func(f *file) { - f.Name = name - } -} - -// SetCopyFunc is a file setting to replace the function that runs when the -// message is sent. It should copy the content of the file to the io.Writer. -// -// The default copy function opens the file with the given filename, and copy -// its content to the io.Writer. -func SetCopyFunc(f func(io.Writer) error) FileSetting { - return func(fi *file) { - fi.CopyFunc = f - } -} - -func (m *Message) appendFile(list []*file, name string, settings []FileSetting) []*file { - f := &file{ - Name: filepath.Base(name), - Header: make(map[string][]string), - CopyFunc: func(w io.Writer) error { - h, err := os.Open(name) - if err != nil { - return err - } - if _, err := io.Copy(w, h); err != nil { - h.Close() - return err - } - return h.Close() - }, - } - - for _, s := range settings { - s(f) - } - - if list == nil { - return []*file{f} - } - - return append(list, f) -} - -// Attach attaches the files to the email. -func (m *Message) Attach(filename string, settings ...FileSetting) { - m.attachments = m.appendFile(m.attachments, filename, settings) -} - -// Embed embeds the images to the email. -func (m *Message) Embed(filename string, settings ...FileSetting) { - m.embedded = m.appendFile(m.embedded, filename, settings) -} diff --git a/vendor/gopkg.in/gomail.v2/smtp.go b/vendor/gopkg.in/gomail.v2/smtp.go index 2aa49c8b..e69de29b 100644 --- a/vendor/gopkg.in/gomail.v2/smtp.go +++ b/vendor/gopkg.in/gomail.v2/smtp.go @@ -1,202 +0,0 @@ -package gomail - -import ( - "crypto/tls" - "fmt" - "io" - "net" - "net/smtp" - "strings" - "time" -) - -// A Dialer is a dialer to an SMTP server. -type Dialer struct { - // Host represents the host of the SMTP server. - Host string - // Port represents the port of the SMTP server. - Port int - // Username is the username to use to authenticate to the SMTP server. - Username string - // Password is the password to use to authenticate to the SMTP server. - Password string - // Auth represents the authentication mechanism used to authenticate to the - // SMTP server. - Auth smtp.Auth - // SSL defines whether an SSL connection is used. It should be false in - // most cases since the authentication mechanism should use the STARTTLS - // extension instead. - SSL bool - // TSLConfig represents the TLS configuration used for the TLS (when the - // STARTTLS extension is used) or SSL connection. - TLSConfig *tls.Config - // LocalName is the hostname sent to the SMTP server with the HELO command. - // By default, "localhost" is sent. - LocalName string -} - -// NewDialer returns a new SMTP Dialer. The given parameters are used to connect -// to the SMTP server. -func NewDialer(host string, port int, username, password string) *Dialer { - return &Dialer{ - Host: host, - Port: port, - Username: username, - Password: password, - SSL: port == 465, - } -} - -// NewPlainDialer returns a new SMTP Dialer. The given parameters are used to -// connect to the SMTP server. -// -// Deprecated: Use NewDialer instead. -func NewPlainDialer(host string, port int, username, password string) *Dialer { - return NewDialer(host, port, username, password) -} - -// Dial dials and authenticates to an SMTP server. The returned SendCloser -// should be closed when done using it. -func (d *Dialer) Dial() (SendCloser, error) { - conn, err := netDialTimeout("tcp", addr(d.Host, d.Port), 10*time.Second) - if err != nil { - return nil, err - } - - if d.SSL { - conn = tlsClient(conn, d.tlsConfig()) - } - - c, err := smtpNewClient(conn, d.Host) - if err != nil { - return nil, err - } - - if d.LocalName != "" { - if err := c.Hello(d.LocalName); err != nil { - return nil, err - } - } - - if !d.SSL { - if ok, _ := c.Extension("STARTTLS"); ok { - if err := c.StartTLS(d.tlsConfig()); err != nil { - c.Close() - return nil, err - } - } - } - - if d.Auth == nil && d.Username != "" { - if ok, auths := c.Extension("AUTH"); ok { - if strings.Contains(auths, "CRAM-MD5") { - d.Auth = smtp.CRAMMD5Auth(d.Username, d.Password) - } else if strings.Contains(auths, "LOGIN") && - !strings.Contains(auths, "PLAIN") { - d.Auth = &loginAuth{ - username: d.Username, - password: d.Password, - host: d.Host, - } - } else { - d.Auth = smtp.PlainAuth("", d.Username, d.Password, d.Host) - } - } - } - - if d.Auth != nil { - if err = c.Auth(d.Auth); err != nil { - c.Close() - return nil, err - } - } - - return &smtpSender{c, d}, nil -} - -func (d *Dialer) tlsConfig() *tls.Config { - if d.TLSConfig == nil { - return &tls.Config{ServerName: d.Host} - } - return d.TLSConfig -} - -func addr(host string, port int) string { - return fmt.Sprintf("%s:%d", host, port) -} - -// DialAndSend opens a connection to the SMTP server, sends the given emails and -// closes the connection. -func (d *Dialer) DialAndSend(m ...*Message) error { - s, err := d.Dial() - if err != nil { - return err - } - defer s.Close() - - return Send(s, m...) -} - -type smtpSender struct { - smtpClient - d *Dialer -} - -func (c *smtpSender) Send(from string, to []string, msg io.WriterTo) error { - if err := c.Mail(from); err != nil { - if err == io.EOF { - // This is probably due to a timeout, so reconnect and try again. - sc, derr := c.d.Dial() - if derr == nil { - if s, ok := sc.(*smtpSender); ok { - *c = *s - return c.Send(from, to, msg) - } - } - } - return err - } - - for _, addr := range to { - if err := c.Rcpt(addr); err != nil { - return err - } - } - - w, err := c.Data() - if err != nil { - return err - } - - if _, err = msg.WriteTo(w); err != nil { - w.Close() - return err - } - - return w.Close() -} - -func (c *smtpSender) Close() error { - return c.Quit() -} - -// Stubbed out for tests. -var ( - netDialTimeout = net.DialTimeout - tlsClient = tls.Client - smtpNewClient = func(conn net.Conn, host string) (smtpClient, error) { - return smtp.NewClient(conn, host) - } -) - -type smtpClient interface { - Hello(string) error - Extension(string) (bool, string) - StartTLS(*tls.Config) error - Auth(smtp.Auth) error - Mail(string) error - Rcpt(string) error - Data() (io.WriteCloser, error) - Quit() error - Close() error -} diff --git a/vendor/gopkg.in/gomail.v2/writeto.go b/vendor/gopkg.in/gomail.v2/writeto.go index 9fb6b86e..e69de29b 100644 --- a/vendor/gopkg.in/gomail.v2/writeto.go +++ b/vendor/gopkg.in/gomail.v2/writeto.go @@ -1,306 +0,0 @@ -package gomail - -import ( - "encoding/base64" - "errors" - "io" - "mime" - "mime/multipart" - "path/filepath" - "strings" - "time" -) - -// WriteTo implements io.WriterTo. It dumps the whole message into w. -func (m *Message) WriteTo(w io.Writer) (int64, error) { - mw := &messageWriter{w: w} - mw.writeMessage(m) - return mw.n, mw.err -} - -func (w *messageWriter) writeMessage(m *Message) { - if _, ok := m.header["Mime-Version"]; !ok { - w.writeString("Mime-Version: 1.0\r\n") - } - if _, ok := m.header["Date"]; !ok { - w.writeHeader("Date", m.FormatDate(now())) - } - w.writeHeaders(m.header) - - if m.hasMixedPart() { - w.openMultipart("mixed") - } - - if m.hasRelatedPart() { - w.openMultipart("related") - } - - if m.hasAlternativePart() { - w.openMultipart("alternative") - } - for _, part := range m.parts { - w.writePart(part, m.charset) - } - if m.hasAlternativePart() { - w.closeMultipart() - } - - w.addFiles(m.embedded, false) - if m.hasRelatedPart() { - w.closeMultipart() - } - - w.addFiles(m.attachments, true) - if m.hasMixedPart() { - w.closeMultipart() - } -} - -func (m *Message) hasMixedPart() bool { - return (len(m.parts) > 0 && len(m.attachments) > 0) || len(m.attachments) > 1 -} - -func (m *Message) hasRelatedPart() bool { - return (len(m.parts) > 0 && len(m.embedded) > 0) || len(m.embedded) > 1 -} - -func (m *Message) hasAlternativePart() bool { - return len(m.parts) > 1 -} - -type messageWriter struct { - w io.Writer - n int64 - writers [3]*multipart.Writer - partWriter io.Writer - depth uint8 - err error -} - -func (w *messageWriter) openMultipart(mimeType string) { - mw := multipart.NewWriter(w) - contentType := "multipart/" + mimeType + ";\r\n boundary=" + mw.Boundary() - w.writers[w.depth] = mw - - if w.depth == 0 { - w.writeHeader("Content-Type", contentType) - w.writeString("\r\n") - } else { - w.createPart(map[string][]string{ - "Content-Type": {contentType}, - }) - } - w.depth++ -} - -func (w *messageWriter) createPart(h map[string][]string) { - w.partWriter, w.err = w.writers[w.depth-1].CreatePart(h) -} - -func (w *messageWriter) closeMultipart() { - if w.depth > 0 { - w.writers[w.depth-1].Close() - w.depth-- - } -} - -func (w *messageWriter) writePart(p *part, charset string) { - w.writeHeaders(map[string][]string{ - "Content-Type": {p.contentType + "; charset=" + charset}, - "Content-Transfer-Encoding": {string(p.encoding)}, - }) - w.writeBody(p.copier, p.encoding) -} - -func (w *messageWriter) addFiles(files []*file, isAttachment bool) { - for _, f := range files { - if _, ok := f.Header["Content-Type"]; !ok { - mediaType := mime.TypeByExtension(filepath.Ext(f.Name)) - if mediaType == "" { - mediaType = "application/octet-stream" - } - f.setHeader("Content-Type", mediaType+`; name="`+f.Name+`"`) - } - - if _, ok := f.Header["Content-Transfer-Encoding"]; !ok { - f.setHeader("Content-Transfer-Encoding", string(Base64)) - } - - if _, ok := f.Header["Content-Disposition"]; !ok { - var disp string - if isAttachment { - disp = "attachment" - } else { - disp = "inline" - } - f.setHeader("Content-Disposition", disp+`; filename="`+f.Name+`"`) - } - - if !isAttachment { - if _, ok := f.Header["Content-ID"]; !ok { - f.setHeader("Content-ID", "<"+f.Name+">") - } - } - w.writeHeaders(f.Header) - w.writeBody(f.CopyFunc, Base64) - } -} - -func (w *messageWriter) Write(p []byte) (int, error) { - if w.err != nil { - return 0, errors.New("gomail: cannot write as writer is in error") - } - - var n int - n, w.err = w.w.Write(p) - w.n += int64(n) - return n, w.err -} - -func (w *messageWriter) writeString(s string) { - n, _ := io.WriteString(w.w, s) - w.n += int64(n) -} - -func (w *messageWriter) writeHeader(k string, v ...string) { - w.writeString(k) - if len(v) == 0 { - w.writeString(":\r\n") - return - } - w.writeString(": ") - - // Max header line length is 78 characters in RFC 5322 and 76 characters - // in RFC 2047. So for the sake of simplicity we use the 76 characters - // limit. - charsLeft := 76 - len(k) - len(": ") - - for i, s := range v { - // If the line is already too long, insert a newline right away. - if charsLeft < 1 { - if i == 0 { - w.writeString("\r\n ") - } else { - w.writeString(",\r\n ") - } - charsLeft = 75 - } else if i != 0 { - w.writeString(", ") - charsLeft -= 2 - } - - // While the header content is too long, fold it by inserting a newline. - for len(s) > charsLeft { - s = w.writeLine(s, charsLeft) - charsLeft = 75 - } - w.writeString(s) - if i := lastIndexByte(s, '\n'); i != -1 { - charsLeft = 75 - (len(s) - i - 1) - } else { - charsLeft -= len(s) - } - } - w.writeString("\r\n") -} - -func (w *messageWriter) writeLine(s string, charsLeft int) string { - // If there is already a newline before the limit. Write the line. - if i := strings.IndexByte(s, '\n'); i != -1 && i < charsLeft { - w.writeString(s[:i+1]) - return s[i+1:] - } - - for i := charsLeft - 1; i >= 0; i-- { - if s[i] == ' ' { - w.writeString(s[:i]) - w.writeString("\r\n ") - return s[i+1:] - } - } - - // We could not insert a newline cleanly so look for a space or a newline - // even if it is after the limit. - for i := 75; i < len(s); i++ { - if s[i] == ' ' { - w.writeString(s[:i]) - w.writeString("\r\n ") - return s[i+1:] - } - if s[i] == '\n' { - w.writeString(s[:i+1]) - return s[i+1:] - } - } - - // Too bad, no space or newline in the whole string. Just write everything. - w.writeString(s) - return "" -} - -func (w *messageWriter) writeHeaders(h map[string][]string) { - if w.depth == 0 { - for k, v := range h { - if k != "Bcc" { - w.writeHeader(k, v...) - } - } - } else { - w.createPart(h) - } -} - -func (w *messageWriter) writeBody(f func(io.Writer) error, enc Encoding) { - var subWriter io.Writer - if w.depth == 0 { - w.writeString("\r\n") - subWriter = w.w - } else { - subWriter = w.partWriter - } - - if enc == Base64 { - wc := base64.NewEncoder(base64.StdEncoding, newBase64LineWriter(subWriter)) - w.err = f(wc) - wc.Close() - } else if enc == Unencoded { - w.err = f(subWriter) - } else { - wc := newQPWriter(subWriter) - w.err = f(wc) - wc.Close() - } -} - -// As required by RFC 2045, 6.7. (page 21) for quoted-printable, and -// RFC 2045, 6.8. (page 25) for base64. -const maxLineLen = 76 - -// base64LineWriter limits text encoded in base64 to 76 characters per line -type base64LineWriter struct { - w io.Writer - lineLen int -} - -func newBase64LineWriter(w io.Writer) *base64LineWriter { - return &base64LineWriter{w: w} -} - -func (w *base64LineWriter) Write(p []byte) (int, error) { - n := 0 - for len(p)+w.lineLen > maxLineLen { - w.w.Write(p[:maxLineLen-w.lineLen]) - w.w.Write([]byte("\r\n")) - p = p[maxLineLen-w.lineLen:] - n += maxLineLen - w.lineLen - w.lineLen = 0 - } - - w.w.Write(p) - w.lineLen += len(p) - - return n + len(p), nil -} - -// Stubbed out for testing. -var now = time.Now diff --git a/vendor/gopkg.in/yaml.v2/.travis.yml b/vendor/gopkg.in/yaml.v2/.travis.yml index 7348c50c..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/.travis.yml +++ b/vendor/gopkg.in/yaml.v2/.travis.yml @@ -1,17 +0,0 @@ -language: go - -go: - - "1.4.x" - - "1.5.x" - - "1.6.x" - - "1.7.x" - - "1.8.x" - - "1.9.x" - - "1.10.x" - - "1.11.x" - - "1.12.x" - - "1.13.x" - - "1.14.x" - - "tip" - -go_import_path: gopkg.in/yaml.v2 diff --git a/vendor/gopkg.in/yaml.v2/LICENSE b/vendor/gopkg.in/yaml.v2/LICENSE index 8dada3ed..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/LICENSE +++ b/vendor/gopkg.in/yaml.v2/LICENSE @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/gopkg.in/yaml.v2/LICENSE.libyaml b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml index 8da58fbf..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/LICENSE.libyaml +++ b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml @@ -1,31 +0,0 @@ -The following files were ported to Go from C files of libyaml, and thus -are still covered by their original copyright and license: - - apic.go - emitterc.go - parserc.go - readerc.go - scannerc.go - writerc.go - yamlh.go - yamlprivateh.go - -Copyright (c) 2006 Kirill Simonov - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/gopkg.in/yaml.v2/NOTICE b/vendor/gopkg.in/yaml.v2/NOTICE index 866d74a7..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/NOTICE +++ b/vendor/gopkg.in/yaml.v2/NOTICE @@ -1,13 +0,0 @@ -Copyright 2011-2016 Canonical Ltd. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/gopkg.in/yaml.v2/README.md b/vendor/gopkg.in/yaml.v2/README.md index b50c6e87..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/README.md +++ b/vendor/gopkg.in/yaml.v2/README.md @@ -1,133 +0,0 @@ -# YAML support for the Go language - -Introduction ------------- - -The yaml package enables Go programs to comfortably encode and decode YAML -values. It was developed within [Canonical](https://www.canonical.com) as -part of the [juju](https://juju.ubuntu.com) project, and is based on a -pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML) -C library to parse and generate YAML data quickly and reliably. - -Compatibility -------------- - -The yaml package supports most of YAML 1.1 and 1.2, including support for -anchors, tags, map merging, etc. Multi-document unmarshalling is not yet -implemented, and base-60 floats from YAML 1.1 are purposefully not -supported since they're a poor design and are gone in YAML 1.2. - -Installation and usage ----------------------- - -The import path for the package is *gopkg.in/yaml.v2*. - -To install it, run: - - go get gopkg.in/yaml.v2 - -API documentation ------------------ - -If opened in a browser, the import path itself leads to the API documentation: - - * [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2) - -API stability -------------- - -The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in). - - -License -------- - -The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details. - - -Example -------- - -```Go -package main - -import ( - "fmt" - "log" - - "gopkg.in/yaml.v2" -) - -var data = ` -a: Easy! -b: - c: 2 - d: [3, 4] -` - -// Note: struct fields must be public in order for unmarshal to -// correctly populate the data. -type T struct { - A string - B struct { - RenamedC int `yaml:"c"` - D []int `yaml:",flow"` - } -} - -func main() { - t := T{} - - err := yaml.Unmarshal([]byte(data), &t) - if err != nil { - log.Fatalf("error: %v", err) - } - fmt.Printf("--- t:\n%v\n\n", t) - - d, err := yaml.Marshal(&t) - if err != nil { - log.Fatalf("error: %v", err) - } - fmt.Printf("--- t dump:\n%s\n\n", string(d)) - - m := make(map[interface{}]interface{}) - - err = yaml.Unmarshal([]byte(data), &m) - if err != nil { - log.Fatalf("error: %v", err) - } - fmt.Printf("--- m:\n%v\n\n", m) - - d, err = yaml.Marshal(&m) - if err != nil { - log.Fatalf("error: %v", err) - } - fmt.Printf("--- m dump:\n%s\n\n", string(d)) -} -``` - -This example will generate the following output: - -``` ---- t: -{Easy! {2 [3 4]}} - ---- t dump: -a: Easy! -b: - c: 2 - d: [3, 4] - - ---- m: -map[a:Easy! b:map[c:2 d:[3 4]]] - ---- m dump: -a: Easy! -b: - c: 2 - d: - - 3 - - 4 -``` - diff --git a/vendor/gopkg.in/yaml.v2/apic.go b/vendor/gopkg.in/yaml.v2/apic.go index acf71402..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/apic.go +++ b/vendor/gopkg.in/yaml.v2/apic.go @@ -1,744 +0,0 @@ -package yaml - -import ( - "io" -) - -func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { - //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) - - // Check if we can move the queue at the beginning of the buffer. - if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { - if parser.tokens_head != len(parser.tokens) { - copy(parser.tokens, parser.tokens[parser.tokens_head:]) - } - parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] - parser.tokens_head = 0 - } - parser.tokens = append(parser.tokens, *token) - if pos < 0 { - return - } - copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) - parser.tokens[parser.tokens_head+pos] = *token -} - -// Create a new parser object. -func yaml_parser_initialize(parser *yaml_parser_t) bool { - *parser = yaml_parser_t{ - raw_buffer: make([]byte, 0, input_raw_buffer_size), - buffer: make([]byte, 0, input_buffer_size), - } - return true -} - -// Destroy a parser object. -func yaml_parser_delete(parser *yaml_parser_t) { - *parser = yaml_parser_t{} -} - -// String read handler. -func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - if parser.input_pos == len(parser.input) { - return 0, io.EOF - } - n = copy(buffer, parser.input[parser.input_pos:]) - parser.input_pos += n - return n, nil -} - -// Reader read handler. -func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - return parser.input_reader.Read(buffer) -} - -// Set a string input. -func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_string_read_handler - parser.input = input - parser.input_pos = 0 -} - -// Set a file input. -func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_reader_read_handler - parser.input_reader = r -} - -// Set the source encoding. -func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { - if parser.encoding != yaml_ANY_ENCODING { - panic("must set the encoding only once") - } - parser.encoding = encoding -} - -var disableLineWrapping = false - -// Create a new emitter object. -func yaml_emitter_initialize(emitter *yaml_emitter_t) { - *emitter = yaml_emitter_t{ - buffer: make([]byte, output_buffer_size), - raw_buffer: make([]byte, 0, output_raw_buffer_size), - states: make([]yaml_emitter_state_t, 0, initial_stack_size), - events: make([]yaml_event_t, 0, initial_queue_size), - } - if disableLineWrapping { - emitter.best_width = -1 - } -} - -// Destroy an emitter object. -func yaml_emitter_delete(emitter *yaml_emitter_t) { - *emitter = yaml_emitter_t{} -} - -// String write handler. -func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - *emitter.output_buffer = append(*emitter.output_buffer, buffer...) - return nil -} - -// yaml_writer_write_handler uses emitter.output_writer to write the -// emitted text. -func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - _, err := emitter.output_writer.Write(buffer) - return err -} - -// Set a string output. -func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_string_write_handler - emitter.output_buffer = output_buffer -} - -// Set a file output. -func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_writer_write_handler - emitter.output_writer = w -} - -// Set the output encoding. -func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { - if emitter.encoding != yaml_ANY_ENCODING { - panic("must set the output encoding only once") - } - emitter.encoding = encoding -} - -// Set the canonical output style. -func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { - emitter.canonical = canonical -} - -//// Set the indentation increment. -func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { - if indent < 2 || indent > 9 { - indent = 2 - } - emitter.best_indent = indent -} - -// Set the preferred line width. -func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { - if width < 0 { - width = -1 - } - emitter.best_width = width -} - -// Set if unescaped non-ASCII characters are allowed. -func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { - emitter.unicode = unicode -} - -// Set the preferred line break character. -func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { - emitter.line_break = line_break -} - -///* -// * Destroy a token object. -// */ -// -//YAML_DECLARE(void) -//yaml_token_delete(yaml_token_t *token) -//{ -// assert(token); // Non-NULL token object expected. -// -// switch (token.type) -// { -// case YAML_TAG_DIRECTIVE_TOKEN: -// yaml_free(token.data.tag_directive.handle); -// yaml_free(token.data.tag_directive.prefix); -// break; -// -// case YAML_ALIAS_TOKEN: -// yaml_free(token.data.alias.value); -// break; -// -// case YAML_ANCHOR_TOKEN: -// yaml_free(token.data.anchor.value); -// break; -// -// case YAML_TAG_TOKEN: -// yaml_free(token.data.tag.handle); -// yaml_free(token.data.tag.suffix); -// break; -// -// case YAML_SCALAR_TOKEN: -// yaml_free(token.data.scalar.value); -// break; -// -// default: -// break; -// } -// -// memset(token, 0, sizeof(yaml_token_t)); -//} -// -///* -// * Check if a string is a valid UTF-8 sequence. -// * -// * Check 'reader.c' for more details on UTF-8 encoding. -// */ -// -//static int -//yaml_check_utf8(yaml_char_t *start, size_t length) -//{ -// yaml_char_t *end = start+length; -// yaml_char_t *pointer = start; -// -// while (pointer < end) { -// unsigned char octet; -// unsigned int width; -// unsigned int value; -// size_t k; -// -// octet = pointer[0]; -// width = (octet & 0x80) == 0x00 ? 1 : -// (octet & 0xE0) == 0xC0 ? 2 : -// (octet & 0xF0) == 0xE0 ? 3 : -// (octet & 0xF8) == 0xF0 ? 4 : 0; -// value = (octet & 0x80) == 0x00 ? octet & 0x7F : -// (octet & 0xE0) == 0xC0 ? octet & 0x1F : -// (octet & 0xF0) == 0xE0 ? octet & 0x0F : -// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; -// if (!width) return 0; -// if (pointer+width > end) return 0; -// for (k = 1; k < width; k ++) { -// octet = pointer[k]; -// if ((octet & 0xC0) != 0x80) return 0; -// value = (value << 6) + (octet & 0x3F); -// } -// if (!((width == 1) || -// (width == 2 && value >= 0x80) || -// (width == 3 && value >= 0x800) || -// (width == 4 && value >= 0x10000))) return 0; -// -// pointer += width; -// } -// -// return 1; -//} -// - -// Create STREAM-START. -func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) { - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - encoding: encoding, - } -} - -// Create STREAM-END. -func yaml_stream_end_event_initialize(event *yaml_event_t) { - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - } -} - -// Create DOCUMENT-START. -func yaml_document_start_event_initialize( - event *yaml_event_t, - version_directive *yaml_version_directive_t, - tag_directives []yaml_tag_directive_t, - implicit bool, -) { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: implicit, - } -} - -// Create DOCUMENT-END. -func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - implicit: implicit, - } -} - -///* -// * Create ALIAS. -// */ -// -//YAML_DECLARE(int) -//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t) -//{ -// mark yaml_mark_t = { 0, 0, 0 } -// anchor_copy *yaml_char_t = NULL -// -// assert(event) // Non-NULL event object is expected. -// assert(anchor) // Non-NULL anchor is expected. -// -// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0 -// -// anchor_copy = yaml_strdup(anchor) -// if (!anchor_copy) -// return 0 -// -// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark) -// -// return 1 -//} - -// Create SCALAR. -func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - anchor: anchor, - tag: tag, - value: value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-START. -func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-END. -func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - } - return true -} - -// Create MAPPING-START. -func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) { - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } -} - -// Create MAPPING-END. -func yaml_mapping_end_event_initialize(event *yaml_event_t) { - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - } -} - -// Destroy an event object. -func yaml_event_delete(event *yaml_event_t) { - *event = yaml_event_t{} -} - -///* -// * Create a document object. -// */ -// -//YAML_DECLARE(int) -//yaml_document_initialize(document *yaml_document_t, -// version_directive *yaml_version_directive_t, -// tag_directives_start *yaml_tag_directive_t, -// tag_directives_end *yaml_tag_directive_t, -// start_implicit int, end_implicit int) -//{ -// struct { -// error yaml_error_type_t -// } context -// struct { -// start *yaml_node_t -// end *yaml_node_t -// top *yaml_node_t -// } nodes = { NULL, NULL, NULL } -// version_directive_copy *yaml_version_directive_t = NULL -// struct { -// start *yaml_tag_directive_t -// end *yaml_tag_directive_t -// top *yaml_tag_directive_t -// } tag_directives_copy = { NULL, NULL, NULL } -// value yaml_tag_directive_t = { NULL, NULL } -// mark yaml_mark_t = { 0, 0, 0 } -// -// assert(document) // Non-NULL document object is expected. -// assert((tag_directives_start && tag_directives_end) || -// (tag_directives_start == tag_directives_end)) -// // Valid tag directives are expected. -// -// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error -// -// if (version_directive) { -// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) -// if (!version_directive_copy) goto error -// version_directive_copy.major = version_directive.major -// version_directive_copy.minor = version_directive.minor -// } -// -// if (tag_directives_start != tag_directives_end) { -// tag_directive *yaml_tag_directive_t -// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) -// goto error -// for (tag_directive = tag_directives_start -// tag_directive != tag_directives_end; tag_directive ++) { -// assert(tag_directive.handle) -// assert(tag_directive.prefix) -// if (!yaml_check_utf8(tag_directive.handle, -// strlen((char *)tag_directive.handle))) -// goto error -// if (!yaml_check_utf8(tag_directive.prefix, -// strlen((char *)tag_directive.prefix))) -// goto error -// value.handle = yaml_strdup(tag_directive.handle) -// value.prefix = yaml_strdup(tag_directive.prefix) -// if (!value.handle || !value.prefix) goto error -// if (!PUSH(&context, tag_directives_copy, value)) -// goto error -// value.handle = NULL -// value.prefix = NULL -// } -// } -// -// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, -// tag_directives_copy.start, tag_directives_copy.top, -// start_implicit, end_implicit, mark, mark) -// -// return 1 -// -//error: -// STACK_DEL(&context, nodes) -// yaml_free(version_directive_copy) -// while (!STACK_EMPTY(&context, tag_directives_copy)) { -// value yaml_tag_directive_t = POP(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// } -// STACK_DEL(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// -// return 0 -//} -// -///* -// * Destroy a document object. -// */ -// -//YAML_DECLARE(void) -//yaml_document_delete(document *yaml_document_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// tag_directive *yaml_tag_directive_t -// -// context.error = YAML_NO_ERROR // Eliminate a compiler warning. -// -// assert(document) // Non-NULL document object is expected. -// -// while (!STACK_EMPTY(&context, document.nodes)) { -// node yaml_node_t = POP(&context, document.nodes) -// yaml_free(node.tag) -// switch (node.type) { -// case YAML_SCALAR_NODE: -// yaml_free(node.data.scalar.value) -// break -// case YAML_SEQUENCE_NODE: -// STACK_DEL(&context, node.data.sequence.items) -// break -// case YAML_MAPPING_NODE: -// STACK_DEL(&context, node.data.mapping.pairs) -// break -// default: -// assert(0) // Should not happen. -// } -// } -// STACK_DEL(&context, document.nodes) -// -// yaml_free(document.version_directive) -// for (tag_directive = document.tag_directives.start -// tag_directive != document.tag_directives.end -// tag_directive++) { -// yaml_free(tag_directive.handle) -// yaml_free(tag_directive.prefix) -// } -// yaml_free(document.tag_directives.start) -// -// memset(document, 0, sizeof(yaml_document_t)) -//} -// -///** -// * Get a document node. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_node(document *yaml_document_t, index int) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (index > 0 && document.nodes.start + index <= document.nodes.top) { -// return document.nodes.start + index - 1 -// } -// return NULL -//} -// -///** -// * Get the root object. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_root_node(document *yaml_document_t) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (document.nodes.top != document.nodes.start) { -// return document.nodes.start -// } -// return NULL -//} -// -///* -// * Add a scalar node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_scalar(document *yaml_document_t, -// tag *yaml_char_t, value *yaml_char_t, length int, -// style yaml_scalar_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// value_copy *yaml_char_t = NULL -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// assert(value) // Non-NULL value is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (length < 0) { -// length = strlen((char *)value) -// } -// -// if (!yaml_check_utf8(value, length)) goto error -// value_copy = yaml_malloc(length+1) -// if (!value_copy) goto error -// memcpy(value_copy, value, length) -// value_copy[length] = '\0' -// -// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// yaml_free(tag_copy) -// yaml_free(value_copy) -// -// return 0 -//} -// -///* -// * Add a sequence node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_sequence(document *yaml_document_t, -// tag *yaml_char_t, style yaml_sequence_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_item_t -// end *yaml_node_item_t -// top *yaml_node_item_t -// } items = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error -// -// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, items) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Add a mapping node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_mapping(document *yaml_document_t, -// tag *yaml_char_t, style yaml_mapping_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_pair_t -// end *yaml_node_pair_t -// top *yaml_node_pair_t -// } pairs = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error -// -// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, pairs) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Append an item to a sequence node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_sequence_item(document *yaml_document_t, -// sequence int, item int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// assert(document) // Non-NULL document is required. -// assert(sequence > 0 -// && document.nodes.start + sequence <= document.nodes.top) -// // Valid sequence id is required. -// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) -// // A sequence node is required. -// assert(item > 0 && document.nodes.start + item <= document.nodes.top) -// // Valid item id is required. -// -// if (!PUSH(&context, -// document.nodes.start[sequence-1].data.sequence.items, item)) -// return 0 -// -// return 1 -//} -// -///* -// * Append a pair of a key and a value to a mapping node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_mapping_pair(document *yaml_document_t, -// mapping int, key int, value int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// pair yaml_node_pair_t -// -// assert(document) // Non-NULL document is required. -// assert(mapping > 0 -// && document.nodes.start + mapping <= document.nodes.top) -// // Valid mapping id is required. -// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) -// // A mapping node is required. -// assert(key > 0 && document.nodes.start + key <= document.nodes.top) -// // Valid key id is required. -// assert(value > 0 && document.nodes.start + value <= document.nodes.top) -// // Valid value id is required. -// -// pair.key = key -// pair.value = value -// -// if (!PUSH(&context, -// document.nodes.start[mapping-1].data.mapping.pairs, pair)) -// return 0 -// -// return 1 -//} -// -// diff --git a/vendor/gopkg.in/yaml.v2/decode.go b/vendor/gopkg.in/yaml.v2/decode.go index 129bc2a9..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/decode.go +++ b/vendor/gopkg.in/yaml.v2/decode.go @@ -1,815 +0,0 @@ -package yaml - -import ( - "encoding" - "encoding/base64" - "fmt" - "io" - "math" - "reflect" - "strconv" - "time" -) - -const ( - documentNode = 1 << iota - mappingNode - sequenceNode - scalarNode - aliasNode -) - -type node struct { - kind int - line, column int - tag string - // For an alias node, alias holds the resolved alias. - alias *node - value string - implicit bool - children []*node - anchors map[string]*node -} - -// ---------------------------------------------------------------------------- -// Parser, produces a node tree out of a libyaml event stream. - -type parser struct { - parser yaml_parser_t - event yaml_event_t - doc *node - doneInit bool -} - -func newParser(b []byte) *parser { - p := parser{} - if !yaml_parser_initialize(&p.parser) { - panic("failed to initialize YAML emitter") - } - if len(b) == 0 { - b = []byte{'\n'} - } - yaml_parser_set_input_string(&p.parser, b) - return &p -} - -func newParserFromReader(r io.Reader) *parser { - p := parser{} - if !yaml_parser_initialize(&p.parser) { - panic("failed to initialize YAML emitter") - } - yaml_parser_set_input_reader(&p.parser, r) - return &p -} - -func (p *parser) init() { - if p.doneInit { - return - } - p.expect(yaml_STREAM_START_EVENT) - p.doneInit = true -} - -func (p *parser) destroy() { - if p.event.typ != yaml_NO_EVENT { - yaml_event_delete(&p.event) - } - yaml_parser_delete(&p.parser) -} - -// expect consumes an event from the event stream and -// checks that it's of the expected type. -func (p *parser) expect(e yaml_event_type_t) { - if p.event.typ == yaml_NO_EVENT { - if !yaml_parser_parse(&p.parser, &p.event) { - p.fail() - } - } - if p.event.typ == yaml_STREAM_END_EVENT { - failf("attempted to go past the end of stream; corrupted value?") - } - if p.event.typ != e { - p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ) - p.fail() - } - yaml_event_delete(&p.event) - p.event.typ = yaml_NO_EVENT -} - -// peek peeks at the next event in the event stream, -// puts the results into p.event and returns the event type. -func (p *parser) peek() yaml_event_type_t { - if p.event.typ != yaml_NO_EVENT { - return p.event.typ - } - if !yaml_parser_parse(&p.parser, &p.event) { - p.fail() - } - return p.event.typ -} - -func (p *parser) fail() { - var where string - var line int - if p.parser.problem_mark.line != 0 { - line = p.parser.problem_mark.line - // Scanner errors don't iterate line before returning error - if p.parser.error == yaml_SCANNER_ERROR { - line++ - } - } else if p.parser.context_mark.line != 0 { - line = p.parser.context_mark.line - } - if line != 0 { - where = "line " + strconv.Itoa(line) + ": " - } - var msg string - if len(p.parser.problem) > 0 { - msg = p.parser.problem - } else { - msg = "unknown problem parsing YAML content" - } - failf("%s%s", where, msg) -} - -func (p *parser) anchor(n *node, anchor []byte) { - if anchor != nil { - p.doc.anchors[string(anchor)] = n - } -} - -func (p *parser) parse() *node { - p.init() - switch p.peek() { - case yaml_SCALAR_EVENT: - return p.scalar() - case yaml_ALIAS_EVENT: - return p.alias() - case yaml_MAPPING_START_EVENT: - return p.mapping() - case yaml_SEQUENCE_START_EVENT: - return p.sequence() - case yaml_DOCUMENT_START_EVENT: - return p.document() - case yaml_STREAM_END_EVENT: - // Happens when attempting to decode an empty buffer. - return nil - default: - panic("attempted to parse unknown event: " + p.event.typ.String()) - } -} - -func (p *parser) node(kind int) *node { - return &node{ - kind: kind, - line: p.event.start_mark.line, - column: p.event.start_mark.column, - } -} - -func (p *parser) document() *node { - n := p.node(documentNode) - n.anchors = make(map[string]*node) - p.doc = n - p.expect(yaml_DOCUMENT_START_EVENT) - n.children = append(n.children, p.parse()) - p.expect(yaml_DOCUMENT_END_EVENT) - return n -} - -func (p *parser) alias() *node { - n := p.node(aliasNode) - n.value = string(p.event.anchor) - n.alias = p.doc.anchors[n.value] - if n.alias == nil { - failf("unknown anchor '%s' referenced", n.value) - } - p.expect(yaml_ALIAS_EVENT) - return n -} - -func (p *parser) scalar() *node { - n := p.node(scalarNode) - n.value = string(p.event.value) - n.tag = string(p.event.tag) - n.implicit = p.event.implicit - p.anchor(n, p.event.anchor) - p.expect(yaml_SCALAR_EVENT) - return n -} - -func (p *parser) sequence() *node { - n := p.node(sequenceNode) - p.anchor(n, p.event.anchor) - p.expect(yaml_SEQUENCE_START_EVENT) - for p.peek() != yaml_SEQUENCE_END_EVENT { - n.children = append(n.children, p.parse()) - } - p.expect(yaml_SEQUENCE_END_EVENT) - return n -} - -func (p *parser) mapping() *node { - n := p.node(mappingNode) - p.anchor(n, p.event.anchor) - p.expect(yaml_MAPPING_START_EVENT) - for p.peek() != yaml_MAPPING_END_EVENT { - n.children = append(n.children, p.parse(), p.parse()) - } - p.expect(yaml_MAPPING_END_EVENT) - return n -} - -// ---------------------------------------------------------------------------- -// Decoder, unmarshals a node into a provided value. - -type decoder struct { - doc *node - aliases map[*node]bool - mapType reflect.Type - terrors []string - strict bool - - decodeCount int - aliasCount int - aliasDepth int -} - -var ( - mapItemType = reflect.TypeOf(MapItem{}) - durationType = reflect.TypeOf(time.Duration(0)) - defaultMapType = reflect.TypeOf(map[interface{}]interface{}{}) - ifaceType = defaultMapType.Elem() - timeType = reflect.TypeOf(time.Time{}) - ptrTimeType = reflect.TypeOf(&time.Time{}) -) - -func newDecoder(strict bool) *decoder { - d := &decoder{mapType: defaultMapType, strict: strict} - d.aliases = make(map[*node]bool) - return d -} - -func (d *decoder) terror(n *node, tag string, out reflect.Value) { - if n.tag != "" { - tag = n.tag - } - value := n.value - if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG { - if len(value) > 10 { - value = " `" + value[:7] + "...`" - } else { - value = " `" + value + "`" - } - } - d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type())) -} - -func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) { - terrlen := len(d.terrors) - err := u.UnmarshalYAML(func(v interface{}) (err error) { - defer handleErr(&err) - d.unmarshal(n, reflect.ValueOf(v)) - if len(d.terrors) > terrlen { - issues := d.terrors[terrlen:] - d.terrors = d.terrors[:terrlen] - return &TypeError{issues} - } - return nil - }) - if e, ok := err.(*TypeError); ok { - d.terrors = append(d.terrors, e.Errors...) - return false - } - if err != nil { - fail(err) - } - return true -} - -// d.prepare initializes and dereferences pointers and calls UnmarshalYAML -// if a value is found to implement it. -// It returns the initialized and dereferenced out value, whether -// unmarshalling was already done by UnmarshalYAML, and if so whether -// its types unmarshalled appropriately. -// -// If n holds a null value, prepare returns before doing anything. -func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { - if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) { - return out, false, false - } - again := true - for again { - again = false - if out.Kind() == reflect.Ptr { - if out.IsNil() { - out.Set(reflect.New(out.Type().Elem())) - } - out = out.Elem() - again = true - } - if out.CanAddr() { - if u, ok := out.Addr().Interface().(Unmarshaler); ok { - good = d.callUnmarshaler(n, u) - return out, true, good - } - } - } - return out, false, false -} - -const ( - // 400,000 decode operations is ~500kb of dense object declarations, or - // ~5kb of dense object declarations with 10000% alias expansion - alias_ratio_range_low = 400000 - - // 4,000,000 decode operations is ~5MB of dense object declarations, or - // ~4.5MB of dense object declarations with 10% alias expansion - alias_ratio_range_high = 4000000 - - // alias_ratio_range is the range over which we scale allowed alias ratios - alias_ratio_range = float64(alias_ratio_range_high - alias_ratio_range_low) -) - -func allowedAliasRatio(decodeCount int) float64 { - switch { - case decodeCount <= alias_ratio_range_low: - // allow 99% to come from alias expansion for small-to-medium documents - return 0.99 - case decodeCount >= alias_ratio_range_high: - // allow 10% to come from alias expansion for very large documents - return 0.10 - default: - // scale smoothly from 99% down to 10% over the range. - // this maps to 396,000 - 400,000 allowed alias-driven decodes over the range. - // 400,000 decode operations is ~100MB of allocations in worst-case scenarios (single-item maps). - return 0.99 - 0.89*(float64(decodeCount-alias_ratio_range_low)/alias_ratio_range) - } -} - -func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) { - d.decodeCount++ - if d.aliasDepth > 0 { - d.aliasCount++ - } - if d.aliasCount > 100 && d.decodeCount > 1000 && float64(d.aliasCount)/float64(d.decodeCount) > allowedAliasRatio(d.decodeCount) { - failf("document contains excessive aliasing") - } - switch n.kind { - case documentNode: - return d.document(n, out) - case aliasNode: - return d.alias(n, out) - } - out, unmarshaled, good := d.prepare(n, out) - if unmarshaled { - return good - } - switch n.kind { - case scalarNode: - good = d.scalar(n, out) - case mappingNode: - good = d.mapping(n, out) - case sequenceNode: - good = d.sequence(n, out) - default: - panic("internal error: unknown node kind: " + strconv.Itoa(n.kind)) - } - return good -} - -func (d *decoder) document(n *node, out reflect.Value) (good bool) { - if len(n.children) == 1 { - d.doc = n - d.unmarshal(n.children[0], out) - return true - } - return false -} - -func (d *decoder) alias(n *node, out reflect.Value) (good bool) { - if d.aliases[n] { - // TODO this could actually be allowed in some circumstances. - failf("anchor '%s' value contains itself", n.value) - } - d.aliases[n] = true - d.aliasDepth++ - good = d.unmarshal(n.alias, out) - d.aliasDepth-- - delete(d.aliases, n) - return good -} - -var zeroValue reflect.Value - -func resetMap(out reflect.Value) { - for _, k := range out.MapKeys() { - out.SetMapIndex(k, zeroValue) - } -} - -func (d *decoder) scalar(n *node, out reflect.Value) bool { - var tag string - var resolved interface{} - if n.tag == "" && !n.implicit { - tag = yaml_STR_TAG - resolved = n.value - } else { - tag, resolved = resolve(n.tag, n.value) - if tag == yaml_BINARY_TAG { - data, err := base64.StdEncoding.DecodeString(resolved.(string)) - if err != nil { - failf("!!binary value contains invalid base64 data") - } - resolved = string(data) - } - } - if resolved == nil { - if out.Kind() == reflect.Map && !out.CanAddr() { - resetMap(out) - } else { - out.Set(reflect.Zero(out.Type())) - } - return true - } - if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { - // We've resolved to exactly the type we want, so use that. - out.Set(resolvedv) - return true - } - // Perhaps we can use the value as a TextUnmarshaler to - // set its value. - if out.CanAddr() { - u, ok := out.Addr().Interface().(encoding.TextUnmarshaler) - if ok { - var text []byte - if tag == yaml_BINARY_TAG { - text = []byte(resolved.(string)) - } else { - // We let any value be unmarshaled into TextUnmarshaler. - // That might be more lax than we'd like, but the - // TextUnmarshaler itself should bowl out any dubious values. - text = []byte(n.value) - } - err := u.UnmarshalText(text) - if err != nil { - fail(err) - } - return true - } - } - switch out.Kind() { - case reflect.String: - if tag == yaml_BINARY_TAG { - out.SetString(resolved.(string)) - return true - } - if resolved != nil { - out.SetString(n.value) - return true - } - case reflect.Interface: - if resolved == nil { - out.Set(reflect.Zero(out.Type())) - } else if tag == yaml_TIMESTAMP_TAG { - // It looks like a timestamp but for backward compatibility - // reasons we set it as a string, so that code that unmarshals - // timestamp-like values into interface{} will continue to - // see a string and not a time.Time. - // TODO(v3) Drop this. - out.Set(reflect.ValueOf(n.value)) - } else { - out.Set(reflect.ValueOf(resolved)) - } - return true - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - switch resolved := resolved.(type) { - case int: - if !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case int64: - if !out.OverflowInt(resolved) { - out.SetInt(resolved) - return true - } - case uint64: - if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case float64: - if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case string: - if out.Type() == durationType { - d, err := time.ParseDuration(resolved) - if err == nil { - out.SetInt(int64(d)) - return true - } - } - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - switch resolved := resolved.(type) { - case int: - if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case int64: - if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case uint64: - if !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case float64: - if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - } - case reflect.Bool: - switch resolved := resolved.(type) { - case bool: - out.SetBool(resolved) - return true - } - case reflect.Float32, reflect.Float64: - switch resolved := resolved.(type) { - case int: - out.SetFloat(float64(resolved)) - return true - case int64: - out.SetFloat(float64(resolved)) - return true - case uint64: - out.SetFloat(float64(resolved)) - return true - case float64: - out.SetFloat(resolved) - return true - } - case reflect.Struct: - if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { - out.Set(resolvedv) - return true - } - case reflect.Ptr: - if out.Type().Elem() == reflect.TypeOf(resolved) { - // TODO DOes this make sense? When is out a Ptr except when decoding a nil value? - elem := reflect.New(out.Type().Elem()) - elem.Elem().Set(reflect.ValueOf(resolved)) - out.Set(elem) - return true - } - } - d.terror(n, tag, out) - return false -} - -func settableValueOf(i interface{}) reflect.Value { - v := reflect.ValueOf(i) - sv := reflect.New(v.Type()).Elem() - sv.Set(v) - return sv -} - -func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { - l := len(n.children) - - var iface reflect.Value - switch out.Kind() { - case reflect.Slice: - out.Set(reflect.MakeSlice(out.Type(), l, l)) - case reflect.Array: - if l != out.Len() { - failf("invalid array: want %d elements but got %d", out.Len(), l) - } - case reflect.Interface: - // No type hints. Will have to use a generic sequence. - iface = out - out = settableValueOf(make([]interface{}, l)) - default: - d.terror(n, yaml_SEQ_TAG, out) - return false - } - et := out.Type().Elem() - - j := 0 - for i := 0; i < l; i++ { - e := reflect.New(et).Elem() - if ok := d.unmarshal(n.children[i], e); ok { - out.Index(j).Set(e) - j++ - } - } - if out.Kind() != reflect.Array { - out.Set(out.Slice(0, j)) - } - if iface.IsValid() { - iface.Set(out) - } - return true -} - -func (d *decoder) mapping(n *node, out reflect.Value) (good bool) { - switch out.Kind() { - case reflect.Struct: - return d.mappingStruct(n, out) - case reflect.Slice: - return d.mappingSlice(n, out) - case reflect.Map: - // okay - case reflect.Interface: - if d.mapType.Kind() == reflect.Map { - iface := out - out = reflect.MakeMap(d.mapType) - iface.Set(out) - } else { - slicev := reflect.New(d.mapType).Elem() - if !d.mappingSlice(n, slicev) { - return false - } - out.Set(slicev) - return true - } - default: - d.terror(n, yaml_MAP_TAG, out) - return false - } - outt := out.Type() - kt := outt.Key() - et := outt.Elem() - - mapType := d.mapType - if outt.Key() == ifaceType && outt.Elem() == ifaceType { - d.mapType = outt - } - - if out.IsNil() { - out.Set(reflect.MakeMap(outt)) - } - l := len(n.children) - for i := 0; i < l; i += 2 { - if isMerge(n.children[i]) { - d.merge(n.children[i+1], out) - continue - } - k := reflect.New(kt).Elem() - if d.unmarshal(n.children[i], k) { - kkind := k.Kind() - if kkind == reflect.Interface { - kkind = k.Elem().Kind() - } - if kkind == reflect.Map || kkind == reflect.Slice { - failf("invalid map key: %#v", k.Interface()) - } - e := reflect.New(et).Elem() - if d.unmarshal(n.children[i+1], e) { - d.setMapIndex(n.children[i+1], out, k, e) - } - } - } - d.mapType = mapType - return true -} - -func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) { - if d.strict && out.MapIndex(k) != zeroValue { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface())) - return - } - out.SetMapIndex(k, v) -} - -func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) { - outt := out.Type() - if outt.Elem() != mapItemType { - d.terror(n, yaml_MAP_TAG, out) - return false - } - - mapType := d.mapType - d.mapType = outt - - var slice []MapItem - var l = len(n.children) - for i := 0; i < l; i += 2 { - if isMerge(n.children[i]) { - d.merge(n.children[i+1], out) - continue - } - item := MapItem{} - k := reflect.ValueOf(&item.Key).Elem() - if d.unmarshal(n.children[i], k) { - v := reflect.ValueOf(&item.Value).Elem() - if d.unmarshal(n.children[i+1], v) { - slice = append(slice, item) - } - } - } - out.Set(reflect.ValueOf(slice)) - d.mapType = mapType - return true -} - -func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) { - sinfo, err := getStructInfo(out.Type()) - if err != nil { - panic(err) - } - name := settableValueOf("") - l := len(n.children) - - var inlineMap reflect.Value - var elemType reflect.Type - if sinfo.InlineMap != -1 { - inlineMap = out.Field(sinfo.InlineMap) - inlineMap.Set(reflect.New(inlineMap.Type()).Elem()) - elemType = inlineMap.Type().Elem() - } - - var doneFields []bool - if d.strict { - doneFields = make([]bool, len(sinfo.FieldsList)) - } - for i := 0; i < l; i += 2 { - ni := n.children[i] - if isMerge(ni) { - d.merge(n.children[i+1], out) - continue - } - if !d.unmarshal(ni, name) { - continue - } - if info, ok := sinfo.FieldsMap[name.String()]; ok { - if d.strict { - if doneFields[info.Id] { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type())) - continue - } - doneFields[info.Id] = true - } - var field reflect.Value - if info.Inline == nil { - field = out.Field(info.Num) - } else { - field = out.FieldByIndex(info.Inline) - } - d.unmarshal(n.children[i+1], field) - } else if sinfo.InlineMap != -1 { - if inlineMap.IsNil() { - inlineMap.Set(reflect.MakeMap(inlineMap.Type())) - } - value := reflect.New(elemType).Elem() - d.unmarshal(n.children[i+1], value) - d.setMapIndex(n.children[i+1], inlineMap, name, value) - } else if d.strict { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type())) - } - } - return true -} - -func failWantMap() { - failf("map merge requires map or sequence of maps as the value") -} - -func (d *decoder) merge(n *node, out reflect.Value) { - switch n.kind { - case mappingNode: - d.unmarshal(n, out) - case aliasNode: - if n.alias != nil && n.alias.kind != mappingNode { - failWantMap() - } - d.unmarshal(n, out) - case sequenceNode: - // Step backwards as earlier nodes take precedence. - for i := len(n.children) - 1; i >= 0; i-- { - ni := n.children[i] - if ni.kind == aliasNode { - if ni.alias != nil && ni.alias.kind != mappingNode { - failWantMap() - } - } else if ni.kind != mappingNode { - failWantMap() - } - d.unmarshal(ni, out) - } - default: - failWantMap() - } -} - -func isMerge(n *node) bool { - return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG) -} diff --git a/vendor/gopkg.in/yaml.v2/emitterc.go b/vendor/gopkg.in/yaml.v2/emitterc.go index a1c2cc52..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/emitterc.go +++ b/vendor/gopkg.in/yaml.v2/emitterc.go @@ -1,1685 +0,0 @@ -package yaml - -import ( - "bytes" - "fmt" -) - -// Flush the buffer if needed. -func flush(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) { - return yaml_emitter_flush(emitter) - } - return true -} - -// Put a character to the output buffer. -func put(emitter *yaml_emitter_t, value byte) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - emitter.buffer[emitter.buffer_pos] = value - emitter.buffer_pos++ - emitter.column++ - return true -} - -// Put a line break to the output buffer. -func put_break(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - switch emitter.line_break { - case yaml_CR_BREAK: - emitter.buffer[emitter.buffer_pos] = '\r' - emitter.buffer_pos += 1 - case yaml_LN_BREAK: - emitter.buffer[emitter.buffer_pos] = '\n' - emitter.buffer_pos += 1 - case yaml_CRLN_BREAK: - emitter.buffer[emitter.buffer_pos+0] = '\r' - emitter.buffer[emitter.buffer_pos+1] = '\n' - emitter.buffer_pos += 2 - default: - panic("unknown line break setting") - } - emitter.column = 0 - emitter.line++ - return true -} - -// Copy a character from a string into buffer. -func write(emitter *yaml_emitter_t, s []byte, i *int) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - p := emitter.buffer_pos - w := width(s[*i]) - switch w { - case 4: - emitter.buffer[p+3] = s[*i+3] - fallthrough - case 3: - emitter.buffer[p+2] = s[*i+2] - fallthrough - case 2: - emitter.buffer[p+1] = s[*i+1] - fallthrough - case 1: - emitter.buffer[p+0] = s[*i+0] - default: - panic("unknown character width") - } - emitter.column++ - emitter.buffer_pos += w - *i += w - return true -} - -// Write a whole string into buffer. -func write_all(emitter *yaml_emitter_t, s []byte) bool { - for i := 0; i < len(s); { - if !write(emitter, s, &i) { - return false - } - } - return true -} - -// Copy a line break character from a string into buffer. -func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { - if s[*i] == '\n' { - if !put_break(emitter) { - return false - } - *i++ - } else { - if !write(emitter, s, i) { - return false - } - emitter.column = 0 - emitter.line++ - } - return true -} - -// Set an emitter error and return false. -func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_EMITTER_ERROR - emitter.problem = problem - return false -} - -// Emit an event. -func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.events = append(emitter.events, *event) - for !yaml_emitter_need_more_events(emitter) { - event := &emitter.events[emitter.events_head] - if !yaml_emitter_analyze_event(emitter, event) { - return false - } - if !yaml_emitter_state_machine(emitter, event) { - return false - } - yaml_event_delete(event) - emitter.events_head++ - } - return true -} - -// Check if we need to accumulate more events before emitting. -// -// We accumulate extra -// - 1 event for DOCUMENT-START -// - 2 events for SEQUENCE-START -// - 3 events for MAPPING-START -// -func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { - if emitter.events_head == len(emitter.events) { - return true - } - var accumulate int - switch emitter.events[emitter.events_head].typ { - case yaml_DOCUMENT_START_EVENT: - accumulate = 1 - break - case yaml_SEQUENCE_START_EVENT: - accumulate = 2 - break - case yaml_MAPPING_START_EVENT: - accumulate = 3 - break - default: - return false - } - if len(emitter.events)-emitter.events_head > accumulate { - return false - } - var level int - for i := emitter.events_head; i < len(emitter.events); i++ { - switch emitter.events[i].typ { - case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: - level++ - case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: - level-- - } - if level == 0 { - return false - } - } - return true -} - -// Append a directive to the directives stack. -func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { - for i := 0; i < len(emitter.tag_directives); i++ { - if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") - } - } - - // [Go] Do we actually need to copy this given garbage collection - // and the lack of deallocating destructors? - tag_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(tag_copy.handle, value.handle) - copy(tag_copy.prefix, value.prefix) - emitter.tag_directives = append(emitter.tag_directives, tag_copy) - return true -} - -// Increase the indentation level. -func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { - emitter.indents = append(emitter.indents, emitter.indent) - if emitter.indent < 0 { - if flow { - emitter.indent = emitter.best_indent - } else { - emitter.indent = 0 - } - } else if !indentless { - emitter.indent += emitter.best_indent - } - return true -} - -// State dispatcher. -func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { - switch emitter.state { - default: - case yaml_EMIT_STREAM_START_STATE: - return yaml_emitter_emit_stream_start(emitter, event) - - case yaml_EMIT_FIRST_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, true) - - case yaml_EMIT_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, false) - - case yaml_EMIT_DOCUMENT_CONTENT_STATE: - return yaml_emitter_emit_document_content(emitter, event) - - case yaml_EMIT_DOCUMENT_END_STATE: - return yaml_emitter_emit_document_end(emitter, event) - - case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, true) - - case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, false) - - case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, false) - - case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, false) - - case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, true) - - case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, false) - - case yaml_EMIT_END_STATE: - return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") - } - panic("invalid emitter state") -} - -// Expect STREAM-START. -func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_STREAM_START_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") - } - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = event.encoding - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = yaml_UTF8_ENCODING - } - } - if emitter.best_indent < 2 || emitter.best_indent > 9 { - emitter.best_indent = 2 - } - if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { - emitter.best_width = 80 - } - if emitter.best_width < 0 { - emitter.best_width = 1<<31 - 1 - } - if emitter.line_break == yaml_ANY_BREAK { - emitter.line_break = yaml_LN_BREAK - } - - emitter.indent = -1 - emitter.line = 0 - emitter.column = 0 - emitter.whitespace = true - emitter.indention = true - - if emitter.encoding != yaml_UTF8_ENCODING { - if !yaml_emitter_write_bom(emitter) { - return false - } - } - emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE - return true -} - -// Expect DOCUMENT-START or STREAM-END. -func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - - if event.typ == yaml_DOCUMENT_START_EVENT { - - if event.version_directive != nil { - if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { - return false - } - } - - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { - return false - } - if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { - return false - } - } - - for i := 0; i < len(default_tag_directives); i++ { - tag_directive := &default_tag_directives[i] - if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { - return false - } - } - - implicit := event.implicit - if !first || emitter.canonical { - implicit = false - } - - if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if event.version_directive != nil { - implicit = false - if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if len(event.tag_directives) > 0 { - implicit = false - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { - return false - } - if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - if yaml_emitter_check_empty_document(emitter) { - implicit = false - } - if !implicit { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { - return false - } - if emitter.canonical { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE - return true - } - - if event.typ == yaml_STREAM_END_EVENT { - if emitter.open_ended { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_END_STATE - return true - } - - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") -} - -// Expect the root node. -func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) - return yaml_emitter_emit_node(emitter, event, true, false, false, false) -} - -// Expect DOCUMENT-END. -func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_DOCUMENT_END_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !event.implicit { - // [Go] Allocate the slice elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_DOCUMENT_START_STATE - emitter.tag_directives = emitter.tag_directives[:0] - return true -} - -// Expect a flow item node. -func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - - return true - } - - if !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) -} - -// Expect a flow key node. -func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_MAPPING_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - - if !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a flow value node. -func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block item node. -func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) { - return false - } - } - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) -} - -// Expect a block key node. -func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, false) { - return false - } - } - if event.typ == yaml_MAPPING_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block value node. -func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a node. -func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, - root bool, sequence bool, mapping bool, simple_key bool) bool { - - emitter.root_context = root - emitter.sequence_context = sequence - emitter.mapping_context = mapping - emitter.simple_key_context = simple_key - - switch event.typ { - case yaml_ALIAS_EVENT: - return yaml_emitter_emit_alias(emitter, event) - case yaml_SCALAR_EVENT: - return yaml_emitter_emit_scalar(emitter, event) - case yaml_SEQUENCE_START_EVENT: - return yaml_emitter_emit_sequence_start(emitter, event) - case yaml_MAPPING_START_EVENT: - return yaml_emitter_emit_mapping_start(emitter, event) - default: - return yaml_emitter_set_emitter_error(emitter, - fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ)) - } -} - -// Expect ALIAS. -func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SCALAR. -func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_select_scalar_style(emitter, event) { - return false - } - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - if !yaml_emitter_process_scalar(emitter) { - return false - } - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SEQUENCE-START. -func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || - yaml_emitter_check_empty_sequence(emitter) { - emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE - } - return true -} - -// Expect MAPPING-START. -func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || - yaml_emitter_check_empty_mapping(emitter) { - emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE - } - return true -} - -// Check if the document content is an empty scalar. -func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { - return false // [Go] Huh? -} - -// Check if the next events represent an empty sequence. -func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT -} - -// Check if the next events represent an empty mapping. -func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT -} - -// Check if the next node can be expressed as a simple key. -func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { - length := 0 - switch emitter.events[emitter.events_head].typ { - case yaml_ALIAS_EVENT: - length += len(emitter.anchor_data.anchor) - case yaml_SCALAR_EVENT: - if emitter.scalar_data.multiline { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) + - len(emitter.scalar_data.value) - case yaml_SEQUENCE_START_EVENT: - if !yaml_emitter_check_empty_sequence(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - case yaml_MAPPING_START_EVENT: - if !yaml_emitter_check_empty_mapping(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - default: - return false - } - return length <= 128 -} - -// Determine an acceptable scalar style. -func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 - if no_tag && !event.implicit && !event.quoted_implicit { - return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") - } - - style := event.scalar_style() - if style == yaml_ANY_SCALAR_STYLE { - style = yaml_PLAIN_SCALAR_STYLE - } - if emitter.canonical { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - if emitter.simple_key_context && emitter.scalar_data.multiline { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - - if style == yaml_PLAIN_SCALAR_STYLE { - if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || - emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if no_tag && !event.implicit { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { - if !emitter.scalar_data.single_quoted_allowed { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { - if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - - if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { - emitter.tag_data.handle = []byte{'!'} - } - emitter.scalar_data.style = style - return true -} - -// Write an anchor. -func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { - if emitter.anchor_data.anchor == nil { - return true - } - c := []byte{'&'} - if emitter.anchor_data.alias { - c[0] = '*' - } - if !yaml_emitter_write_indicator(emitter, c, true, false, false) { - return false - } - return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) -} - -// Write a tag. -func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { - if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { - return true - } - if len(emitter.tag_data.handle) > 0 { - if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { - return false - } - if len(emitter.tag_data.suffix) > 0 { - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - } - } else { - // [Go] Allocate these slices elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { - return false - } - } - return true -} - -// Write a scalar. -func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { - switch emitter.scalar_data.style { - case yaml_PLAIN_SCALAR_STYLE: - return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_SINGLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_DOUBLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_LITERAL_SCALAR_STYLE: - return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) - - case yaml_FOLDED_SCALAR_STYLE: - return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) - } - panic("unknown scalar style") -} - -// Check if a %YAML directive is valid. -func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { - if version_directive.major != 1 || version_directive.minor != 1 { - return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") - } - return true -} - -// Check if a %TAG directive is valid. -func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { - handle := tag_directive.handle - prefix := tag_directive.prefix - if len(handle) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") - } - if handle[0] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") - } - if handle[len(handle)-1] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") - } - for i := 1; i < len(handle)-1; i += width(handle[i]) { - if !is_alpha(handle, i) { - return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") - } - } - if len(prefix) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") - } - return true -} - -// Check if an anchor is valid. -func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { - if len(anchor) == 0 { - problem := "anchor value must not be empty" - if alias { - problem = "alias value must not be empty" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - for i := 0; i < len(anchor); i += width(anchor[i]) { - if !is_alpha(anchor, i) { - problem := "anchor value must contain alphanumerical characters only" - if alias { - problem = "alias value must contain alphanumerical characters only" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - } - emitter.anchor_data.anchor = anchor - emitter.anchor_data.alias = alias - return true -} - -// Check if a tag is valid. -func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { - if len(tag) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") - } - for i := 0; i < len(emitter.tag_directives); i++ { - tag_directive := &emitter.tag_directives[i] - if bytes.HasPrefix(tag, tag_directive.prefix) { - emitter.tag_data.handle = tag_directive.handle - emitter.tag_data.suffix = tag[len(tag_directive.prefix):] - return true - } - } - emitter.tag_data.suffix = tag - return true -} - -// Check if a scalar is valid. -func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { - var ( - block_indicators = false - flow_indicators = false - line_breaks = false - special_characters = false - - leading_space = false - leading_break = false - trailing_space = false - trailing_break = false - break_space = false - space_break = false - - preceded_by_whitespace = false - followed_by_whitespace = false - previous_space = false - previous_break = false - ) - - emitter.scalar_data.value = value - - if len(value) == 0 { - emitter.scalar_data.multiline = false - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = false - return true - } - - if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { - block_indicators = true - flow_indicators = true - } - - preceded_by_whitespace = true - for i, w := 0, 0; i < len(value); i += w { - w = width(value[i]) - followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) - - if i == 0 { - switch value[i] { - case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': - flow_indicators = true - block_indicators = true - case '?', ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '-': - if followed_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } else { - switch value[i] { - case ',', '?', '[', ']', '{', '}': - flow_indicators = true - case ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '#': - if preceded_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } - - if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { - special_characters = true - } - if is_space(value, i) { - if i == 0 { - leading_space = true - } - if i+width(value[i]) == len(value) { - trailing_space = true - } - if previous_break { - break_space = true - } - previous_space = true - previous_break = false - } else if is_break(value, i) { - line_breaks = true - if i == 0 { - leading_break = true - } - if i+width(value[i]) == len(value) { - trailing_break = true - } - if previous_space { - space_break = true - } - previous_space = false - previous_break = true - } else { - previous_space = false - previous_break = false - } - - // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. - preceded_by_whitespace = is_blankz(value, i) - } - - emitter.scalar_data.multiline = line_breaks - emitter.scalar_data.flow_plain_allowed = true - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = true - - if leading_space || leading_break || trailing_space || trailing_break { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if trailing_space { - emitter.scalar_data.block_allowed = false - } - if break_space { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - } - if space_break || special_characters { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - emitter.scalar_data.block_allowed = false - } - if line_breaks { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if flow_indicators { - emitter.scalar_data.flow_plain_allowed = false - } - if block_indicators { - emitter.scalar_data.block_plain_allowed = false - } - return true -} - -// Check if the event data is valid. -func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - emitter.anchor_data.anchor = nil - emitter.tag_data.handle = nil - emitter.tag_data.suffix = nil - emitter.scalar_data.value = nil - - switch event.typ { - case yaml_ALIAS_EVENT: - if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { - return false - } - - case yaml_SCALAR_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - if !yaml_emitter_analyze_scalar(emitter, event.value) { - return false - } - - case yaml_SEQUENCE_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - - case yaml_MAPPING_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - } - return true -} - -// Write the BOM character. -func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { - if !flush(emitter) { - return false - } - pos := emitter.buffer_pos - emitter.buffer[pos+0] = '\xEF' - emitter.buffer[pos+1] = '\xBB' - emitter.buffer[pos+2] = '\xBF' - emitter.buffer_pos += 3 - return true -} - -func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { - indent := emitter.indent - if indent < 0 { - indent = 0 - } - if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { - if !put_break(emitter) { - return false - } - } - for emitter.column < indent { - if !put(emitter, ' ') { - return false - } - } - emitter.whitespace = true - emitter.indention = true - return true -} - -func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, indicator) { - return false - } - emitter.whitespace = is_whitespace - emitter.indention = (emitter.indention && is_indention) - emitter.open_ended = false - return true -} - -func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - for i := 0; i < len(value); { - var must_write bool - switch value[i] { - case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': - must_write = true - default: - must_write = is_alpha(value, i) - } - if must_write { - if !write(emitter, value, &i) { - return false - } - } else { - w := width(value[i]) - for k := 0; k < w; k++ { - octet := value[i] - i++ - if !put(emitter, '%') { - return false - } - - c := octet >> 4 - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - - c = octet & 0x0f - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - } - } - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - - emitter.whitespace = false - emitter.indention = false - if emitter.root_context { - emitter.open_ended = true - } - - return true -} - -func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { - return false - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if value[i] == '\'' { - if !put(emitter, '\'') { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - spaces := false - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { - return false - } - - for i := 0; i < len(value); { - if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || - is_bom(value, i) || is_break(value, i) || - value[i] == '"' || value[i] == '\\' { - - octet := value[i] - - var w int - var v rune - switch { - case octet&0x80 == 0x00: - w, v = 1, rune(octet&0x7F) - case octet&0xE0 == 0xC0: - w, v = 2, rune(octet&0x1F) - case octet&0xF0 == 0xE0: - w, v = 3, rune(octet&0x0F) - case octet&0xF8 == 0xF0: - w, v = 4, rune(octet&0x07) - } - for k := 1; k < w; k++ { - octet = value[i+k] - v = (v << 6) + (rune(octet) & 0x3F) - } - i += w - - if !put(emitter, '\\') { - return false - } - - var ok bool - switch v { - case 0x00: - ok = put(emitter, '0') - case 0x07: - ok = put(emitter, 'a') - case 0x08: - ok = put(emitter, 'b') - case 0x09: - ok = put(emitter, 't') - case 0x0A: - ok = put(emitter, 'n') - case 0x0b: - ok = put(emitter, 'v') - case 0x0c: - ok = put(emitter, 'f') - case 0x0d: - ok = put(emitter, 'r') - case 0x1b: - ok = put(emitter, 'e') - case 0x22: - ok = put(emitter, '"') - case 0x5c: - ok = put(emitter, '\\') - case 0x85: - ok = put(emitter, 'N') - case 0xA0: - ok = put(emitter, '_') - case 0x2028: - ok = put(emitter, 'L') - case 0x2029: - ok = put(emitter, 'P') - default: - if v <= 0xFF { - ok = put(emitter, 'x') - w = 2 - } else if v <= 0xFFFF { - ok = put(emitter, 'u') - w = 4 - } else { - ok = put(emitter, 'U') - w = 8 - } - for k := (w - 1) * 4; ok && k >= 0; k -= 4 { - digit := byte((v >> uint(k)) & 0x0F) - if digit < 10 { - ok = put(emitter, digit+'0') - } else { - ok = put(emitter, digit+'A'-10) - } - } - } - if !ok { - return false - } - spaces = false - } else if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { - if !yaml_emitter_write_indent(emitter) { - return false - } - if is_space(value, i+1) { - if !put(emitter, '\\') { - return false - } - } - i += width(value[i]) - } else if !write(emitter, value, &i) { - return false - } - spaces = true - } else { - if !write(emitter, value, &i) { - return false - } - spaces = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { - if is_space(value, 0) || is_break(value, 0) { - indent_hint := []byte{'0' + byte(emitter.best_indent)} - if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { - return false - } - } - - emitter.open_ended = false - - var chomp_hint [1]byte - if len(value) == 0 { - chomp_hint[0] = '-' - } else { - i := len(value) - 1 - for value[i]&0xC0 == 0x80 { - i-- - } - if !is_break(value, i) { - chomp_hint[0] = '-' - } else if i == 0 { - chomp_hint[0] = '+' - emitter.open_ended = true - } else { - i-- - for value[i]&0xC0 == 0x80 { - i-- - } - if is_break(value, i) { - chomp_hint[0] = '+' - emitter.open_ended = true - } - } - } - if chomp_hint[0] != 0 { - if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { - return false - } - } - return true -} - -func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - if !put_break(emitter) { - return false - } - emitter.indention = true - emitter.whitespace = true - breaks := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - breaks = false - } - } - - return true -} - -func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - - if !put_break(emitter) { - return false - } - emitter.indention = true - emitter.whitespace = true - - breaks := true - leading_spaces := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !breaks && !leading_spaces && value[i] == '\n' { - k := 0 - for is_break(value, k) { - k += width(value[k]) - } - if !is_blankz(value, k) { - if !put_break(emitter) { - return false - } - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - leading_spaces = is_blank(value, i) - } - if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - emitter.indention = false - breaks = false - } - } - return true -} diff --git a/vendor/gopkg.in/yaml.v2/encode.go b/vendor/gopkg.in/yaml.v2/encode.go index 0ee738e1..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/encode.go +++ b/vendor/gopkg.in/yaml.v2/encode.go @@ -1,390 +0,0 @@ -package yaml - -import ( - "encoding" - "fmt" - "io" - "reflect" - "regexp" - "sort" - "strconv" - "strings" - "time" - "unicode/utf8" -) - -// jsonNumber is the interface of the encoding/json.Number datatype. -// Repeating the interface here avoids a dependency on encoding/json, and also -// supports other libraries like jsoniter, which use a similar datatype with -// the same interface. Detecting this interface is useful when dealing with -// structures containing json.Number, which is a string under the hood. The -// encoder should prefer the use of Int64(), Float64() and string(), in that -// order, when encoding this type. -type jsonNumber interface { - Float64() (float64, error) - Int64() (int64, error) - String() string -} - -type encoder struct { - emitter yaml_emitter_t - event yaml_event_t - out []byte - flow bool - // doneInit holds whether the initial stream_start_event has been - // emitted. - doneInit bool -} - -func newEncoder() *encoder { - e := &encoder{} - yaml_emitter_initialize(&e.emitter) - yaml_emitter_set_output_string(&e.emitter, &e.out) - yaml_emitter_set_unicode(&e.emitter, true) - return e -} - -func newEncoderWithWriter(w io.Writer) *encoder { - e := &encoder{} - yaml_emitter_initialize(&e.emitter) - yaml_emitter_set_output_writer(&e.emitter, w) - yaml_emitter_set_unicode(&e.emitter, true) - return e -} - -func (e *encoder) init() { - if e.doneInit { - return - } - yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING) - e.emit() - e.doneInit = true -} - -func (e *encoder) finish() { - e.emitter.open_ended = false - yaml_stream_end_event_initialize(&e.event) - e.emit() -} - -func (e *encoder) destroy() { - yaml_emitter_delete(&e.emitter) -} - -func (e *encoder) emit() { - // This will internally delete the e.event value. - e.must(yaml_emitter_emit(&e.emitter, &e.event)) -} - -func (e *encoder) must(ok bool) { - if !ok { - msg := e.emitter.problem - if msg == "" { - msg = "unknown problem generating YAML content" - } - failf("%s", msg) - } -} - -func (e *encoder) marshalDoc(tag string, in reflect.Value) { - e.init() - yaml_document_start_event_initialize(&e.event, nil, nil, true) - e.emit() - e.marshal(tag, in) - yaml_document_end_event_initialize(&e.event, true) - e.emit() -} - -func (e *encoder) marshal(tag string, in reflect.Value) { - if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() { - e.nilv() - return - } - iface := in.Interface() - switch m := iface.(type) { - case jsonNumber: - integer, err := m.Int64() - if err == nil { - // In this case the json.Number is a valid int64 - in = reflect.ValueOf(integer) - break - } - float, err := m.Float64() - if err == nil { - // In this case the json.Number is a valid float64 - in = reflect.ValueOf(float) - break - } - // fallback case - no number could be obtained - in = reflect.ValueOf(m.String()) - case time.Time, *time.Time: - // Although time.Time implements TextMarshaler, - // we don't want to treat it as a string for YAML - // purposes because YAML has special support for - // timestamps. - case Marshaler: - v, err := m.MarshalYAML() - if err != nil { - fail(err) - } - if v == nil { - e.nilv() - return - } - in = reflect.ValueOf(v) - case encoding.TextMarshaler: - text, err := m.MarshalText() - if err != nil { - fail(err) - } - in = reflect.ValueOf(string(text)) - case nil: - e.nilv() - return - } - switch in.Kind() { - case reflect.Interface: - e.marshal(tag, in.Elem()) - case reflect.Map: - e.mapv(tag, in) - case reflect.Ptr: - if in.Type() == ptrTimeType { - e.timev(tag, in.Elem()) - } else { - e.marshal(tag, in.Elem()) - } - case reflect.Struct: - if in.Type() == timeType { - e.timev(tag, in) - } else { - e.structv(tag, in) - } - case reflect.Slice, reflect.Array: - if in.Type().Elem() == mapItemType { - e.itemsv(tag, in) - } else { - e.slicev(tag, in) - } - case reflect.String: - e.stringv(tag, in) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - if in.Type() == durationType { - e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String())) - } else { - e.intv(tag, in) - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - e.uintv(tag, in) - case reflect.Float32, reflect.Float64: - e.floatv(tag, in) - case reflect.Bool: - e.boolv(tag, in) - default: - panic("cannot marshal type: " + in.Type().String()) - } -} - -func (e *encoder) mapv(tag string, in reflect.Value) { - e.mappingv(tag, func() { - keys := keyList(in.MapKeys()) - sort.Sort(keys) - for _, k := range keys { - e.marshal("", k) - e.marshal("", in.MapIndex(k)) - } - }) -} - -func (e *encoder) itemsv(tag string, in reflect.Value) { - e.mappingv(tag, func() { - slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem) - for _, item := range slice { - e.marshal("", reflect.ValueOf(item.Key)) - e.marshal("", reflect.ValueOf(item.Value)) - } - }) -} - -func (e *encoder) structv(tag string, in reflect.Value) { - sinfo, err := getStructInfo(in.Type()) - if err != nil { - panic(err) - } - e.mappingv(tag, func() { - for _, info := range sinfo.FieldsList { - var value reflect.Value - if info.Inline == nil { - value = in.Field(info.Num) - } else { - value = in.FieldByIndex(info.Inline) - } - if info.OmitEmpty && isZero(value) { - continue - } - e.marshal("", reflect.ValueOf(info.Key)) - e.flow = info.Flow - e.marshal("", value) - } - if sinfo.InlineMap >= 0 { - m := in.Field(sinfo.InlineMap) - if m.Len() > 0 { - e.flow = false - keys := keyList(m.MapKeys()) - sort.Sort(keys) - for _, k := range keys { - if _, found := sinfo.FieldsMap[k.String()]; found { - panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String())) - } - e.marshal("", k) - e.flow = false - e.marshal("", m.MapIndex(k)) - } - } - } - }) -} - -func (e *encoder) mappingv(tag string, f func()) { - implicit := tag == "" - style := yaml_BLOCK_MAPPING_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_MAPPING_STYLE - } - yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style) - e.emit() - f() - yaml_mapping_end_event_initialize(&e.event) - e.emit() -} - -func (e *encoder) slicev(tag string, in reflect.Value) { - implicit := tag == "" - style := yaml_BLOCK_SEQUENCE_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_SEQUENCE_STYLE - } - e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) - e.emit() - n := in.Len() - for i := 0; i < n; i++ { - e.marshal("", in.Index(i)) - } - e.must(yaml_sequence_end_event_initialize(&e.event)) - e.emit() -} - -// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1. -// -// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported -// in YAML 1.2 and by this package, but these should be marshalled quoted for -// the time being for compatibility with other parsers. -func isBase60Float(s string) (result bool) { - // Fast path. - if s == "" { - return false - } - c := s[0] - if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 { - return false - } - // Do the full match. - return base60float.MatchString(s) -} - -// From http://yaml.org/type/float.html, except the regular expression there -// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. -var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) - -func (e *encoder) stringv(tag string, in reflect.Value) { - var style yaml_scalar_style_t - s := in.String() - canUsePlain := true - switch { - case !utf8.ValidString(s): - if tag == yaml_BINARY_TAG { - failf("explicitly tagged !!binary data must be base64-encoded") - } - if tag != "" { - failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag)) - } - // It can't be encoded directly as YAML so use a binary tag - // and encode it as base64. - tag = yaml_BINARY_TAG - s = encodeBase64(s) - case tag == "": - // Check to see if it would resolve to a specific - // tag when encoded unquoted. If it doesn't, - // there's no need to quote it. - rtag, _ := resolve("", s) - canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s) - } - // Note: it's possible for user code to emit invalid YAML - // if they explicitly specify a tag and a string containing - // text that's incompatible with that tag. - switch { - case strings.Contains(s, "\n"): - style = yaml_LITERAL_SCALAR_STYLE - case canUsePlain: - style = yaml_PLAIN_SCALAR_STYLE - default: - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - e.emitScalar(s, "", tag, style) -} - -func (e *encoder) boolv(tag string, in reflect.Value) { - var s string - if in.Bool() { - s = "true" - } else { - s = "false" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) intv(tag string, in reflect.Value) { - s := strconv.FormatInt(in.Int(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) uintv(tag string, in reflect.Value) { - s := strconv.FormatUint(in.Uint(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) timev(tag string, in reflect.Value) { - t := in.Interface().(time.Time) - s := t.Format(time.RFC3339Nano) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) floatv(tag string, in reflect.Value) { - // Issue #352: When formatting, use the precision of the underlying value - precision := 64 - if in.Kind() == reflect.Float32 { - precision = 32 - } - - s := strconv.FormatFloat(in.Float(), 'g', -1, precision) - switch s { - case "+Inf": - s = ".inf" - case "-Inf": - s = "-.inf" - case "NaN": - s = ".nan" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) nilv() { - e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) { - implicit := tag == "" - e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) - e.emit() -} diff --git a/vendor/gopkg.in/yaml.v2/parserc.go b/vendor/gopkg.in/yaml.v2/parserc.go index 81d05dfe..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/parserc.go +++ b/vendor/gopkg.in/yaml.v2/parserc.go @@ -1,1095 +0,0 @@ -package yaml - -import ( - "bytes" -) - -// The parser implements the following grammar: -// -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// implicit_document ::= block_node DOCUMENT-END* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// block_node_or_indentless_sequence ::= -// ALIAS -// | properties (block_content | indentless_block_sequence)? -// | block_content -// | indentless_block_sequence -// block_node ::= ALIAS -// | properties block_content? -// | block_content -// flow_node ::= ALIAS -// | properties flow_content? -// | flow_content -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// block_content ::= block_collection | flow_collection | SCALAR -// flow_content ::= flow_collection | SCALAR -// block_collection ::= block_sequence | block_mapping -// flow_collection ::= flow_sequence | flow_mapping -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// block_mapping ::= BLOCK-MAPPING_START -// ((KEY block_node_or_indentless_sequence?)? -// (VALUE block_node_or_indentless_sequence?)?)* -// BLOCK-END -// flow_sequence ::= FLOW-SEQUENCE-START -// (flow_sequence_entry FLOW-ENTRY)* -// flow_sequence_entry? -// FLOW-SEQUENCE-END -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// flow_mapping ::= FLOW-MAPPING-START -// (flow_mapping_entry FLOW-ENTRY)* -// flow_mapping_entry? -// FLOW-MAPPING-END -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - -// Peek the next token in the token queue. -func peek_token(parser *yaml_parser_t) *yaml_token_t { - if parser.token_available || yaml_parser_fetch_more_tokens(parser) { - return &parser.tokens[parser.tokens_head] - } - return nil -} - -// Remove the next token from the queue (must be called after peek_token). -func skip_token(parser *yaml_parser_t) { - parser.token_available = false - parser.tokens_parsed++ - parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN - parser.tokens_head++ -} - -// Get the next event. -func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { - // Erase the event object. - *event = yaml_event_t{} - - // No events after the end of the stream or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { - return true - } - - // Generate the next event. - return yaml_parser_state_machine(parser, event) -} - -// Set parser error. -func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -// State dispatcher. -func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { - //trace("yaml_parser_state_machine", "state:", parser.state.String()) - - switch parser.state { - case yaml_PARSE_STREAM_START_STATE: - return yaml_parser_parse_stream_start(parser, event) - - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, true) - - case yaml_PARSE_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, false) - - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return yaml_parser_parse_document_content(parser, event) - - case yaml_PARSE_DOCUMENT_END_STATE: - return yaml_parser_parse_document_end(parser, event) - - case yaml_PARSE_BLOCK_NODE_STATE: - return yaml_parser_parse_node(parser, event, true, false) - - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return yaml_parser_parse_node(parser, event, true, true) - - case yaml_PARSE_FLOW_NODE_STATE: - return yaml_parser_parse_node(parser, event, false, false) - - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, true) - - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, false) - - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_indentless_sequence_entry(parser, event) - - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, true) - - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, false) - - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return yaml_parser_parse_block_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, true) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, false) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) - - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, true) - - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, true) - - default: - panic("invalid parser state") - } -} - -// Parse the production: -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// ************ -func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_STREAM_START_TOKEN { - return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark) - } - parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - encoding: token.encoding, - } - skip_token(parser) - return true -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// * -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// ************************* -func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { - - token := peek_token(parser) - if token == nil { - return false - } - - // Parse extra document end indicators. - if !implicit { - for token.typ == yaml_DOCUMENT_END_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && - token.typ != yaml_TAG_DIRECTIVE_TOKEN && - token.typ != yaml_DOCUMENT_START_TOKEN && - token.typ != yaml_STREAM_END_TOKEN { - // Parse an implicit document. - if !yaml_parser_process_directives(parser, nil, nil) { - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_BLOCK_NODE_STATE - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - } else if token.typ != yaml_STREAM_END_TOKEN { - // Parse an explicit document. - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - start_mark := token.start_mark - if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { - return false - } - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_DOCUMENT_START_TOKEN { - yaml_parser_set_parser_error(parser, - "did not find expected ", token.start_mark) - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE - end_mark := token.end_mark - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: false, - } - skip_token(parser) - - } else { - // Parse the stream end. - parser.state = yaml_PARSE_END_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - } - - return true -} - -// Parse the productions: -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// *********** -// -func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || - token.typ == yaml_TAG_DIRECTIVE_TOKEN || - token.typ == yaml_DOCUMENT_START_TOKEN || - token.typ == yaml_DOCUMENT_END_TOKEN || - token.typ == yaml_STREAM_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - return yaml_parser_process_empty_scalar(parser, event, - token.start_mark) - } - return yaml_parser_parse_node(parser, event, true, false) -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// ************* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// -func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - start_mark := token.start_mark - end_mark := token.start_mark - - implicit := true - if token.typ == yaml_DOCUMENT_END_TOKEN { - end_mark = token.end_mark - skip_token(parser) - implicit = false - } - - parser.tag_directives = parser.tag_directives[:0] - - parser.state = yaml_PARSE_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - start_mark: start_mark, - end_mark: end_mark, - implicit: implicit, - } - return true -} - -// Parse the productions: -// block_node_or_indentless_sequence ::= -// ALIAS -// ***** -// | properties (block_content | indentless_block_sequence)? -// ********** * -// | block_content | indentless_block_sequence -// * -// block_node ::= ALIAS -// ***** -// | properties block_content? -// ********** * -// | block_content -// * -// flow_node ::= ALIAS -// ***** -// | properties flow_content? -// ********** * -// | flow_content -// * -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// ************************* -// block_content ::= block_collection | flow_collection | SCALAR -// ****** -// flow_content ::= flow_collection | SCALAR -// ****** -func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { - //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_ALIAS_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - *event = yaml_event_t{ - typ: yaml_ALIAS_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - anchor: token.value, - } - skip_token(parser) - return true - } - - start_mark := token.start_mark - end_mark := token.start_mark - - var tag_token bool - var tag_handle, tag_suffix, anchor []byte - var tag_mark yaml_mark_t - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - start_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } else if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - start_mark = token.start_mark - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - var tag []byte - if tag_token { - if len(tag_handle) == 0 { - tag = tag_suffix - tag_suffix = nil - } else { - for i := range parser.tag_directives { - if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { - tag = append([]byte(nil), parser.tag_directives[i].prefix...) - tag = append(tag, tag_suffix...) - break - } - } - if len(tag) == 0 { - yaml_parser_set_parser_error_context(parser, - "while parsing a node", start_mark, - "found undefined tag handle", tag_mark) - return false - } - } - } - - implicit := len(tag) == 0 - if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_SCALAR_TOKEN { - var plain_implicit, quoted_implicit bool - end_mark = token.end_mark - if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { - plain_implicit = true - } else if len(tag) == 0 { - quoted_implicit = true - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - value: token.value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(token.style), - } - skip_token(parser) - return true - } - if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { - // [Go] Some of the events below can be merged as they differ only on style. - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_FLOW_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - return true - } - if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), - } - return true - } - if len(anchor) > 0 || len(tag) > 0 { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - quoted_implicit: false, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true - } - - context := "while parsing a flow node" - if block { - context = "while parsing a block node" - } - yaml_parser_set_parser_error_context(parser, context, start_mark, - "did not find expected node content", token.start_mark) - return false -} - -// Parse the productions: -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// ******************** *********** * ********* -// -func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } else { - parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } - if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block collection", context_mark, - "did not find expected '-' indicator", token.start_mark) -} - -// Parse the productions: -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// *********** * -func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && - token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? - } - return true -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// ******************* -// ((KEY block_node_or_indentless_sequence?)? -// *** * -// (VALUE block_node_or_indentless_sequence?)?)* -// -// BLOCK-END -// ********* -// -func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_KEY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } else { - parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } else if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block mapping", context_mark, - "did not find expected key", token.start_mark) -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// -// ((KEY block_node_or_indentless_sequence?)? -// -// (VALUE block_node_or_indentless_sequence?)?)* -// ***** * -// BLOCK-END -// -// -func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence ::= FLOW-SEQUENCE-START -// ******************* -// (flow_sequence_entry FLOW-ENTRY)* -// * ********** -// flow_sequence_entry? -// * -// FLOW-SEQUENCE-END -// ***************** -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * -// -func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow sequence", context_mark, - "did not find expected ',' or ']'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - implicit: true, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - skip_token(parser) - return true - } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true -} - -// -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// *** * -// -func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - mark := token.end_mark - skip_token(parser) - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// ***** * -// -func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * -// -func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? - } - return true -} - -// Parse the productions: -// flow_mapping ::= FLOW-MAPPING-START -// ****************** -// (flow_mapping_entry FLOW-ENTRY)* -// * ********** -// flow_mapping_entry? -// ****************** -// FLOW-MAPPING-END -// **************** -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * *** * -// -func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow mapping", context_mark, - "did not find expected ',' or '}'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } else { - parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - return true -} - -// Parse the productions: -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * ***** * -// -func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { - token := peek_token(parser) - if token == nil { - return false - } - if empty { - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Generate an empty scalar event. -func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: mark, - end_mark: mark, - value: nil, // Empty - implicit: true, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true -} - -var default_tag_directives = []yaml_tag_directive_t{ - {[]byte("!"), []byte("!")}, - {[]byte("!!"), []byte("tag:yaml.org,2002:")}, -} - -// Parse directives. -func yaml_parser_process_directives(parser *yaml_parser_t, - version_directive_ref **yaml_version_directive_t, - tag_directives_ref *[]yaml_tag_directive_t) bool { - - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - - token := peek_token(parser) - if token == nil { - return false - } - - for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { - if version_directive != nil { - yaml_parser_set_parser_error(parser, - "found duplicate %YAML directive", token.start_mark) - return false - } - if token.major != 1 || token.minor != 1 { - yaml_parser_set_parser_error(parser, - "found incompatible YAML document", token.start_mark) - return false - } - version_directive = &yaml_version_directive_t{ - major: token.major, - minor: token.minor, - } - } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { - value := yaml_tag_directive_t{ - handle: token.value, - prefix: token.prefix, - } - if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { - return false - } - tag_directives = append(tag_directives, value) - } - - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - - for i := range default_tag_directives { - if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { - return false - } - } - - if version_directive_ref != nil { - *version_directive_ref = version_directive - } - if tag_directives_ref != nil { - *tag_directives_ref = tag_directives - } - return true -} - -// Append a tag directive to the directives stack. -func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { - for i := range parser.tag_directives { - if bytes.Equal(value.handle, parser.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) - } - } - - // [Go] I suspect the copy is unnecessary. This was likely done - // because there was no way to track ownership of the data. - value_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(value_copy.handle, value.handle) - copy(value_copy.prefix, value.prefix) - parser.tag_directives = append(parser.tag_directives, value_copy) - return true -} diff --git a/vendor/gopkg.in/yaml.v2/readerc.go b/vendor/gopkg.in/yaml.v2/readerc.go index 7c1f5fac..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/readerc.go +++ b/vendor/gopkg.in/yaml.v2/readerc.go @@ -1,412 +0,0 @@ -package yaml - -import ( - "io" -) - -// Set the reader error and return 0. -func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { - parser.error = yaml_READER_ERROR - parser.problem = problem - parser.problem_offset = offset - parser.problem_value = value - return false -} - -// Byte order marks. -const ( - bom_UTF8 = "\xef\xbb\xbf" - bom_UTF16LE = "\xff\xfe" - bom_UTF16BE = "\xfe\xff" -) - -// Determine the input stream encoding by checking the BOM symbol. If no BOM is -// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. -func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { - // Ensure that we had enough bytes in the raw buffer. - for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { - if !yaml_parser_update_raw_buffer(parser) { - return false - } - } - - // Determine the encoding. - buf := parser.raw_buffer - pos := parser.raw_buffer_pos - avail := len(buf) - pos - if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { - parser.encoding = yaml_UTF16LE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { - parser.encoding = yaml_UTF16BE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { - parser.encoding = yaml_UTF8_ENCODING - parser.raw_buffer_pos += 3 - parser.offset += 3 - } else { - parser.encoding = yaml_UTF8_ENCODING - } - return true -} - -// Update the raw buffer. -func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { - size_read := 0 - - // Return if the raw buffer is full. - if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { - return true - } - - // Return on EOF. - if parser.eof { - return true - } - - // Move the remaining bytes in the raw buffer to the beginning. - if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { - copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) - } - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] - parser.raw_buffer_pos = 0 - - // Call the read handler to fill the buffer. - size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] - if err == io.EOF { - parser.eof = true - } else if err != nil { - return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) - } - return true -} - -// Ensure that the buffer contains at least `length` characters. -// Return true on success, false on failure. -// -// The length is supposed to be significantly less that the buffer size. -func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { - if parser.read_handler == nil { - panic("read handler must be set") - } - - // [Go] This function was changed to guarantee the requested length size at EOF. - // The fact we need to do this is pretty awful, but the description above implies - // for that to be the case, and there are tests - - // If the EOF flag is set and the raw buffer is empty, do nothing. - if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { - // [Go] ACTUALLY! Read the documentation of this function above. - // This is just broken. To return true, we need to have the - // given length in the buffer. Not doing that means every single - // check that calls this function to make sure the buffer has a - // given length is Go) panicking; or C) accessing invalid memory. - //return true - } - - // Return if the buffer contains enough characters. - if parser.unread >= length { - return true - } - - // Determine the input encoding if it is not known yet. - if parser.encoding == yaml_ANY_ENCODING { - if !yaml_parser_determine_encoding(parser) { - return false - } - } - - // Move the unread characters to the beginning of the buffer. - buffer_len := len(parser.buffer) - if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { - copy(parser.buffer, parser.buffer[parser.buffer_pos:]) - buffer_len -= parser.buffer_pos - parser.buffer_pos = 0 - } else if parser.buffer_pos == buffer_len { - buffer_len = 0 - parser.buffer_pos = 0 - } - - // Open the whole buffer for writing, and cut it before returning. - parser.buffer = parser.buffer[:cap(parser.buffer)] - - // Fill the buffer until it has enough characters. - first := true - for parser.unread < length { - - // Fill the raw buffer if necessary. - if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { - if !yaml_parser_update_raw_buffer(parser) { - parser.buffer = parser.buffer[:buffer_len] - return false - } - } - first = false - - // Decode the raw buffer. - inner: - for parser.raw_buffer_pos != len(parser.raw_buffer) { - var value rune - var width int - - raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos - - // Decode the next character. - switch parser.encoding { - case yaml_UTF8_ENCODING: - // Decode a UTF-8 character. Check RFC 3629 - // (http://www.ietf.org/rfc/rfc3629.txt) for more details. - // - // The following table (taken from the RFC) is used for - // decoding. - // - // Char. number range | UTF-8 octet sequence - // (hexadecimal) | (binary) - // --------------------+------------------------------------ - // 0000 0000-0000 007F | 0xxxxxxx - // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx - // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx - // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - // - // Additionally, the characters in the range 0xD800-0xDFFF - // are prohibited as they are reserved for use with UTF-16 - // surrogate pairs. - - // Determine the length of the UTF-8 sequence. - octet := parser.raw_buffer[parser.raw_buffer_pos] - switch { - case octet&0x80 == 0x00: - width = 1 - case octet&0xE0 == 0xC0: - width = 2 - case octet&0xF0 == 0xE0: - width = 3 - case octet&0xF8 == 0xF0: - width = 4 - default: - // The leading octet is invalid. - return yaml_parser_set_reader_error(parser, - "invalid leading UTF-8 octet", - parser.offset, int(octet)) - } - - // Check if the raw buffer contains an incomplete character. - if width > raw_unread { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-8 octet sequence", - parser.offset, -1) - } - break inner - } - - // Decode the leading octet. - switch { - case octet&0x80 == 0x00: - value = rune(octet & 0x7F) - case octet&0xE0 == 0xC0: - value = rune(octet & 0x1F) - case octet&0xF0 == 0xE0: - value = rune(octet & 0x0F) - case octet&0xF8 == 0xF0: - value = rune(octet & 0x07) - default: - value = 0 - } - - // Check and decode the trailing octets. - for k := 1; k < width; k++ { - octet = parser.raw_buffer[parser.raw_buffer_pos+k] - - // Check if the octet is valid. - if (octet & 0xC0) != 0x80 { - return yaml_parser_set_reader_error(parser, - "invalid trailing UTF-8 octet", - parser.offset+k, int(octet)) - } - - // Decode the octet. - value = (value << 6) + rune(octet&0x3F) - } - - // Check the length of the sequence against the value. - switch { - case width == 1: - case width == 2 && value >= 0x80: - case width == 3 && value >= 0x800: - case width == 4 && value >= 0x10000: - default: - return yaml_parser_set_reader_error(parser, - "invalid length of a UTF-8 sequence", - parser.offset, -1) - } - - // Check the range of the value. - if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { - return yaml_parser_set_reader_error(parser, - "invalid Unicode character", - parser.offset, int(value)) - } - - case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: - var low, high int - if parser.encoding == yaml_UTF16LE_ENCODING { - low, high = 0, 1 - } else { - low, high = 1, 0 - } - - // The UTF-16 encoding is not as simple as one might - // naively think. Check RFC 2781 - // (http://www.ietf.org/rfc/rfc2781.txt). - // - // Normally, two subsequent bytes describe a Unicode - // character. However a special technique (called a - // surrogate pair) is used for specifying character - // values larger than 0xFFFF. - // - // A surrogate pair consists of two pseudo-characters: - // high surrogate area (0xD800-0xDBFF) - // low surrogate area (0xDC00-0xDFFF) - // - // The following formulas are used for decoding - // and encoding characters using surrogate pairs: - // - // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) - // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) - // W1 = 110110yyyyyyyyyy - // W2 = 110111xxxxxxxxxx - // - // where U is the character value, W1 is the high surrogate - // area, W2 is the low surrogate area. - - // Check for incomplete UTF-16 character. - if raw_unread < 2 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 character", - parser.offset, -1) - } - break inner - } - - // Get the character. - value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) - - // Check for unexpected low surrogate area. - if value&0xFC00 == 0xDC00 { - return yaml_parser_set_reader_error(parser, - "unexpected low surrogate area", - parser.offset, int(value)) - } - - // Check for a high surrogate area. - if value&0xFC00 == 0xD800 { - width = 4 - - // Check for incomplete surrogate pair. - if raw_unread < 4 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 surrogate pair", - parser.offset, -1) - } - break inner - } - - // Get the next character. - value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) - - // Check for a low surrogate area. - if value2&0xFC00 != 0xDC00 { - return yaml_parser_set_reader_error(parser, - "expected low surrogate area", - parser.offset+2, int(value2)) - } - - // Generate the value of the surrogate pair. - value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) - } else { - width = 2 - } - - default: - panic("impossible") - } - - // Check if the character is in the allowed range: - // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) - // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) - // | [#x10000-#x10FFFF] (32 bit) - switch { - case value == 0x09: - case value == 0x0A: - case value == 0x0D: - case value >= 0x20 && value <= 0x7E: - case value == 0x85: - case value >= 0xA0 && value <= 0xD7FF: - case value >= 0xE000 && value <= 0xFFFD: - case value >= 0x10000 && value <= 0x10FFFF: - default: - return yaml_parser_set_reader_error(parser, - "control characters are not allowed", - parser.offset, int(value)) - } - - // Move the raw pointers. - parser.raw_buffer_pos += width - parser.offset += width - - // Finally put the character into the buffer. - if value <= 0x7F { - // 0000 0000-0000 007F . 0xxxxxxx - parser.buffer[buffer_len+0] = byte(value) - buffer_len += 1 - } else if value <= 0x7FF { - // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) - parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) - buffer_len += 2 - } else if value <= 0xFFFF { - // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) - buffer_len += 3 - } else { - // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) - buffer_len += 4 - } - - parser.unread++ - } - - // On EOF, put NUL into the buffer and return. - if parser.eof { - parser.buffer[buffer_len] = 0 - buffer_len++ - parser.unread++ - break - } - } - // [Go] Read the documentation of this function above. To return true, - // we need to have the given length in the buffer. Not doing that means - // every single check that calls this function to make sure the buffer - // has a given length is Go) panicking; or C) accessing invalid memory. - // This happens here due to the EOF above breaking early. - for buffer_len < length { - parser.buffer[buffer_len] = 0 - buffer_len++ - } - parser.buffer = parser.buffer[:buffer_len] - return true -} diff --git a/vendor/gopkg.in/yaml.v2/resolve.go b/vendor/gopkg.in/yaml.v2/resolve.go index 4120e0c9..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/resolve.go +++ b/vendor/gopkg.in/yaml.v2/resolve.go @@ -1,258 +0,0 @@ -package yaml - -import ( - "encoding/base64" - "math" - "regexp" - "strconv" - "strings" - "time" -) - -type resolveMapItem struct { - value interface{} - tag string -} - -var resolveTable = make([]byte, 256) -var resolveMap = make(map[string]resolveMapItem) - -func init() { - t := resolveTable - t[int('+')] = 'S' // Sign - t[int('-')] = 'S' - for _, c := range "0123456789" { - t[int(c)] = 'D' // Digit - } - for _, c := range "yYnNtTfFoO~" { - t[int(c)] = 'M' // In map - } - t[int('.')] = '.' // Float (potentially in map) - - var resolveMapList = []struct { - v interface{} - tag string - l []string - }{ - {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}}, - {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}}, - {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}}, - {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}}, - {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}}, - {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}}, - {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}}, - {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}}, - {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}}, - {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}}, - {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}}, - {"<<", yaml_MERGE_TAG, []string{"<<"}}, - } - - m := resolveMap - for _, item := range resolveMapList { - for _, s := range item.l { - m[s] = resolveMapItem{item.v, item.tag} - } - } -} - -const longTagPrefix = "tag:yaml.org,2002:" - -func shortTag(tag string) string { - // TODO This can easily be made faster and produce less garbage. - if strings.HasPrefix(tag, longTagPrefix) { - return "!!" + tag[len(longTagPrefix):] - } - return tag -} - -func longTag(tag string) string { - if strings.HasPrefix(tag, "!!") { - return longTagPrefix + tag[2:] - } - return tag -} - -func resolvableTag(tag string) bool { - switch tag { - case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG: - return true - } - return false -} - -var yamlStyleFloat = regexp.MustCompile(`^[-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?$`) - -func resolve(tag string, in string) (rtag string, out interface{}) { - if !resolvableTag(tag) { - return tag, in - } - - defer func() { - switch tag { - case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG: - return - case yaml_FLOAT_TAG: - if rtag == yaml_INT_TAG { - switch v := out.(type) { - case int64: - rtag = yaml_FLOAT_TAG - out = float64(v) - return - case int: - rtag = yaml_FLOAT_TAG - out = float64(v) - return - } - } - } - failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag)) - }() - - // Any data is accepted as a !!str or !!binary. - // Otherwise, the prefix is enough of a hint about what it might be. - hint := byte('N') - if in != "" { - hint = resolveTable[in[0]] - } - if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG { - // Handle things we can lookup in a map. - if item, ok := resolveMap[in]; ok { - return item.tag, item.value - } - - // Base 60 floats are a bad idea, were dropped in YAML 1.2, and - // are purposefully unsupported here. They're still quoted on - // the way out for compatibility with other parser, though. - - switch hint { - case 'M': - // We've already checked the map above. - - case '.': - // Not in the map, so maybe a normal float. - floatv, err := strconv.ParseFloat(in, 64) - if err == nil { - return yaml_FLOAT_TAG, floatv - } - - case 'D', 'S': - // Int, float, or timestamp. - // Only try values as a timestamp if the value is unquoted or there's an explicit - // !!timestamp tag. - if tag == "" || tag == yaml_TIMESTAMP_TAG { - t, ok := parseTimestamp(in) - if ok { - return yaml_TIMESTAMP_TAG, t - } - } - - plain := strings.Replace(in, "_", "", -1) - intv, err := strconv.ParseInt(plain, 0, 64) - if err == nil { - if intv == int64(int(intv)) { - return yaml_INT_TAG, int(intv) - } else { - return yaml_INT_TAG, intv - } - } - uintv, err := strconv.ParseUint(plain, 0, 64) - if err == nil { - return yaml_INT_TAG, uintv - } - if yamlStyleFloat.MatchString(plain) { - floatv, err := strconv.ParseFloat(plain, 64) - if err == nil { - return yaml_FLOAT_TAG, floatv - } - } - if strings.HasPrefix(plain, "0b") { - intv, err := strconv.ParseInt(plain[2:], 2, 64) - if err == nil { - if intv == int64(int(intv)) { - return yaml_INT_TAG, int(intv) - } else { - return yaml_INT_TAG, intv - } - } - uintv, err := strconv.ParseUint(plain[2:], 2, 64) - if err == nil { - return yaml_INT_TAG, uintv - } - } else if strings.HasPrefix(plain, "-0b") { - intv, err := strconv.ParseInt("-" + plain[3:], 2, 64) - if err == nil { - if true || intv == int64(int(intv)) { - return yaml_INT_TAG, int(intv) - } else { - return yaml_INT_TAG, intv - } - } - } - default: - panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")") - } - } - return yaml_STR_TAG, in -} - -// encodeBase64 encodes s as base64 that is broken up into multiple lines -// as appropriate for the resulting length. -func encodeBase64(s string) string { - const lineLen = 70 - encLen := base64.StdEncoding.EncodedLen(len(s)) - lines := encLen/lineLen + 1 - buf := make([]byte, encLen*2+lines) - in := buf[0:encLen] - out := buf[encLen:] - base64.StdEncoding.Encode(in, []byte(s)) - k := 0 - for i := 0; i < len(in); i += lineLen { - j := i + lineLen - if j > len(in) { - j = len(in) - } - k += copy(out[k:], in[i:j]) - if lines > 1 { - out[k] = '\n' - k++ - } - } - return string(out[:k]) -} - -// This is a subset of the formats allowed by the regular expression -// defined at http://yaml.org/type/timestamp.html. -var allowedTimestampFormats = []string{ - "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields. - "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t". - "2006-1-2 15:4:5.999999999", // space separated with no time zone - "2006-1-2", // date only - // Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5" - // from the set of examples. -} - -// parseTimestamp parses s as a timestamp string and -// returns the timestamp and reports whether it succeeded. -// Timestamp formats are defined at http://yaml.org/type/timestamp.html -func parseTimestamp(s string) (time.Time, bool) { - // TODO write code to check all the formats supported by - // http://yaml.org/type/timestamp.html instead of using time.Parse. - - // Quick check: all date formats start with YYYY-. - i := 0 - for ; i < len(s); i++ { - if c := s[i]; c < '0' || c > '9' { - break - } - } - if i != 4 || i == len(s) || s[i] != '-' { - return time.Time{}, false - } - for _, format := range allowedTimestampFormats { - if t, err := time.Parse(format, s); err == nil { - return t, true - } - } - return time.Time{}, false -} diff --git a/vendor/gopkg.in/yaml.v2/scannerc.go b/vendor/gopkg.in/yaml.v2/scannerc.go index 0b9bb603..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/scannerc.go +++ b/vendor/gopkg.in/yaml.v2/scannerc.go @@ -1,2711 +0,0 @@ -package yaml - -import ( - "bytes" - "fmt" -) - -// Introduction -// ************ -// -// The following notes assume that you are familiar with the YAML specification -// (http://yaml.org/spec/1.2/spec.html). We mostly follow it, although in -// some cases we are less restrictive that it requires. -// -// The process of transforming a YAML stream into a sequence of events is -// divided on two steps: Scanning and Parsing. -// -// The Scanner transforms the input stream into a sequence of tokens, while the -// parser transform the sequence of tokens produced by the Scanner into a -// sequence of parsing events. -// -// The Scanner is rather clever and complicated. The Parser, on the contrary, -// is a straightforward implementation of a recursive-descendant parser (or, -// LL(1) parser, as it is usually called). -// -// Actually there are two issues of Scanning that might be called "clever", the -// rest is quite straightforward. The issues are "block collection start" and -// "simple keys". Both issues are explained below in details. -// -// Here the Scanning step is explained and implemented. We start with the list -// of all the tokens produced by the Scanner together with short descriptions. -// -// Now, tokens: -// -// STREAM-START(encoding) # The stream start. -// STREAM-END # The stream end. -// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. -// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. -// DOCUMENT-START # '---' -// DOCUMENT-END # '...' -// BLOCK-SEQUENCE-START # Indentation increase denoting a block -// BLOCK-MAPPING-START # sequence or a block mapping. -// BLOCK-END # Indentation decrease. -// FLOW-SEQUENCE-START # '[' -// FLOW-SEQUENCE-END # ']' -// BLOCK-SEQUENCE-START # '{' -// BLOCK-SEQUENCE-END # '}' -// BLOCK-ENTRY # '-' -// FLOW-ENTRY # ',' -// KEY # '?' or nothing (simple keys). -// VALUE # ':' -// ALIAS(anchor) # '*anchor' -// ANCHOR(anchor) # '&anchor' -// TAG(handle,suffix) # '!handle!suffix' -// SCALAR(value,style) # A scalar. -// -// The following two tokens are "virtual" tokens denoting the beginning and the -// end of the stream: -// -// STREAM-START(encoding) -// STREAM-END -// -// We pass the information about the input stream encoding with the -// STREAM-START token. -// -// The next two tokens are responsible for tags: -// -// VERSION-DIRECTIVE(major,minor) -// TAG-DIRECTIVE(handle,prefix) -// -// Example: -// -// %YAML 1.1 -// %TAG ! !foo -// %TAG !yaml! tag:yaml.org,2002: -// --- -// -// The correspoding sequence of tokens: -// -// STREAM-START(utf-8) -// VERSION-DIRECTIVE(1,1) -// TAG-DIRECTIVE("!","!foo") -// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") -// DOCUMENT-START -// STREAM-END -// -// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole -// line. -// -// The document start and end indicators are represented by: -// -// DOCUMENT-START -// DOCUMENT-END -// -// Note that if a YAML stream contains an implicit document (without '---' -// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be -// produced. -// -// In the following examples, we present whole documents together with the -// produced tokens. -// -// 1. An implicit document: -// -// 'a scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// STREAM-END -// -// 2. An explicit document: -// -// --- -// 'a scalar' -// ... -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// SCALAR("a scalar",single-quoted) -// DOCUMENT-END -// STREAM-END -// -// 3. Several documents in a stream: -// -// 'a scalar' -// --- -// 'another scalar' -// --- -// 'yet another scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// DOCUMENT-START -// SCALAR("another scalar",single-quoted) -// DOCUMENT-START -// SCALAR("yet another scalar",single-quoted) -// STREAM-END -// -// We have already introduced the SCALAR token above. The following tokens are -// used to describe aliases, anchors, tag, and scalars: -// -// ALIAS(anchor) -// ANCHOR(anchor) -// TAG(handle,suffix) -// SCALAR(value,style) -// -// The following series of examples illustrate the usage of these tokens: -// -// 1. A recursive sequence: -// -// &A [ *A ] -// -// Tokens: -// -// STREAM-START(utf-8) -// ANCHOR("A") -// FLOW-SEQUENCE-START -// ALIAS("A") -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A tagged scalar: -// -// !!float "3.14" # A good approximation. -// -// Tokens: -// -// STREAM-START(utf-8) -// TAG("!!","float") -// SCALAR("3.14",double-quoted) -// STREAM-END -// -// 3. Various scalar styles: -// -// --- # Implicit empty plain scalars do not produce tokens. -// --- a plain scalar -// --- 'a single-quoted scalar' -// --- "a double-quoted scalar" -// --- |- -// a literal scalar -// --- >- -// a folded -// scalar -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// DOCUMENT-START -// SCALAR("a plain scalar",plain) -// DOCUMENT-START -// SCALAR("a single-quoted scalar",single-quoted) -// DOCUMENT-START -// SCALAR("a double-quoted scalar",double-quoted) -// DOCUMENT-START -// SCALAR("a literal scalar",literal) -// DOCUMENT-START -// SCALAR("a folded scalar",folded) -// STREAM-END -// -// Now it's time to review collection-related tokens. We will start with -// flow collections: -// -// FLOW-SEQUENCE-START -// FLOW-SEQUENCE-END -// FLOW-MAPPING-START -// FLOW-MAPPING-END -// FLOW-ENTRY -// KEY -// VALUE -// -// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and -// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' -// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the -// indicators '?' and ':', which are used for denoting mapping keys and values, -// are represented by the KEY and VALUE tokens. -// -// The following examples show flow collections: -// -// 1. A flow sequence: -// -// [item 1, item 2, item 3] -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-SEQUENCE-START -// SCALAR("item 1",plain) -// FLOW-ENTRY -// SCALAR("item 2",plain) -// FLOW-ENTRY -// SCALAR("item 3",plain) -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A flow mapping: -// -// { -// a simple key: a value, # Note that the KEY token is produced. -// ? a complex key: another value, -// } -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// FLOW-ENTRY -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// FLOW-ENTRY -// FLOW-MAPPING-END -// STREAM-END -// -// A simple key is a key which is not denoted by the '?' indicator. Note that -// the Scanner still produce the KEY token whenever it encounters a simple key. -// -// For scanning block collections, the following tokens are used (note that we -// repeat KEY and VALUE here): -// -// BLOCK-SEQUENCE-START -// BLOCK-MAPPING-START -// BLOCK-END -// BLOCK-ENTRY -// KEY -// VALUE -// -// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation -// increase that precedes a block collection (cf. the INDENT token in Python). -// The token BLOCK-END denote indentation decrease that ends a block collection -// (cf. the DEDENT token in Python). However YAML has some syntax pecularities -// that makes detections of these tokens more complex. -// -// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators -// '-', '?', and ':' correspondingly. -// -// The following examples show how the tokens BLOCK-SEQUENCE-START, -// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: -// -// 1. Block sequences: -// -// - item 1 -// - item 2 -// - -// - item 3.1 -// - item 3.2 -// - -// key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 3.1",plain) -// BLOCK-ENTRY -// SCALAR("item 3.2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Block mappings: -// -// a simple key: a value # The KEY token is produced here. -// ? a complex key -// : another value -// a mapping: -// key 1: value 1 -// key 2: value 2 -// a sequence: -// - item 1 -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// KEY -// SCALAR("a mapping",plain) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML does not always require to start a new block collection from a new -// line. If the current line contains only '-', '?', and ':' indicators, a new -// block collection may start at the current line. The following examples -// illustrate this case: -// -// 1. Collections in a sequence: -// -// - - item 1 -// - item 2 -// - key 1: value 1 -// key 2: value 2 -// - ? complex key -// : complex value -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("complex key") -// VALUE -// SCALAR("complex value") -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Collections in a mapping: -// -// ? a sequence -// : - item 1 -// - item 2 -// ? a mapping -// : key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// KEY -// SCALAR("a mapping",plain) -// VALUE -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML also permits non-indented sequences if they are included into a block -// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: -// -// key: -// - item 1 # BLOCK-SEQUENCE-START is NOT produced here. -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key",plain) -// VALUE -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// - -// Ensure that the buffer contains the required number of characters. -// Return true on success, false on failure (reader error or memory error). -func cache(parser *yaml_parser_t, length int) bool { - // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) - return parser.unread >= length || yaml_parser_update_buffer(parser, length) -} - -// Advance the buffer pointer. -func skip(parser *yaml_parser_t) { - parser.mark.index++ - parser.mark.column++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) -} - -func skip_line(parser *yaml_parser_t) { - if is_crlf(parser.buffer, parser.buffer_pos) { - parser.mark.index += 2 - parser.mark.column = 0 - parser.mark.line++ - parser.unread -= 2 - parser.buffer_pos += 2 - } else if is_break(parser.buffer, parser.buffer_pos) { - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) - } -} - -// Copy a character to a string buffer and advance pointers. -func read(parser *yaml_parser_t, s []byte) []byte { - w := width(parser.buffer[parser.buffer_pos]) - if w == 0 { - panic("invalid character sequence") - } - if len(s) == 0 { - s = make([]byte, 0, 32) - } - if w == 1 && len(s)+w <= cap(s) { - s = s[:len(s)+1] - s[len(s)-1] = parser.buffer[parser.buffer_pos] - parser.buffer_pos++ - } else { - s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) - parser.buffer_pos += w - } - parser.mark.index++ - parser.mark.column++ - parser.unread-- - return s -} - -// Copy a line break character to a string buffer and advance pointers. -func read_line(parser *yaml_parser_t, s []byte) []byte { - buf := parser.buffer - pos := parser.buffer_pos - switch { - case buf[pos] == '\r' && buf[pos+1] == '\n': - // CR LF . LF - s = append(s, '\n') - parser.buffer_pos += 2 - parser.mark.index++ - parser.unread-- - case buf[pos] == '\r' || buf[pos] == '\n': - // CR|LF . LF - s = append(s, '\n') - parser.buffer_pos += 1 - case buf[pos] == '\xC2' && buf[pos+1] == '\x85': - // NEL . LF - s = append(s, '\n') - parser.buffer_pos += 2 - case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): - // LS|PS . LS|PS - s = append(s, buf[parser.buffer_pos:pos+3]...) - parser.buffer_pos += 3 - default: - return s - } - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - return s -} - -// Get the next token. -func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { - // Erase the token object. - *token = yaml_token_t{} // [Go] Is this necessary? - - // No tokens after STREAM-END or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR { - return true - } - - // Ensure that the tokens queue contains enough tokens. - if !parser.token_available { - if !yaml_parser_fetch_more_tokens(parser) { - return false - } - } - - // Fetch the next token from the queue. - *token = parser.tokens[parser.tokens_head] - parser.tokens_head++ - parser.tokens_parsed++ - parser.token_available = false - - if token.typ == yaml_STREAM_END_TOKEN { - parser.stream_end_produced = true - } - return true -} - -// Set the scanner error and return false. -func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { - parser.error = yaml_SCANNER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = parser.mark - return false -} - -func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { - context := "while parsing a tag" - if directive { - context = "while parsing a %TAG directive" - } - return yaml_parser_set_scanner_error(parser, context, context_mark, problem) -} - -func trace(args ...interface{}) func() { - pargs := append([]interface{}{"+++"}, args...) - fmt.Println(pargs...) - pargs = append([]interface{}{"---"}, args...) - return func() { fmt.Println(pargs...) } -} - -// Ensure that the tokens queue contains at least one token which can be -// returned to the Parser. -func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { - // While we need more tokens to fetch, do it. - for { - if parser.tokens_head != len(parser.tokens) { - // If queue is non-empty, check if any potential simple key may - // occupy the head position. - head_tok_idx, ok := parser.simple_keys_by_tok[parser.tokens_parsed] - if !ok { - break - } else if valid, ok := yaml_simple_key_is_valid(parser, &parser.simple_keys[head_tok_idx]); !ok { - return false - } else if !valid { - break - } - } - // Fetch the next token. - if !yaml_parser_fetch_next_token(parser) { - return false - } - } - - parser.token_available = true - return true -} - -// The dispatcher for token fetchers. -func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool { - // Ensure that the buffer is initialized. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check if we just started scanning. Fetch STREAM-START then. - if !parser.stream_start_produced { - return yaml_parser_fetch_stream_start(parser) - } - - // Eat whitespaces and comments until we reach the next token. - if !yaml_parser_scan_to_next_token(parser) { - return false - } - - // Check the indentation level against the current column. - if !yaml_parser_unroll_indent(parser, parser.mark.column) { - return false - } - - // Ensure that the buffer contains at least 4 characters. 4 is the length - // of the longest indicators ('--- ' and '... '). - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - // Is it the end of the stream? - if is_z(parser.buffer, parser.buffer_pos) { - return yaml_parser_fetch_stream_end(parser) - } - - // Is it a directive? - if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { - return yaml_parser_fetch_directive(parser) - } - - buf := parser.buffer - pos := parser.buffer_pos - - // Is it the document start indicator? - if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) - } - - // Is it the document end indicator? - if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) - } - - // Is it the flow sequence start indicator? - if buf[pos] == '[' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) - } - - // Is it the flow mapping start indicator? - if parser.buffer[parser.buffer_pos] == '{' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) - } - - // Is it the flow sequence end indicator? - if parser.buffer[parser.buffer_pos] == ']' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_SEQUENCE_END_TOKEN) - } - - // Is it the flow mapping end indicator? - if parser.buffer[parser.buffer_pos] == '}' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_MAPPING_END_TOKEN) - } - - // Is it the flow entry indicator? - if parser.buffer[parser.buffer_pos] == ',' { - return yaml_parser_fetch_flow_entry(parser) - } - - // Is it the block entry indicator? - if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { - return yaml_parser_fetch_block_entry(parser) - } - - // Is it the key indicator? - if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_key(parser) - } - - // Is it the value indicator? - if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_value(parser) - } - - // Is it an alias? - if parser.buffer[parser.buffer_pos] == '*' { - return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) - } - - // Is it an anchor? - if parser.buffer[parser.buffer_pos] == '&' { - return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) - } - - // Is it a tag? - if parser.buffer[parser.buffer_pos] == '!' { - return yaml_parser_fetch_tag(parser) - } - - // Is it a literal scalar? - if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, true) - } - - // Is it a folded scalar? - if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, false) - } - - // Is it a single-quoted scalar? - if parser.buffer[parser.buffer_pos] == '\'' { - return yaml_parser_fetch_flow_scalar(parser, true) - } - - // Is it a double-quoted scalar? - if parser.buffer[parser.buffer_pos] == '"' { - return yaml_parser_fetch_flow_scalar(parser, false) - } - - // Is it a plain scalar? - // - // A plain scalar may start with any non-blank characters except - // - // '-', '?', ':', ',', '[', ']', '{', '}', - // '#', '&', '*', '!', '|', '>', '\'', '\"', - // '%', '@', '`'. - // - // In the block context (and, for the '-' indicator, in the flow context - // too), it may also start with the characters - // - // '-', '?', ':' - // - // if it is followed by a non-space character. - // - // The last rule is more restrictive than the specification requires. - // [Go] Make this logic more reasonable. - //switch parser.buffer[parser.buffer_pos] { - //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': - //} - if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || - parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || - parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || - (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level == 0 && - (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && - !is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_plain_scalar(parser) - } - - // If we don't determine the token type so far, it is an error. - return yaml_parser_set_scanner_error(parser, - "while scanning for the next token", parser.mark, - "found character that cannot start any token") -} - -func yaml_simple_key_is_valid(parser *yaml_parser_t, simple_key *yaml_simple_key_t) (valid, ok bool) { - if !simple_key.possible { - return false, true - } - - // The 1.2 specification says: - // - // "If the ? indicator is omitted, parsing needs to see past the - // implicit key to recognize it as such. To limit the amount of - // lookahead required, the “:” indicator must appear at most 1024 - // Unicode characters beyond the start of the key. In addition, the key - // is restricted to a single line." - // - if simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index { - // Check if the potential simple key to be removed is required. - if simple_key.required { - return false, yaml_parser_set_scanner_error(parser, - "while scanning a simple key", simple_key.mark, - "could not find expected ':'") - } - simple_key.possible = false - return false, true - } - return true, true -} - -// Check if a simple key may start at the current position and add it if -// needed. -func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { - // A simple key is required at the current position if the scanner is in - // the block context and the current column coincides with the indentation - // level. - - required := parser.flow_level == 0 && parser.indent == parser.mark.column - - // - // If the current position may start a simple key, save it. - // - if parser.simple_key_allowed { - simple_key := yaml_simple_key_t{ - possible: true, - required: required, - token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), - mark: parser.mark, - } - - if !yaml_parser_remove_simple_key(parser) { - return false - } - parser.simple_keys[len(parser.simple_keys)-1] = simple_key - parser.simple_keys_by_tok[simple_key.token_number] = len(parser.simple_keys) - 1 - } - return true -} - -// Remove a potential simple key at the current flow level. -func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { - i := len(parser.simple_keys) - 1 - if parser.simple_keys[i].possible { - // If the key is required, it is an error. - if parser.simple_keys[i].required { - return yaml_parser_set_scanner_error(parser, - "while scanning a simple key", parser.simple_keys[i].mark, - "could not find expected ':'") - } - // Remove the key from the stack. - parser.simple_keys[i].possible = false - delete(parser.simple_keys_by_tok, parser.simple_keys[i].token_number) - } - return true -} - -// max_flow_level limits the flow_level -const max_flow_level = 10000 - -// Increase the flow level and resize the simple key list if needed. -func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { - // Reset the simple key on the next level. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{ - possible: false, - required: false, - token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), - mark: parser.mark, - }) - - // Increase the flow level. - parser.flow_level++ - if parser.flow_level > max_flow_level { - return yaml_parser_set_scanner_error(parser, - "while increasing flow level", parser.simple_keys[len(parser.simple_keys)-1].mark, - fmt.Sprintf("exceeded max depth of %d", max_flow_level)) - } - return true -} - -// Decrease the flow level. -func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { - if parser.flow_level > 0 { - parser.flow_level-- - last := len(parser.simple_keys) - 1 - delete(parser.simple_keys_by_tok, parser.simple_keys[last].token_number) - parser.simple_keys = parser.simple_keys[:last] - } - return true -} - -// max_indents limits the indents stack size -const max_indents = 10000 - -// Push the current indentation level to the stack and set the new level -// the current column is greater than the indentation level. In this case, -// append or insert the specified token into the token queue. -func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - if parser.indent < column { - // Push the current indentation level to the stack and set the new - // indentation level. - parser.indents = append(parser.indents, parser.indent) - parser.indent = column - if len(parser.indents) > max_indents { - return yaml_parser_set_scanner_error(parser, - "while increasing indent level", parser.simple_keys[len(parser.simple_keys)-1].mark, - fmt.Sprintf("exceeded max depth of %d", max_indents)) - } - - // Create a token and insert it into the queue. - token := yaml_token_t{ - typ: typ, - start_mark: mark, - end_mark: mark, - } - if number > -1 { - number -= parser.tokens_parsed - } - yaml_insert_token(parser, number, &token) - } - return true -} - -// Pop indentation levels from the indents stack until the current level -// becomes less or equal to the column. For each indentation level, append -// the BLOCK-END token. -func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - // Loop through the indentation levels in the stack. - for parser.indent > column { - // Create a token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - - // Pop the indentation level. - parser.indent = parser.indents[len(parser.indents)-1] - parser.indents = parser.indents[:len(parser.indents)-1] - } - return true -} - -// Initialize the scanner and produce the STREAM-START token. -func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { - - // Set the initial indentation. - parser.indent = -1 - - // Initialize the simple key stack. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) - - parser.simple_keys_by_tok = make(map[int]int) - - // A simple key is allowed at the beginning of the stream. - parser.simple_key_allowed = true - - // We have started. - parser.stream_start_produced = true - - // Create the STREAM-START token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_START_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - encoding: parser.encoding, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the STREAM-END token and shut down the scanner. -func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { - - // Force new line. - if parser.mark.column != 0 { - parser.mark.column = 0 - parser.mark.line++ - } - - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the STREAM-END token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. -func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. - token := yaml_token_t{} - if !yaml_parser_scan_directive(parser, &token) { - return false - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the DOCUMENT-START or DOCUMENT-END token. -func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Consume the token. - start_mark := parser.mark - - skip(parser) - skip(parser) - skip(parser) - - end_mark := parser.mark - - // Create the DOCUMENT-START or DOCUMENT-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. -func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // The indicators '[' and '{' may start a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // Increase the flow level. - if !yaml_parser_increase_flow_level(parser) { - return false - } - - // A simple key may follow the indicators '[' and '{'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. -func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset any potential simple key on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Decrease the flow level. - if !yaml_parser_decrease_flow_level(parser) { - return false - } - - // No simple keys after the indicators ']' and '}'. - parser.simple_key_allowed = false - - // Consume the token. - - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-ENTRY token. -func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after ','. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_FLOW_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the BLOCK-ENTRY token. -func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { - // Check if the scanner is in the block context. - if parser.flow_level == 0 { - // Check if we are allowed to start a new entry. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "block sequence entries are not allowed in this context") - } - // Add the BLOCK-SEQUENCE-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { - return false - } - } else { - // It is an error for the '-' indicator to occur in the flow context, - // but we let the Parser detect and report about it because the Parser - // is able to point to the context. - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '-'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the BLOCK-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the KEY token. -func yaml_parser_fetch_key(parser *yaml_parser_t) bool { - - // In the block context, additional checks are required. - if parser.flow_level == 0 { - // Check if we are allowed to start a new key (not nessesary simple). - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping keys are not allowed in this context") - } - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '?' in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the KEY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the VALUE token. -func yaml_parser_fetch_value(parser *yaml_parser_t) bool { - - simple_key := &parser.simple_keys[len(parser.simple_keys)-1] - - // Have we found a simple key? - if valid, ok := yaml_simple_key_is_valid(parser, simple_key); !ok { - return false - - } else if valid { - - // Create the KEY token and insert it into the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: simple_key.mark, - end_mark: simple_key.mark, - } - yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) - - // In the block context, we may need to add the BLOCK-MAPPING-START token. - if !yaml_parser_roll_indent(parser, simple_key.mark.column, - simple_key.token_number, - yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { - return false - } - - // Remove the simple key. - simple_key.possible = false - delete(parser.simple_keys_by_tok, simple_key.token_number) - - // A simple key cannot follow another simple key. - parser.simple_key_allowed = false - - } else { - // The ':' indicator follows a complex key. - - // In the block context, extra checks are required. - if parser.flow_level == 0 { - - // Check if we are allowed to start a complex value. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping values are not allowed in this context") - } - - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Simple keys after ':' are allowed in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - } - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the VALUE token and append it to the queue. - token := yaml_token_t{ - typ: yaml_VALUE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the ALIAS or ANCHOR token. -func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // An anchor or an alias could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow an anchor or an alias. - parser.simple_key_allowed = false - - // Create the ALIAS or ANCHOR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_anchor(parser, &token, typ) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the TAG token. -func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { - // A tag could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a tag. - parser.simple_key_allowed = false - - // Create the TAG token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_tag(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. -func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { - // Remove any potential simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // A simple key may follow a block scalar. - parser.simple_key_allowed = true - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_block_scalar(parser, &token, literal) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. -func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_flow_scalar(parser, &token, single) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,plain) token. -func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_plain_scalar(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Eat whitespaces and comments until the next token is found. -func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { - - // Until the next token is not found. - for { - // Allow the BOM mark to start a line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { - skip(parser) - } - - // Eat whitespaces. - // Tabs are allowed: - // - in the flow context - // - in the block context, but not at the beginning of the line or - // after '-', '?', or ':' (complex value). - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Eat a comment until a line break. - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // If it is a line break, eat it. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - - // In the block context, a new line may start a simple key. - if parser.flow_level == 0 { - parser.simple_key_allowed = true - } - } else { - break // We have found a token. - } - } - - return true -} - -// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// -func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { - // Eat '%'. - start_mark := parser.mark - skip(parser) - - // Scan the directive name. - var name []byte - if !yaml_parser_scan_directive_name(parser, start_mark, &name) { - return false - } - - // Is it a YAML directive? - if bytes.Equal(name, []byte("YAML")) { - // Scan the VERSION directive value. - var major, minor int8 - if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { - return false - } - end_mark := parser.mark - - // Create a VERSION-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_VERSION_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - major: major, - minor: minor, - } - - // Is it a TAG directive? - } else if bytes.Equal(name, []byte("TAG")) { - // Scan the TAG directive value. - var handle, prefix []byte - if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { - return false - } - end_mark := parser.mark - - // Create a TAG-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_TAG_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - prefix: prefix, - } - - // Unknown directive. - } else { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found unknown directive name") - return false - } - - // Eat the rest of the line including any comments. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - return true -} - -// Scan the directive name. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^ -// -func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { - // Consume the directive name. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - var s []byte - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the name is empty. - if len(s) == 0 { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "could not find expected directive name") - return false - } - - // Check for an blank character after the name. - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found unexpected non-alphabetical character") - return false - } - *name = s - return true -} - -// Scan the value of VERSION-DIRECTIVE. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^^^ -func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the major version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { - return false - } - - // Eat '.'. - if parser.buffer[parser.buffer_pos] != '.' { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected digit or '.' character") - } - - skip(parser) - - // Consume the minor version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { - return false - } - return true -} - -const max_number_length = 2 - -// Scan the version number of VERSION-DIRECTIVE. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^ -// %YAML 1.1 # a comment \n -// ^ -func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { - - // Repeat while the next character is digit. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var value, length int8 - for is_digit(parser.buffer, parser.buffer_pos) { - // Check if the number is too long. - length++ - if length > max_number_length { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "found extremely long version number") - } - value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the number was present. - if length == 0 { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected version number") - } - *number = value - return true -} - -// Scan the value of a TAG-DIRECTIVE token. -// -// Scope: -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// -func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { - var handle_value, prefix_value []byte - - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a handle. - if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { - return false - } - - // Expect a whitespace. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blank(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace") - return false - } - - // Eat whitespaces. - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a prefix. - if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { - return false - } - - // Expect a whitespace or line break. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace or line break") - return false - } - - *handle = handle_value - *prefix = prefix_value - return true -} - -func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { - var s []byte - - // Eat the indicator character. - start_mark := parser.mark - skip(parser) - - // Consume the value. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - end_mark := parser.mark - - /* - * Check if length of the anchor is greater than 0 and it is followed by - * a whitespace character or one of the indicators: - * - * '?', ':', ',', ']', '}', '%', '@', '`'. - */ - - if len(s) == 0 || - !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || - parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '`') { - context := "while scanning an alias" - if typ == yaml_ANCHOR_TOKEN { - context = "while scanning an anchor" - } - yaml_parser_set_scanner_error(parser, context, start_mark, - "did not find expected alphabetic or numeric character") - return false - } - - // Create a token. - *token = yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - value: s, - } - - return true -} - -/* - * Scan a TAG token. - */ - -func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { - var handle, suffix []byte - - start_mark := parser.mark - - // Check if the tag is in the canonical form. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - if parser.buffer[parser.buffer_pos+1] == '<' { - // Keep the handle as '' - - // Eat '!<' - skip(parser) - skip(parser) - - // Consume the tag value. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - - // Check for '>' and eat it. - if parser.buffer[parser.buffer_pos] != '>' { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find the expected '>'") - return false - } - - skip(parser) - } else { - // The tag has either the '!suffix' or the '!handle!suffix' form. - - // First, try to scan a handle. - if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { - return false - } - - // Check if it is, indeed, handle. - if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { - // Scan the suffix now. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - } else { - // It wasn't a handle after all. Scan the rest of the tag. - if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { - return false - } - - // Set the handle to '!'. - handle = []byte{'!'} - - // A special case: the '!' tag. Set the handle to '' and the - // suffix to '!'. - if len(suffix) == 0 { - handle, suffix = suffix, handle - } - } - } - - // Check the character which ends the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find expected whitespace or line break") - return false - } - - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_TAG_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - suffix: suffix, - } - return true -} - -// Scan a tag handle. -func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { - // Check the initial '!' character. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] != '!' { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - - var s []byte - - // Copy the '!' character. - s = read(parser, s) - - // Copy all subsequent alphabetical and numerical characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the trailing character is '!' and copy it. - if parser.buffer[parser.buffer_pos] == '!' { - s = read(parser, s) - } else { - // It's either the '!' tag or not really a tag handle. If it's a %TAG - // directive, it's an error. If it's a tag token, it must be a part of URI. - if directive && string(s) != "!" { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - } - - *handle = s - return true -} - -// Scan a tag. -func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { - //size_t length = head ? strlen((char *)head) : 0 - var s []byte - hasTag := len(head) > 0 - - // Copy the head if needed. - // - // Note that we don't copy the leading '!' character. - if len(head) > 1 { - s = append(s, head[1:]...) - } - - // Scan the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // The set of characters that may appear in URI is as follows: - // - // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', - // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', - // '%'. - // [Go] Convert this into more reasonable logic. - for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || - parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || - parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || - parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || - parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || - parser.buffer[parser.buffer_pos] == '%' { - // Check if it is a URI-escape sequence. - if parser.buffer[parser.buffer_pos] == '%' { - if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { - return false - } - } else { - s = read(parser, s) - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - hasTag = true - } - - if !hasTag { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected tag URI") - return false - } - *uri = s - return true -} - -// Decode an URI-escape sequence corresponding to a single UTF-8 character. -func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { - - // Decode the required number of characters. - w := 1024 - for w > 0 { - // Check for a URI-escaped octet. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - - if !(parser.buffer[parser.buffer_pos] == '%' && - is_hex(parser.buffer, parser.buffer_pos+1) && - is_hex(parser.buffer, parser.buffer_pos+2)) { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find URI escaped octet") - } - - // Get the octet. - octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) - - // If it is the leading octet, determine the length of the UTF-8 sequence. - if w == 1024 { - w = width(octet) - if w == 0 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect leading UTF-8 octet") - } - } else { - // Check if the trailing octet is correct. - if octet&0xC0 != 0x80 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect trailing UTF-8 octet") - } - } - - // Copy the octet and move the pointers. - *s = append(*s, octet) - skip(parser) - skip(parser) - skip(parser) - w-- - } - return true -} - -// Scan a block scalar. -func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { - // Eat the indicator '|' or '>'. - start_mark := parser.mark - skip(parser) - - // Scan the additional block scalar indicators. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check for a chomping indicator. - var chomping, increment int - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - // Set the chomping method and eat the indicator. - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - - // Check for an indentation indicator. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if is_digit(parser.buffer, parser.buffer_pos) { - // Check that the indentation is greater than 0. - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an indentation indicator equal to 0") - return false - } - - // Get the indentation level and eat the indicator. - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - } - - } else if is_digit(parser.buffer, parser.buffer_pos) { - // Do the same as above, but in the opposite order. - - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an indentation indicator equal to 0") - return false - } - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - } - } - - // Eat whitespaces and comments to the end of the line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - end_mark := parser.mark - - // Set the indentation level if it was specified. - var indent int - if increment > 0 { - if parser.indent >= 0 { - indent = parser.indent + increment - } else { - indent = increment - } - } - - // Scan the leading line breaks and determine the indentation level if needed. - var s, leading_break, trailing_breaks []byte - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - - // Scan the block scalar content. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var leading_blank, trailing_blank bool - for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { - // We are at the beginning of a non-empty line. - - // Is it a trailing whitespace? - trailing_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Check if we need to fold the leading line break. - if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { - // Do we need to join the lines by space? - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } - } else { - s = append(s, leading_break...) - } - leading_break = leading_break[:0] - - // Append the remaining line breaks. - s = append(s, trailing_breaks...) - trailing_breaks = trailing_breaks[:0] - - // Is it a leading whitespace? - leading_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Consume the current line. - for !is_breakz(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - leading_break = read_line(parser, leading_break) - - // Eat the following indentation spaces and line breaks. - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - } - - // Chomp the tail. - if chomping != -1 { - s = append(s, leading_break...) - } - if chomping == 1 { - s = append(s, trailing_breaks...) - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_LITERAL_SCALAR_STYLE, - } - if !literal { - token.style = yaml_FOLDED_SCALAR_STYLE - } - return true -} - -// Scan indentation spaces and line breaks for a block scalar. Determine the -// indentation level if needed. -func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { - *end_mark = parser.mark - - // Eat the indentation spaces and line breaks. - max_indent := 0 - for { - // Eat the indentation spaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.mark.column > max_indent { - max_indent = parser.mark.column - } - - // Check for a tab character messing the indentation. - if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { - return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found a tab character where an indentation space is expected") - } - - // Have we found a non-empty line? - if !is_break(parser.buffer, parser.buffer_pos) { - break - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - // [Go] Should really be returning breaks instead. - *breaks = read_line(parser, *breaks) - *end_mark = parser.mark - } - - // Determine the indentation level if needed. - if *indent == 0 { - *indent = max_indent - if *indent < parser.indent+1 { - *indent = parser.indent + 1 - } - if *indent < 1 { - *indent = 1 - } - } - return true -} - -// Scan a quoted scalar. -func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { - // Eat the left quote. - start_mark := parser.mark - skip(parser) - - // Consume the content of the quoted scalar. - var s, leading_break, trailing_breaks, whitespaces []byte - for { - // Check that there are no document indicators at the beginning of the line. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected document indicator") - return false - } - - // Check for EOF. - if is_z(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected end of stream") - return false - } - - // Consume non-blank characters. - leading_blanks := false - for !is_blankz(parser.buffer, parser.buffer_pos) { - if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { - // Is is an escaped single quote. - s = append(s, '\'') - skip(parser) - skip(parser) - - } else if single && parser.buffer[parser.buffer_pos] == '\'' { - // It is a right single quote. - break - } else if !single && parser.buffer[parser.buffer_pos] == '"' { - // It is a right double quote. - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { - // It is an escaped line break. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - skip(parser) - skip_line(parser) - leading_blanks = true - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' { - // It is an escape sequence. - code_length := 0 - - // Check the escape character. - switch parser.buffer[parser.buffer_pos+1] { - case '0': - s = append(s, 0) - case 'a': - s = append(s, '\x07') - case 'b': - s = append(s, '\x08') - case 't', '\t': - s = append(s, '\x09') - case 'n': - s = append(s, '\x0A') - case 'v': - s = append(s, '\x0B') - case 'f': - s = append(s, '\x0C') - case 'r': - s = append(s, '\x0D') - case 'e': - s = append(s, '\x1B') - case ' ': - s = append(s, '\x20') - case '"': - s = append(s, '"') - case '\'': - s = append(s, '\'') - case '\\': - s = append(s, '\\') - case 'N': // NEL (#x85) - s = append(s, '\xC2') - s = append(s, '\x85') - case '_': // #xA0 - s = append(s, '\xC2') - s = append(s, '\xA0') - case 'L': // LS (#x2028) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA8') - case 'P': // PS (#x2029) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA9') - case 'x': - code_length = 2 - case 'u': - code_length = 4 - case 'U': - code_length = 8 - default: - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found unknown escape character") - return false - } - - skip(parser) - skip(parser) - - // Consume an arbitrary escape code. - if code_length > 0 { - var value int - - // Scan the character value. - if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { - return false - } - for k := 0; k < code_length; k++ { - if !is_hex(parser.buffer, parser.buffer_pos+k) { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "did not find expected hexdecimal number") - return false - } - value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) - } - - // Check the value and write the character. - if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found invalid Unicode character escape code") - return false - } - if value <= 0x7F { - s = append(s, byte(value)) - } else if value <= 0x7FF { - s = append(s, byte(0xC0+(value>>6))) - s = append(s, byte(0x80+(value&0x3F))) - } else if value <= 0xFFFF { - s = append(s, byte(0xE0+(value>>12))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } else { - s = append(s, byte(0xF0+(value>>18))) - s = append(s, byte(0x80+((value>>12)&0x3F))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } - - // Advance the pointer. - for k := 0; k < code_length; k++ { - skip(parser) - } - } - } else { - // It is a non-escaped non-blank character. - s = read(parser, s) - } - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check if we are at the end of the scalar. - if single { - if parser.buffer[parser.buffer_pos] == '\'' { - break - } - } else { - if parser.buffer[parser.buffer_pos] == '"' { - break - } - } - - // Consume blank characters. - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Join the whitespaces or fold line breaks. - if leading_blanks { - // Do we need to fold line breaks? - if len(leading_break) > 0 && leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Eat the right quote. - skip(parser) - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_SINGLE_QUOTED_SCALAR_STYLE, - } - if !single { - token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - return true -} - -// Scan a plain scalar. -func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { - - var s, leading_break, trailing_breaks, whitespaces []byte - var leading_blanks bool - var indent = parser.indent + 1 - - start_mark := parser.mark - end_mark := parser.mark - - // Consume the content of the plain scalar. - for { - // Check for a document indicator. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - break - } - - // Check for a comment. - if parser.buffer[parser.buffer_pos] == '#' { - break - } - - // Consume non-blank characters. - for !is_blankz(parser.buffer, parser.buffer_pos) { - - // Check for indicators that may end a plain scalar. - if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level > 0 && - (parser.buffer[parser.buffer_pos] == ',' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}')) { - break - } - - // Check if we need to join whitespaces and breaks. - if leading_blanks || len(whitespaces) > 0 { - if leading_blanks { - // Do we need to fold line breaks? - if leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - leading_blanks = false - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Copy the character. - s = read(parser, s) - - end_mark = parser.mark - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - // Is it the end? - if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { - break - } - - // Consume blank characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - - // Check for tab characters that abuse indentation. - if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", - start_mark, "found a tab character that violates indentation") - return false - } - - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check indentation level. - if parser.flow_level == 0 && parser.mark.column < indent { - break - } - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_PLAIN_SCALAR_STYLE, - } - - // Note that we change the 'simple_key_allowed' flag. - if leading_blanks { - parser.simple_key_allowed = true - } - return true -} diff --git a/vendor/gopkg.in/yaml.v2/writerc.go b/vendor/gopkg.in/yaml.v2/writerc.go index a2dde608..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/writerc.go +++ b/vendor/gopkg.in/yaml.v2/writerc.go @@ -1,26 +0,0 @@ -package yaml - -// Set the writer error and return false. -func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_WRITER_ERROR - emitter.problem = problem - return false -} - -// Flush the output buffer. -func yaml_emitter_flush(emitter *yaml_emitter_t) bool { - if emitter.write_handler == nil { - panic("write handler not set") - } - - // Check if the buffer is empty. - if emitter.buffer_pos == 0 { - return true - } - - if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { - return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) - } - emitter.buffer_pos = 0 - return true -} diff --git a/vendor/gopkg.in/yaml.v2/yaml.go b/vendor/gopkg.in/yaml.v2/yaml.go index 30813884..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/yaml.go +++ b/vendor/gopkg.in/yaml.v2/yaml.go @@ -1,478 +0,0 @@ -// Package yaml implements YAML support for the Go language. -// -// Source code and other details for the project are available at GitHub: -// -// https://github.com/go-yaml/yaml -// -package yaml - -import ( - "errors" - "fmt" - "io" - "reflect" - "strings" - "sync" -) - -// MapSlice encodes and decodes as a YAML map. -// The order of keys is preserved when encoding and decoding. -type MapSlice []MapItem - -// MapItem is an item in a MapSlice. -type MapItem struct { - Key, Value interface{} -} - -// The Unmarshaler interface may be implemented by types to customize their -// behavior when being unmarshaled from a YAML document. The UnmarshalYAML -// method receives a function that may be called to unmarshal the original -// YAML value into a field or variable. It is safe to call the unmarshal -// function parameter more than once if necessary. -type Unmarshaler interface { - UnmarshalYAML(unmarshal func(interface{}) error) error -} - -// The Marshaler interface may be implemented by types to customize their -// behavior when being marshaled into a YAML document. The returned value -// is marshaled in place of the original value implementing Marshaler. -// -// If an error is returned by MarshalYAML, the marshaling procedure stops -// and returns with the provided error. -type Marshaler interface { - MarshalYAML() (interface{}, error) -} - -// Unmarshal decodes the first document found within the in byte slice -// and assigns decoded values into the out value. -// -// Maps and pointers (to a struct, string, int, etc) are accepted as out -// values. If an internal pointer within a struct is not initialized, -// the yaml package will initialize it if necessary for unmarshalling -// the provided data. The out parameter must not be nil. -// -// The type of the decoded values should be compatible with the respective -// values in out. If one or more values cannot be decoded due to a type -// mismatches, decoding continues partially until the end of the YAML -// content, and a *yaml.TypeError is returned with details for all -// missed values. -// -// Struct fields are only unmarshalled if they are exported (have an -// upper case first letter), and are unmarshalled using the field name -// lowercased as the default key. Custom keys may be defined via the -// "yaml" name in the field tag: the content preceding the first comma -// is used as the key, and the following comma-separated options are -// used to tweak the marshalling process (see Marshal). -// Conflicting names result in a runtime error. -// -// For example: -// -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// var t T -// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) -// -// See the documentation of Marshal for the format of tags and a list of -// supported tag options. -// -func Unmarshal(in []byte, out interface{}) (err error) { - return unmarshal(in, out, false) -} - -// UnmarshalStrict is like Unmarshal except that any fields that are found -// in the data that do not have corresponding struct members, or mapping -// keys that are duplicates, will result in -// an error. -func UnmarshalStrict(in []byte, out interface{}) (err error) { - return unmarshal(in, out, true) -} - -// A Decoder reads and decodes YAML values from an input stream. -type Decoder struct { - strict bool - parser *parser -} - -// NewDecoder returns a new decoder that reads from r. -// -// The decoder introduces its own buffering and may read -// data from r beyond the YAML values requested. -func NewDecoder(r io.Reader) *Decoder { - return &Decoder{ - parser: newParserFromReader(r), - } -} - -// SetStrict sets whether strict decoding behaviour is enabled when -// decoding items in the data (see UnmarshalStrict). By default, decoding is not strict. -func (dec *Decoder) SetStrict(strict bool) { - dec.strict = strict -} - -// Decode reads the next YAML-encoded value from its input -// and stores it in the value pointed to by v. -// -// See the documentation for Unmarshal for details about the -// conversion of YAML into a Go value. -func (dec *Decoder) Decode(v interface{}) (err error) { - d := newDecoder(dec.strict) - defer handleErr(&err) - node := dec.parser.parse() - if node == nil { - return io.EOF - } - out := reflect.ValueOf(v) - if out.Kind() == reflect.Ptr && !out.IsNil() { - out = out.Elem() - } - d.unmarshal(node, out) - if len(d.terrors) > 0 { - return &TypeError{d.terrors} - } - return nil -} - -func unmarshal(in []byte, out interface{}, strict bool) (err error) { - defer handleErr(&err) - d := newDecoder(strict) - p := newParser(in) - defer p.destroy() - node := p.parse() - if node != nil { - v := reflect.ValueOf(out) - if v.Kind() == reflect.Ptr && !v.IsNil() { - v = v.Elem() - } - d.unmarshal(node, v) - } - if len(d.terrors) > 0 { - return &TypeError{d.terrors} - } - return nil -} - -// Marshal serializes the value provided into a YAML document. The structure -// of the generated document will reflect the structure of the value itself. -// Maps and pointers (to struct, string, int, etc) are accepted as the in value. -// -// Struct fields are only marshalled if they are exported (have an upper case -// first letter), and are marshalled using the field name lowercased as the -// default key. Custom keys may be defined via the "yaml" name in the field -// tag: the content preceding the first comma is used as the key, and the -// following comma-separated options are used to tweak the marshalling process. -// Conflicting names result in a runtime error. -// -// The field tag format accepted is: -// -// `(...) yaml:"[][,[,]]" (...)` -// -// The following flags are currently supported: -// -// omitempty Only include the field if it's not set to the zero -// value for the type or to empty slices or maps. -// Zero valued structs will be omitted if all their public -// fields are zero, unless they implement an IsZero -// method (see the IsZeroer interface type), in which -// case the field will be excluded if IsZero returns true. -// -// flow Marshal using a flow style (useful for structs, -// sequences and maps). -// -// inline Inline the field, which must be a struct or a map, -// causing all of its fields or keys to be processed as if -// they were part of the outer struct. For maps, keys must -// not conflict with the yaml keys of other struct fields. -// -// In addition, if the key is "-", the field is ignored. -// -// For example: -// -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" -// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" -// -func Marshal(in interface{}) (out []byte, err error) { - defer handleErr(&err) - e := newEncoder() - defer e.destroy() - e.marshalDoc("", reflect.ValueOf(in)) - e.finish() - out = e.out - return -} - -// An Encoder writes YAML values to an output stream. -type Encoder struct { - encoder *encoder -} - -// NewEncoder returns a new encoder that writes to w. -// The Encoder should be closed after use to flush all data -// to w. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - encoder: newEncoderWithWriter(w), - } -} - -// Encode writes the YAML encoding of v to the stream. -// If multiple items are encoded to the stream, the -// second and subsequent document will be preceded -// with a "---" document separator, but the first will not. -// -// See the documentation for Marshal for details about the conversion of Go -// values to YAML. -func (e *Encoder) Encode(v interface{}) (err error) { - defer handleErr(&err) - e.encoder.marshalDoc("", reflect.ValueOf(v)) - return nil -} - -// Close closes the encoder by writing any remaining data. -// It does not write a stream terminating string "...". -func (e *Encoder) Close() (err error) { - defer handleErr(&err) - e.encoder.finish() - return nil -} - -func handleErr(err *error) { - if v := recover(); v != nil { - if e, ok := v.(yamlError); ok { - *err = e.err - } else { - panic(v) - } - } -} - -type yamlError struct { - err error -} - -func fail(err error) { - panic(yamlError{err}) -} - -func failf(format string, args ...interface{}) { - panic(yamlError{fmt.Errorf("yaml: "+format, args...)}) -} - -// A TypeError is returned by Unmarshal when one or more fields in -// the YAML document cannot be properly decoded into the requested -// types. When this error is returned, the value is still -// unmarshaled partially. -type TypeError struct { - Errors []string -} - -func (e *TypeError) Error() string { - return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n ")) -} - -// -------------------------------------------------------------------------- -// Maintain a mapping of keys to structure field indexes - -// The code in this section was copied from mgo/bson. - -// structInfo holds details for the serialization of fields of -// a given struct. -type structInfo struct { - FieldsMap map[string]fieldInfo - FieldsList []fieldInfo - - // InlineMap is the number of the field in the struct that - // contains an ,inline map, or -1 if there's none. - InlineMap int -} - -type fieldInfo struct { - Key string - Num int - OmitEmpty bool - Flow bool - // Id holds the unique field identifier, so we can cheaply - // check for field duplicates without maintaining an extra map. - Id int - - // Inline holds the field index if the field is part of an inlined struct. - Inline []int -} - -var structMap = make(map[reflect.Type]*structInfo) -var fieldMapMutex sync.RWMutex - -func getStructInfo(st reflect.Type) (*structInfo, error) { - fieldMapMutex.RLock() - sinfo, found := structMap[st] - fieldMapMutex.RUnlock() - if found { - return sinfo, nil - } - - n := st.NumField() - fieldsMap := make(map[string]fieldInfo) - fieldsList := make([]fieldInfo, 0, n) - inlineMap := -1 - for i := 0; i != n; i++ { - field := st.Field(i) - if field.PkgPath != "" && !field.Anonymous { - continue // Private field - } - - info := fieldInfo{Num: i} - - tag := field.Tag.Get("yaml") - if tag == "" && strings.Index(string(field.Tag), ":") < 0 { - tag = string(field.Tag) - } - if tag == "-" { - continue - } - - inline := false - fields := strings.Split(tag, ",") - if len(fields) > 1 { - for _, flag := range fields[1:] { - switch flag { - case "omitempty": - info.OmitEmpty = true - case "flow": - info.Flow = true - case "inline": - inline = true - default: - return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)) - } - } - tag = fields[0] - } - - if inline { - switch field.Type.Kind() { - case reflect.Map: - if inlineMap >= 0 { - return nil, errors.New("Multiple ,inline maps in struct " + st.String()) - } - if field.Type.Key() != reflect.TypeOf("") { - return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) - } - inlineMap = info.Num - case reflect.Struct: - sinfo, err := getStructInfo(field.Type) - if err != nil { - return nil, err - } - for _, finfo := range sinfo.FieldsList { - if _, found := fieldsMap[finfo.Key]; found { - msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - if finfo.Inline == nil { - finfo.Inline = []int{i, finfo.Num} - } else { - finfo.Inline = append([]int{i}, finfo.Inline...) - } - finfo.Id = len(fieldsList) - fieldsMap[finfo.Key] = finfo - fieldsList = append(fieldsList, finfo) - } - default: - //return nil, errors.New("Option ,inline needs a struct value or map field") - return nil, errors.New("Option ,inline needs a struct value field") - } - continue - } - - if tag != "" { - info.Key = tag - } else { - info.Key = strings.ToLower(field.Name) - } - - if _, found = fieldsMap[info.Key]; found { - msg := "Duplicated key '" + info.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - - info.Id = len(fieldsList) - fieldsList = append(fieldsList, info) - fieldsMap[info.Key] = info - } - - sinfo = &structInfo{ - FieldsMap: fieldsMap, - FieldsList: fieldsList, - InlineMap: inlineMap, - } - - fieldMapMutex.Lock() - structMap[st] = sinfo - fieldMapMutex.Unlock() - return sinfo, nil -} - -// IsZeroer is used to check whether an object is zero to -// determine whether it should be omitted when marshaling -// with the omitempty flag. One notable implementation -// is time.Time. -type IsZeroer interface { - IsZero() bool -} - -func isZero(v reflect.Value) bool { - kind := v.Kind() - if z, ok := v.Interface().(IsZeroer); ok { - if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() { - return true - } - return z.IsZero() - } - switch kind { - case reflect.String: - return len(v.String()) == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - case reflect.Slice: - return v.Len() == 0 - case reflect.Map: - return v.Len() == 0 - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Struct: - vt := v.Type() - for i := v.NumField() - 1; i >= 0; i-- { - if vt.Field(i).PkgPath != "" { - continue // Private field - } - if !isZero(v.Field(i)) { - return false - } - } - return true - } - return false -} - -// FutureLineWrap globally disables line wrapping when encoding long strings. -// This is a temporary and thus deprecated method introduced to faciliate -// migration towards v3, which offers more control of line lengths on -// individual encodings, and has a default matching the behavior introduced -// by this function. -// -// The default formatting of v2 was erroneously changed in v2.3.0 and reverted -// in v2.4.0, at which point this function was introduced to help migration. -func FutureLineWrap() { - disableLineWrapping = true -} diff --git a/vendor/gopkg.in/yaml.v2/yamlh.go b/vendor/gopkg.in/yaml.v2/yamlh.go index f6a9c8e3..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/yamlh.go +++ b/vendor/gopkg.in/yaml.v2/yamlh.go @@ -1,739 +0,0 @@ -package yaml - -import ( - "fmt" - "io" -) - -// The version directive data. -type yaml_version_directive_t struct { - major int8 // The major version number. - minor int8 // The minor version number. -} - -// The tag directive data. -type yaml_tag_directive_t struct { - handle []byte // The tag handle. - prefix []byte // The tag prefix. -} - -type yaml_encoding_t int - -// The stream encoding. -const ( - // Let the parser choose the encoding. - yaml_ANY_ENCODING yaml_encoding_t = iota - - yaml_UTF8_ENCODING // The default UTF-8 encoding. - yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. - yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. -) - -type yaml_break_t int - -// Line break types. -const ( - // Let the parser choose the break type. - yaml_ANY_BREAK yaml_break_t = iota - - yaml_CR_BREAK // Use CR for line breaks (Mac style). - yaml_LN_BREAK // Use LN for line breaks (Unix style). - yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). -) - -type yaml_error_type_t int - -// Many bad things could happen with the parser and emitter. -const ( - // No error is produced. - yaml_NO_ERROR yaml_error_type_t = iota - - yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. - yaml_READER_ERROR // Cannot read or decode the input stream. - yaml_SCANNER_ERROR // Cannot scan the input stream. - yaml_PARSER_ERROR // Cannot parse the input stream. - yaml_COMPOSER_ERROR // Cannot compose a YAML document. - yaml_WRITER_ERROR // Cannot write to the output stream. - yaml_EMITTER_ERROR // Cannot emit a YAML stream. -) - -// The pointer position. -type yaml_mark_t struct { - index int // The position index. - line int // The position line. - column int // The position column. -} - -// Node Styles - -type yaml_style_t int8 - -type yaml_scalar_style_t yaml_style_t - -// Scalar styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota - - yaml_PLAIN_SCALAR_STYLE // The plain scalar style. - yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. - yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. - yaml_LITERAL_SCALAR_STYLE // The literal scalar style. - yaml_FOLDED_SCALAR_STYLE // The folded scalar style. -) - -type yaml_sequence_style_t yaml_style_t - -// Sequence styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota - - yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. - yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. -) - -type yaml_mapping_style_t yaml_style_t - -// Mapping styles. -const ( - // Let the emitter choose the style. - yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota - - yaml_BLOCK_MAPPING_STYLE // The block mapping style. - yaml_FLOW_MAPPING_STYLE // The flow mapping style. -) - -// Tokens - -type yaml_token_type_t int - -// Token types. -const ( - // An empty token. - yaml_NO_TOKEN yaml_token_type_t = iota - - yaml_STREAM_START_TOKEN // A STREAM-START token. - yaml_STREAM_END_TOKEN // A STREAM-END token. - - yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. - yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. - yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. - yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. - - yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. - yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. - yaml_BLOCK_END_TOKEN // A BLOCK-END token. - - yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. - yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. - yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. - yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. - - yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. - yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. - yaml_KEY_TOKEN // A KEY token. - yaml_VALUE_TOKEN // A VALUE token. - - yaml_ALIAS_TOKEN // An ALIAS token. - yaml_ANCHOR_TOKEN // An ANCHOR token. - yaml_TAG_TOKEN // A TAG token. - yaml_SCALAR_TOKEN // A SCALAR token. -) - -func (tt yaml_token_type_t) String() string { - switch tt { - case yaml_NO_TOKEN: - return "yaml_NO_TOKEN" - case yaml_STREAM_START_TOKEN: - return "yaml_STREAM_START_TOKEN" - case yaml_STREAM_END_TOKEN: - return "yaml_STREAM_END_TOKEN" - case yaml_VERSION_DIRECTIVE_TOKEN: - return "yaml_VERSION_DIRECTIVE_TOKEN" - case yaml_TAG_DIRECTIVE_TOKEN: - return "yaml_TAG_DIRECTIVE_TOKEN" - case yaml_DOCUMENT_START_TOKEN: - return "yaml_DOCUMENT_START_TOKEN" - case yaml_DOCUMENT_END_TOKEN: - return "yaml_DOCUMENT_END_TOKEN" - case yaml_BLOCK_SEQUENCE_START_TOKEN: - return "yaml_BLOCK_SEQUENCE_START_TOKEN" - case yaml_BLOCK_MAPPING_START_TOKEN: - return "yaml_BLOCK_MAPPING_START_TOKEN" - case yaml_BLOCK_END_TOKEN: - return "yaml_BLOCK_END_TOKEN" - case yaml_FLOW_SEQUENCE_START_TOKEN: - return "yaml_FLOW_SEQUENCE_START_TOKEN" - case yaml_FLOW_SEQUENCE_END_TOKEN: - return "yaml_FLOW_SEQUENCE_END_TOKEN" - case yaml_FLOW_MAPPING_START_TOKEN: - return "yaml_FLOW_MAPPING_START_TOKEN" - case yaml_FLOW_MAPPING_END_TOKEN: - return "yaml_FLOW_MAPPING_END_TOKEN" - case yaml_BLOCK_ENTRY_TOKEN: - return "yaml_BLOCK_ENTRY_TOKEN" - case yaml_FLOW_ENTRY_TOKEN: - return "yaml_FLOW_ENTRY_TOKEN" - case yaml_KEY_TOKEN: - return "yaml_KEY_TOKEN" - case yaml_VALUE_TOKEN: - return "yaml_VALUE_TOKEN" - case yaml_ALIAS_TOKEN: - return "yaml_ALIAS_TOKEN" - case yaml_ANCHOR_TOKEN: - return "yaml_ANCHOR_TOKEN" - case yaml_TAG_TOKEN: - return "yaml_TAG_TOKEN" - case yaml_SCALAR_TOKEN: - return "yaml_SCALAR_TOKEN" - } - return "" -} - -// The token structure. -type yaml_token_t struct { - // The token type. - typ yaml_token_type_t - - // The start/end of the token. - start_mark, end_mark yaml_mark_t - - // The stream encoding (for yaml_STREAM_START_TOKEN). - encoding yaml_encoding_t - - // The alias/anchor/scalar value or tag/tag directive handle - // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). - value []byte - - // The tag suffix (for yaml_TAG_TOKEN). - suffix []byte - - // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). - prefix []byte - - // The scalar style (for yaml_SCALAR_TOKEN). - style yaml_scalar_style_t - - // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). - major, minor int8 -} - -// Events - -type yaml_event_type_t int8 - -// Event types. -const ( - // An empty event. - yaml_NO_EVENT yaml_event_type_t = iota - - yaml_STREAM_START_EVENT // A STREAM-START event. - yaml_STREAM_END_EVENT // A STREAM-END event. - yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. - yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. - yaml_ALIAS_EVENT // An ALIAS event. - yaml_SCALAR_EVENT // A SCALAR event. - yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. - yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. - yaml_MAPPING_START_EVENT // A MAPPING-START event. - yaml_MAPPING_END_EVENT // A MAPPING-END event. -) - -var eventStrings = []string{ - yaml_NO_EVENT: "none", - yaml_STREAM_START_EVENT: "stream start", - yaml_STREAM_END_EVENT: "stream end", - yaml_DOCUMENT_START_EVENT: "document start", - yaml_DOCUMENT_END_EVENT: "document end", - yaml_ALIAS_EVENT: "alias", - yaml_SCALAR_EVENT: "scalar", - yaml_SEQUENCE_START_EVENT: "sequence start", - yaml_SEQUENCE_END_EVENT: "sequence end", - yaml_MAPPING_START_EVENT: "mapping start", - yaml_MAPPING_END_EVENT: "mapping end", -} - -func (e yaml_event_type_t) String() string { - if e < 0 || int(e) >= len(eventStrings) { - return fmt.Sprintf("unknown event %d", e) - } - return eventStrings[e] -} - -// The event structure. -type yaml_event_t struct { - - // The event type. - typ yaml_event_type_t - - // The start and end of the event. - start_mark, end_mark yaml_mark_t - - // The document encoding (for yaml_STREAM_START_EVENT). - encoding yaml_encoding_t - - // The version directive (for yaml_DOCUMENT_START_EVENT). - version_directive *yaml_version_directive_t - - // The list of tag directives (for yaml_DOCUMENT_START_EVENT). - tag_directives []yaml_tag_directive_t - - // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). - anchor []byte - - // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - tag []byte - - // The scalar value (for yaml_SCALAR_EVENT). - value []byte - - // Is the document start/end indicator implicit, or the tag optional? - // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). - implicit bool - - // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). - quoted_implicit bool - - // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - style yaml_style_t -} - -func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } -func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } -func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } - -// Nodes - -const ( - yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. - yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. - yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. - yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. - yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. - yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. - - yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. - yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. - - // Not in original libyaml. - yaml_BINARY_TAG = "tag:yaml.org,2002:binary" - yaml_MERGE_TAG = "tag:yaml.org,2002:merge" - - yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. - yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. - yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. -) - -type yaml_node_type_t int - -// Node types. -const ( - // An empty node. - yaml_NO_NODE yaml_node_type_t = iota - - yaml_SCALAR_NODE // A scalar node. - yaml_SEQUENCE_NODE // A sequence node. - yaml_MAPPING_NODE // A mapping node. -) - -// An element of a sequence node. -type yaml_node_item_t int - -// An element of a mapping node. -type yaml_node_pair_t struct { - key int // The key of the element. - value int // The value of the element. -} - -// The node structure. -type yaml_node_t struct { - typ yaml_node_type_t // The node type. - tag []byte // The node tag. - - // The node data. - - // The scalar parameters (for yaml_SCALAR_NODE). - scalar struct { - value []byte // The scalar value. - length int // The length of the scalar value. - style yaml_scalar_style_t // The scalar style. - } - - // The sequence parameters (for YAML_SEQUENCE_NODE). - sequence struct { - items_data []yaml_node_item_t // The stack of sequence items. - style yaml_sequence_style_t // The sequence style. - } - - // The mapping parameters (for yaml_MAPPING_NODE). - mapping struct { - pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). - pairs_start *yaml_node_pair_t // The beginning of the stack. - pairs_end *yaml_node_pair_t // The end of the stack. - pairs_top *yaml_node_pair_t // The top of the stack. - style yaml_mapping_style_t // The mapping style. - } - - start_mark yaml_mark_t // The beginning of the node. - end_mark yaml_mark_t // The end of the node. - -} - -// The document structure. -type yaml_document_t struct { - - // The document nodes. - nodes []yaml_node_t - - // The version directive. - version_directive *yaml_version_directive_t - - // The list of tag directives. - tag_directives_data []yaml_tag_directive_t - tag_directives_start int // The beginning of the tag directives list. - tag_directives_end int // The end of the tag directives list. - - start_implicit int // Is the document start indicator implicit? - end_implicit int // Is the document end indicator implicit? - - // The start/end of the document. - start_mark, end_mark yaml_mark_t -} - -// The prototype of a read handler. -// -// The read handler is called when the parser needs to read more bytes from the -// source. The handler should write not more than size bytes to the buffer. -// The number of written bytes should be set to the size_read variable. -// -// [in,out] data A pointer to an application data specified by -// yaml_parser_set_input(). -// [out] buffer The buffer to write the data from the source. -// [in] size The size of the buffer. -// [out] size_read The actual number of bytes read from the source. -// -// On success, the handler should return 1. If the handler failed, -// the returned value should be 0. On EOF, the handler should set the -// size_read to 0 and return 1. -type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) - -// This structure holds information about a potential simple key. -type yaml_simple_key_t struct { - possible bool // Is a simple key possible? - required bool // Is a simple key required? - token_number int // The number of the token. - mark yaml_mark_t // The position mark. -} - -// The states of the parser. -type yaml_parser_state_t int - -const ( - yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota - - yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. - yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. - yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. - yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. - yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. - yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. - yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. - yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. - yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. - yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. - yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. - yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. - yaml_PARSE_END_STATE // Expect nothing. -) - -func (ps yaml_parser_state_t) String() string { - switch ps { - case yaml_PARSE_STREAM_START_STATE: - return "yaml_PARSE_STREAM_START_STATE" - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_START_STATE: - return "yaml_PARSE_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return "yaml_PARSE_DOCUMENT_CONTENT_STATE" - case yaml_PARSE_DOCUMENT_END_STATE: - return "yaml_PARSE_DOCUMENT_END_STATE" - case yaml_PARSE_BLOCK_NODE_STATE: - return "yaml_PARSE_BLOCK_NODE_STATE" - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" - case yaml_PARSE_FLOW_NODE_STATE: - return "yaml_PARSE_FLOW_NODE_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" - case yaml_PARSE_END_STATE: - return "yaml_PARSE_END_STATE" - } - return "" -} - -// This structure holds aliases data. -type yaml_alias_data_t struct { - anchor []byte // The anchor. - index int // The node id. - mark yaml_mark_t // The anchor mark. -} - -// The parser structure. -// -// All members are internal. Manage the structure using the -// yaml_parser_ family of functions. -type yaml_parser_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - - problem string // Error description. - - // The byte about which the problem occurred. - problem_offset int - problem_value int - problem_mark yaml_mark_t - - // The error context. - context string - context_mark yaml_mark_t - - // Reader stuff - - read_handler yaml_read_handler_t // Read handler. - - input_reader io.Reader // File input data. - input []byte // String input data. - input_pos int - - eof bool // EOF flag - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - unread int // The number of unread characters in the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The input encoding. - - offset int // The offset of the current position (in bytes). - mark yaml_mark_t // The mark of the current position. - - // Scanner stuff - - stream_start_produced bool // Have we started to scan the input stream? - stream_end_produced bool // Have we reached the end of the input stream? - - flow_level int // The number of unclosed '[' and '{' indicators. - - tokens []yaml_token_t // The tokens queue. - tokens_head int // The head of the tokens queue. - tokens_parsed int // The number of tokens fetched from the queue. - token_available bool // Does the tokens queue contain a token ready for dequeueing. - - indent int // The current indentation level. - indents []int // The indentation levels stack. - - simple_key_allowed bool // May a simple key occur at the current position? - simple_keys []yaml_simple_key_t // The stack of simple keys. - simple_keys_by_tok map[int]int // possible simple_key indexes indexed by token_number - - // Parser stuff - - state yaml_parser_state_t // The current parser state. - states []yaml_parser_state_t // The parser states stack. - marks []yaml_mark_t // The stack of marks. - tag_directives []yaml_tag_directive_t // The list of TAG directives. - - // Dumper stuff - - aliases []yaml_alias_data_t // The alias data. - - document *yaml_document_t // The currently parsed document. -} - -// Emitter Definitions - -// The prototype of a write handler. -// -// The write handler is called when the emitter needs to flush the accumulated -// characters to the output. The handler should write @a size bytes of the -// @a buffer to the output. -// -// @param[in,out] data A pointer to an application data specified by -// yaml_emitter_set_output(). -// @param[in] buffer The buffer with bytes to be written. -// @param[in] size The size of the buffer. -// -// @returns On success, the handler should return @c 1. If the handler failed, -// the returned value should be @c 0. -// -type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error - -type yaml_emitter_state_t int - -// The emitter states. -const ( - // Expect STREAM-START. - yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota - - yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. - yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. - yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. - yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. - yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. - yaml_EMIT_END_STATE // Expect nothing. -) - -// The emitter structure. -// -// All members are internal. Manage the structure using the @c yaml_emitter_ -// family of functions. -type yaml_emitter_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - problem string // Error description. - - // Writer stuff - - write_handler yaml_write_handler_t // Write handler. - - output_buffer *[]byte // String output data. - output_writer io.Writer // File output data. - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The stream encoding. - - // Emitter stuff - - canonical bool // If the output is in the canonical style? - best_indent int // The number of indentation spaces. - best_width int // The preferred width of the output lines. - unicode bool // Allow unescaped non-ASCII characters? - line_break yaml_break_t // The preferred line break. - - state yaml_emitter_state_t // The current emitter state. - states []yaml_emitter_state_t // The stack of states. - - events []yaml_event_t // The event queue. - events_head int // The head of the event queue. - - indents []int // The stack of indentation levels. - - tag_directives []yaml_tag_directive_t // The list of tag directives. - - indent int // The current indentation level. - - flow_level int // The current flow level. - - root_context bool // Is it the document root context? - sequence_context bool // Is it a sequence context? - mapping_context bool // Is it a mapping context? - simple_key_context bool // Is it a simple mapping key context? - - line int // The current line. - column int // The current column. - whitespace bool // If the last character was a whitespace? - indention bool // If the last character was an indentation character (' ', '-', '?', ':')? - open_ended bool // If an explicit document end is required? - - // Anchor analysis. - anchor_data struct { - anchor []byte // The anchor value. - alias bool // Is it an alias? - } - - // Tag analysis. - tag_data struct { - handle []byte // The tag handle. - suffix []byte // The tag suffix. - } - - // Scalar analysis. - scalar_data struct { - value []byte // The scalar value. - multiline bool // Does the scalar contain line breaks? - flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? - block_plain_allowed bool // Can the scalar be expressed in the block plain style? - single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? - block_allowed bool // Can the scalar be expressed in the literal or folded styles? - style yaml_scalar_style_t // The output style. - } - - // Dumper stuff - - opened bool // If the stream was already opened? - closed bool // If the stream was already closed? - - // The information associated with the document nodes. - anchors *struct { - references int // The number of references. - anchor int // The anchor id. - serialized bool // If the node has been emitted? - } - - last_anchor_id int // The last assigned anchor id. - - document *yaml_document_t // The currently emitted document. -} diff --git a/vendor/gopkg.in/yaml.v2/yamlprivateh.go b/vendor/gopkg.in/yaml.v2/yamlprivateh.go index 8110ce3c..e69de29b 100644 --- a/vendor/gopkg.in/yaml.v2/yamlprivateh.go +++ b/vendor/gopkg.in/yaml.v2/yamlprivateh.go @@ -1,173 +0,0 @@ -package yaml - -const ( - // The size of the input raw buffer. - input_raw_buffer_size = 512 - - // The size of the input buffer. - // It should be possible to decode the whole raw buffer. - input_buffer_size = input_raw_buffer_size * 3 - - // The size of the output buffer. - output_buffer_size = 128 - - // The size of the output raw buffer. - // It should be possible to encode the whole output buffer. - output_raw_buffer_size = (output_buffer_size*2 + 2) - - // The size of other stacks and queues. - initial_stack_size = 16 - initial_queue_size = 16 - initial_string_size = 16 -) - -// Check if the character at the specified position is an alphabetical -// character, a digit, '_', or '-'. -func is_alpha(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' -} - -// Check if the character at the specified position is a digit. -func is_digit(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' -} - -// Get the value of a digit. -func as_digit(b []byte, i int) int { - return int(b[i]) - '0' -} - -// Check if the character at the specified position is a hex-digit. -func is_hex(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' -} - -// Get the value of a hex-digit. -func as_hex(b []byte, i int) int { - bi := b[i] - if bi >= 'A' && bi <= 'F' { - return int(bi) - 'A' + 10 - } - if bi >= 'a' && bi <= 'f' { - return int(bi) - 'a' + 10 - } - return int(bi) - '0' -} - -// Check if the character is ASCII. -func is_ascii(b []byte, i int) bool { - return b[i] <= 0x7F -} - -// Check if the character at the start of the buffer can be printed unescaped. -func is_printable(b []byte, i int) bool { - return ((b[i] == 0x0A) || // . == #x0A - (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E - (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF - (b[i] > 0xC2 && b[i] < 0xED) || - (b[i] == 0xED && b[i+1] < 0xA0) || - (b[i] == 0xEE) || - (b[i] == 0xEF && // #xE000 <= . <= #xFFFD - !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF - !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) -} - -// Check if the character at the specified position is NUL. -func is_z(b []byte, i int) bool { - return b[i] == 0x00 -} - -// Check if the beginning of the buffer is a BOM. -func is_bom(b []byte, i int) bool { - return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF -} - -// Check if the character at the specified position is space. -func is_space(b []byte, i int) bool { - return b[i] == ' ' -} - -// Check if the character at the specified position is tab. -func is_tab(b []byte, i int) bool { - return b[i] == '\t' -} - -// Check if the character at the specified position is blank (space or tab). -func is_blank(b []byte, i int) bool { - //return is_space(b, i) || is_tab(b, i) - return b[i] == ' ' || b[i] == '\t' -} - -// Check if the character at the specified position is a line break. -func is_break(b []byte, i int) bool { - return (b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) -} - -func is_crlf(b []byte, i int) bool { - return b[i] == '\r' && b[i+1] == '\n' -} - -// Check if the character is a line break or NUL. -func is_breakz(b []byte, i int) bool { - //return is_break(b, i) || is_z(b, i) - return ( // is_break: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - // is_z: - b[i] == 0) -} - -// Check if the character is a line break, space, or NUL. -func is_spacez(b []byte, i int) bool { - //return is_space(b, i) || is_breakz(b, i) - return ( // is_space: - b[i] == ' ' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Check if the character is a line break, space, tab, or NUL. -func is_blankz(b []byte, i int) bool { - //return is_blank(b, i) || is_breakz(b, i) - return ( // is_blank: - b[i] == ' ' || b[i] == '\t' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Determine the width of the character. -func width(b byte) int { - // Don't replace these by a switch without first - // confirming that it is being inlined. - if b&0x80 == 0x00 { - return 1 - } - if b&0xE0 == 0xC0 { - return 2 - } - if b&0xF0 == 0xE0 { - return 3 - } - if b&0xF8 == 0xF0 { - return 4 - } - return 0 - -} diff --git a/vendor/gorm.io/datatypes/License b/vendor/gorm.io/datatypes/License index 037e1653..e69de29b 100644 --- a/vendor/gorm.io/datatypes/License +++ b/vendor/gorm.io/datatypes/License @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013-NOW Jinzhu - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/gorm.io/datatypes/README.md b/vendor/gorm.io/datatypes/README.md index 0760e4af..e69de29b 100644 --- a/vendor/gorm.io/datatypes/README.md +++ b/vendor/gorm.io/datatypes/README.md @@ -1,94 +0,0 @@ -# GORM Data Types - -## JSON - -sqlite, mysql, postgres supported - -```go -import "gorm.io/datatypes" - -type UserWithJSON struct { - gorm.Model - Name string - Attributes datatypes.JSON -} - -DB.Create(&User{ - Name: "json-1", - Attributes: datatypes.JSON([]byte(`{"name": "jinzhu", "age": 18, "tags": ["tag1", "tag2"], "orgs": {"orga": "orga"}}`)), -} - -// Check JSON has keys -datatypes.JSONQuery("attributes").HasKey(value, keys...) - -db.Find(&user, datatypes.JSONQuery("attributes").HasKey("role")) -db.Find(&user, datatypes.JSONQuery("attributes").HasKey("orgs", "orga")) -// MySQL -// SELECT * FROM `users` WHERE JSON_EXTRACT(`attributes`, '$.role') IS NOT NULL -// SELECT * FROM `users` WHERE JSON_EXTRACT(`attributes`, '$.orgs.orga') IS NOT NULL - -// PostgreSQL -// SELECT * FROM "user" WHERE "attributes"::jsonb ? 'role' -// SELECT * FROM "user" WHERE "attributes"::jsonb -> 'orgs' ? 'orga' - - -// Check JSON extract value from keys equal to value -datatypes.JSONQuery("attributes").Equals(value, keys...) - -DB.First(&user, datatypes.JSONQuery("attributes").Equals("jinzhu", "name")) -DB.First(&user, datatypes.JSONQuery("attributes").Equals("orgb", "orgs", "orgb")) -// MySQL -// SELECT * FROM `user` WHERE JSON_EXTRACT(`attributes`, '$.name') = "jinzhu" -// SELECT * FROM `user` WHERE JSON_EXTRACT(`attributes`, '$.orgs.orgb') = "orgb" - -// PostgreSQL -// SELECT * FROM "user" WHERE json_extract_path_text("attributes"::json,'name') = 'jinzhu' -// SELECT * FROM "user" WHERE json_extract_path_text("attributes"::json,'orgs','orgb') = 'orgb' -``` - -NOTE: SQlite need to build with `json1` tag, e.g: `go build --tags json1`, refer https://github.com/mattn/go-sqlite3#usage - -## Date - -```go -import "gorm.io/datatypes" - -type UserWithDate struct { - gorm.Model - Name string - Date datatypes.Date -} - -user := UserWithDate{Name: "jinzhu", Date: datatypes.Date(time.Now())} -DB.Create(&user) -// INSERT INTO `user_with_dates` (`name`,`date`) VALUES ("jinzhu","2020-07-17 00:00:00") - -DB.First(&result, "name = ? AND date = ?", "jinzhu", datatypes.Date(curTime)) -// SELECT * FROM user_with_dates WHERE name = "jinzhu" AND date = "2020-07-17 00:00:00" ORDER BY `user_with_dates`.`id` LIMIT 1 -``` - -## Time - -MySQL, PostgreSQL, SQLite, SQLServer are supported. - -Time with nanoseconds is supported for some databases which support for time with fractional second scale. - -```go -import "gorm.io/datatypes" - -type UserWithTime struct { - gorm.Model - Name string - Time datatypes.Time -} - -user := UserWithTime{Name: "jinzhu", Time: datatypes.NewTime(1, 2, 3, 0)} -DB.Create(&user) -// INSERT INTO `user_with_times` (`name`,`time`) VALUES ("jinzhu","01:02:03") - -DB.First(&result, "name = ? AND time = ?", "jinzhu", datatypes.NewTime(1, 2, 3, 0)) -// SELECT * FROM user_with_times WHERE name = "jinzhu" AND time = "01:02:03" ORDER BY `user_with_times`.`id` LIMIT 1 -``` - -NOTE: If the current using database is SQLite, the field column type is defined as `TEXT` type -when GORM AutoMigrate because SQLite doesn't have time type. diff --git a/vendor/gorm.io/datatypes/json.go b/vendor/gorm.io/datatypes/json.go index d78d6469..e69de29b 100644 --- a/vendor/gorm.io/datatypes/json.go +++ b/vendor/gorm.io/datatypes/json.go @@ -1,191 +0,0 @@ -package datatypes - -import ( - "context" - "database/sql/driver" - "encoding/json" - "errors" - "fmt" - "strings" - - "gorm.io/driver/mysql" - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/gorm/schema" -) - -// JSON defined JSON data type, need to implements driver.Valuer, sql.Scanner interface -type JSON json.RawMessage - -// Value return json value, implement driver.Valuer interface -func (j JSON) Value() (driver.Value, error) { - if len(j) == 0 { - return nil, nil - } - bytes, err := json.RawMessage(j).MarshalJSON() - return string(bytes), err -} - -// Scan scan value into Jsonb, implements sql.Scanner interface -func (j *JSON) Scan(value interface{}) error { - if value == nil { - *j = JSON("null") - return nil - } - var bytes []byte - switch v := value.(type) { - case []byte: - bytes = v - case string: - bytes = []byte(v) - default: - return errors.New(fmt.Sprint("Failed to unmarshal JSONB value:", value)) - } - - result := json.RawMessage{} - err := json.Unmarshal(bytes, &result) - *j = JSON(result) - return err -} - -// MarshalJSON to output non base64 encoded []byte -func (j JSON) MarshalJSON() ([]byte, error) { - return json.RawMessage(j).MarshalJSON() -} - -// UnmarshalJSON to deserialize []byte -func (j *JSON) UnmarshalJSON(b []byte) error { - result := json.RawMessage{} - err := result.UnmarshalJSON(b) - *j = JSON(result) - return err -} - -func (j JSON) String() string { - return string(j) -} - -// GormDataType gorm common data type -func (JSON) GormDataType() string { - return "json" -} - -// GormDBDataType gorm db data type -func (JSON) GormDBDataType(db *gorm.DB, field *schema.Field) string { - switch db.Dialector.Name() { - case "sqlite": - return "JSON" - case "mysql": - return "JSON" - case "postgres": - return "JSONB" - } - return "" -} - -func (js JSON) GormValue(ctx context.Context, db *gorm.DB) clause.Expr { - if len(js) == 0 { - return gorm.Expr("NULL") - } - - data, _ := js.MarshalJSON() - - switch db.Dialector.Name() { - case "mysql": - if v, ok := db.Dialector.(*mysql.Dialector); ok && !strings.Contains(v.ServerVersion, "MariaDB") { - return gorm.Expr("CAST(? AS JSON)", string(data)) - } - } - - return gorm.Expr("?", string(data)) -} - -// JSONQueryExpression json query expression, implements clause.Expression interface to use as querier -type JSONQueryExpression struct { - column string - keys []string - hasKeys bool - equals bool - equalsValue interface{} -} - -// JSONQuery query column as json -func JSONQuery(column string) *JSONQueryExpression { - return &JSONQueryExpression{column: column} -} - -// HasKey returns clause.Expression -func (jsonQuery *JSONQueryExpression) HasKey(keys ...string) *JSONQueryExpression { - jsonQuery.keys = keys - jsonQuery.hasKeys = true - return jsonQuery -} - -// Keys returns clause.Expression -func (jsonQuery *JSONQueryExpression) Equals(value interface{}, keys ...string) *JSONQueryExpression { - jsonQuery.keys = keys - jsonQuery.equals = true - jsonQuery.equalsValue = value - return jsonQuery -} - -// Build implements clause.Expression -func (jsonQuery *JSONQueryExpression) Build(builder clause.Builder) { - if stmt, ok := builder.(*gorm.Statement); ok { - switch stmt.Dialector.Name() { - case "mysql", "sqlite": - switch { - case jsonQuery.hasKeys: - if len(jsonQuery.keys) > 0 { - builder.WriteString("JSON_EXTRACT(" + stmt.Quote(jsonQuery.column) + ",") - builder.AddVar(stmt, "$."+strings.Join(jsonQuery.keys, ".")) - builder.WriteString(") IS NOT NULL") - } - case jsonQuery.equals: - if len(jsonQuery.keys) > 0 { - builder.WriteString("JSON_EXTRACT(" + stmt.Quote(jsonQuery.column) + ",") - builder.AddVar(stmt, "$."+strings.Join(jsonQuery.keys, ".")) - builder.WriteString(") = ") - if _, ok := jsonQuery.equalsValue.(bool); ok { - builder.WriteString(fmt.Sprint(jsonQuery.equalsValue)) - } else { - stmt.AddVar(builder, jsonQuery.equalsValue) - } - } - } - case "postgres": - switch { - case jsonQuery.hasKeys: - if len(jsonQuery.keys) > 0 { - stmt.WriteQuoted(jsonQuery.column) - stmt.WriteString("::jsonb") - for _, key := range jsonQuery.keys[0 : len(jsonQuery.keys)-1] { - stmt.WriteString(" -> ") - stmt.AddVar(builder, key) - } - - stmt.WriteString(" ? ") - stmt.AddVar(builder, jsonQuery.keys[len(jsonQuery.keys)-1]) - } - case jsonQuery.equals: - if len(jsonQuery.keys) > 0 { - builder.WriteString(fmt.Sprintf("json_extract_path_text(%v::json,", stmt.Quote(jsonQuery.column))) - - for idx, key := range jsonQuery.keys { - if idx > 0 { - builder.WriteByte(',') - } - stmt.AddVar(builder, key) - } - builder.WriteString(") = ") - - if _, ok := jsonQuery.equalsValue.(string); ok { - stmt.AddVar(builder, jsonQuery.equalsValue) - } else { - stmt.AddVar(builder, fmt.Sprint(jsonQuery.equalsValue)) - } - } - } - } - } -} diff --git a/vendor/gorm.io/datatypes/test_all.sh b/vendor/gorm.io/datatypes/test_all.sh deleted file mode 100644 index 4bf5a166..00000000 --- a/vendor/gorm.io/datatypes/test_all.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -e - -dialects=("postgres" "postgres_simple" "mysql" "mssql" "sqlite") - -for dialect in "${dialects[@]}" ; do - if [ "$GORM_DIALECT" = "" ] || [ "$GORM_DIALECT" = "${dialect}" ] - then - GORM_DIALECT=${dialect} go test --tags "json1" - fi -done diff --git a/vendor/gorm.io/driver/mysql/License b/vendor/gorm.io/driver/mysql/License index 037e1653..e69de29b 100644 --- a/vendor/gorm.io/driver/mysql/License +++ b/vendor/gorm.io/driver/mysql/License @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013-NOW Jinzhu - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/gorm.io/driver/mysql/README.md b/vendor/gorm.io/driver/mysql/README.md index b8f7a6c9..e69de29b 100644 --- a/vendor/gorm.io/driver/mysql/README.md +++ b/vendor/gorm.io/driver/mysql/README.md @@ -1,51 +0,0 @@ -# GORM MySQL Driver - -## Quick Start - -```go -import ( - "gorm.io/driver/mysql" - "gorm.io/gorm" -) - -// https://github.com/go-sql-driver/mysql -dsn := "gorm:gorm@tcp(localhost:9910)/gorm?charset=utf8&parseTime=True&loc=Local" -db, err := gorm.Open(mysql.Open(dsn), &gorm.Config{}) -``` - -## Configuration - -```go -import ( - "gorm.io/driver/mysql" - "gorm.io/gorm" -) - -var datetimePrecision = 2 - -db, err := gorm.Open(mysql.New(mysql.Config{ - DSN: "gorm:gorm@tcp(localhost:9910)/gorm?charset=utf8&parseTime=True&loc=Local", // data source name, refer https://github.com/go-sql-driver/mysql#dsn-data-source-name - DefaultStringSize: 256, // add default size for string fields, by default, will use db type `longtext` for fields without size, not a primary key, no index defined and don't have default values - DisableDatetimePrecision: true, // disable datetime precision support, which not supported before MySQL 5.6 - DefaultDatetimePrecision: &datetimePrecision, // default datetime precision - DontSupportRenameIndex: true, // drop & create index when rename index, rename index not supported before MySQL 5.7, MariaDB - DontSupportRenameColumn: true, // use change when rename column, rename rename not supported before MySQL 8, MariaDB - SkipInitializeWithVersion: false, // smart configure based on used version -}), &gorm.Config{}) -``` - -## Customized Driver - -```go -import ( - _ "example.com/my_mysql_driver" - "gorm.io/gorm" -) - -db, err := gorm.Open(mysql.New(mysql.Config{ - DriverName: "my_mysql_driver_name", - DSN: "gorm:gorm@tcp(localhost:9910)/gorm?charset=utf8&parseTime=True&loc=Local", // data source name, refer https://github.com/go-sql-driver/mysql#dsn-data-source-name -}) -``` - -Checkout [https://gorm.io](https://gorm.io) for details. diff --git a/vendor/gorm.io/driver/mysql/migrator.go b/vendor/gorm.io/driver/mysql/migrator.go index dd2b55a4..e69de29b 100644 --- a/vendor/gorm.io/driver/mysql/migrator.go +++ b/vendor/gorm.io/driver/mysql/migrator.go @@ -1,246 +0,0 @@ -package mysql - -import ( - "database/sql" - "fmt" - "strings" - - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/gorm/migrator" - "gorm.io/gorm/schema" -) - -type Migrator struct { - migrator.Migrator - Dialector -} - -func (m Migrator) FullDataTypeOf(field *schema.Field) clause.Expr { - expr := m.Migrator.FullDataTypeOf(field) - - if value, ok := field.TagSettings["COMMENT"]; ok { - expr.SQL += " COMMENT " + m.Dialector.Explain("?", value) - } - - return expr -} - -func (m Migrator) AlterColumn(value interface{}, field string) error { - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - if field := stmt.Schema.LookUpField(field); field != nil { - return m.DB.Exec( - "ALTER TABLE ? MODIFY COLUMN ? ?", - clause.Table{Name: stmt.Table}, clause.Column{Name: field.DBName}, m.FullDataTypeOf(field), - ).Error - } - return fmt.Errorf("failed to look up field with name: %s", field) - }) -} - -func (m Migrator) RenameColumn(value interface{}, oldName, newName string) error { - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - if !m.Dialector.DontSupportRenameColumn { - return m.Migrator.RenameColumn(value, oldName, newName) - } - - var field *schema.Field - if f := stmt.Schema.LookUpField(oldName); f != nil { - oldName = f.DBName - field = f - } - - if f := stmt.Schema.LookUpField(newName); f != nil { - newName = f.DBName - field = f - } - - if field != nil { - return m.DB.Exec( - "ALTER TABLE ? CHANGE ? ? ?", - clause.Table{Name: stmt.Table}, clause.Column{Name: oldName}, - clause.Column{Name: newName}, m.FullDataTypeOf(field), - ).Error - } - - return fmt.Errorf("failed to look up field with name: %s", newName) - }) -} - -func (m Migrator) RenameIndex(value interface{}, oldName, newName string) error { - if !m.Dialector.DontSupportRenameIndex { - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - return m.DB.Exec( - "ALTER TABLE ? RENAME INDEX ? TO ?", - clause.Table{Name: stmt.Table}, clause.Column{Name: oldName}, clause.Column{Name: newName}, - ).Error - }) - } - - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - err := m.DropIndex(value, oldName) - if err != nil { - return err - } - - if idx := stmt.Schema.LookIndex(newName); idx == nil { - if idx = stmt.Schema.LookIndex(oldName); idx != nil { - opts := m.BuildIndexOptions(idx.Fields, stmt) - values := []interface{}{clause.Column{Name: newName}, clause.Table{Name: stmt.Table}, opts} - - createIndexSQL := "CREATE " - if idx.Class != "" { - createIndexSQL += idx.Class + " " - } - createIndexSQL += "INDEX ? ON ??" - - if idx.Type != "" { - createIndexSQL += " USING " + idx.Type - } - - return m.DB.Exec(createIndexSQL, values...).Error - } - } - - return m.CreateIndex(value, newName) - }) - -} - -func (m Migrator) DropTable(values ...interface{}) error { - values = m.ReorderModels(values, false) - return m.DB.Connection(func(tx *gorm.DB) error { - tx.Exec("SET FOREIGN_KEY_CHECKS = 0;") - for i := len(values) - 1; i >= 0; i-- { - if err := m.RunWithValue(values[i], func(stmt *gorm.Statement) error { - return tx.Exec("DROP TABLE IF EXISTS ? CASCADE", clause.Table{Name: stmt.Table}).Error - }); err != nil { - return err - } - } - return tx.Exec("SET FOREIGN_KEY_CHECKS = 1;").Error - }) -} - -func (m Migrator) DropConstraint(value interface{}, name string) error { - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - constraint, chk, table := m.GuessConstraintAndTable(stmt, name) - if chk != nil { - return m.DB.Exec("ALTER TABLE ? DROP CHECK ?", clause.Table{Name: stmt.Table}, clause.Column{Name: chk.Name}).Error - } - if constraint != nil { - name = constraint.Name - } - - return m.DB.Exec( - "ALTER TABLE ? DROP FOREIGN KEY ?", clause.Table{Name: table}, clause.Column{Name: name}, - ).Error - }) -} - -// ColumnTypes column types return columnTypes,error -func (m Migrator) ColumnTypes(value interface{}) ([]gorm.ColumnType, error) { - columnTypes := make([]gorm.ColumnType, 0) - err := m.RunWithValue(value, func(stmt *gorm.Statement) error { - var ( - currentDatabase = m.DB.Migrator().CurrentDatabase() - columnTypeSQL = "SELECT column_name, column_default, is_nullable = 'YES', data_type, character_maximum_length, column_type, column_key, extra, column_comment, numeric_precision, numeric_scale " - rows, err = m.DB.Session(&gorm.Session{}).Table(stmt.Table).Limit(1).Rows() - ) - - if err != nil { - return err - } - - rawColumnTypes, err := rows.ColumnTypes() - - if err := rows.Close(); err != nil { - return err - } - - if !m.DisableDatetimePrecision { - columnTypeSQL += ", datetime_precision " - } - columnTypeSQL += "FROM information_schema.columns WHERE table_schema = ? AND table_name = ? ORDER BY ORDINAL_POSITION" - - columns, rowErr := m.DB.Raw(columnTypeSQL, currentDatabase, stmt.Table).Rows() - if rowErr != nil { - return rowErr - } - - defer columns.Close() - - for columns.Next() { - var ( - column migrator.ColumnType - datetimePrecision sql.NullInt64 - extraValue sql.NullString - columnKey sql.NullString - values = []interface{}{ - &column.NameValue, &column.DefaultValueValue, &column.NullableValue, &column.DataTypeValue, &column.LengthValue, &column.ColumnTypeValue, &columnKey, &extraValue, &column.CommentValue, &column.DecimalSizeValue, &column.ScaleValue, - } - ) - - if !m.DisableDatetimePrecision { - values = append(values, &datetimePrecision) - } - - if scanErr := columns.Scan(values...); scanErr != nil { - return scanErr - } - - column.PrimaryKeyValue = sql.NullBool{Bool: false, Valid: true} - column.UniqueValue = sql.NullBool{Bool: false, Valid: true} - switch columnKey.String { - case "PRI": - column.PrimaryKeyValue = sql.NullBool{Bool: true, Valid: true} - case "UNI": - column.UniqueValue = sql.NullBool{Bool: true, Valid: true} - } - - if strings.Contains(extraValue.String, "auto_increment") { - column.AutoIncrementValue = sql.NullBool{Bool: true, Valid: true} - } - - column.DefaultValueValue.String = strings.Trim(column.DefaultValueValue.String, "'") - if m.Dialector.DontSupportNullAsDefaultValue { - // rewrite mariadb default value like other version - if column.DefaultValueValue.Valid && column.DefaultValueValue.String == "NULL" { - column.DefaultValueValue.Valid = false - column.DefaultValueValue.String = "" - } - } - - if datetimePrecision.Valid { - column.DecimalSizeValue = datetimePrecision - } - - for _, c := range rawColumnTypes { - if c.Name() == column.NameValue.String { - column.SQLColumnType = c - break - } - } - - columnTypes = append(columnTypes, column) - } - - return nil - }) - - return columnTypes, err -} - -func (m Migrator) CurrentDatabase() (name string) { - baseName := m.Migrator.CurrentDatabase() - m.DB.Raw( - "SELECT SCHEMA_NAME from Information_schema.SCHEMATA where SCHEMA_NAME LIKE ? ORDER BY SCHEMA_NAME=? DESC limit 1", - baseName+"%", baseName).Scan(&name) - return -} - -func (m Migrator) GetTables() (tableList []string, err error) { - err = m.DB.Raw("SELECT TABLE_NAME FROM information_schema.tables where TABLE_SCHEMA=?", m.CurrentDatabase()). - Scan(&tableList).Error - return -} diff --git a/vendor/gorm.io/driver/mysql/mysql.go b/vendor/gorm.io/driver/mysql/mysql.go index bc8011a0..e69de29b 100644 --- a/vendor/gorm.io/driver/mysql/mysql.go +++ b/vendor/gorm.io/driver/mysql/mysql.go @@ -1,407 +0,0 @@ -package mysql - -import ( - "context" - "database/sql" - "fmt" - "math" - "strings" - "time" - - _ "github.com/go-sql-driver/mysql" - "gorm.io/gorm" - "gorm.io/gorm/callbacks" - "gorm.io/gorm/clause" - "gorm.io/gorm/logger" - "gorm.io/gorm/migrator" - "gorm.io/gorm/schema" -) - -type Config struct { - DriverName string - ServerVersion string - DSN string - Conn gorm.ConnPool - SkipInitializeWithVersion bool - DefaultStringSize uint - DefaultDatetimePrecision *int - DisableDatetimePrecision bool - DontSupportRenameIndex bool - DontSupportRenameColumn bool - DontSupportForShareClause bool - DontSupportNullAsDefaultValue bool -} - -type Dialector struct { - *Config -} - -var ( - // CreateClauses create clauses - CreateClauses = []string{"INSERT", "VALUES", "ON CONFLICT"} - // QueryClauses query clauses - QueryClauses = []string{} - // UpdateClauses update clauses - UpdateClauses = []string{"UPDATE", "SET", "WHERE", "ORDER BY", "LIMIT"} - // DeleteClauses delete clauses - DeleteClauses = []string{"DELETE", "FROM", "WHERE", "ORDER BY", "LIMIT"} - - defaultDatetimePrecision = 3 -) - -func Open(dsn string) gorm.Dialector { - return &Dialector{Config: &Config{DSN: dsn}} -} - -func New(config Config) gorm.Dialector { - return &Dialector{Config: &config} -} - -func (dialector Dialector) Name() string { - return "mysql" -} - -// NowFunc return now func -func (dialector Dialector) NowFunc(n int) func() time.Time { - return func() time.Time { - round := time.Second / time.Duration(math.Pow10(n)) - return time.Now().Round(round) - } -} - -func (dialector Dialector) Apply(config *gorm.Config) error { - if config.NowFunc == nil { - if dialector.DefaultDatetimePrecision == nil { - dialector.DefaultDatetimePrecision = &defaultDatetimePrecision - } - - // while maintaining the readability of the code, separate the business logic from - // the general part and leave it to the function to do it here. - config.NowFunc = dialector.NowFunc(*dialector.DefaultDatetimePrecision) - } - - return nil -} - -func (dialector Dialector) Initialize(db *gorm.DB) (err error) { - ctx := context.Background() - - // register callbacks - callbacks.RegisterDefaultCallbacks(db, &callbacks.Config{ - CreateClauses: CreateClauses, - QueryClauses: QueryClauses, - UpdateClauses: UpdateClauses, - DeleteClauses: DeleteClauses, - }) - - if dialector.DriverName == "" { - dialector.DriverName = "mysql" - } - - if dialector.DefaultDatetimePrecision == nil { - dialector.DefaultDatetimePrecision = &defaultDatetimePrecision - } - - if dialector.Conn != nil { - db.ConnPool = dialector.Conn - } else { - db.ConnPool, err = sql.Open(dialector.DriverName, dialector.DSN) - if err != nil { - return err - } - } - - if !dialector.Config.SkipInitializeWithVersion { - err = db.ConnPool.QueryRowContext(ctx, "SELECT VERSION()").Scan(&dialector.ServerVersion) - if err != nil { - return err - } - - if strings.Contains(dialector.ServerVersion, "MariaDB") { - dialector.Config.DontSupportRenameIndex = true - dialector.Config.DontSupportRenameColumn = true - dialector.Config.DontSupportForShareClause = true - dialector.Config.DontSupportNullAsDefaultValue = true - } else if strings.HasPrefix(dialector.ServerVersion, "5.6.") { - dialector.Config.DontSupportRenameIndex = true - dialector.Config.DontSupportRenameColumn = true - dialector.Config.DontSupportForShareClause = true - } else if strings.HasPrefix(dialector.ServerVersion, "5.7.") { - dialector.Config.DontSupportRenameColumn = true - dialector.Config.DontSupportForShareClause = true - } else if strings.HasPrefix(dialector.ServerVersion, "5.") { - dialector.Config.DisableDatetimePrecision = true - dialector.Config.DontSupportRenameIndex = true - dialector.Config.DontSupportRenameColumn = true - dialector.Config.DontSupportForShareClause = true - } - } - - for k, v := range dialector.ClauseBuilders() { - db.ClauseBuilders[k] = v - } - return -} - -const ( - // ClauseOnConflict for clause.ClauseBuilder ON CONFLICT key - ClauseOnConflict = "ON CONFLICT" - // ClauseValues for clause.ClauseBuilder VALUES key - ClauseValues = "VALUES" - // ClauseValues for clause.ClauseBuilder FOR key - ClauseFor = "FOR" -) - -func (dialector Dialector) ClauseBuilders() map[string]clause.ClauseBuilder { - clauseBuilders := map[string]clause.ClauseBuilder{ - ClauseOnConflict: func(c clause.Clause, builder clause.Builder) { - onConflict, ok := c.Expression.(clause.OnConflict) - if !ok { - c.Build(builder) - return - } - - builder.WriteString("ON DUPLICATE KEY UPDATE ") - if len(onConflict.DoUpdates) == 0 { - if s := builder.(*gorm.Statement).Schema; s != nil { - var column clause.Column - onConflict.DoNothing = false - - if s.PrioritizedPrimaryField != nil { - column = clause.Column{Name: s.PrioritizedPrimaryField.DBName} - } else if len(s.DBNames) > 0 { - column = clause.Column{Name: s.DBNames[0]} - } - - if column.Name != "" { - onConflict.DoUpdates = []clause.Assignment{{Column: column, Value: column}} - } - } - } - - for idx, assignment := range onConflict.DoUpdates { - if idx > 0 { - builder.WriteByte(',') - } - - builder.WriteQuoted(assignment.Column) - builder.WriteByte('=') - if column, ok := assignment.Value.(clause.Column); ok && column.Table == "excluded" { - column.Table = "" - builder.WriteString("VALUES(") - builder.WriteQuoted(column) - builder.WriteByte(')') - } else { - builder.AddVar(builder, assignment.Value) - } - } - }, - ClauseValues: func(c clause.Clause, builder clause.Builder) { - if values, ok := c.Expression.(clause.Values); ok && len(values.Columns) == 0 { - builder.WriteString("VALUES()") - return - } - c.Build(builder) - }, - } - - if dialector.Config.DontSupportForShareClause { - clauseBuilders[ClauseFor] = func(c clause.Clause, builder clause.Builder) { - if values, ok := c.Expression.(clause.Locking); ok && strings.EqualFold(values.Strength, "SHARE") { - builder.WriteString("LOCK IN SHARE MODE") - return - } - c.Build(builder) - } - } - - return clauseBuilders -} - -func (dialector Dialector) DefaultValueOf(field *schema.Field) clause.Expression { - return clause.Expr{SQL: "DEFAULT"} -} - -func (dialector Dialector) Migrator(db *gorm.DB) gorm.Migrator { - return Migrator{ - Migrator: migrator.Migrator{ - Config: migrator.Config{ - DB: db, - Dialector: dialector, - }, - }, - Dialector: dialector, - } -} - -func (dialector Dialector) BindVarTo(writer clause.Writer, stmt *gorm.Statement, v interface{}) { - writer.WriteByte('?') -} - -func (dialector Dialector) QuoteTo(writer clause.Writer, str string) { - var ( - underQuoted, selfQuoted bool - continuousBacktick int8 - shiftDelimiter int8 - ) - - for _, v := range []byte(str) { - switch v { - case '`': - continuousBacktick++ - if continuousBacktick == 2 { - writer.WriteString("``") - continuousBacktick = 0 - } - case '.': - if continuousBacktick > 0 || !selfQuoted { - shiftDelimiter = 0 - underQuoted = false - continuousBacktick = 0 - writer.WriteByte('`') - } - writer.WriteByte(v) - continue - default: - if shiftDelimiter-continuousBacktick <= 0 && !underQuoted { - writer.WriteByte('`') - underQuoted = true - if selfQuoted = continuousBacktick > 0; selfQuoted { - continuousBacktick -= 1 - } - } - - for ; continuousBacktick > 0; continuousBacktick -= 1 { - writer.WriteString("``") - } - - writer.WriteByte(v) - } - shiftDelimiter++ - } - - if continuousBacktick > 0 && !selfQuoted { - writer.WriteString("``") - } - writer.WriteByte('`') -} - -func (dialector Dialector) Explain(sql string, vars ...interface{}) string { - return logger.ExplainSQL(sql, nil, `'`, vars...) -} - -func (dialector Dialector) DataTypeOf(field *schema.Field) string { - switch field.DataType { - case schema.Bool: - return "boolean" - case schema.Int, schema.Uint: - return dialector.getSchemaIntAndUnitType(field) - case schema.Float: - return dialector.getSchemaFloatType(field) - case schema.String: - return dialector.getSchemaStringType(field) - case schema.Time: - return dialector.getSchemaTimeType(field) - case schema.Bytes: - return dialector.getSchemaBytesType(field) - } - - return string(field.DataType) -} - -func (dialector Dialector) getSchemaFloatType(field *schema.Field) string { - if field.Precision > 0 { - return fmt.Sprintf("decimal(%d, %d)", field.Precision, field.Scale) - } - - if field.Size <= 32 { - return "float" - } - - return "double" -} - -func (dialector Dialector) getSchemaStringType(field *schema.Field) string { - size := field.Size - if size == 0 { - if dialector.DefaultStringSize > 0 { - size = int(dialector.DefaultStringSize) - } else { - hasIndex := field.TagSettings["INDEX"] != "" || field.TagSettings["UNIQUE"] != "" - // TEXT, GEOMETRY or JSON column can't have a default value - if field.PrimaryKey || field.HasDefaultValue || hasIndex { - size = 191 // utf8mb4 - } - } - } - - if size >= 65536 && size <= int(math.Pow(2, 24)) { - return "mediumtext" - } - - if size > int(math.Pow(2, 24)) || size <= 0 { - return "longtext" - } - - return fmt.Sprintf("varchar(%d)", size) -} - -func (dialector Dialector) getSchemaTimeType(field *schema.Field) string { - precision := "" - if !dialector.DisableDatetimePrecision && field.Precision == 0 { - field.Precision = *dialector.DefaultDatetimePrecision - } - - if field.Precision > 0 { - precision = fmt.Sprintf("(%d)", field.Precision) - } - - if field.NotNull || field.PrimaryKey { - return "datetime" + precision - } - return "datetime" + precision + " NULL" -} - -func (dialector Dialector) getSchemaBytesType(field *schema.Field) string { - if field.Size > 0 && field.Size < 65536 { - return fmt.Sprintf("varbinary(%d)", field.Size) - } - - if field.Size >= 65536 && field.Size <= int(math.Pow(2, 24)) { - return "mediumblob" - } - - return "longblob" -} - -func (dialector Dialector) getSchemaIntAndUnitType(field *schema.Field) string { - sqlType := "bigint" - switch { - case field.Size <= 8: - sqlType = "tinyint" - case field.Size <= 16: - sqlType = "smallint" - case field.Size <= 24: - sqlType = "mediumint" - case field.Size <= 32: - sqlType = "int" - } - - if field.DataType == schema.Uint { - sqlType += " unsigned" - } - - if field.AutoIncrement { - sqlType += " AUTO_INCREMENT" - } - - return sqlType -} - -func (dialector Dialector) SavePoint(tx *gorm.DB, name string) error { - return tx.Exec("SAVEPOINT " + name).Error -} - -func (dialector Dialector) RollbackTo(tx *gorm.DB, name string) error { - return tx.Exec("ROLLBACK TO SAVEPOINT " + name).Error -} diff --git a/vendor/gorm.io/driver/postgres/License b/vendor/gorm.io/driver/postgres/License index 037e1653..e69de29b 100644 --- a/vendor/gorm.io/driver/postgres/License +++ b/vendor/gorm.io/driver/postgres/License @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013-NOW Jinzhu - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/gorm.io/driver/postgres/README.md b/vendor/gorm.io/driver/postgres/README.md index 01ba443e..e69de29b 100644 --- a/vendor/gorm.io/driver/postgres/README.md +++ b/vendor/gorm.io/driver/postgres/README.md @@ -1,31 +0,0 @@ -# GORM PostgreSQL Driver - -## Quick Start - -```go -import ( - "gorm.io/driver/postgres" - "gorm.io/gorm" -) - -// https://github.com/jackc/pgx -dsn := "host=localhost user=gorm password=gorm dbname=gorm port=9920 sslmode=disable TimeZone=Asia/Shanghai" -db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{}) -``` - -## Configuration - -```go -import ( - "gorm.io/driver/postgres" - "gorm.io/gorm" -) - -db, err := gorm.Open(postgres.New(postgres.Config{ - DSN: "host=localhost user=gorm password=gorm dbname=gorm port=9920 sslmode=disable TimeZone=Asia/Shanghai", // data source name, refer https://github.com/jackc/pgx - PreferSimpleProtocol: true, // disables implicit prepared statement usage. By default pgx automatically uses the extended protocol -}), &gorm.Config{}) -``` - - -Checkout [https://gorm.io](https://gorm.io) for details. diff --git a/vendor/gorm.io/driver/postgres/migrator.go b/vendor/gorm.io/driver/postgres/migrator.go index ca918025..e69de29b 100644 --- a/vendor/gorm.io/driver/postgres/migrator.go +++ b/vendor/gorm.io/driver/postgres/migrator.go @@ -1,588 +0,0 @@ -package postgres - -import ( - "database/sql" - "fmt" - "regexp" - "strings" - - "github.com/jackc/pgx/v4" - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/gorm/migrator" - "gorm.io/gorm/schema" -) - -type Migrator struct { - migrator.Migrator -} - -func (m Migrator) CurrentDatabase() (name string) { - m.DB.Raw("SELECT CURRENT_DATABASE()").Scan(&name) - return -} - -func (m Migrator) BuildIndexOptions(opts []schema.IndexOption, stmt *gorm.Statement) (results []interface{}) { - for _, opt := range opts { - str := stmt.Quote(opt.DBName) - if opt.Expression != "" { - str = opt.Expression - } - - if opt.Collate != "" { - str += " COLLATE " + opt.Collate - } - - if opt.Sort != "" { - str += " " + opt.Sort - } - results = append(results, clause.Expr{SQL: str}) - } - return -} - -func (m Migrator) HasIndex(value interface{}, name string) bool { - var count int64 - m.RunWithValue(value, func(stmt *gorm.Statement) error { - if idx := stmt.Schema.LookIndex(name); idx != nil { - name = idx.Name - } - currentSchema, curTable := m.CurrentSchema(stmt, stmt.Table) - return m.DB.Raw( - "SELECT count(*) FROM pg_indexes WHERE tablename = ? AND indexname = ? AND schemaname = ?", curTable, name, currentSchema, - ).Scan(&count).Error - }) - - return count > 0 -} - -func (m Migrator) CreateIndex(value interface{}, name string) error { - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - if idx := stmt.Schema.LookIndex(name); idx != nil { - opts := m.BuildIndexOptions(idx.Fields, stmt) - values := []interface{}{clause.Column{Name: idx.Name}, m.CurrentTable(stmt), opts} - - createIndexSQL := "CREATE " - if idx.Class != "" { - createIndexSQL += idx.Class + " " - } - createIndexSQL += "INDEX " - - if strings.TrimSpace(strings.ToUpper(idx.Option)) == "CONCURRENTLY" { - createIndexSQL += "CONCURRENTLY " - } - - createIndexSQL += "IF NOT EXISTS ? ON ?" - - if idx.Type != "" { - createIndexSQL += " USING " + idx.Type + "(?)" - } else { - createIndexSQL += " ?" - } - - if idx.Where != "" { - createIndexSQL += " WHERE " + idx.Where - } - - return m.DB.Exec(createIndexSQL, values...).Error - } - - return fmt.Errorf("failed to create index with name %v", name) - }) -} - -func (m Migrator) RenameIndex(value interface{}, oldName, newName string) error { - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - return m.DB.Exec( - "ALTER INDEX ? RENAME TO ?", - clause.Column{Name: oldName}, clause.Column{Name: newName}, - ).Error - }) -} - -func (m Migrator) DropIndex(value interface{}, name string) error { - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - if idx := stmt.Schema.LookIndex(name); idx != nil { - name = idx.Name - } - - return m.DB.Exec("DROP INDEX ?", clause.Column{Name: name}).Error - }) -} - -func (m Migrator) GetTables() (tableList []string, err error) { - currentSchema, _ := m.CurrentSchema(m.DB.Statement, "") - return tableList, m.DB.Raw("SELECT table_name FROM information_schema.tables WHERE table_schema = ? AND table_type = ?", currentSchema, "BASE TABLE").Scan(&tableList).Error -} - -func (m Migrator) CreateTable(values ...interface{}) (err error) { - if err = m.Migrator.CreateTable(values...); err != nil { - return - } - for _, value := range m.ReorderModels(values, false) { - if err = m.RunWithValue(value, func(stmt *gorm.Statement) error { - for _, field := range stmt.Schema.FieldsByDBName { - if field.Comment != "" { - if err := m.DB.Exec( - "COMMENT ON COLUMN ?.? IS ?", - m.CurrentTable(stmt), clause.Column{Name: field.DBName}, gorm.Expr(m.Migrator.Dialector.Explain("$1", field.Comment)), - ).Error; err != nil { - return err - } - } - } - return nil - }); err != nil { - return - } - } - return -} - -func (m Migrator) HasTable(value interface{}) bool { - var count int64 - m.RunWithValue(value, func(stmt *gorm.Statement) error { - currentSchema, curTable := m.CurrentSchema(stmt, stmt.Table) - return m.DB.Raw("SELECT count(*) FROM information_schema.tables WHERE table_schema = ? AND table_name = ? AND table_type = ?", currentSchema, curTable, "BASE TABLE").Scan(&count).Error - }) - return count > 0 -} - -func (m Migrator) DropTable(values ...interface{}) error { - values = m.ReorderModels(values, false) - tx := m.DB.Session(&gorm.Session{}) - for i := len(values) - 1; i >= 0; i-- { - if err := m.RunWithValue(values[i], func(stmt *gorm.Statement) error { - return tx.Exec("DROP TABLE IF EXISTS ? CASCADE", m.CurrentTable(stmt)).Error - }); err != nil { - return err - } - } - return nil -} - -func (m Migrator) AddColumn(value interface{}, field string) error { - if err := m.Migrator.AddColumn(value, field); err != nil { - return err - } - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - if field := stmt.Schema.LookUpField(field); field != nil { - if field.Comment != "" { - if err := m.DB.Exec( - "COMMENT ON COLUMN ?.? IS ?", - m.CurrentTable(stmt), clause.Column{Name: field.DBName}, gorm.Expr(m.Migrator.Dialector.Explain("$1", field.Comment)), - ).Error; err != nil { - return err - } - } - } - return nil - }) -} - -func (m Migrator) HasColumn(value interface{}, field string) bool { - var count int64 - m.RunWithValue(value, func(stmt *gorm.Statement) error { - name := field - if stmt.Schema != nil { - if field := stmt.Schema.LookUpField(field); field != nil { - name = field.DBName - } - } - - currentSchema, curTable := m.CurrentSchema(stmt, stmt.Table) - return m.DB.Raw( - "SELECT count(*) FROM INFORMATION_SCHEMA.columns WHERE table_schema = ? AND table_name = ? AND column_name = ?", - currentSchema, curTable, name, - ).Scan(&count).Error - }) - - return count > 0 -} - -func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnType gorm.ColumnType) error { - // skip primary field - if !field.PrimaryKey { - if err := m.Migrator.MigrateColumn(value, field, columnType); err != nil { - return err - } - } - - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - var description string - currentSchema, curTable := m.CurrentSchema(stmt, stmt.Table) - values := []interface{}{currentSchema, curTable, field.DBName, stmt.Table, currentSchema} - checkSQL := "SELECT description FROM pg_catalog.pg_description " - checkSQL += "WHERE objsubid = (SELECT ordinal_position FROM information_schema.columns WHERE table_schema = ? AND table_name = ? AND column_name = ?) " - checkSQL += "AND objoid = (SELECT oid FROM pg_catalog.pg_class WHERE relname = ? AND relnamespace = " - checkSQL += "(SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = ?))" - m.DB.Raw(checkSQL, values...).Scan(&description) - comment := field.Comment - if comment != "" { - comment = comment[1 : len(comment)-1] - } - if field.Comment != "" && comment != description { - if err := m.DB.Exec( - "COMMENT ON COLUMN ?.? IS ?", - m.CurrentTable(stmt), clause.Column{Name: field.DBName}, gorm.Expr(m.Migrator.Dialector.Explain("$1", field.Comment)), - ).Error; err != nil { - return err - } - } - return nil - }) -} - -// AlterColumn alter value's `field` column' type based on schema definition -func (m Migrator) AlterColumn(value interface{}, field string) error { - return m.RunWithValue(value, func(stmt *gorm.Statement) error { - if field := stmt.Schema.LookUpField(field); field != nil { - var ( - columnTypes, _ = m.DB.Migrator().ColumnTypes(value) - fieldColumnType *migrator.ColumnType - ) - for _, columnType := range columnTypes { - if columnType.Name() == field.DBName { - fieldColumnType, _ = columnType.(*migrator.ColumnType) - } - } - - fileType := clause.Expr{SQL: m.DataTypeOf(field)} - if fieldColumnType.DatabaseTypeName() != fileType.SQL { - filedColumnAutoIncrement, _ := fieldColumnType.AutoIncrement() - if field.AutoIncrement && filedColumnAutoIncrement { // update - serialDatabaseType, _ := getSerialDatabaseType(fileType.SQL) - if t, _ := fieldColumnType.ColumnType(); t != serialDatabaseType { - if err := m.UpdateSequence(m.DB, stmt, field, serialDatabaseType); err != nil { - return err - } - } - } else if field.AutoIncrement && !filedColumnAutoIncrement { // create - serialDatabaseType, _ := getSerialDatabaseType(fileType.SQL) - if err := m.CreateSequence(m.DB, stmt, field, serialDatabaseType); err != nil { - return err - } - } else if !field.AutoIncrement && filedColumnAutoIncrement { // delete - if err := m.DeleteSequence(m.DB, stmt, field, fileType); err != nil { - return err - } - } else { - if err := m.DB.Exec("ALTER TABLE ? ALTER COLUMN ? TYPE ?", m.CurrentTable(stmt), clause.Column{Name: field.DBName}, fileType).Error; err != nil { - return err - } - } - } - - if null, _ := fieldColumnType.Nullable(); null == field.NotNull { - if field.NotNull { - if err := m.DB.Exec("ALTER TABLE ? ALTER COLUMN ? SET NOT NULL", m.CurrentTable(stmt), clause.Column{Name: field.DBName}).Error; err != nil { - return err - } - } else { - if err := m.DB.Exec("ALTER TABLE ? ALTER COLUMN ? DROP NOT NULL", m.CurrentTable(stmt), clause.Column{Name: field.DBName}).Error; err != nil { - return err - } - } - } - - if uniq, _ := fieldColumnType.Unique(); uniq != field.Unique { - idxName := clause.Column{Name: m.DB.Config.NamingStrategy.IndexName(stmt.Table, field.DBName)} - if err := m.DB.Exec("ALTER TABLE ? ADD CONSTRAINT ? UNIQUE(?)", m.CurrentTable(stmt), idxName, clause.Column{Name: field.DBName}).Error; err != nil { - return err - } - } - - if v, _ := fieldColumnType.DefaultValue(); v != field.DefaultValue { - if field.HasDefaultValue && (field.DefaultValueInterface != nil || field.DefaultValue != "") { - if field.DefaultValueInterface != nil { - defaultStmt := &gorm.Statement{Vars: []interface{}{field.DefaultValueInterface}} - m.Dialector.BindVarTo(defaultStmt, defaultStmt, field.DefaultValueInterface) - if err := m.DB.Exec("ALTER TABLE ? ALTER COLUMN ? SET DEFAULT ?", m.CurrentTable(stmt), clause.Column{Name: field.DBName}, clause.Expr{SQL: m.Dialector.Explain(defaultStmt.SQL.String(), field.DefaultValueInterface)}).Error; err != nil { - return err - } - } else if field.DefaultValue != "(-)" { - if err := m.DB.Exec("ALTER TABLE ? ALTER COLUMN ? SET DEFAULT ?", m.CurrentTable(stmt), clause.Column{Name: field.DBName}, clause.Expr{SQL: field.DefaultValue}).Error; err != nil { - return err - } - } else { - if err := m.DB.Exec("ALTER TABLE ? ALTER COLUMN ? DROP DEFAULT", m.CurrentTable(stmt), clause.Column{Name: field.DBName}, clause.Expr{SQL: field.DefaultValue}).Error; err != nil { - return err - } - } - } - } - return nil - } - return fmt.Errorf("failed to look up field with name: %s", field) - }) -} - -func (m Migrator) HasConstraint(value interface{}, name string) bool { - var count int64 - m.RunWithValue(value, func(stmt *gorm.Statement) error { - constraint, chk, table := m.GuessConstraintAndTable(stmt, name) - currentSchema, curTable := m.CurrentSchema(stmt, table) - if constraint != nil { - name = constraint.Name - } else if chk != nil { - name = chk.Name - } - - return m.DB.Raw( - "SELECT count(*) FROM INFORMATION_SCHEMA.table_constraints WHERE table_schema = ? AND table_name = ? AND constraint_name = ?", - currentSchema, curTable, name, - ).Scan(&count).Error - }) - - return count > 0 -} - -func (m Migrator) ColumnTypes(value interface{}) (columnTypes []gorm.ColumnType, err error) { - columnTypes = make([]gorm.ColumnType, 0) - err = m.RunWithValue(value, func(stmt *gorm.Statement) error { - var ( - currentDatabase = m.DB.Migrator().CurrentDatabase() - currentSchema, table = m.CurrentSchema(stmt, stmt.Table) - columns, err = m.DB.Raw( - "SELECT c.column_name, c.is_nullable = 'YES', c.udt_name, c.character_maximum_length, c.numeric_precision, c.numeric_precision_radix, c.numeric_scale, c.datetime_precision, 8 * typlen, c.column_default, pd.description FROM information_schema.columns AS c JOIN pg_type AS pgt ON c.udt_name = pgt.typname LEFT JOIN pg_catalog.pg_description as pd ON pd.objsubid = c.ordinal_position AND pd.objoid = (SELECT oid FROM pg_catalog.pg_class WHERE relname = c.table_name AND relnamespace = (SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = c.table_schema)) where table_catalog = ? AND table_schema = ? AND table_name = ?", - currentDatabase, currentSchema, table).Rows() - ) - - if err != nil { - return err - } - - for columns.Next() { - var ( - column = &migrator.ColumnType{ - PrimaryKeyValue: sql.NullBool{Valid: true}, - UniqueValue: sql.NullBool{Valid: true}, - } - datetimePrecision sql.NullInt64 - radixValue sql.NullInt64 - typeLenValue sql.NullInt64 - ) - - err = columns.Scan( - &column.NameValue, &column.NullableValue, &column.DataTypeValue, &column.LengthValue, &column.DecimalSizeValue, - &radixValue, &column.ScaleValue, &datetimePrecision, &typeLenValue, &column.DefaultValueValue, &column.CommentValue, - ) - if err != nil { - return err - } - - if typeLenValue.Valid && typeLenValue.Int64 > 0 { - column.LengthValue = typeLenValue - } - - if strings.HasPrefix(column.DefaultValueValue.String, "nextval('") && strings.HasSuffix(column.DefaultValueValue.String, "seq'::regclass)") { - column.AutoIncrementValue = sql.NullBool{Bool: true, Valid: true} - column.DefaultValueValue = sql.NullString{} - } - - if column.DefaultValueValue.Valid { - column.DefaultValueValue.String = regexp.MustCompile(`'(.*)'::[\w]+$`).ReplaceAllString(column.DefaultValueValue.String, "$1") - } - - if datetimePrecision.Valid { - column.DecimalSizeValue = datetimePrecision - } - - columnTypes = append(columnTypes, column) - } - columns.Close() - - // assign sql column type - { - rows, rowsErr := m.GetRows(currentSchema, table) - if rowsErr != nil { - return rowsErr - } - rawColumnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - for _, columnType := range columnTypes { - for _, c := range rawColumnTypes { - if c.Name() == columnType.Name() { - columnType.(*migrator.ColumnType).SQLColumnType = c - break - } - } - } - rows.Close() - } - - // check primary, unique field - { - columnTypeRows, err := m.DB.Raw("SELECT c.column_name, constraint_type FROM information_schema.table_constraints tc JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name WHERE constraint_type IN ('PRIMARY KEY', 'UNIQUE') AND c.table_catalog = ? AND c.table_schema = ? AND c.table_name = ?", currentDatabase, currentSchema, table).Rows() - if err != nil { - return err - } - - for columnTypeRows.Next() { - var name, columnType string - columnTypeRows.Scan(&name, &columnType) - for _, c := range columnTypes { - mc := c.(*migrator.ColumnType) - if mc.NameValue.String == name { - switch columnType { - case "PRIMARY KEY": - mc.PrimaryKeyValue = sql.NullBool{Bool: true, Valid: true} - case "UNIQUE": - mc.UniqueValue = sql.NullBool{Bool: true, Valid: true} - } - break - } - } - } - columnTypeRows.Close() - } - - // check column type - { - dataTypeRows, err := m.DB.Raw(`SELECT a.attname as column_name, format_type(a.atttypid, a.atttypmod) AS data_type - FROM pg_attribute a JOIN pg_class b ON a.attrelid = b.relfilenode AND relnamespace = (SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = ?) - WHERE a.attnum > 0 -- hide internal columns - AND NOT a.attisdropped -- hide deleted columns - AND b.relname = ?`, currentSchema, table).Rows() - if err != nil { - return err - } - - for dataTypeRows.Next() { - var name, dataType string - dataTypeRows.Scan(&name, &dataType) - for _, c := range columnTypes { - mc := c.(*migrator.ColumnType) - if mc.NameValue.String == name { - mc.ColumnTypeValue = sql.NullString{String: dataType, Valid: true} - break - } - } - } - dataTypeRows.Close() - } - - return err - }) - return -} - -func (m Migrator) GetRows(currentSchema interface{}, table interface{}) (*sql.Rows, error) { - name := table.(string) - if _, ok := currentSchema.(string); ok { - name = fmt.Sprintf("%v.%v", currentSchema, table) - } - - return m.DB.Session(&gorm.Session{}).Table(name).Limit(1).Scopes(func(d *gorm.DB) *gorm.DB { - // use simple protocol - if !m.DB.PrepareStmt { - d.Statement.Vars = append(d.Statement.Vars, pgx.QuerySimpleProtocol(true)) - } - return d - }).Rows() -} - -func (m Migrator) CurrentSchema(stmt *gorm.Statement, table string) (interface{}, interface{}) { - if strings.Contains(table, ".") { - if tables := strings.Split(table, `.`); len(tables) == 2 { - return tables[0], tables[1] - } - } - - if stmt.TableExpr != nil { - if tables := strings.Split(stmt.TableExpr.SQL, `"."`); len(tables) == 2 { - return strings.TrimPrefix(tables[0], `"`), table - } - } - return clause.Expr{SQL: "CURRENT_SCHEMA()"}, table -} - -func (m Migrator) CreateSequence(tx *gorm.DB, stmt *gorm.Statement, field *schema.Field, - serialDatabaseType string) (err error) { - - _, table := m.CurrentSchema(stmt, stmt.Table) - tableName := table.(string) - - sequenceName := strings.Join([]string{tableName, field.DBName, "seq"}, "_") - if err = tx.Exec(`CREATE SEQUENCE IF NOT EXISTS ? AS ?`, clause.Expr{SQL: sequenceName}, - clause.Expr{SQL: serialDatabaseType}).Error; err != nil { - return err - } - - if err := tx.Exec("ALTER TABLE ? ALTER COLUMN ? SET DEFAULT nextval('?')", - clause.Expr{SQL: tableName}, clause.Expr{SQL: field.DBName}, clause.Expr{SQL: sequenceName}).Error; err != nil { - return err - } - - if err := tx.Exec("ALTER SEQUENCE ? OWNED BY ?.?", - clause.Expr{SQL: sequenceName}, clause.Expr{SQL: tableName}, clause.Expr{SQL: field.DBName}).Error; err != nil { - return err - } - return -} - -func (m Migrator) UpdateSequence(tx *gorm.DB, stmt *gorm.Statement, field *schema.Field, - serialDatabaseType string) (err error) { - - sequenceName, err := m.getColumnSequenceName(tx, stmt, field) - if err != nil { - return err - } - - if err = tx.Exec(`ALTER SEQUENCE IF EXISTS ? AS ?`, clause.Expr{SQL: sequenceName}, clause.Expr{SQL: serialDatabaseType}).Error; err != nil { - return err - } - - if err := tx.Exec("ALTER TABLE ? ALTER COLUMN ? TYPE ?", - m.CurrentTable(stmt), clause.Expr{SQL: field.DBName}, clause.Expr{SQL: serialDatabaseType}).Error; err != nil { - return err - } - return -} - -func (m Migrator) DeleteSequence(tx *gorm.DB, stmt *gorm.Statement, field *schema.Field, - fileType clause.Expr) (err error) { - - sequenceName, err := m.getColumnSequenceName(tx, stmt, field) - if err != nil { - return err - } - - if err := tx.Exec("ALTER TABLE ? ALTER COLUMN ? TYPE ?", m.CurrentTable(stmt), clause.Column{Name: field.DBName}, fileType).Error; err != nil { - return err - } - - if err := tx.Exec("ALTER TABLE ? ALTER COLUMN ? DROP DEFAULT", - m.CurrentTable(stmt), clause.Expr{SQL: field.DBName}).Error; err != nil { - return err - } - - if err = tx.Exec(`DROP SEQUENCE IF EXISTS ?`, clause.Expr{SQL: sequenceName}).Error; err != nil { - return err - } - - return -} - -func (m Migrator) getColumnSequenceName(tx *gorm.DB, stmt *gorm.Statement, field *schema.Field) ( - sequenceName string, err error) { - _, table := m.CurrentSchema(stmt, stmt.Table) - - // DefaultValueValue is reset by ColumnTypes, search again. - var columnDefault string - err = tx.Raw( - `SELECT column_default FROM information_schema.columns WHERE table_name = ? AND column_name = ?`, - table, field.DBName).Scan(&columnDefault).Error - - if err != nil { - return - } - - sequenceName = strings.TrimSuffix( - strings.TrimPrefix(columnDefault, `nextval('`), - `'::regclass)`, - ) - return -} diff --git a/vendor/gorm.io/driver/postgres/postgres.go b/vendor/gorm.io/driver/postgres/postgres.go index c0fdea06..e69de29b 100644 --- a/vendor/gorm.io/driver/postgres/postgres.go +++ b/vendor/gorm.io/driver/postgres/postgres.go @@ -1,221 +0,0 @@ -package postgres - -import ( - "database/sql" - "fmt" - "regexp" - "strconv" - - "github.com/jackc/pgx/v4" - "github.com/jackc/pgx/v4/stdlib" - "gorm.io/gorm" - "gorm.io/gorm/callbacks" - "gorm.io/gorm/clause" - "gorm.io/gorm/logger" - "gorm.io/gorm/migrator" - "gorm.io/gorm/schema" -) - -type Dialector struct { - *Config -} - -type Config struct { - DriverName string - DSN string - PreferSimpleProtocol bool - WithoutReturning bool - Conn gorm.ConnPool -} - -func Open(dsn string) gorm.Dialector { - return &Dialector{&Config{DSN: dsn}} -} - -func New(config Config) gorm.Dialector { - return &Dialector{Config: &config} -} - -func (dialector Dialector) Name() string { - return "postgres" -} - -var timeZoneMatcher = regexp.MustCompile("(time_zone|TimeZone)=(.*?)($|&| )") - -func (dialector Dialector) Initialize(db *gorm.DB) (err error) { - // register callbacks - callbacks.RegisterDefaultCallbacks(db, &callbacks.Config{ - CreateClauses: []string{"INSERT", "VALUES", "ON CONFLICT", "RETURNING"}, - UpdateClauses: []string{"UPDATE", "SET", "WHERE", "RETURNING"}, - DeleteClauses: []string{"DELETE", "FROM", "WHERE", "RETURNING"}, - }) - - if dialector.Conn != nil { - db.ConnPool = dialector.Conn - } else if dialector.DriverName != "" { - db.ConnPool, err = sql.Open(dialector.DriverName, dialector.Config.DSN) - } else { - var config *pgx.ConnConfig - - config, err = pgx.ParseConfig(dialector.Config.DSN) - if err != nil { - return - } - if dialector.Config.PreferSimpleProtocol { - config.PreferSimpleProtocol = true - } - result := timeZoneMatcher.FindStringSubmatch(dialector.Config.DSN) - if len(result) > 2 { - config.RuntimeParams["timezone"] = result[2] - } - db.ConnPool = stdlib.OpenDB(*config) - } - return -} - -func (dialector Dialector) Migrator(db *gorm.DB) gorm.Migrator { - return Migrator{migrator.Migrator{Config: migrator.Config{ - DB: db, - Dialector: dialector, - CreateIndexAfterCreateTable: true, - }}} -} - -func (dialector Dialector) DefaultValueOf(field *schema.Field) clause.Expression { - return clause.Expr{SQL: "DEFAULT"} -} - -func (dialector Dialector) BindVarTo(writer clause.Writer, stmt *gorm.Statement, v interface{}) { - writer.WriteByte('$') - writer.WriteString(strconv.Itoa(len(stmt.Vars))) -} - -func (dialector Dialector) QuoteTo(writer clause.Writer, str string) { - var ( - underQuoted, selfQuoted bool - continuousBacktick int8 - shiftDelimiter int8 - ) - - for _, v := range []byte(str) { - switch v { - case '"': - continuousBacktick++ - if continuousBacktick == 2 { - writer.WriteString(`""`) - continuousBacktick = 0 - } - case '.': - if continuousBacktick > 0 || !selfQuoted { - shiftDelimiter = 0 - underQuoted = false - continuousBacktick = 0 - writer.WriteByte('"') - } - writer.WriteByte(v) - continue - default: - if shiftDelimiter-continuousBacktick <= 0 && !underQuoted { - writer.WriteByte('"') - underQuoted = true - if selfQuoted = continuousBacktick > 0; selfQuoted { - continuousBacktick -= 1 - } - } - - for ; continuousBacktick > 0; continuousBacktick -= 1 { - writer.WriteString(`""`) - } - - writer.WriteByte(v) - } - shiftDelimiter++ - } - - if continuousBacktick > 0 && !selfQuoted { - writer.WriteString(`""`) - } - writer.WriteByte('"') -} - -var numericPlaceholder = regexp.MustCompile(`\$(\d+)`) - -func (dialector Dialector) Explain(sql string, vars ...interface{}) string { - return logger.ExplainSQL(sql, numericPlaceholder, `'`, vars...) -} - -func (dialector Dialector) DataTypeOf(field *schema.Field) string { - switch field.DataType { - case schema.Bool: - return "boolean" - case schema.Int, schema.Uint: - size := field.Size - if field.DataType == schema.Uint { - size++ - } - if field.AutoIncrement { - switch { - case size <= 16: - return "smallserial" - case size <= 32: - return "serial" - default: - return "bigserial" - } - } else { - switch { - case size <= 16: - return "smallint" - case size <= 32: - return "integer" - default: - return "bigint" - } - } - case schema.Float: - if field.Precision > 0 { - if field.Scale > 0 { - return fmt.Sprintf("numeric(%d, %d)", field.Precision, field.Scale) - } - return fmt.Sprintf("numeric(%d)", field.Precision) - } - return "decimal" - case schema.String: - if field.Size > 0 { - return fmt.Sprintf("varchar(%d)", field.Size) - } - return "text" - case schema.Time: - if field.Precision > 0 { - return fmt.Sprintf("timestamptz(%d)", field.Precision) - } - return "timestamptz" - case schema.Bytes: - return "bytea" - } - - return string(field.DataType) -} - -func (dialectopr Dialector) SavePoint(tx *gorm.DB, name string) error { - tx.Exec("SAVEPOINT " + name) - return nil -} - -func (dialectopr Dialector) RollbackTo(tx *gorm.DB, name string) error { - tx.Exec("ROLLBACK TO SAVEPOINT " + name) - return nil -} - -func getSerialDatabaseType(s string) (dbType string, ok bool) { - switch s { - case "smallserial": - return "smallint", true - case "serial": - return "integer", true - case "bigserial": - return "bigint", true - default: - return "", false - } -} diff --git a/vendor/gorm.io/gorm/.gitignore b/vendor/gorm.io/gorm/.gitignore index 45505cc9..e69de29b 100644 --- a/vendor/gorm.io/gorm/.gitignore +++ b/vendor/gorm.io/gorm/.gitignore @@ -1,6 +0,0 @@ -TODO* -documents -coverage.txt -_book -.idea -vendor \ No newline at end of file diff --git a/vendor/gorm.io/gorm/.golangci.yml b/vendor/gorm.io/gorm/.golangci.yml index 16903ed6..e69de29b 100644 --- a/vendor/gorm.io/gorm/.golangci.yml +++ b/vendor/gorm.io/gorm/.golangci.yml @@ -1,11 +0,0 @@ -linters: - enable: - - cyclop - - exportloopref - - gocritic - - gosec - - ineffassign - - misspell - - prealloc - - unconvert - - unparam diff --git a/vendor/gorm.io/gorm/License b/vendor/gorm.io/gorm/License index 037e1653..e69de29b 100644 --- a/vendor/gorm.io/gorm/License +++ b/vendor/gorm.io/gorm/License @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013-NOW Jinzhu - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/gorm.io/gorm/README.md b/vendor/gorm.io/gorm/README.md index 312a3a59..e69de29b 100644 --- a/vendor/gorm.io/gorm/README.md +++ b/vendor/gorm.io/gorm/README.md @@ -1,43 +0,0 @@ -# GORM - -The fantastic ORM library for Golang, aims to be developer friendly. - -[![go report card](https://goreportcard.com/badge/github.com/go-gorm/gorm "go report card")](https://goreportcard.com/report/github.com/go-gorm/gorm) -[![test status](https://github.com/go-gorm/gorm/workflows/tests/badge.svg?branch=master "test status")](https://github.com/go-gorm/gorm/actions) -[![Join the chat at https://gitter.im/jinzhu/gorm](https://img.shields.io/gitter/room/jinzhu/gorm.svg)](https://gitter.im/jinzhu/gorm?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![Open Collective Backer](https://opencollective.com/gorm/tiers/backer/badge.svg?label=backer&color=brightgreen "Open Collective Backer")](https://opencollective.com/gorm) -[![Open Collective Sponsor](https://opencollective.com/gorm/tiers/sponsor/badge.svg?label=sponsor&color=brightgreen "Open Collective Sponsor")](https://opencollective.com/gorm) -[![MIT license](https://img.shields.io/badge/license-MIT-brightgreen.svg)](https://opensource.org/licenses/MIT) -[![Go.Dev reference](https://img.shields.io/badge/go.dev-reference-blue?logo=go&logoColor=white)](https://pkg.go.dev/gorm.io/gorm?tab=doc) - -## Overview - -* Full-Featured ORM -* Associations (Has One, Has Many, Belongs To, Many To Many, Polymorphism, Single-table inheritance) -* Hooks (Before/After Create/Save/Update/Delete/Find) -* Eager loading with `Preload`, `Joins` -* Transactions, Nested Transactions, Save Point, RollbackTo to Saved Point -* Context, Prepared Statement Mode, DryRun Mode -* Batch Insert, FindInBatches, Find To Map -* SQL Builder, Upsert, Locking, Optimizer/Index/Comment Hints, NamedArg, Search/Update/Create with SQL Expr -* Composite Primary Key -* Auto Migrations -* Logger -* Extendable, flexible plugin API: Database Resolver (Multiple Databases, Read/Write Splitting) / Prometheus… -* Every feature comes with tests -* Developer Friendly - -## Getting Started - -* GORM Guides [https://gorm.io](https://gorm.io) -* GORM Gen [gorm/gen](https://github.com/go-gorm/gen#gormgen) - -## Contributing - -[You can help to deliver a better GORM, check out things you can do](https://gorm.io/contribute.html) - -## License - -© Jinzhu, 2013~time.Now - -Released under the [MIT License](https://github.com/go-gorm/gorm/blob/master/License) diff --git a/vendor/gorm.io/gorm/association.go b/vendor/gorm.io/gorm/association.go index 35e10ddd..e69de29b 100644 --- a/vendor/gorm.io/gorm/association.go +++ b/vendor/gorm.io/gorm/association.go @@ -1,522 +0,0 @@ -package gorm - -import ( - "fmt" - "reflect" - "strings" - - "gorm.io/gorm/clause" - "gorm.io/gorm/schema" - "gorm.io/gorm/utils" -) - -// Association Mode contains some helper methods to handle relationship things easily. -type Association struct { - DB *DB - Relationship *schema.Relationship - Error error -} - -func (db *DB) Association(column string) *Association { - association := &Association{DB: db} - table := db.Statement.Table - - if err := db.Statement.Parse(db.Statement.Model); err == nil { - db.Statement.Table = table - association.Relationship = db.Statement.Schema.Relationships.Relations[column] - - if association.Relationship == nil { - association.Error = fmt.Errorf("%w: %s", ErrUnsupportedRelation, column) - } - - db.Statement.ReflectValue = reflect.ValueOf(db.Statement.Model) - for db.Statement.ReflectValue.Kind() == reflect.Ptr { - db.Statement.ReflectValue = db.Statement.ReflectValue.Elem() - } - } else { - association.Error = err - } - - return association -} - -func (association *Association) Find(out interface{}, conds ...interface{}) error { - if association.Error == nil { - association.Error = association.buildCondition().Find(out, conds...).Error - } - return association.Error -} - -func (association *Association) Append(values ...interface{}) error { - if association.Error == nil { - switch association.Relationship.Type { - case schema.HasOne, schema.BelongsTo: - if len(values) > 0 { - association.Error = association.Replace(values...) - } - default: - association.saveAssociation( /*clear*/ false, values...) - } - } - - return association.Error -} - -func (association *Association) Replace(values ...interface{}) error { - if association.Error == nil { - // save associations - if association.saveAssociation( /*clear*/ true, values...); association.Error != nil { - return association.Error - } - - // set old associations's foreign key to null - reflectValue := association.DB.Statement.ReflectValue - rel := association.Relationship - switch rel.Type { - case schema.BelongsTo: - if len(values) == 0 { - updateMap := map[string]interface{}{} - switch reflectValue.Kind() { - case reflect.Slice, reflect.Array: - for i := 0; i < reflectValue.Len(); i++ { - association.Error = rel.Field.Set(association.DB.Statement.Context, reflectValue.Index(i), reflect.Zero(rel.Field.FieldType).Interface()) - } - case reflect.Struct: - association.Error = rel.Field.Set(association.DB.Statement.Context, reflectValue, reflect.Zero(rel.Field.FieldType).Interface()) - } - - for _, ref := range rel.References { - updateMap[ref.ForeignKey.DBName] = nil - } - - association.Error = association.DB.UpdateColumns(updateMap).Error - } - case schema.HasOne, schema.HasMany: - var ( - primaryFields []*schema.Field - foreignKeys []string - updateMap = map[string]interface{}{} - relValues = schema.GetRelationsValues(association.DB.Statement.Context, reflectValue, []*schema.Relationship{rel}) - modelValue = reflect.New(rel.FieldSchema.ModelType).Interface() - tx = association.DB.Model(modelValue) - ) - - if _, rvs := schema.GetIdentityFieldValuesMap(association.DB.Statement.Context, relValues, rel.FieldSchema.PrimaryFields); len(rvs) > 0 { - if column, values := schema.ToQueryValues(rel.FieldSchema.Table, rel.FieldSchema.PrimaryFieldDBNames, rvs); len(values) > 0 { - tx.Not(clause.IN{Column: column, Values: values}) - } - } - - for _, ref := range rel.References { - if ref.OwnPrimaryKey { - primaryFields = append(primaryFields, ref.PrimaryKey) - foreignKeys = append(foreignKeys, ref.ForeignKey.DBName) - updateMap[ref.ForeignKey.DBName] = nil - } else if ref.PrimaryValue != "" { - tx.Where(clause.Eq{Column: ref.ForeignKey.DBName, Value: ref.PrimaryValue}) - } - } - - if _, pvs := schema.GetIdentityFieldValuesMap(association.DB.Statement.Context, reflectValue, primaryFields); len(pvs) > 0 { - column, values := schema.ToQueryValues(rel.FieldSchema.Table, foreignKeys, pvs) - association.Error = tx.Where(clause.IN{Column: column, Values: values}).UpdateColumns(updateMap).Error - } - case schema.Many2Many: - var ( - primaryFields, relPrimaryFields []*schema.Field - joinPrimaryKeys, joinRelPrimaryKeys []string - modelValue = reflect.New(rel.JoinTable.ModelType).Interface() - tx = association.DB.Model(modelValue) - ) - - for _, ref := range rel.References { - if ref.PrimaryValue == "" { - if ref.OwnPrimaryKey { - primaryFields = append(primaryFields, ref.PrimaryKey) - joinPrimaryKeys = append(joinPrimaryKeys, ref.ForeignKey.DBName) - } else { - relPrimaryFields = append(relPrimaryFields, ref.PrimaryKey) - joinRelPrimaryKeys = append(joinRelPrimaryKeys, ref.ForeignKey.DBName) - } - } else { - tx.Clauses(clause.Eq{Column: ref.ForeignKey.DBName, Value: ref.PrimaryValue}) - } - } - - _, pvs := schema.GetIdentityFieldValuesMap(association.DB.Statement.Context, reflectValue, primaryFields) - if column, values := schema.ToQueryValues(rel.JoinTable.Table, joinPrimaryKeys, pvs); len(values) > 0 { - tx.Where(clause.IN{Column: column, Values: values}) - } else { - return ErrPrimaryKeyRequired - } - - _, rvs := schema.GetIdentityFieldValuesMapFromValues(association.DB.Statement.Context, values, relPrimaryFields) - if relColumn, relValues := schema.ToQueryValues(rel.JoinTable.Table, joinRelPrimaryKeys, rvs); len(relValues) > 0 { - tx.Where(clause.Not(clause.IN{Column: relColumn, Values: relValues})) - } - - association.Error = tx.Delete(modelValue).Error - } - } - return association.Error -} - -func (association *Association) Delete(values ...interface{}) error { - if association.Error == nil { - var ( - reflectValue = association.DB.Statement.ReflectValue - rel = association.Relationship - primaryFields []*schema.Field - foreignKeys []string - updateAttrs = map[string]interface{}{} - conds []clause.Expression - ) - - for _, ref := range rel.References { - if ref.PrimaryValue == "" { - primaryFields = append(primaryFields, ref.PrimaryKey) - foreignKeys = append(foreignKeys, ref.ForeignKey.DBName) - updateAttrs[ref.ForeignKey.DBName] = nil - } else { - conds = append(conds, clause.Eq{Column: ref.ForeignKey.DBName, Value: ref.PrimaryValue}) - } - } - - switch rel.Type { - case schema.BelongsTo: - tx := association.DB.Model(reflect.New(rel.Schema.ModelType).Interface()) - - _, pvs := schema.GetIdentityFieldValuesMap(association.DB.Statement.Context, reflectValue, rel.Schema.PrimaryFields) - if pcolumn, pvalues := schema.ToQueryValues(rel.Schema.Table, rel.Schema.PrimaryFieldDBNames, pvs); len(pvalues) > 0 { - conds = append(conds, clause.IN{Column: pcolumn, Values: pvalues}) - } else { - return ErrPrimaryKeyRequired - } - - _, rvs := schema.GetIdentityFieldValuesMapFromValues(association.DB.Statement.Context, values, primaryFields) - relColumn, relValues := schema.ToQueryValues(rel.Schema.Table, foreignKeys, rvs) - conds = append(conds, clause.IN{Column: relColumn, Values: relValues}) - - association.Error = tx.Clauses(conds...).UpdateColumns(updateAttrs).Error - case schema.HasOne, schema.HasMany: - tx := association.DB.Model(reflect.New(rel.FieldSchema.ModelType).Interface()) - - _, pvs := schema.GetIdentityFieldValuesMap(association.DB.Statement.Context, reflectValue, primaryFields) - if pcolumn, pvalues := schema.ToQueryValues(rel.FieldSchema.Table, foreignKeys, pvs); len(pvalues) > 0 { - conds = append(conds, clause.IN{Column: pcolumn, Values: pvalues}) - } else { - return ErrPrimaryKeyRequired - } - - _, rvs := schema.GetIdentityFieldValuesMapFromValues(association.DB.Statement.Context, values, rel.FieldSchema.PrimaryFields) - relColumn, relValues := schema.ToQueryValues(rel.FieldSchema.Table, rel.FieldSchema.PrimaryFieldDBNames, rvs) - conds = append(conds, clause.IN{Column: relColumn, Values: relValues}) - - association.Error = tx.Clauses(conds...).UpdateColumns(updateAttrs).Error - case schema.Many2Many: - var ( - primaryFields, relPrimaryFields []*schema.Field - joinPrimaryKeys, joinRelPrimaryKeys []string - joinValue = reflect.New(rel.JoinTable.ModelType).Interface() - ) - - for _, ref := range rel.References { - if ref.PrimaryValue == "" { - if ref.OwnPrimaryKey { - primaryFields = append(primaryFields, ref.PrimaryKey) - joinPrimaryKeys = append(joinPrimaryKeys, ref.ForeignKey.DBName) - } else { - relPrimaryFields = append(relPrimaryFields, ref.PrimaryKey) - joinRelPrimaryKeys = append(joinRelPrimaryKeys, ref.ForeignKey.DBName) - } - } else { - conds = append(conds, clause.Eq{Column: ref.ForeignKey.DBName, Value: ref.PrimaryValue}) - } - } - - _, pvs := schema.GetIdentityFieldValuesMap(association.DB.Statement.Context, reflectValue, primaryFields) - if pcolumn, pvalues := schema.ToQueryValues(rel.JoinTable.Table, joinPrimaryKeys, pvs); len(pvalues) > 0 { - conds = append(conds, clause.IN{Column: pcolumn, Values: pvalues}) - } else { - return ErrPrimaryKeyRequired - } - - _, rvs := schema.GetIdentityFieldValuesMapFromValues(association.DB.Statement.Context, values, relPrimaryFields) - relColumn, relValues := schema.ToQueryValues(rel.JoinTable.Table, joinRelPrimaryKeys, rvs) - conds = append(conds, clause.IN{Column: relColumn, Values: relValues}) - - association.Error = association.DB.Where(clause.Where{Exprs: conds}).Model(nil).Delete(joinValue).Error - } - - if association.Error == nil { - // clean up deleted values's foreign key - relValuesMap, _ := schema.GetIdentityFieldValuesMapFromValues(association.DB.Statement.Context, values, rel.FieldSchema.PrimaryFields) - - cleanUpDeletedRelations := func(data reflect.Value) { - if _, zero := rel.Field.ValueOf(association.DB.Statement.Context, data); !zero { - fieldValue := reflect.Indirect(rel.Field.ReflectValueOf(association.DB.Statement.Context, data)) - primaryValues := make([]interface{}, len(rel.FieldSchema.PrimaryFields)) - - switch fieldValue.Kind() { - case reflect.Slice, reflect.Array: - validFieldValues := reflect.Zero(rel.Field.IndirectFieldType) - for i := 0; i < fieldValue.Len(); i++ { - for idx, field := range rel.FieldSchema.PrimaryFields { - primaryValues[idx], _ = field.ValueOf(association.DB.Statement.Context, fieldValue.Index(i)) - } - - if _, ok := relValuesMap[utils.ToStringKey(primaryValues...)]; !ok { - validFieldValues = reflect.Append(validFieldValues, fieldValue.Index(i)) - } - } - - association.Error = rel.Field.Set(association.DB.Statement.Context, data, validFieldValues.Interface()) - case reflect.Struct: - for idx, field := range rel.FieldSchema.PrimaryFields { - primaryValues[idx], _ = field.ValueOf(association.DB.Statement.Context, fieldValue) - } - - if _, ok := relValuesMap[utils.ToStringKey(primaryValues...)]; ok { - if association.Error = rel.Field.Set(association.DB.Statement.Context, data, reflect.Zero(rel.FieldSchema.ModelType).Interface()); association.Error != nil { - break - } - - if rel.JoinTable == nil { - for _, ref := range rel.References { - if ref.OwnPrimaryKey || ref.PrimaryValue != "" { - association.Error = ref.ForeignKey.Set(association.DB.Statement.Context, fieldValue, reflect.Zero(ref.ForeignKey.FieldType).Interface()) - } else { - association.Error = ref.ForeignKey.Set(association.DB.Statement.Context, data, reflect.Zero(ref.ForeignKey.FieldType).Interface()) - } - } - } - } - } - } - } - - switch reflectValue.Kind() { - case reflect.Slice, reflect.Array: - for i := 0; i < reflectValue.Len(); i++ { - cleanUpDeletedRelations(reflect.Indirect(reflectValue.Index(i))) - } - case reflect.Struct: - cleanUpDeletedRelations(reflectValue) - } - } - } - - return association.Error -} - -func (association *Association) Clear() error { - return association.Replace() -} - -func (association *Association) Count() (count int64) { - if association.Error == nil { - association.Error = association.buildCondition().Count(&count).Error - } - return -} - -type assignBack struct { - Source reflect.Value - Index int - Dest reflect.Value -} - -func (association *Association) saveAssociation(clear bool, values ...interface{}) { - var ( - reflectValue = association.DB.Statement.ReflectValue - assignBacks []assignBack // assign association values back to arguments after save - ) - - appendToRelations := func(source, rv reflect.Value, clear bool) { - switch association.Relationship.Type { - case schema.HasOne, schema.BelongsTo: - switch rv.Kind() { - case reflect.Slice, reflect.Array: - if rv.Len() > 0 { - association.Error = association.Relationship.Field.Set(association.DB.Statement.Context, source, rv.Index(0).Addr().Interface()) - - if association.Relationship.Field.FieldType.Kind() == reflect.Struct { - assignBacks = append(assignBacks, assignBack{Source: source, Dest: rv.Index(0)}) - } - } - case reflect.Struct: - association.Error = association.Relationship.Field.Set(association.DB.Statement.Context, source, rv.Addr().Interface()) - - if association.Relationship.Field.FieldType.Kind() == reflect.Struct { - assignBacks = append(assignBacks, assignBack{Source: source, Dest: rv}) - } - } - case schema.HasMany, schema.Many2Many: - elemType := association.Relationship.Field.IndirectFieldType.Elem() - fieldValue := reflect.Indirect(association.Relationship.Field.ReflectValueOf(association.DB.Statement.Context, source)) - if clear { - fieldValue = reflect.New(association.Relationship.Field.IndirectFieldType).Elem() - } - - appendToFieldValues := func(ev reflect.Value) { - if ev.Type().AssignableTo(elemType) { - fieldValue = reflect.Append(fieldValue, ev) - } else if ev.Type().Elem().AssignableTo(elemType) { - fieldValue = reflect.Append(fieldValue, ev.Elem()) - } else { - association.Error = fmt.Errorf("unsupported data type: %v for relation %s", ev.Type(), association.Relationship.Name) - } - - if elemType.Kind() == reflect.Struct { - assignBacks = append(assignBacks, assignBack{Source: source, Dest: ev, Index: fieldValue.Len()}) - } - } - - switch rv.Kind() { - case reflect.Slice, reflect.Array: - for i := 0; i < rv.Len(); i++ { - appendToFieldValues(reflect.Indirect(rv.Index(i)).Addr()) - } - case reflect.Struct: - appendToFieldValues(rv.Addr()) - } - - if association.Error == nil { - association.Error = association.Relationship.Field.Set(association.DB.Statement.Context, source, fieldValue.Interface()) - } - } - } - - selectedSaveColumns := []string{association.Relationship.Name} - omitColumns := []string{} - selectColumns, _ := association.DB.Statement.SelectAndOmitColumns(true, false) - for name, ok := range selectColumns { - columnName := "" - if strings.HasPrefix(name, association.Relationship.Name) { - if columnName = strings.TrimPrefix(name, association.Relationship.Name); columnName == ".*" { - columnName = name - } - } else if strings.HasPrefix(name, clause.Associations) { - columnName = name - } - - if columnName != "" { - if ok { - selectedSaveColumns = append(selectedSaveColumns, columnName) - } else { - omitColumns = append(omitColumns, columnName) - } - } - } - - for _, ref := range association.Relationship.References { - if !ref.OwnPrimaryKey { - selectedSaveColumns = append(selectedSaveColumns, ref.ForeignKey.Name) - } - } - - associationDB := association.DB.Session(&Session{}).Model(nil) - if !association.DB.FullSaveAssociations { - associationDB.Select(selectedSaveColumns) - } - if len(omitColumns) > 0 { - associationDB.Omit(omitColumns...) - } - associationDB = associationDB.Session(&Session{}) - - switch reflectValue.Kind() { - case reflect.Slice, reflect.Array: - if len(values) != reflectValue.Len() { - // clear old data - if clear && len(values) == 0 { - for i := 0; i < reflectValue.Len(); i++ { - if err := association.Relationship.Field.Set(association.DB.Statement.Context, reflectValue.Index(i), reflect.New(association.Relationship.Field.IndirectFieldType).Interface()); err != nil { - association.Error = err - break - } - - if association.Relationship.JoinTable == nil { - for _, ref := range association.Relationship.References { - if !ref.OwnPrimaryKey && ref.PrimaryValue == "" { - if err := ref.ForeignKey.Set(association.DB.Statement.Context, reflectValue.Index(i), reflect.Zero(ref.ForeignKey.FieldType).Interface()); err != nil { - association.Error = err - break - } - } - } - } - } - break - } - - association.Error = ErrInvalidValueOfLength - return - } - - for i := 0; i < reflectValue.Len(); i++ { - appendToRelations(reflectValue.Index(i), reflect.Indirect(reflect.ValueOf(values[i])), clear) - - // TODO support save slice data, sql with case? - association.Error = associationDB.Updates(reflectValue.Index(i).Addr().Interface()).Error - } - case reflect.Struct: - // clear old data - if clear && len(values) == 0 { - association.Error = association.Relationship.Field.Set(association.DB.Statement.Context, reflectValue, reflect.New(association.Relationship.Field.IndirectFieldType).Interface()) - - if association.Relationship.JoinTable == nil && association.Error == nil { - for _, ref := range association.Relationship.References { - if !ref.OwnPrimaryKey && ref.PrimaryValue == "" { - association.Error = ref.ForeignKey.Set(association.DB.Statement.Context, reflectValue, reflect.Zero(ref.ForeignKey.FieldType).Interface()) - } - } - } - } - - for idx, value := range values { - rv := reflect.Indirect(reflect.ValueOf(value)) - appendToRelations(reflectValue, rv, clear && idx == 0) - } - - if len(values) > 0 { - association.Error = associationDB.Updates(reflectValue.Addr().Interface()).Error - } - } - - for _, assignBack := range assignBacks { - fieldValue := reflect.Indirect(association.Relationship.Field.ReflectValueOf(association.DB.Statement.Context, assignBack.Source)) - if assignBack.Index > 0 { - reflect.Indirect(assignBack.Dest).Set(fieldValue.Index(assignBack.Index - 1)) - } else { - reflect.Indirect(assignBack.Dest).Set(fieldValue) - } - } -} - -func (association *Association) buildCondition() *DB { - var ( - queryConds = association.Relationship.ToQueryConditions(association.DB.Statement.Context, association.DB.Statement.ReflectValue) - modelValue = reflect.New(association.Relationship.FieldSchema.ModelType).Interface() - tx = association.DB.Model(modelValue) - ) - - if association.Relationship.JoinTable != nil { - if !tx.Statement.Unscoped && len(association.Relationship.JoinTable.QueryClauses) > 0 { - joinStmt := Statement{DB: tx, Context: tx.Statement.Context, Schema: association.Relationship.JoinTable, Table: association.Relationship.JoinTable.Table, Clauses: map[string]clause.Clause{}} - for _, queryClause := range association.Relationship.JoinTable.QueryClauses { - joinStmt.AddClause(queryClause) - } - joinStmt.Build("WHERE") - tx.Clauses(clause.Expr{SQL: strings.Replace(joinStmt.SQL.String(), "WHERE ", "", 1), Vars: joinStmt.Vars}) - } - - tx = tx.Session(&Session{QueryFields: true}).Clauses(clause.From{Joins: []clause.Join{{ - Table: clause.Table{Name: association.Relationship.JoinTable.Table}, - ON: clause.Where{Exprs: queryConds}, - }}}) - } else { - tx.Clauses(clause.Where{Exprs: queryConds}) - } - - return tx -} diff --git a/vendor/gorm.io/gorm/callbacks/associations.go b/vendor/gorm.io/gorm/callbacks/associations.go index fd3141cf..e69de29b 100644 --- a/vendor/gorm.io/gorm/callbacks/associations.go +++ b/vendor/gorm.io/gorm/callbacks/associations.go @@ -1,419 +0,0 @@ -package callbacks - -import ( - "reflect" - "strings" - - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/gorm/schema" - "gorm.io/gorm/utils" -) - -func SaveBeforeAssociations(create bool) func(db *gorm.DB) { - return func(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil { - selectColumns, restricted := db.Statement.SelectAndOmitColumns(create, !create) - - // Save Belongs To associations - for _, rel := range db.Statement.Schema.Relationships.BelongsTo { - if v, ok := selectColumns[rel.Name]; (ok && !v) || (!ok && restricted) { - continue - } - - setupReferences := func(obj reflect.Value, elem reflect.Value) { - for _, ref := range rel.References { - if !ref.OwnPrimaryKey { - pv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, elem) - db.AddError(ref.ForeignKey.Set(db.Statement.Context, obj, pv)) - - if dest, ok := db.Statement.Dest.(map[string]interface{}); ok { - dest[ref.ForeignKey.DBName] = pv - if _, ok := dest[rel.Name]; ok { - dest[rel.Name] = elem.Interface() - } - } - } - } - } - - switch db.Statement.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - var ( - rValLen = db.Statement.ReflectValue.Len() - objs = make([]reflect.Value, 0, rValLen) - fieldType = rel.Field.FieldType - isPtr = fieldType.Kind() == reflect.Ptr - ) - - if !isPtr { - fieldType = reflect.PtrTo(fieldType) - } - - elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) - for i := 0; i < rValLen; i++ { - obj := db.Statement.ReflectValue.Index(i) - if reflect.Indirect(obj).Kind() != reflect.Struct { - break - } - - if _, zero := rel.Field.ValueOf(db.Statement.Context, obj); !zero { // check belongs to relation value - rv := rel.Field.ReflectValueOf(db.Statement.Context, obj) // relation reflect value - objs = append(objs, obj) - if isPtr { - elems = reflect.Append(elems, rv) - } else { - elems = reflect.Append(elems, rv.Addr()) - } - } - } - - if elems.Len() > 0 { - if saveAssociations(db, rel, elems, selectColumns, restricted, nil) == nil { - for i := 0; i < elems.Len(); i++ { - setupReferences(objs[i], elems.Index(i)) - } - } - } - case reflect.Struct: - if _, zero := rel.Field.ValueOf(db.Statement.Context, db.Statement.ReflectValue); !zero { - rv := rel.Field.ReflectValueOf(db.Statement.Context, db.Statement.ReflectValue) // relation reflect value - if rv.Kind() != reflect.Ptr { - rv = rv.Addr() - } - - if saveAssociations(db, rel, rv, selectColumns, restricted, nil) == nil { - setupReferences(db.Statement.ReflectValue, rv) - } - } - } - } - } - } -} - -func SaveAfterAssociations(create bool) func(db *gorm.DB) { - return func(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil { - selectColumns, restricted := db.Statement.SelectAndOmitColumns(create, !create) - - // Save Has One associations - for _, rel := range db.Statement.Schema.Relationships.HasOne { - if v, ok := selectColumns[rel.Name]; (ok && !v) || (!ok && restricted) { - continue - } - - switch db.Statement.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - var ( - fieldType = rel.Field.FieldType - isPtr = fieldType.Kind() == reflect.Ptr - ) - - if !isPtr { - fieldType = reflect.PtrTo(fieldType) - } - - elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) - - for i := 0; i < db.Statement.ReflectValue.Len(); i++ { - obj := db.Statement.ReflectValue.Index(i) - - if reflect.Indirect(obj).Kind() == reflect.Struct { - if _, zero := rel.Field.ValueOf(db.Statement.Context, obj); !zero { - rv := rel.Field.ReflectValueOf(db.Statement.Context, obj) - if rv.Kind() != reflect.Ptr { - rv = rv.Addr() - } - - for _, ref := range rel.References { - if ref.OwnPrimaryKey { - fv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, obj) - db.AddError(ref.ForeignKey.Set(db.Statement.Context, rv, fv)) - } else if ref.PrimaryValue != "" { - db.AddError(ref.ForeignKey.Set(db.Statement.Context, rv, ref.PrimaryValue)) - } - } - - elems = reflect.Append(elems, rv) - } - } - } - - if elems.Len() > 0 { - assignmentColumns := make([]string, 0, len(rel.References)) - for _, ref := range rel.References { - assignmentColumns = append(assignmentColumns, ref.ForeignKey.DBName) - } - - saveAssociations(db, rel, elems, selectColumns, restricted, assignmentColumns) - } - case reflect.Struct: - if _, zero := rel.Field.ValueOf(db.Statement.Context, db.Statement.ReflectValue); !zero { - f := rel.Field.ReflectValueOf(db.Statement.Context, db.Statement.ReflectValue) - if f.Kind() != reflect.Ptr { - f = f.Addr() - } - - assignmentColumns := make([]string, 0, len(rel.References)) - for _, ref := range rel.References { - if ref.OwnPrimaryKey { - fv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, db.Statement.ReflectValue) - db.AddError(ref.ForeignKey.Set(db.Statement.Context, f, fv)) - } else if ref.PrimaryValue != "" { - db.AddError(ref.ForeignKey.Set(db.Statement.Context, f, ref.PrimaryValue)) - } - assignmentColumns = append(assignmentColumns, ref.ForeignKey.DBName) - } - - saveAssociations(db, rel, f, selectColumns, restricted, assignmentColumns) - } - } - } - - // Save Has Many associations - for _, rel := range db.Statement.Schema.Relationships.HasMany { - if v, ok := selectColumns[rel.Name]; (ok && !v) || (!ok && restricted) { - continue - } - - fieldType := rel.Field.IndirectFieldType.Elem() - isPtr := fieldType.Kind() == reflect.Ptr - if !isPtr { - fieldType = reflect.PtrTo(fieldType) - } - elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) - identityMap := map[string]bool{} - appendToElems := func(v reflect.Value) { - if _, zero := rel.Field.ValueOf(db.Statement.Context, v); !zero { - f := reflect.Indirect(rel.Field.ReflectValueOf(db.Statement.Context, v)) - - for i := 0; i < f.Len(); i++ { - elem := f.Index(i) - for _, ref := range rel.References { - if ref.OwnPrimaryKey { - pv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, v) - db.AddError(ref.ForeignKey.Set(db.Statement.Context, elem, pv)) - } else if ref.PrimaryValue != "" { - db.AddError(ref.ForeignKey.Set(db.Statement.Context, elem, ref.PrimaryValue)) - } - } - - relPrimaryValues := make([]interface{}, 0, len(rel.FieldSchema.PrimaryFields)) - for _, pf := range rel.FieldSchema.PrimaryFields { - if pfv, ok := pf.ValueOf(db.Statement.Context, elem); !ok { - relPrimaryValues = append(relPrimaryValues, pfv) - } - } - - cacheKey := utils.ToStringKey(relPrimaryValues) - if len(relPrimaryValues) != len(rel.FieldSchema.PrimaryFields) || !identityMap[cacheKey] { - identityMap[cacheKey] = true - if isPtr { - elems = reflect.Append(elems, elem) - } else { - elems = reflect.Append(elems, elem.Addr()) - } - } - } - } - } - - switch db.Statement.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - for i := 0; i < db.Statement.ReflectValue.Len(); i++ { - obj := db.Statement.ReflectValue.Index(i) - if reflect.Indirect(obj).Kind() == reflect.Struct { - appendToElems(obj) - } - } - case reflect.Struct: - appendToElems(db.Statement.ReflectValue) - } - - if elems.Len() > 0 { - assignmentColumns := make([]string, 0, len(rel.References)) - for _, ref := range rel.References { - assignmentColumns = append(assignmentColumns, ref.ForeignKey.DBName) - } - - saveAssociations(db, rel, elems, selectColumns, restricted, assignmentColumns) - } - } - - // Save Many2Many associations - for _, rel := range db.Statement.Schema.Relationships.Many2Many { - if v, ok := selectColumns[rel.Name]; (ok && !v) || (!ok && restricted) { - continue - } - - fieldType := rel.Field.IndirectFieldType.Elem() - isPtr := fieldType.Kind() == reflect.Ptr - if !isPtr { - fieldType = reflect.PtrTo(fieldType) - } - elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) - joins := reflect.MakeSlice(reflect.SliceOf(reflect.PtrTo(rel.JoinTable.ModelType)), 0, 10) - objs := []reflect.Value{} - - appendToJoins := func(obj reflect.Value, elem reflect.Value) { - joinValue := reflect.New(rel.JoinTable.ModelType) - for _, ref := range rel.References { - if ref.OwnPrimaryKey { - fv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, obj) - db.AddError(ref.ForeignKey.Set(db.Statement.Context, joinValue, fv)) - } else if ref.PrimaryValue != "" { - db.AddError(ref.ForeignKey.Set(db.Statement.Context, joinValue, ref.PrimaryValue)) - } else { - fv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, elem) - db.AddError(ref.ForeignKey.Set(db.Statement.Context, joinValue, fv)) - } - } - joins = reflect.Append(joins, joinValue) - } - - appendToElems := func(v reflect.Value) { - if _, zero := rel.Field.ValueOf(db.Statement.Context, v); !zero { - f := reflect.Indirect(rel.Field.ReflectValueOf(db.Statement.Context, v)) - - for i := 0; i < f.Len(); i++ { - elem := f.Index(i) - - objs = append(objs, v) - if isPtr { - elems = reflect.Append(elems, elem) - } else { - elems = reflect.Append(elems, elem.Addr()) - } - } - } - } - - switch db.Statement.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - for i := 0; i < db.Statement.ReflectValue.Len(); i++ { - obj := db.Statement.ReflectValue.Index(i) - if reflect.Indirect(obj).Kind() == reflect.Struct { - appendToElems(obj) - } - } - case reflect.Struct: - appendToElems(db.Statement.ReflectValue) - } - - // optimize elems of reflect value length - if elemLen := elems.Len(); elemLen > 0 { - if v, ok := selectColumns[rel.Name+".*"]; !ok || v { - saveAssociations(db, rel, elems, selectColumns, restricted, nil) - } - - for i := 0; i < elemLen; i++ { - appendToJoins(objs[i], elems.Index(i)) - } - } - - if joins.Len() > 0 { - db.AddError(db.Session(&gorm.Session{NewDB: true}).Clauses(clause.OnConflict{DoNothing: true}).Session(&gorm.Session{ - SkipHooks: db.Statement.SkipHooks, - DisableNestedTransaction: true, - }).Create(joins.Interface()).Error) - } - } - } - } -} - -func onConflictOption(stmt *gorm.Statement, s *schema.Schema, defaultUpdatingColumns []string) (onConflict clause.OnConflict) { - if len(defaultUpdatingColumns) > 0 || stmt.DB.FullSaveAssociations { - onConflict.Columns = make([]clause.Column, 0, len(s.PrimaryFieldDBNames)) - for _, dbName := range s.PrimaryFieldDBNames { - onConflict.Columns = append(onConflict.Columns, clause.Column{Name: dbName}) - } - - onConflict.UpdateAll = stmt.DB.FullSaveAssociations - if !onConflict.UpdateAll { - onConflict.DoUpdates = clause.AssignmentColumns(defaultUpdatingColumns) - } - } else { - onConflict.DoNothing = true - } - - return -} - -func saveAssociations(db *gorm.DB, rel *schema.Relationship, rValues reflect.Value, selectColumns map[string]bool, restricted bool, defaultUpdatingColumns []string) error { - // stop save association loop - if checkAssociationsSaved(db, rValues) { - return nil - } - - var ( - selects, omits []string - onConflict = onConflictOption(db.Statement, rel.FieldSchema, defaultUpdatingColumns) - refName = rel.Name + "." - values = rValues.Interface() - ) - - for name, ok := range selectColumns { - columnName := "" - if strings.HasPrefix(name, refName) { - columnName = strings.TrimPrefix(name, refName) - } - - if columnName != "" { - if ok { - selects = append(selects, columnName) - } else { - omits = append(omits, columnName) - } - } - } - - tx := db.Session(&gorm.Session{NewDB: true}).Clauses(onConflict).Session(&gorm.Session{ - FullSaveAssociations: db.FullSaveAssociations, - SkipHooks: db.Statement.SkipHooks, - DisableNestedTransaction: true, - }) - - db.Statement.Settings.Range(func(k, v interface{}) bool { - tx.Statement.Settings.Store(k, v) - return true - }) - - if tx.Statement.FullSaveAssociations { - tx = tx.Set("gorm:update_track_time", true) - } - - if len(selects) > 0 { - tx = tx.Select(selects) - } else if restricted && len(omits) == 0 { - tx = tx.Omit(clause.Associations) - } - - if len(omits) > 0 { - tx = tx.Omit(omits...) - } - - return db.AddError(tx.Create(values).Error) -} - -// check association values has been saved -// if values kind is Struct, check it has been saved -// if values kind is Slice/Array, check all items have been saved -var visitMapStoreKey = "gorm:saved_association_map" - -func checkAssociationsSaved(db *gorm.DB, values reflect.Value) bool { - if visit, ok := db.Get(visitMapStoreKey); ok { - if v, ok := visit.(*visitMap); ok { - if loadOrStoreVisitMap(v, values) { - return true - } - } - } else { - vistMap := make(visitMap) - loadOrStoreVisitMap(&vistMap, values) - db.Set(visitMapStoreKey, &vistMap) - } - - return false -} diff --git a/vendor/gorm.io/gorm/callbacks/create.go b/vendor/gorm.io/gorm/callbacks/create.go index 0fe1dc93..e69de29b 100644 --- a/vendor/gorm.io/gorm/callbacks/create.go +++ b/vendor/gorm.io/gorm/callbacks/create.go @@ -1,343 +0,0 @@ -package callbacks - -import ( - "fmt" - "reflect" - - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/gorm/schema" - "gorm.io/gorm/utils" -) - -// BeforeCreate before create hooks -func BeforeCreate(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && (db.Statement.Schema.BeforeSave || db.Statement.Schema.BeforeCreate) { - callMethod(db, func(value interface{}, tx *gorm.DB) (called bool) { - if db.Statement.Schema.BeforeSave { - if i, ok := value.(BeforeSaveInterface); ok { - called = true - db.AddError(i.BeforeSave(tx)) - } - } - - if db.Statement.Schema.BeforeCreate { - if i, ok := value.(BeforeCreateInterface); ok { - called = true - db.AddError(i.BeforeCreate(tx)) - } - } - return called - }) - } -} - -// Create create hook -func Create(config *Config) func(db *gorm.DB) { - supportReturning := utils.Contains(config.CreateClauses, "RETURNING") - - return func(db *gorm.DB) { - if db.Error != nil { - return - } - - if db.Statement.Schema != nil { - if !db.Statement.Unscoped { - for _, c := range db.Statement.Schema.CreateClauses { - db.Statement.AddClause(c) - } - } - - if supportReturning && len(db.Statement.Schema.FieldsWithDefaultDBValue) > 0 { - if _, ok := db.Statement.Clauses["RETURNING"]; !ok { - fromColumns := make([]clause.Column, 0, len(db.Statement.Schema.FieldsWithDefaultDBValue)) - for _, field := range db.Statement.Schema.FieldsWithDefaultDBValue { - fromColumns = append(fromColumns, clause.Column{Name: field.DBName}) - } - db.Statement.AddClause(clause.Returning{Columns: fromColumns}) - } - } - } - - if db.Statement.SQL.Len() == 0 { - db.Statement.SQL.Grow(180) - db.Statement.AddClauseIfNotExists(clause.Insert{}) - db.Statement.AddClause(ConvertToCreateValues(db.Statement)) - - db.Statement.Build(db.Statement.BuildClauses...) - } - - isDryRun := !db.DryRun && db.Error == nil - if !isDryRun { - return - } - - ok, mode := hasReturning(db, supportReturning) - if ok { - if c, ok := db.Statement.Clauses["ON CONFLICT"]; ok { - if onConflict, _ := c.Expression.(clause.OnConflict); onConflict.DoNothing { - mode |= gorm.ScanOnConflictDoNothing - } - } - - rows, err := db.Statement.ConnPool.QueryContext( - db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars..., - ) - if db.AddError(err) == nil { - defer func() { - db.AddError(rows.Close()) - }() - gorm.Scan(rows, db, mode) - } - - return - } - - result, err := db.Statement.ConnPool.ExecContext( - db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars..., - ) - if err != nil { - db.AddError(err) - return - } - - db.RowsAffected, _ = result.RowsAffected() - if db.RowsAffected != 0 && db.Statement.Schema != nil && - db.Statement.Schema.PrioritizedPrimaryField != nil && - db.Statement.Schema.PrioritizedPrimaryField.HasDefaultValue { - insertID, err := result.LastInsertId() - insertOk := err == nil && insertID > 0 - if !insertOk { - db.AddError(err) - return - } - - switch db.Statement.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - if config.LastInsertIDReversed { - for i := db.Statement.ReflectValue.Len() - 1; i >= 0; i-- { - rv := db.Statement.ReflectValue.Index(i) - if reflect.Indirect(rv).Kind() != reflect.Struct { - break - } - - _, isZero := db.Statement.Schema.PrioritizedPrimaryField.ValueOf(db.Statement.Context, rv) - if isZero { - db.AddError(db.Statement.Schema.PrioritizedPrimaryField.Set(db.Statement.Context, rv, insertID)) - insertID -= db.Statement.Schema.PrioritizedPrimaryField.AutoIncrementIncrement - } - } - } else { - for i := 0; i < db.Statement.ReflectValue.Len(); i++ { - rv := db.Statement.ReflectValue.Index(i) - if reflect.Indirect(rv).Kind() != reflect.Struct { - break - } - - if _, isZero := db.Statement.Schema.PrioritizedPrimaryField.ValueOf(db.Statement.Context, rv); isZero { - db.AddError(db.Statement.Schema.PrioritizedPrimaryField.Set(db.Statement.Context, rv, insertID)) - insertID += db.Statement.Schema.PrioritizedPrimaryField.AutoIncrementIncrement - } - } - } - case reflect.Struct: - _, isZero := db.Statement.Schema.PrioritizedPrimaryField.ValueOf(db.Statement.Context, db.Statement.ReflectValue) - if isZero { - db.AddError(db.Statement.Schema.PrioritizedPrimaryField.Set(db.Statement.Context, db.Statement.ReflectValue, insertID)) - } - } - } - } -} - -// AfterCreate after create hooks -func AfterCreate(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && (db.Statement.Schema.AfterSave || db.Statement.Schema.AfterCreate) { - callMethod(db, func(value interface{}, tx *gorm.DB) (called bool) { - if db.Statement.Schema.AfterCreate { - if i, ok := value.(AfterCreateInterface); ok { - called = true - db.AddError(i.AfterCreate(tx)) - } - } - - if db.Statement.Schema.AfterSave { - if i, ok := value.(AfterSaveInterface); ok { - called = true - db.AddError(i.AfterSave(tx)) - } - } - return called - }) - } -} - -// ConvertToCreateValues convert to create values -func ConvertToCreateValues(stmt *gorm.Statement) (values clause.Values) { - curTime := stmt.DB.NowFunc() - - switch value := stmt.Dest.(type) { - case map[string]interface{}: - values = ConvertMapToValuesForCreate(stmt, value) - case *map[string]interface{}: - values = ConvertMapToValuesForCreate(stmt, *value) - case []map[string]interface{}: - values = ConvertSliceOfMapToValuesForCreate(stmt, value) - case *[]map[string]interface{}: - values = ConvertSliceOfMapToValuesForCreate(stmt, *value) - default: - var ( - selectColumns, restricted = stmt.SelectAndOmitColumns(true, false) - _, updateTrackTime = stmt.Get("gorm:update_track_time") - isZero bool - ) - stmt.Settings.Delete("gorm:update_track_time") - - values = clause.Values{Columns: make([]clause.Column, 0, len(stmt.Schema.DBNames))} - - for _, db := range stmt.Schema.DBNames { - if field := stmt.Schema.FieldsByDBName[db]; !field.HasDefaultValue || field.DefaultValueInterface != nil { - if v, ok := selectColumns[db]; (ok && v) || (!ok && (!restricted || field.AutoCreateTime > 0 || field.AutoUpdateTime > 0)) { - values.Columns = append(values.Columns, clause.Column{Name: db}) - } - } - } - - switch stmt.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - rValLen := stmt.ReflectValue.Len() - if rValLen == 0 { - stmt.AddError(gorm.ErrEmptySlice) - return - } - - stmt.SQL.Grow(rValLen * 18) - stmt.Vars = make([]interface{}, 0, rValLen*len(values.Columns)) - values.Values = make([][]interface{}, rValLen) - - defaultValueFieldsHavingValue := map[*schema.Field][]interface{}{} - for i := 0; i < rValLen; i++ { - rv := reflect.Indirect(stmt.ReflectValue.Index(i)) - if !rv.IsValid() { - stmt.AddError(fmt.Errorf("slice data #%v is invalid: %w", i, gorm.ErrInvalidData)) - return - } - - values.Values[i] = make([]interface{}, len(values.Columns)) - for idx, column := range values.Columns { - field := stmt.Schema.FieldsByDBName[column.Name] - if values.Values[i][idx], isZero = field.ValueOf(stmt.Context, rv); isZero { - if field.DefaultValueInterface != nil { - values.Values[i][idx] = field.DefaultValueInterface - stmt.AddError(field.Set(stmt.Context, rv, field.DefaultValueInterface)) - } else if field.AutoCreateTime > 0 || field.AutoUpdateTime > 0 { - stmt.AddError(field.Set(stmt.Context, rv, curTime)) - values.Values[i][idx], _ = field.ValueOf(stmt.Context, rv) - } - } else if field.AutoUpdateTime > 0 && updateTrackTime { - stmt.AddError(field.Set(stmt.Context, rv, curTime)) - values.Values[i][idx], _ = field.ValueOf(stmt.Context, rv) - } - } - - for _, field := range stmt.Schema.FieldsWithDefaultDBValue { - if v, ok := selectColumns[field.DBName]; (ok && v) || (!ok && !restricted) { - if rvOfvalue, isZero := field.ValueOf(stmt.Context, rv); !isZero { - if len(defaultValueFieldsHavingValue[field]) == 0 { - defaultValueFieldsHavingValue[field] = make([]interface{}, rValLen) - } - defaultValueFieldsHavingValue[field][i] = rvOfvalue - } - } - } - } - - for field, vs := range defaultValueFieldsHavingValue { - values.Columns = append(values.Columns, clause.Column{Name: field.DBName}) - for idx := range values.Values { - if vs[idx] == nil { - values.Values[idx] = append(values.Values[idx], stmt.Dialector.DefaultValueOf(field)) - } else { - values.Values[idx] = append(values.Values[idx], vs[idx]) - } - } - } - case reflect.Struct: - values.Values = [][]interface{}{make([]interface{}, len(values.Columns))} - for idx, column := range values.Columns { - field := stmt.Schema.FieldsByDBName[column.Name] - if values.Values[0][idx], isZero = field.ValueOf(stmt.Context, stmt.ReflectValue); isZero { - if field.DefaultValueInterface != nil { - values.Values[0][idx] = field.DefaultValueInterface - stmt.AddError(field.Set(stmt.Context, stmt.ReflectValue, field.DefaultValueInterface)) - } else if field.AutoCreateTime > 0 || field.AutoUpdateTime > 0 { - stmt.AddError(field.Set(stmt.Context, stmt.ReflectValue, curTime)) - values.Values[0][idx], _ = field.ValueOf(stmt.Context, stmt.ReflectValue) - } - } else if field.AutoUpdateTime > 0 && updateTrackTime { - stmt.AddError(field.Set(stmt.Context, stmt.ReflectValue, curTime)) - values.Values[0][idx], _ = field.ValueOf(stmt.Context, stmt.ReflectValue) - } - } - - for _, field := range stmt.Schema.FieldsWithDefaultDBValue { - if v, ok := selectColumns[field.DBName]; (ok && v) || (!ok && !restricted) { - if rvOfvalue, isZero := field.ValueOf(stmt.Context, stmt.ReflectValue); !isZero { - values.Columns = append(values.Columns, clause.Column{Name: field.DBName}) - values.Values[0] = append(values.Values[0], rvOfvalue) - } - } - } - default: - stmt.AddError(gorm.ErrInvalidData) - } - } - - if c, ok := stmt.Clauses["ON CONFLICT"]; ok { - if onConflict, _ := c.Expression.(clause.OnConflict); onConflict.UpdateAll { - if stmt.Schema != nil && len(values.Columns) >= 1 { - selectColumns, restricted := stmt.SelectAndOmitColumns(true, true) - - columns := make([]string, 0, len(values.Columns)-1) - for _, column := range values.Columns { - if field := stmt.Schema.LookUpField(column.Name); field != nil { - if v, ok := selectColumns[field.DBName]; (ok && v) || (!ok && !restricted) { - if !field.PrimaryKey && (!field.HasDefaultValue || field.DefaultValueInterface != nil) && field.AutoCreateTime == 0 { - if field.AutoUpdateTime > 0 { - assignment := clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: curTime} - switch field.AutoUpdateTime { - case schema.UnixNanosecond: - assignment.Value = curTime.UnixNano() - case schema.UnixMillisecond: - assignment.Value = curTime.UnixNano() / 1e6 - case schema.UnixSecond: - assignment.Value = curTime.Unix() - } - - onConflict.DoUpdates = append(onConflict.DoUpdates, assignment) - } else { - columns = append(columns, column.Name) - } - } - } - } - } - - onConflict.DoUpdates = append(onConflict.DoUpdates, clause.AssignmentColumns(columns)...) - if len(onConflict.DoUpdates) == 0 { - onConflict.DoNothing = true - } - - // use primary fields as default OnConflict columns - if len(onConflict.Columns) == 0 { - for _, field := range stmt.Schema.PrimaryFields { - onConflict.Columns = append(onConflict.Columns, clause.Column{Name: field.DBName}) - } - } - stmt.AddClause(onConflict) - } - } - } - - return values -} diff --git a/vendor/gorm.io/gorm/callbacks/delete.go b/vendor/gorm.io/gorm/callbacks/delete.go index 84f446a3..e69de29b 100644 --- a/vendor/gorm.io/gorm/callbacks/delete.go +++ b/vendor/gorm.io/gorm/callbacks/delete.go @@ -1,185 +0,0 @@ -package callbacks - -import ( - "reflect" - "strings" - - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/gorm/schema" - "gorm.io/gorm/utils" -) - -func BeforeDelete(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && db.Statement.Schema.BeforeDelete { - callMethod(db, func(value interface{}, tx *gorm.DB) bool { - if i, ok := value.(BeforeDeleteInterface); ok { - db.AddError(i.BeforeDelete(tx)) - return true - } - - return false - }) - } -} - -func DeleteBeforeAssociations(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil { - selectColumns, restricted := db.Statement.SelectAndOmitColumns(true, false) - if !restricted { - return - } - - for column, v := range selectColumns { - if !v { - continue - } - - rel, ok := db.Statement.Schema.Relationships.Relations[column] - if !ok { - continue - } - - switch rel.Type { - case schema.HasOne, schema.HasMany: - queryConds := rel.ToQueryConditions(db.Statement.Context, db.Statement.ReflectValue) - modelValue := reflect.New(rel.FieldSchema.ModelType).Interface() - tx := db.Session(&gorm.Session{NewDB: true}).Model(modelValue) - withoutConditions := false - if db.Statement.Unscoped { - tx = tx.Unscoped() - } - - if len(db.Statement.Selects) > 0 { - selects := make([]string, 0, len(db.Statement.Selects)) - for _, s := range db.Statement.Selects { - if s == clause.Associations { - selects = append(selects, s) - } else if columnPrefix := column + "."; strings.HasPrefix(s, columnPrefix) { - selects = append(selects, strings.TrimPrefix(s, columnPrefix)) - } - } - - if len(selects) > 0 { - tx = tx.Select(selects) - } - } - - for _, cond := range queryConds { - if c, ok := cond.(clause.IN); ok && len(c.Values) == 0 { - withoutConditions = true - break - } - } - - if !withoutConditions && db.AddError(tx.Clauses(clause.Where{Exprs: queryConds}).Delete(modelValue).Error) != nil { - return - } - case schema.Many2Many: - var ( - queryConds = make([]clause.Expression, 0, len(rel.References)) - foreignFields = make([]*schema.Field, 0, len(rel.References)) - relForeignKeys = make([]string, 0, len(rel.References)) - modelValue = reflect.New(rel.JoinTable.ModelType).Interface() - table = rel.JoinTable.Table - tx = db.Session(&gorm.Session{NewDB: true}).Model(modelValue).Table(table) - ) - - for _, ref := range rel.References { - if ref.OwnPrimaryKey { - foreignFields = append(foreignFields, ref.PrimaryKey) - relForeignKeys = append(relForeignKeys, ref.ForeignKey.DBName) - } else if ref.PrimaryValue != "" { - queryConds = append(queryConds, clause.Eq{ - Column: clause.Column{Table: rel.JoinTable.Table, Name: ref.ForeignKey.DBName}, - Value: ref.PrimaryValue, - }) - } - } - - _, foreignValues := schema.GetIdentityFieldValuesMap(db.Statement.Context, db.Statement.ReflectValue, foreignFields) - column, values := schema.ToQueryValues(table, relForeignKeys, foreignValues) - queryConds = append(queryConds, clause.IN{Column: column, Values: values}) - - if db.AddError(tx.Clauses(clause.Where{Exprs: queryConds}).Delete(modelValue).Error) != nil { - return - } - } - } - - } -} - -func Delete(config *Config) func(db *gorm.DB) { - supportReturning := utils.Contains(config.DeleteClauses, "RETURNING") - - return func(db *gorm.DB) { - if db.Error != nil { - return - } - - if db.Statement.Schema != nil { - for _, c := range db.Statement.Schema.DeleteClauses { - db.Statement.AddClause(c) - } - } - - if db.Statement.SQL.Len() == 0 { - db.Statement.SQL.Grow(100) - db.Statement.AddClauseIfNotExists(clause.Delete{}) - - if db.Statement.Schema != nil { - _, queryValues := schema.GetIdentityFieldValuesMap(db.Statement.Context, db.Statement.ReflectValue, db.Statement.Schema.PrimaryFields) - column, values := schema.ToQueryValues(db.Statement.Table, db.Statement.Schema.PrimaryFieldDBNames, queryValues) - - if len(values) > 0 { - db.Statement.AddClause(clause.Where{Exprs: []clause.Expression{clause.IN{Column: column, Values: values}}}) - } - - if db.Statement.ReflectValue.CanAddr() && db.Statement.Dest != db.Statement.Model && db.Statement.Model != nil { - _, queryValues = schema.GetIdentityFieldValuesMap(db.Statement.Context, reflect.ValueOf(db.Statement.Model), db.Statement.Schema.PrimaryFields) - column, values = schema.ToQueryValues(db.Statement.Table, db.Statement.Schema.PrimaryFieldDBNames, queryValues) - - if len(values) > 0 { - db.Statement.AddClause(clause.Where{Exprs: []clause.Expression{clause.IN{Column: column, Values: values}}}) - } - } - } - - db.Statement.AddClauseIfNotExists(clause.From{}) - - db.Statement.Build(db.Statement.BuildClauses...) - } - - checkMissingWhereConditions(db) - - if !db.DryRun && db.Error == nil { - ok, mode := hasReturning(db, supportReturning) - if !ok { - result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...) - if db.AddError(err) == nil { - db.RowsAffected, _ = result.RowsAffected() - } - - return - } - - if rows, err := db.Statement.ConnPool.QueryContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...); db.AddError(err) == nil { - gorm.Scan(rows, db, mode) - db.AddError(rows.Close()) - } - } - } -} - -func AfterDelete(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && db.Statement.Schema.AfterDelete { - callMethod(db, func(value interface{}, tx *gorm.DB) bool { - if i, ok := value.(AfterDeleteInterface); ok { - db.AddError(i.AfterDelete(tx)) - return true - } - return false - }) - } -} diff --git a/vendor/gorm.io/gorm/callbacks/preload.go b/vendor/gorm.io/gorm/callbacks/preload.go index ea2570ba..e69de29b 100644 --- a/vendor/gorm.io/gorm/callbacks/preload.go +++ b/vendor/gorm.io/gorm/callbacks/preload.go @@ -1,173 +0,0 @@ -package callbacks - -import ( - "fmt" - "reflect" - - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/gorm/schema" - "gorm.io/gorm/utils" -) - -func preload(tx *gorm.DB, rel *schema.Relationship, conds []interface{}, preloads map[string][]interface{}) error { - var ( - reflectValue = tx.Statement.ReflectValue - relForeignKeys []string - relForeignFields []*schema.Field - foreignFields []*schema.Field - foreignValues [][]interface{} - identityMap = map[string][]reflect.Value{} - inlineConds []interface{} - ) - - if rel.JoinTable != nil { - var ( - joinForeignFields = make([]*schema.Field, 0, len(rel.References)) - joinRelForeignFields = make([]*schema.Field, 0, len(rel.References)) - joinForeignKeys = make([]string, 0, len(rel.References)) - ) - - for _, ref := range rel.References { - if ref.OwnPrimaryKey { - joinForeignKeys = append(joinForeignKeys, ref.ForeignKey.DBName) - joinForeignFields = append(joinForeignFields, ref.ForeignKey) - foreignFields = append(foreignFields, ref.PrimaryKey) - } else if ref.PrimaryValue != "" { - tx = tx.Where(clause.Eq{Column: ref.ForeignKey.DBName, Value: ref.PrimaryValue}) - } else { - joinRelForeignFields = append(joinRelForeignFields, ref.ForeignKey) - relForeignKeys = append(relForeignKeys, ref.PrimaryKey.DBName) - relForeignFields = append(relForeignFields, ref.PrimaryKey) - } - } - - joinIdentityMap, joinForeignValues := schema.GetIdentityFieldValuesMap(tx.Statement.Context, reflectValue, foreignFields) - if len(joinForeignValues) == 0 { - return nil - } - - joinResults := rel.JoinTable.MakeSlice().Elem() - column, values := schema.ToQueryValues(clause.CurrentTable, joinForeignKeys, joinForeignValues) - if err := tx.Where(clause.IN{Column: column, Values: values}).Find(joinResults.Addr().Interface()).Error; err != nil { - return err - } - - // convert join identity map to relation identity map - fieldValues := make([]interface{}, len(joinForeignFields)) - joinFieldValues := make([]interface{}, len(joinRelForeignFields)) - for i := 0; i < joinResults.Len(); i++ { - joinIndexValue := joinResults.Index(i) - for idx, field := range joinForeignFields { - fieldValues[idx], _ = field.ValueOf(tx.Statement.Context, joinIndexValue) - } - - for idx, field := range joinRelForeignFields { - joinFieldValues[idx], _ = field.ValueOf(tx.Statement.Context, joinIndexValue) - } - - if results, ok := joinIdentityMap[utils.ToStringKey(fieldValues...)]; ok { - joinKey := utils.ToStringKey(joinFieldValues...) - identityMap[joinKey] = append(identityMap[joinKey], results...) - } - } - - _, foreignValues = schema.GetIdentityFieldValuesMap(tx.Statement.Context, joinResults, joinRelForeignFields) - } else { - for _, ref := range rel.References { - if ref.OwnPrimaryKey { - relForeignKeys = append(relForeignKeys, ref.ForeignKey.DBName) - relForeignFields = append(relForeignFields, ref.ForeignKey) - foreignFields = append(foreignFields, ref.PrimaryKey) - } else if ref.PrimaryValue != "" { - tx = tx.Where(clause.Eq{Column: ref.ForeignKey.DBName, Value: ref.PrimaryValue}) - } else { - relForeignKeys = append(relForeignKeys, ref.PrimaryKey.DBName) - relForeignFields = append(relForeignFields, ref.PrimaryKey) - foreignFields = append(foreignFields, ref.ForeignKey) - } - } - - identityMap, foreignValues = schema.GetIdentityFieldValuesMap(tx.Statement.Context, reflectValue, foreignFields) - if len(foreignValues) == 0 { - return nil - } - } - - // nested preload - for p, pvs := range preloads { - tx = tx.Preload(p, pvs...) - } - - reflectResults := rel.FieldSchema.MakeSlice().Elem() - column, values := schema.ToQueryValues(clause.CurrentTable, relForeignKeys, foreignValues) - - if len(values) != 0 { - for _, cond := range conds { - if fc, ok := cond.(func(*gorm.DB) *gorm.DB); ok { - tx = fc(tx) - } else { - inlineConds = append(inlineConds, cond) - } - } - - if err := tx.Where(clause.IN{Column: column, Values: values}).Find(reflectResults.Addr().Interface(), inlineConds...).Error; err != nil { - return err - } - } - - fieldValues := make([]interface{}, len(relForeignFields)) - - // clean up old values before preloading - switch reflectValue.Kind() { - case reflect.Struct: - switch rel.Type { - case schema.HasMany, schema.Many2Many: - tx.AddError(rel.Field.Set(tx.Statement.Context, reflectValue, reflect.MakeSlice(rel.Field.IndirectFieldType, 0, 10).Interface())) - default: - tx.AddError(rel.Field.Set(tx.Statement.Context, reflectValue, reflect.New(rel.Field.FieldType).Interface())) - } - case reflect.Slice, reflect.Array: - for i := 0; i < reflectValue.Len(); i++ { - switch rel.Type { - case schema.HasMany, schema.Many2Many: - tx.AddError(rel.Field.Set(tx.Statement.Context, reflectValue.Index(i), reflect.MakeSlice(rel.Field.IndirectFieldType, 0, 10).Interface())) - default: - tx.AddError(rel.Field.Set(tx.Statement.Context, reflectValue.Index(i), reflect.New(rel.Field.FieldType).Interface())) - } - } - } - - for i := 0; i < reflectResults.Len(); i++ { - elem := reflectResults.Index(i) - for idx, field := range relForeignFields { - fieldValues[idx], _ = field.ValueOf(tx.Statement.Context, elem) - } - - datas, ok := identityMap[utils.ToStringKey(fieldValues...)] - if !ok { - return fmt.Errorf("failed to assign association %#v, make sure foreign fields exists", elem.Interface()) - } - - for _, data := range datas { - reflectFieldValue := rel.Field.ReflectValueOf(tx.Statement.Context, data) - if reflectFieldValue.Kind() == reflect.Ptr && reflectFieldValue.IsNil() { - reflectFieldValue.Set(reflect.New(rel.Field.FieldType.Elem())) - } - - reflectFieldValue = reflect.Indirect(reflectFieldValue) - switch reflectFieldValue.Kind() { - case reflect.Struct: - tx.AddError(rel.Field.Set(tx.Statement.Context, data, elem.Interface())) - case reflect.Slice, reflect.Array: - if reflectFieldValue.Type().Elem().Kind() == reflect.Ptr { - tx.AddError(rel.Field.Set(tx.Statement.Context, data, reflect.Append(reflectFieldValue, elem).Interface())) - } else { - tx.AddError(rel.Field.Set(tx.Statement.Context, data, reflect.Append(reflectFieldValue, elem.Elem()).Interface())) - } - } - } - } - - return tx.Error -} diff --git a/vendor/gorm.io/gorm/callbacks/update.go b/vendor/gorm.io/gorm/callbacks/update.go index 01f40509..e69de29b 100644 --- a/vendor/gorm.io/gorm/callbacks/update.go +++ b/vendor/gorm.io/gorm/callbacks/update.go @@ -1,291 +0,0 @@ -package callbacks - -import ( - "reflect" - "sort" - - "gorm.io/gorm" - "gorm.io/gorm/clause" - "gorm.io/gorm/schema" - "gorm.io/gorm/utils" -) - -func SetupUpdateReflectValue(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil { - if !db.Statement.ReflectValue.CanAddr() || db.Statement.Model != db.Statement.Dest { - db.Statement.ReflectValue = reflect.ValueOf(db.Statement.Model) - for db.Statement.ReflectValue.Kind() == reflect.Ptr { - db.Statement.ReflectValue = db.Statement.ReflectValue.Elem() - } - - if dest, ok := db.Statement.Dest.(map[string]interface{}); ok { - for _, rel := range db.Statement.Schema.Relationships.BelongsTo { - if _, ok := dest[rel.Name]; ok { - db.AddError(rel.Field.Set(db.Statement.Context, db.Statement.ReflectValue, dest[rel.Name])) - } - } - } - } - } -} - -// BeforeUpdate before update hooks -func BeforeUpdate(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && (db.Statement.Schema.BeforeSave || db.Statement.Schema.BeforeUpdate) { - callMethod(db, func(value interface{}, tx *gorm.DB) (called bool) { - if db.Statement.Schema.BeforeSave { - if i, ok := value.(BeforeSaveInterface); ok { - called = true - db.AddError(i.BeforeSave(tx)) - } - } - - if db.Statement.Schema.BeforeUpdate { - if i, ok := value.(BeforeUpdateInterface); ok { - called = true - db.AddError(i.BeforeUpdate(tx)) - } - } - - return called - }) - } -} - -// Update update hook -func Update(config *Config) func(db *gorm.DB) { - supportReturning := utils.Contains(config.UpdateClauses, "RETURNING") - - return func(db *gorm.DB) { - if db.Error != nil { - return - } - - if db.Statement.Schema != nil { - for _, c := range db.Statement.Schema.UpdateClauses { - db.Statement.AddClause(c) - } - } - - if db.Statement.SQL.Len() == 0 { - db.Statement.SQL.Grow(180) - db.Statement.AddClauseIfNotExists(clause.Update{}) - if set := ConvertToAssignments(db.Statement); len(set) != 0 { - db.Statement.AddClause(set) - } else if _, ok := db.Statement.Clauses["SET"]; !ok { - return - } - - db.Statement.Build(db.Statement.BuildClauses...) - } - - checkMissingWhereConditions(db) - - if !db.DryRun && db.Error == nil { - if ok, mode := hasReturning(db, supportReturning); ok { - if rows, err := db.Statement.ConnPool.QueryContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...); db.AddError(err) == nil { - dest := db.Statement.Dest - db.Statement.Dest = db.Statement.ReflectValue.Addr().Interface() - gorm.Scan(rows, db, mode) - db.Statement.Dest = dest - db.AddError(rows.Close()) - } - } else { - result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...) - - if db.AddError(err) == nil { - db.RowsAffected, _ = result.RowsAffected() - } - } - } - } -} - -// AfterUpdate after update hooks -func AfterUpdate(db *gorm.DB) { - if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && (db.Statement.Schema.AfterSave || db.Statement.Schema.AfterUpdate) { - callMethod(db, func(value interface{}, tx *gorm.DB) (called bool) { - if db.Statement.Schema.AfterUpdate { - if i, ok := value.(AfterUpdateInterface); ok { - called = true - db.AddError(i.AfterUpdate(tx)) - } - } - - if db.Statement.Schema.AfterSave { - if i, ok := value.(AfterSaveInterface); ok { - called = true - db.AddError(i.AfterSave(tx)) - } - } - - return called - }) - } -} - -// ConvertToAssignments convert to update assignments -func ConvertToAssignments(stmt *gorm.Statement) (set clause.Set) { - var ( - selectColumns, restricted = stmt.SelectAndOmitColumns(false, true) - assignValue func(field *schema.Field, value interface{}) - ) - - switch stmt.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - assignValue = func(field *schema.Field, value interface{}) { - for i := 0; i < stmt.ReflectValue.Len(); i++ { - field.Set(stmt.Context, stmt.ReflectValue.Index(i), value) - } - } - case reflect.Struct: - assignValue = func(field *schema.Field, value interface{}) { - if stmt.ReflectValue.CanAddr() { - field.Set(stmt.Context, stmt.ReflectValue, value) - } - } - default: - assignValue = func(field *schema.Field, value interface{}) { - } - } - - updatingValue := reflect.ValueOf(stmt.Dest) - for updatingValue.Kind() == reflect.Ptr { - updatingValue = updatingValue.Elem() - } - - if !updatingValue.CanAddr() || stmt.Dest != stmt.Model { - switch stmt.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - if size := stmt.ReflectValue.Len(); size > 0 { - var primaryKeyExprs []clause.Expression - for i := 0; i < size; i++ { - exprs := make([]clause.Expression, len(stmt.Schema.PrimaryFields)) - var notZero bool - for idx, field := range stmt.Schema.PrimaryFields { - value, isZero := field.ValueOf(stmt.Context, stmt.ReflectValue.Index(i)) - exprs[idx] = clause.Eq{Column: field.DBName, Value: value} - notZero = notZero || !isZero - } - if notZero { - primaryKeyExprs = append(primaryKeyExprs, clause.And(exprs...)) - } - } - - stmt.AddClause(clause.Where{Exprs: []clause.Expression{clause.Or(primaryKeyExprs...)}}) - } - case reflect.Struct: - for _, field := range stmt.Schema.PrimaryFields { - if value, isZero := field.ValueOf(stmt.Context, stmt.ReflectValue); !isZero { - stmt.AddClause(clause.Where{Exprs: []clause.Expression{clause.Eq{Column: field.DBName, Value: value}}}) - } - } - } - } - - switch value := updatingValue.Interface().(type) { - case map[string]interface{}: - set = make([]clause.Assignment, 0, len(value)) - - keys := make([]string, 0, len(value)) - for k := range value { - keys = append(keys, k) - } - sort.Strings(keys) - - for _, k := range keys { - kv := value[k] - if _, ok := kv.(*gorm.DB); ok { - kv = []interface{}{kv} - } - - if stmt.Schema != nil { - if field := stmt.Schema.LookUpField(k); field != nil { - if field.DBName != "" { - if v, ok := selectColumns[field.DBName]; (ok && v) || (!ok && !restricted) { - set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: kv}) - assignValue(field, value[k]) - } - } else if v, ok := selectColumns[field.Name]; (ok && v) || (!ok && !restricted) { - assignValue(field, value[k]) - } - continue - } - } - - if v, ok := selectColumns[k]; (ok && v) || (!ok && !restricted) { - set = append(set, clause.Assignment{Column: clause.Column{Name: k}, Value: kv}) - } - } - - if !stmt.SkipHooks && stmt.Schema != nil { - for _, dbName := range stmt.Schema.DBNames { - field := stmt.Schema.LookUpField(dbName) - if field.AutoUpdateTime > 0 && value[field.Name] == nil && value[field.DBName] == nil { - if v, ok := selectColumns[field.DBName]; (ok && v) || !ok { - now := stmt.DB.NowFunc() - assignValue(field, now) - - if field.AutoUpdateTime == schema.UnixNanosecond { - set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: now.UnixNano()}) - } else if field.AutoUpdateTime == schema.UnixMillisecond { - set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: now.UnixNano() / 1e6}) - } else if field.AutoUpdateTime == schema.UnixSecond { - set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: now.Unix()}) - } else { - set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: now}) - } - } - } - } - } - default: - updatingSchema := stmt.Schema - if !updatingValue.CanAddr() || stmt.Dest != stmt.Model { - // different schema - updatingStmt := &gorm.Statement{DB: stmt.DB} - if err := updatingStmt.Parse(stmt.Dest); err == nil { - updatingSchema = updatingStmt.Schema - } - } - - switch updatingValue.Kind() { - case reflect.Struct: - set = make([]clause.Assignment, 0, len(stmt.Schema.FieldsByDBName)) - for _, dbName := range stmt.Schema.DBNames { - if field := updatingSchema.LookUpField(dbName); field != nil { - if !field.PrimaryKey || !updatingValue.CanAddr() || stmt.Dest != stmt.Model { - if v, ok := selectColumns[field.DBName]; (ok && v) || (!ok && (!restricted || (!stmt.SkipHooks && field.AutoUpdateTime > 0))) { - value, isZero := field.ValueOf(stmt.Context, updatingValue) - if !stmt.SkipHooks && field.AutoUpdateTime > 0 { - if field.AutoUpdateTime == schema.UnixNanosecond { - value = stmt.DB.NowFunc().UnixNano() - } else if field.AutoUpdateTime == schema.UnixMillisecond { - value = stmt.DB.NowFunc().UnixNano() / 1e6 - } else if field.AutoUpdateTime == schema.UnixSecond { - value = stmt.DB.NowFunc().Unix() - } else { - value = stmt.DB.NowFunc() - } - isZero = false - } - - if (ok || !isZero) && field.Updatable { - set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: value}) - assignValue(field, value) - } - } - } else { - if value, isZero := field.ValueOf(stmt.Context, updatingValue); !isZero { - stmt.AddClause(clause.Where{Exprs: []clause.Expression{clause.Eq{Column: field.DBName, Value: value}}}) - } - } - } - } - default: - stmt.AddError(gorm.ErrInvalidData) - } - } - - return -} diff --git a/vendor/gorm.io/gorm/logger/logger.go b/vendor/gorm.io/gorm/logger/logger.go index 2ffd28d5..e69de29b 100644 --- a/vendor/gorm.io/gorm/logger/logger.go +++ b/vendor/gorm.io/gorm/logger/logger.go @@ -1,202 +0,0 @@ -package logger - -import ( - "context" - "errors" - "fmt" - "io/ioutil" - "log" - "os" - "time" - - "gorm.io/gorm/utils" -) - -// ErrRecordNotFound record not found error -var ErrRecordNotFound = errors.New("record not found") - -// Colors -const ( - Reset = "\033[0m" - Red = "\033[31m" - Green = "\033[32m" - Yellow = "\033[33m" - Blue = "\033[34m" - Magenta = "\033[35m" - Cyan = "\033[36m" - White = "\033[37m" - BlueBold = "\033[34;1m" - MagentaBold = "\033[35;1m" - RedBold = "\033[31;1m" - YellowBold = "\033[33;1m" -) - -// LogLevel log level -type LogLevel int - -const ( - // Silent silent log level - Silent LogLevel = iota + 1 - // Error error log level - Error - // Warn warn log level - Warn - // Info info log level - Info -) - -// Writer log writer interface -type Writer interface { - Printf(string, ...interface{}) -} - -// Config logger config -type Config struct { - SlowThreshold time.Duration - Colorful bool - IgnoreRecordNotFoundError bool - LogLevel LogLevel -} - -// Interface logger interface -type Interface interface { - LogMode(LogLevel) Interface - Info(context.Context, string, ...interface{}) - Warn(context.Context, string, ...interface{}) - Error(context.Context, string, ...interface{}) - Trace(ctx context.Context, begin time.Time, fc func() (sql string, rowsAffected int64), err error) -} - -var ( - // Discard Discard logger will print any log to ioutil.Discard - Discard = New(log.New(ioutil.Discard, "", log.LstdFlags), Config{}) - // Default Default logger - Default = New(log.New(os.Stdout, "\r\n", log.LstdFlags), Config{ - SlowThreshold: 200 * time.Millisecond, - LogLevel: Warn, - IgnoreRecordNotFoundError: false, - Colorful: true, - }) - // Recorder Recorder logger records running SQL into a recorder instance - Recorder = traceRecorder{Interface: Default, BeginAt: time.Now()} -) - -// New initialize logger -func New(writer Writer, config Config) Interface { - var ( - infoStr = "%s\n[info] " - warnStr = "%s\n[warn] " - errStr = "%s\n[error] " - traceStr = "%s\n[%.3fms] [rows:%v] %s" - traceWarnStr = "%s %s\n[%.3fms] [rows:%v] %s" - traceErrStr = "%s %s\n[%.3fms] [rows:%v] %s" - ) - - if config.Colorful { - infoStr = Green + "%s\n" + Reset + Green + "[info] " + Reset - warnStr = BlueBold + "%s\n" + Reset + Magenta + "[warn] " + Reset - errStr = Magenta + "%s\n" + Reset + Red + "[error] " + Reset - traceStr = Green + "%s\n" + Reset + Yellow + "[%.3fms] " + BlueBold + "[rows:%v]" + Reset + " %s" - traceWarnStr = Green + "%s " + Yellow + "%s\n" + Reset + RedBold + "[%.3fms] " + Yellow + "[rows:%v]" + Magenta + " %s" + Reset - traceErrStr = RedBold + "%s " + MagentaBold + "%s\n" + Reset + Yellow + "[%.3fms] " + BlueBold + "[rows:%v]" + Reset + " %s" - } - - return &logger{ - Writer: writer, - Config: config, - infoStr: infoStr, - warnStr: warnStr, - errStr: errStr, - traceStr: traceStr, - traceWarnStr: traceWarnStr, - traceErrStr: traceErrStr, - } -} - -type logger struct { - Writer - Config - infoStr, warnStr, errStr string - traceStr, traceErrStr, traceWarnStr string -} - -// LogMode log mode -func (l *logger) LogMode(level LogLevel) Interface { - newlogger := *l - newlogger.LogLevel = level - return &newlogger -} - -// Info print info -func (l logger) Info(ctx context.Context, msg string, data ...interface{}) { - if l.LogLevel >= Info { - l.Printf(l.infoStr+msg, append([]interface{}{utils.FileWithLineNum()}, data...)...) - } -} - -// Warn print warn messages -func (l logger) Warn(ctx context.Context, msg string, data ...interface{}) { - if l.LogLevel >= Warn { - l.Printf(l.warnStr+msg, append([]interface{}{utils.FileWithLineNum()}, data...)...) - } -} - -// Error print error messages -func (l logger) Error(ctx context.Context, msg string, data ...interface{}) { - if l.LogLevel >= Error { - l.Printf(l.errStr+msg, append([]interface{}{utils.FileWithLineNum()}, data...)...) - } -} - -// Trace print sql message -func (l logger) Trace(ctx context.Context, begin time.Time, fc func() (string, int64), err error) { - if l.LogLevel <= Silent { - return - } - - elapsed := time.Since(begin) - switch { - case err != nil && l.LogLevel >= Error && (!errors.Is(err, ErrRecordNotFound) || !l.IgnoreRecordNotFoundError): - sql, rows := fc() - if rows == -1 { - l.Printf(l.traceErrStr, utils.FileWithLineNum(), err, float64(elapsed.Nanoseconds())/1e6, "-", sql) - } else { - l.Printf(l.traceErrStr, utils.FileWithLineNum(), err, float64(elapsed.Nanoseconds())/1e6, rows, sql) - } - case elapsed > l.SlowThreshold && l.SlowThreshold != 0 && l.LogLevel >= Warn: - sql, rows := fc() - slowLog := fmt.Sprintf("SLOW SQL >= %v", l.SlowThreshold) - if rows == -1 { - l.Printf(l.traceWarnStr, utils.FileWithLineNum(), slowLog, float64(elapsed.Nanoseconds())/1e6, "-", sql) - } else { - l.Printf(l.traceWarnStr, utils.FileWithLineNum(), slowLog, float64(elapsed.Nanoseconds())/1e6, rows, sql) - } - case l.LogLevel == Info: - sql, rows := fc() - if rows == -1 { - l.Printf(l.traceStr, utils.FileWithLineNum(), float64(elapsed.Nanoseconds())/1e6, "-", sql) - } else { - l.Printf(l.traceStr, utils.FileWithLineNum(), float64(elapsed.Nanoseconds())/1e6, rows, sql) - } - } -} - -type traceRecorder struct { - Interface - BeginAt time.Time - SQL string - RowsAffected int64 - Err error -} - -// New new trace recorder -func (l traceRecorder) New() *traceRecorder { - return &traceRecorder{Interface: l.Interface, BeginAt: time.Now()} -} - -// Trace implement logger interface -func (l *traceRecorder) Trace(ctx context.Context, begin time.Time, fc func() (string, int64), err error) { - l.BeginAt = begin - l.SQL, l.RowsAffected = fc() - l.Err = err -} diff --git a/vendor/gorm.io/gorm/logger/sql.go b/vendor/gorm.io/gorm/logger/sql.go index c8b194c3..e69de29b 100644 --- a/vendor/gorm.io/gorm/logger/sql.go +++ b/vendor/gorm.io/gorm/logger/sql.go @@ -1,147 +0,0 @@ -package logger - -import ( - "database/sql/driver" - "fmt" - "reflect" - "regexp" - "strconv" - "strings" - "time" - "unicode" - - "gorm.io/gorm/utils" -) - -const ( - tmFmtWithMS = "2006-01-02 15:04:05.999" - tmFmtZero = "0000-00-00 00:00:00" - nullStr = "NULL" -) - -func isPrintable(s string) bool { - for _, r := range s { - if !unicode.IsPrint(r) { - return false - } - } - return true -} - -var convertibleTypes = []reflect.Type{reflect.TypeOf(time.Time{}), reflect.TypeOf(false), reflect.TypeOf([]byte{})} - -// ExplainSQL generate SQL string with given parameters, the generated SQL is expected to be used in logger, execute it might introduce a SQL injection vulnerability -func ExplainSQL(sql string, numericPlaceholder *regexp.Regexp, escaper string, avars ...interface{}) string { - var ( - convertParams func(interface{}, int) - vars = make([]string, len(avars)) - ) - - convertParams = func(v interface{}, idx int) { - switch v := v.(type) { - case bool: - vars[idx] = strconv.FormatBool(v) - case time.Time: - if v.IsZero() { - vars[idx] = escaper + tmFmtZero + escaper - } else { - vars[idx] = escaper + v.Format(tmFmtWithMS) + escaper - } - case *time.Time: - if v != nil { - if v.IsZero() { - vars[idx] = escaper + tmFmtZero + escaper - } else { - vars[idx] = escaper + v.Format(tmFmtWithMS) + escaper - } - } else { - vars[idx] = nullStr - } - case driver.Valuer: - reflectValue := reflect.ValueOf(v) - if v != nil && reflectValue.IsValid() && ((reflectValue.Kind() == reflect.Ptr && !reflectValue.IsNil()) || reflectValue.Kind() != reflect.Ptr) { - r, _ := v.Value() - convertParams(r, idx) - } else { - vars[idx] = nullStr - } - case fmt.Stringer: - reflectValue := reflect.ValueOf(v) - switch reflectValue.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - vars[idx] = fmt.Sprintf("%d", reflectValue.Interface()) - case reflect.Float32, reflect.Float64: - vars[idx] = fmt.Sprintf("%.6f", reflectValue.Interface()) - case reflect.Bool: - vars[idx] = fmt.Sprintf("%t", reflectValue.Interface()) - case reflect.String: - vars[idx] = escaper + strings.ReplaceAll(fmt.Sprintf("%v", v), escaper, "\\"+escaper) + escaper - default: - if v != nil && reflectValue.IsValid() && ((reflectValue.Kind() == reflect.Ptr && !reflectValue.IsNil()) || reflectValue.Kind() != reflect.Ptr) { - vars[idx] = escaper + strings.ReplaceAll(fmt.Sprintf("%v", v), escaper, "\\"+escaper) + escaper - } else { - vars[idx] = nullStr - } - } - case []byte: - if s := string(v); isPrintable(s) { - vars[idx] = escaper + strings.ReplaceAll(s, escaper, "\\"+escaper) + escaper - } else { - vars[idx] = escaper + "" + escaper - } - case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64: - vars[idx] = utils.ToString(v) - case float64, float32: - vars[idx] = fmt.Sprintf("%.6f", v) - case string: - vars[idx] = escaper + strings.ReplaceAll(v, escaper, "\\"+escaper) + escaper - default: - rv := reflect.ValueOf(v) - if v == nil || !rv.IsValid() || rv.Kind() == reflect.Ptr && rv.IsNil() { - vars[idx] = nullStr - } else if valuer, ok := v.(driver.Valuer); ok { - v, _ = valuer.Value() - convertParams(v, idx) - } else if rv.Kind() == reflect.Ptr && !rv.IsZero() { - convertParams(reflect.Indirect(rv).Interface(), idx) - } else { - for _, t := range convertibleTypes { - if rv.Type().ConvertibleTo(t) { - convertParams(rv.Convert(t).Interface(), idx) - return - } - } - vars[idx] = escaper + strings.ReplaceAll(fmt.Sprint(v), escaper, "\\"+escaper) + escaper - } - } - } - - for idx, v := range avars { - convertParams(v, idx) - } - - if numericPlaceholder == nil { - var idx int - var newSQL strings.Builder - - for _, v := range []byte(sql) { - if v == '?' { - if len(vars) > idx { - newSQL.WriteString(vars[idx]) - idx++ - continue - } - } - newSQL.WriteByte(v) - } - - sql = newSQL.String() - } else { - sql = numericPlaceholder.ReplaceAllString(sql, "$$$1$$") - for idx, v := range vars { - sql = strings.Replace(sql, "$"+strconv.Itoa(idx+1)+"$", v, 1) - } - } - - return sql -} diff --git a/vendor/gorm.io/gorm/prepare_stmt.go b/vendor/gorm.io/gorm/prepare_stmt.go index b062b0d6..e69de29b 100644 --- a/vendor/gorm.io/gorm/prepare_stmt.go +++ b/vendor/gorm.io/gorm/prepare_stmt.go @@ -1,172 +0,0 @@ -package gorm - -import ( - "context" - "database/sql" - "sync" -) - -type Stmt struct { - *sql.Stmt - Transaction bool -} - -type PreparedStmtDB struct { - Stmts map[string]Stmt - PreparedSQL []string - Mux *sync.RWMutex - ConnPool -} - -func (db *PreparedStmtDB) GetDBConn() (*sql.DB, error) { - if dbConnector, ok := db.ConnPool.(GetDBConnector); ok && dbConnector != nil { - return dbConnector.GetDBConn() - } - - if sqldb, ok := db.ConnPool.(*sql.DB); ok { - return sqldb, nil - } - - return nil, ErrInvalidDB -} - -func (db *PreparedStmtDB) Close() { - db.Mux.Lock() - defer db.Mux.Unlock() - - for _, query := range db.PreparedSQL { - if stmt, ok := db.Stmts[query]; ok { - delete(db.Stmts, query) - go stmt.Close() - } - } -} - -func (db *PreparedStmtDB) prepare(ctx context.Context, conn ConnPool, isTransaction bool, query string) (Stmt, error) { - db.Mux.RLock() - if stmt, ok := db.Stmts[query]; ok && (!stmt.Transaction || isTransaction) { - db.Mux.RUnlock() - return stmt, nil - } - db.Mux.RUnlock() - - db.Mux.Lock() - defer db.Mux.Unlock() - - // double check - if stmt, ok := db.Stmts[query]; ok && (!stmt.Transaction || isTransaction) { - return stmt, nil - } else if ok { - go stmt.Close() - } - - stmt, err := conn.PrepareContext(ctx, query) - if err == nil { - db.Stmts[query] = Stmt{Stmt: stmt, Transaction: isTransaction} - db.PreparedSQL = append(db.PreparedSQL, query) - } - - return db.Stmts[query], err -} - -func (db *PreparedStmtDB) BeginTx(ctx context.Context, opt *sql.TxOptions) (ConnPool, error) { - if beginner, ok := db.ConnPool.(TxBeginner); ok { - tx, err := beginner.BeginTx(ctx, opt) - return &PreparedStmtTX{PreparedStmtDB: db, Tx: tx}, err - } - return nil, ErrInvalidTransaction -} - -func (db *PreparedStmtDB) ExecContext(ctx context.Context, query string, args ...interface{}) (result sql.Result, err error) { - stmt, err := db.prepare(ctx, db.ConnPool, false, query) - if err == nil { - result, err = stmt.ExecContext(ctx, args...) - if err != nil { - db.Mux.Lock() - defer db.Mux.Unlock() - go stmt.Close() - delete(db.Stmts, query) - } - } - return result, err -} - -func (db *PreparedStmtDB) QueryContext(ctx context.Context, query string, args ...interface{}) (rows *sql.Rows, err error) { - stmt, err := db.prepare(ctx, db.ConnPool, false, query) - if err == nil { - rows, err = stmt.QueryContext(ctx, args...) - if err != nil { - db.Mux.Lock() - defer db.Mux.Unlock() - - go stmt.Close() - delete(db.Stmts, query) - } - } - return rows, err -} - -func (db *PreparedStmtDB) QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row { - stmt, err := db.prepare(ctx, db.ConnPool, false, query) - if err == nil { - return stmt.QueryRowContext(ctx, args...) - } - return &sql.Row{} -} - -type PreparedStmtTX struct { - Tx - PreparedStmtDB *PreparedStmtDB -} - -func (tx *PreparedStmtTX) Commit() error { - if tx.Tx != nil { - return tx.Tx.Commit() - } - return ErrInvalidTransaction -} - -func (tx *PreparedStmtTX) Rollback() error { - if tx.Tx != nil { - return tx.Tx.Rollback() - } - return ErrInvalidTransaction -} - -func (tx *PreparedStmtTX) ExecContext(ctx context.Context, query string, args ...interface{}) (result sql.Result, err error) { - stmt, err := tx.PreparedStmtDB.prepare(ctx, tx.Tx, true, query) - if err == nil { - result, err = tx.Tx.StmtContext(ctx, stmt.Stmt).ExecContext(ctx, args...) - if err != nil { - tx.PreparedStmtDB.Mux.Lock() - defer tx.PreparedStmtDB.Mux.Unlock() - - go stmt.Close() - delete(tx.PreparedStmtDB.Stmts, query) - } - } - return result, err -} - -func (tx *PreparedStmtTX) QueryContext(ctx context.Context, query string, args ...interface{}) (rows *sql.Rows, err error) { - stmt, err := tx.PreparedStmtDB.prepare(ctx, tx.Tx, true, query) - if err == nil { - rows, err = tx.Tx.StmtContext(ctx, stmt.Stmt).QueryContext(ctx, args...) - if err != nil { - tx.PreparedStmtDB.Mux.Lock() - defer tx.PreparedStmtDB.Mux.Unlock() - - go stmt.Close() - delete(tx.PreparedStmtDB.Stmts, query) - } - } - return rows, err -} - -func (tx *PreparedStmtTX) QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row { - stmt, err := tx.PreparedStmtDB.prepare(ctx, tx.Tx, true, query) - if err == nil { - return tx.Tx.StmtContext(ctx, stmt.Stmt).QueryRowContext(ctx, args...) - } - return &sql.Row{} -} diff --git a/vendor/gorm.io/gorm/schema/field.go b/vendor/gorm.io/gorm/schema/field.go index d6df6596..e69de29b 100644 --- a/vendor/gorm.io/gorm/schema/field.go +++ b/vendor/gorm.io/gorm/schema/field.go @@ -1,952 +0,0 @@ -package schema - -import ( - "context" - "database/sql" - "database/sql/driver" - "fmt" - "reflect" - "strconv" - "strings" - "sync" - "time" - - "github.com/jinzhu/now" - "gorm.io/gorm/clause" - "gorm.io/gorm/utils" -) - -// special types' reflect type -var ( - TimeReflectType = reflect.TypeOf(time.Time{}) - TimePtrReflectType = reflect.TypeOf(&time.Time{}) - ByteReflectType = reflect.TypeOf(uint8(0)) -) - -type ( - // DataType GORM data type - DataType string - // TimeType GORM time type - TimeType int64 -) - -// GORM time types -const ( - UnixTime TimeType = 1 - UnixSecond TimeType = 2 - UnixMillisecond TimeType = 3 - UnixNanosecond TimeType = 4 -) - -// GORM fields types -const ( - Bool DataType = "bool" - Int DataType = "int" - Uint DataType = "uint" - Float DataType = "float" - String DataType = "string" - Time DataType = "time" - Bytes DataType = "bytes" -) - -// Field is the representation of model schema's field -type Field struct { - Name string - DBName string - BindNames []string - DataType DataType - GORMDataType DataType - PrimaryKey bool - AutoIncrement bool - AutoIncrementIncrement int64 - Creatable bool - Updatable bool - Readable bool - AutoCreateTime TimeType - AutoUpdateTime TimeType - HasDefaultValue bool - DefaultValue string - DefaultValueInterface interface{} - NotNull bool - Unique bool - Comment string - Size int - Precision int - Scale int - IgnoreMigration bool - FieldType reflect.Type - IndirectFieldType reflect.Type - StructField reflect.StructField - Tag reflect.StructTag - TagSettings map[string]string - Schema *Schema - EmbeddedSchema *Schema - OwnerSchema *Schema - ReflectValueOf func(context.Context, reflect.Value) reflect.Value - ValueOf func(context.Context, reflect.Value) (value interface{}, zero bool) - Set func(context.Context, reflect.Value, interface{}) error - Serializer SerializerInterface - NewValuePool FieldNewValuePool -} - -// ParseField parses reflect.StructField to Field -func (schema *Schema) ParseField(fieldStruct reflect.StructField) *Field { - var ( - err error - tagSetting = ParseTagSetting(fieldStruct.Tag.Get("gorm"), ";") - ) - - field := &Field{ - Name: fieldStruct.Name, - DBName: tagSetting["COLUMN"], - BindNames: []string{fieldStruct.Name}, - FieldType: fieldStruct.Type, - IndirectFieldType: fieldStruct.Type, - StructField: fieldStruct, - Tag: fieldStruct.Tag, - TagSettings: tagSetting, - Schema: schema, - Creatable: true, - Updatable: true, - Readable: true, - PrimaryKey: utils.CheckTruth(tagSetting["PRIMARYKEY"], tagSetting["PRIMARY_KEY"]), - AutoIncrement: utils.CheckTruth(tagSetting["AUTOINCREMENT"]), - HasDefaultValue: utils.CheckTruth(tagSetting["AUTOINCREMENT"]), - NotNull: utils.CheckTruth(tagSetting["NOT NULL"], tagSetting["NOTNULL"]), - Unique: utils.CheckTruth(tagSetting["UNIQUE"]), - Comment: tagSetting["COMMENT"], - AutoIncrementIncrement: 1, - } - - for field.IndirectFieldType.Kind() == reflect.Ptr { - field.IndirectFieldType = field.IndirectFieldType.Elem() - } - - fieldValue := reflect.New(field.IndirectFieldType) - // if field is valuer, used its value or first field as data type - valuer, isValuer := fieldValue.Interface().(driver.Valuer) - if isValuer { - if _, ok := fieldValue.Interface().(GormDataTypeInterface); !ok { - if v, err := valuer.Value(); reflect.ValueOf(v).IsValid() && err == nil { - fieldValue = reflect.ValueOf(v) - } - - // Use the field struct's first field type as data type, e.g: use `string` for sql.NullString - var getRealFieldValue func(reflect.Value) - getRealFieldValue = func(v reflect.Value) { - var ( - rv = reflect.Indirect(v) - rvType = rv.Type() - ) - - if rv.Kind() == reflect.Struct && !rvType.ConvertibleTo(TimeReflectType) { - for i := 0; i < rvType.NumField(); i++ { - for key, value := range ParseTagSetting(rvType.Field(i).Tag.Get("gorm"), ";") { - if _, ok := field.TagSettings[key]; !ok { - field.TagSettings[key] = value - } - } - } - - for i := 0; i < rvType.NumField(); i++ { - newFieldType := rvType.Field(i).Type - for newFieldType.Kind() == reflect.Ptr { - newFieldType = newFieldType.Elem() - } - - fieldValue = reflect.New(newFieldType) - if rvType != reflect.Indirect(fieldValue).Type() { - getRealFieldValue(fieldValue) - } - - if fieldValue.IsValid() { - return - } - } - } - } - - getRealFieldValue(fieldValue) - } - } - - if v, isSerializer := fieldValue.Interface().(SerializerInterface); isSerializer { - field.DataType = String - field.Serializer = v - } else { - var serializerName = field.TagSettings["JSON"] - if serializerName == "" { - serializerName = field.TagSettings["SERIALIZER"] - } - if serializerName != "" { - if serializer, ok := GetSerializer(serializerName); ok { - // Set default data type to string for serializer - field.DataType = String - field.Serializer = serializer - } else { - schema.err = fmt.Errorf("invalid serializer type %v", serializerName) - } - } - } - - if num, ok := field.TagSettings["AUTOINCREMENTINCREMENT"]; ok { - field.AutoIncrementIncrement, _ = strconv.ParseInt(num, 10, 64) - } - - if v, ok := field.TagSettings["DEFAULT"]; ok { - field.HasDefaultValue = true - field.DefaultValue = v - } - - if num, ok := field.TagSettings["SIZE"]; ok { - if field.Size, err = strconv.Atoi(num); err != nil { - field.Size = -1 - } - } - - if p, ok := field.TagSettings["PRECISION"]; ok { - field.Precision, _ = strconv.Atoi(p) - } - - if s, ok := field.TagSettings["SCALE"]; ok { - field.Scale, _ = strconv.Atoi(s) - } - - // default value is function or null or blank (primary keys) - field.DefaultValue = strings.TrimSpace(field.DefaultValue) - skipParseDefaultValue := strings.Contains(field.DefaultValue, "(") && - strings.Contains(field.DefaultValue, ")") || strings.ToLower(field.DefaultValue) == "null" || field.DefaultValue == "" - switch reflect.Indirect(fieldValue).Kind() { - case reflect.Bool: - field.DataType = Bool - if field.HasDefaultValue && !skipParseDefaultValue { - if field.DefaultValueInterface, err = strconv.ParseBool(field.DefaultValue); err != nil { - schema.err = fmt.Errorf("failed to parse %s as default value for bool, got error: %v", field.DefaultValue, err) - } - } - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - field.DataType = Int - if field.HasDefaultValue && !skipParseDefaultValue { - if field.DefaultValueInterface, err = strconv.ParseInt(field.DefaultValue, 0, 64); err != nil { - schema.err = fmt.Errorf("failed to parse %s as default value for int, got error: %v", field.DefaultValue, err) - } - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - field.DataType = Uint - if field.HasDefaultValue && !skipParseDefaultValue { - if field.DefaultValueInterface, err = strconv.ParseUint(field.DefaultValue, 0, 64); err != nil { - schema.err = fmt.Errorf("failed to parse %s as default value for uint, got error: %v", field.DefaultValue, err) - } - } - case reflect.Float32, reflect.Float64: - field.DataType = Float - if field.HasDefaultValue && !skipParseDefaultValue { - if field.DefaultValueInterface, err = strconv.ParseFloat(field.DefaultValue, 64); err != nil { - schema.err = fmt.Errorf("failed to parse %s as default value for float, got error: %v", field.DefaultValue, err) - } - } - case reflect.String: - field.DataType = String - if field.HasDefaultValue && !skipParseDefaultValue { - field.DefaultValue = strings.Trim(field.DefaultValue, "'") - field.DefaultValue = strings.Trim(field.DefaultValue, `"`) - field.DefaultValueInterface = field.DefaultValue - } - case reflect.Struct: - if _, ok := fieldValue.Interface().(*time.Time); ok { - field.DataType = Time - } else if fieldValue.Type().ConvertibleTo(TimeReflectType) { - field.DataType = Time - } else if fieldValue.Type().ConvertibleTo(TimePtrReflectType) { - field.DataType = Time - } - if field.HasDefaultValue && !skipParseDefaultValue && field.DataType == Time { - if t, err := now.Parse(field.DefaultValue); err == nil { - field.DefaultValueInterface = t - } - } - case reflect.Array, reflect.Slice: - if reflect.Indirect(fieldValue).Type().Elem() == ByteReflectType && field.DataType == "" { - field.DataType = Bytes - } - } - - if dataTyper, ok := fieldValue.Interface().(GormDataTypeInterface); ok { - field.DataType = DataType(dataTyper.GormDataType()) - } - - if v, ok := field.TagSettings["AUTOCREATETIME"]; (ok && utils.CheckTruth(v)) || (!ok && field.Name == "CreatedAt" && (field.DataType == Time || field.DataType == Int || field.DataType == Uint)) { - if field.DataType == Time { - field.AutoCreateTime = UnixTime - } else if strings.ToUpper(v) == "NANO" { - field.AutoCreateTime = UnixNanosecond - } else if strings.ToUpper(v) == "MILLI" { - field.AutoCreateTime = UnixMillisecond - } else { - field.AutoCreateTime = UnixSecond - } - } - - if v, ok := field.TagSettings["AUTOUPDATETIME"]; (ok && utils.CheckTruth(v)) || (!ok && field.Name == "UpdatedAt" && (field.DataType == Time || field.DataType == Int || field.DataType == Uint)) { - if field.DataType == Time { - field.AutoUpdateTime = UnixTime - } else if strings.ToUpper(v) == "NANO" { - field.AutoUpdateTime = UnixNanosecond - } else if strings.ToUpper(v) == "MILLI" { - field.AutoUpdateTime = UnixMillisecond - } else { - field.AutoUpdateTime = UnixSecond - } - } - - if field.GORMDataType == "" { - field.GORMDataType = field.DataType - } - - if val, ok := field.TagSettings["TYPE"]; ok { - switch DataType(strings.ToLower(val)) { - case Bool, Int, Uint, Float, String, Time, Bytes: - field.DataType = DataType(strings.ToLower(val)) - default: - field.DataType = DataType(val) - } - } - - if field.Size == 0 { - switch reflect.Indirect(fieldValue).Kind() { - case reflect.Int, reflect.Int64, reflect.Uint, reflect.Uint64, reflect.Float64: - field.Size = 64 - case reflect.Int8, reflect.Uint8: - field.Size = 8 - case reflect.Int16, reflect.Uint16: - field.Size = 16 - case reflect.Int32, reflect.Uint32, reflect.Float32: - field.Size = 32 - } - } - - // setup permission - if val, ok := field.TagSettings["-"]; ok { - val = strings.ToLower(strings.TrimSpace(val)) - switch val { - case "-": - field.Creatable = false - field.Updatable = false - field.Readable = false - field.DataType = "" - case "all": - field.Creatable = false - field.Updatable = false - field.Readable = false - field.DataType = "" - field.IgnoreMigration = true - case "migration": - field.IgnoreMigration = true - } - } - - if v, ok := field.TagSettings["->"]; ok { - field.Creatable = false - field.Updatable = false - if strings.ToLower(v) == "false" { - field.Readable = false - } else { - field.Readable = true - } - } - - if v, ok := field.TagSettings["<-"]; ok { - field.Creatable = true - field.Updatable = true - - if v != "<-" { - if !strings.Contains(v, "create") { - field.Creatable = false - } - - if !strings.Contains(v, "update") { - field.Updatable = false - } - } - } - - // Normal anonymous field or having `EMBEDDED` tag - if _, ok := field.TagSettings["EMBEDDED"]; ok || (field.GORMDataType != Time && field.GORMDataType != Bytes && !isValuer && - fieldStruct.Anonymous && (field.Creatable || field.Updatable || field.Readable)) { - kind := reflect.Indirect(fieldValue).Kind() - switch kind { - case reflect.Struct: - var err error - field.Creatable = false - field.Updatable = false - field.Readable = false - - cacheStore := &sync.Map{} - cacheStore.Store(embeddedCacheKey, true) - if field.EmbeddedSchema, err = getOrParse(fieldValue.Interface(), cacheStore, embeddedNamer{Table: schema.Table, Namer: schema.namer}); err != nil { - schema.err = err - } - - for _, ef := range field.EmbeddedSchema.Fields { - ef.Schema = schema - ef.OwnerSchema = field.EmbeddedSchema - ef.BindNames = append([]string{fieldStruct.Name}, ef.BindNames...) - // index is negative means is pointer - if field.FieldType.Kind() == reflect.Struct { - ef.StructField.Index = append([]int{fieldStruct.Index[0]}, ef.StructField.Index...) - } else { - ef.StructField.Index = append([]int{-fieldStruct.Index[0] - 1}, ef.StructField.Index...) - } - - if prefix, ok := field.TagSettings["EMBEDDEDPREFIX"]; ok && ef.DBName != "" { - ef.DBName = prefix + ef.DBName - } - - if ef.PrimaryKey { - if val, ok := ef.TagSettings["PRIMARYKEY"]; ok && utils.CheckTruth(val) { - ef.PrimaryKey = true - } else if val, ok := ef.TagSettings["PRIMARY_KEY"]; ok && utils.CheckTruth(val) { - ef.PrimaryKey = true - } else { - ef.PrimaryKey = false - - if val, ok := ef.TagSettings["AUTOINCREMENT"]; !ok || !utils.CheckTruth(val) { - ef.AutoIncrement = false - } - - if ef.DefaultValue == "" { - ef.HasDefaultValue = false - } - } - } - - for k, v := range field.TagSettings { - ef.TagSettings[k] = v - } - } - case reflect.Invalid, reflect.Uintptr, reflect.Array, reflect.Chan, reflect.Func, reflect.Interface, - reflect.Map, reflect.Ptr, reflect.Slice, reflect.UnsafePointer, reflect.Complex64, reflect.Complex128: - schema.err = fmt.Errorf("invalid embedded struct for %s's field %s, should be struct, but got %v", field.Schema.Name, field.Name, field.FieldType) - } - } - - return field -} - -// create valuer, setter when parse struct -func (field *Field) setupValuerAndSetter() { - // Setup NewValuePool - field.setupNewValuePool() - - // ValueOf returns field's value and if it is zero - fieldIndex := field.StructField.Index[0] - switch { - case len(field.StructField.Index) == 1 && fieldIndex > 0: - field.ValueOf = func(ctx context.Context, value reflect.Value) (interface{}, bool) { - fieldValue := reflect.Indirect(value).Field(fieldIndex) - return fieldValue.Interface(), fieldValue.IsZero() - } - default: - field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) { - v = reflect.Indirect(v) - for _, fieldIdx := range field.StructField.Index { - if fieldIdx >= 0 { - v = v.Field(fieldIdx) - } else { - v = v.Field(-fieldIdx - 1) - - if !v.IsNil() { - v = v.Elem() - } else { - return nil, true - } - } - } - - fv, zero := v.Interface(), v.IsZero() - return fv, zero - } - } - - if field.Serializer != nil { - oldValuerOf := field.ValueOf - field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) { - value, zero := oldValuerOf(ctx, v) - if zero { - return value, zero - } - - s, ok := value.(SerializerValuerInterface) - if !ok { - s = field.Serializer - } - - return &serializer{ - Field: field, - SerializeValuer: s, - Destination: v, - Context: ctx, - fieldValue: value, - }, false - } - } - - // ReflectValueOf returns field's reflect value - switch { - case len(field.StructField.Index) == 1 && fieldIndex > 0: - field.ReflectValueOf = func(ctx context.Context, value reflect.Value) reflect.Value { - return reflect.Indirect(value).Field(fieldIndex) - } - default: - field.ReflectValueOf = func(ctx context.Context, v reflect.Value) reflect.Value { - v = reflect.Indirect(v) - for idx, fieldIdx := range field.StructField.Index { - if fieldIdx >= 0 { - v = v.Field(fieldIdx) - } else { - v = v.Field(-fieldIdx - 1) - - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - - if idx < len(field.StructField.Index)-1 { - v = v.Elem() - } - } - } - return v - } - } - - fallbackSetter := func(ctx context.Context, value reflect.Value, v interface{}, setter func(context.Context, reflect.Value, interface{}) error) (err error) { - if v == nil { - field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem()) - } else { - reflectV := reflect.ValueOf(v) - // Optimal value type acquisition for v - reflectValType := reflectV.Type() - - if reflectValType.AssignableTo(field.FieldType) { - if reflectV.Kind() == reflect.Ptr && reflectV.Elem().Kind() == reflect.Ptr { - reflectV = reflect.Indirect(reflectV) - } - field.ReflectValueOf(ctx, value).Set(reflectV) - return - } else if reflectValType.ConvertibleTo(field.FieldType) { - field.ReflectValueOf(ctx, value).Set(reflectV.Convert(field.FieldType)) - return - } else if field.FieldType.Kind() == reflect.Ptr { - fieldValue := field.ReflectValueOf(ctx, value) - fieldType := field.FieldType.Elem() - - if reflectValType.AssignableTo(fieldType) { - if !fieldValue.IsValid() { - fieldValue = reflect.New(fieldType) - } else if fieldValue.IsNil() { - fieldValue.Set(reflect.New(fieldType)) - } - fieldValue.Elem().Set(reflectV) - return - } else if reflectValType.ConvertibleTo(fieldType) { - if fieldValue.IsNil() { - fieldValue.Set(reflect.New(fieldType)) - } - - fieldValue.Elem().Set(reflectV.Convert(fieldType)) - return - } - } - - if reflectV.Kind() == reflect.Ptr { - if reflectV.IsNil() { - field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem()) - } else if reflectV.Type().Elem().AssignableTo(field.FieldType) { - field.ReflectValueOf(ctx, value).Set(reflectV.Elem()) - return - } else { - err = setter(ctx, value, reflectV.Elem().Interface()) - } - } else if valuer, ok := v.(driver.Valuer); ok { - if v, err = valuer.Value(); err == nil { - err = setter(ctx, value, v) - } - } else if _, ok := v.(clause.Expr); !ok { - return fmt.Errorf("failed to set value %#v to field %s", v, field.Name) - } - } - - return - } - - // Set - switch field.FieldType.Kind() { - case reflect.Bool: - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) error { - switch data := v.(type) { - case **bool: - if data != nil && *data != nil { - field.ReflectValueOf(ctx, value).SetBool(**data) - } - case bool: - field.ReflectValueOf(ctx, value).SetBool(data) - case int64: - field.ReflectValueOf(ctx, value).SetBool(data > 0) - case string: - b, _ := strconv.ParseBool(data) - field.ReflectValueOf(ctx, value).SetBool(b) - default: - return fallbackSetter(ctx, value, v, field.Set) - } - return nil - } - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) { - switch data := v.(type) { - case **int64: - if data != nil && *data != nil { - field.ReflectValueOf(ctx, value).SetInt(**data) - } - case int64: - field.ReflectValueOf(ctx, value).SetInt(data) - case int: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case int8: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case int16: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case int32: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case uint: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case uint8: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case uint16: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case uint32: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case uint64: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case float32: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case float64: - field.ReflectValueOf(ctx, value).SetInt(int64(data)) - case []byte: - return field.Set(ctx, value, string(data)) - case string: - if i, err := strconv.ParseInt(data, 0, 64); err == nil { - field.ReflectValueOf(ctx, value).SetInt(i) - } else { - return err - } - case time.Time: - if field.AutoCreateTime == UnixNanosecond || field.AutoUpdateTime == UnixNanosecond { - field.ReflectValueOf(ctx, value).SetInt(data.UnixNano()) - } else if field.AutoCreateTime == UnixMillisecond || field.AutoUpdateTime == UnixMillisecond { - field.ReflectValueOf(ctx, value).SetInt(data.UnixNano() / 1e6) - } else { - field.ReflectValueOf(ctx, value).SetInt(data.Unix()) - } - case *time.Time: - if data != nil { - if field.AutoCreateTime == UnixNanosecond || field.AutoUpdateTime == UnixNanosecond { - field.ReflectValueOf(ctx, value).SetInt(data.UnixNano()) - } else if field.AutoCreateTime == UnixMillisecond || field.AutoUpdateTime == UnixMillisecond { - field.ReflectValueOf(ctx, value).SetInt(data.UnixNano() / 1e6) - } else { - field.ReflectValueOf(ctx, value).SetInt(data.Unix()) - } - } else { - field.ReflectValueOf(ctx, value).SetInt(0) - } - default: - return fallbackSetter(ctx, value, v, field.Set) - } - return err - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) { - switch data := v.(type) { - case **uint64: - if data != nil && *data != nil { - field.ReflectValueOf(ctx, value).SetUint(**data) - } - case uint64: - field.ReflectValueOf(ctx, value).SetUint(data) - case uint: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case uint8: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case uint16: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case uint32: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case int64: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case int: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case int8: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case int16: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case int32: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case float32: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case float64: - field.ReflectValueOf(ctx, value).SetUint(uint64(data)) - case []byte: - return field.Set(ctx, value, string(data)) - case time.Time: - if field.AutoCreateTime == UnixNanosecond || field.AutoUpdateTime == UnixNanosecond { - field.ReflectValueOf(ctx, value).SetUint(uint64(data.UnixNano())) - } else if field.AutoCreateTime == UnixMillisecond || field.AutoUpdateTime == UnixMillisecond { - field.ReflectValueOf(ctx, value).SetUint(uint64(data.UnixNano() / 1e6)) - } else { - field.ReflectValueOf(ctx, value).SetUint(uint64(data.Unix())) - } - case string: - if i, err := strconv.ParseUint(data, 0, 64); err == nil { - field.ReflectValueOf(ctx, value).SetUint(i) - } else { - return err - } - default: - return fallbackSetter(ctx, value, v, field.Set) - } - return err - } - case reflect.Float32, reflect.Float64: - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) { - switch data := v.(type) { - case **float64: - if data != nil && *data != nil { - field.ReflectValueOf(ctx, value).SetFloat(**data) - } - case float64: - field.ReflectValueOf(ctx, value).SetFloat(data) - case float32: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case int64: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case int: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case int8: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case int16: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case int32: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case uint: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case uint8: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case uint16: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case uint32: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case uint64: - field.ReflectValueOf(ctx, value).SetFloat(float64(data)) - case []byte: - return field.Set(ctx, value, string(data)) - case string: - if i, err := strconv.ParseFloat(data, 64); err == nil { - field.ReflectValueOf(ctx, value).SetFloat(i) - } else { - return err - } - default: - return fallbackSetter(ctx, value, v, field.Set) - } - return err - } - case reflect.String: - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) { - switch data := v.(type) { - case **string: - if data != nil && *data != nil { - field.ReflectValueOf(ctx, value).SetString(**data) - } - case string: - field.ReflectValueOf(ctx, value).SetString(data) - case []byte: - field.ReflectValueOf(ctx, value).SetString(string(data)) - case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64: - field.ReflectValueOf(ctx, value).SetString(utils.ToString(data)) - case float64, float32: - field.ReflectValueOf(ctx, value).SetString(fmt.Sprintf("%."+strconv.Itoa(field.Precision)+"f", data)) - default: - return fallbackSetter(ctx, value, v, field.Set) - } - return err - } - default: - fieldValue := reflect.New(field.FieldType) - switch fieldValue.Elem().Interface().(type) { - case time.Time: - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) error { - switch data := v.(type) { - case **time.Time: - if data != nil && *data != nil { - field.Set(ctx, value, *data) - } - case time.Time: - field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(v)) - case *time.Time: - if data != nil { - field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(data).Elem()) - } else { - field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(time.Time{})) - } - case string: - if t, err := now.Parse(data); err == nil { - field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(t)) - } else { - return fmt.Errorf("failed to set string %v to time.Time field %s, failed to parse it as time, got error %v", v, field.Name, err) - } - default: - return fallbackSetter(ctx, value, v, field.Set) - } - return nil - } - case *time.Time: - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) error { - switch data := v.(type) { - case **time.Time: - if data != nil { - field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(*data)) - } - case time.Time: - fieldValue := field.ReflectValueOf(ctx, value) - if fieldValue.IsNil() { - fieldValue.Set(reflect.New(field.FieldType.Elem())) - } - fieldValue.Elem().Set(reflect.ValueOf(v)) - case *time.Time: - field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(v)) - case string: - if t, err := now.Parse(data); err == nil { - fieldValue := field.ReflectValueOf(ctx, value) - if fieldValue.IsNil() { - if v == "" { - return nil - } - fieldValue.Set(reflect.New(field.FieldType.Elem())) - } - fieldValue.Elem().Set(reflect.ValueOf(t)) - } else { - return fmt.Errorf("failed to set string %v to time.Time field %s, failed to parse it as time, got error %v", v, field.Name, err) - } - default: - return fallbackSetter(ctx, value, v, field.Set) - } - return nil - } - default: - if _, ok := fieldValue.Elem().Interface().(sql.Scanner); ok { - // pointer scanner - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) { - reflectV := reflect.ValueOf(v) - if !reflectV.IsValid() { - field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem()) - } else if reflectV.Type().AssignableTo(field.FieldType) { - field.ReflectValueOf(ctx, value).Set(reflectV) - } else if reflectV.Kind() == reflect.Ptr { - if reflectV.IsNil() || !reflectV.IsValid() { - field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem()) - } else { - return field.Set(ctx, value, reflectV.Elem().Interface()) - } - } else { - fieldValue := field.ReflectValueOf(ctx, value) - if fieldValue.IsNil() { - fieldValue.Set(reflect.New(field.FieldType.Elem())) - } - - if valuer, ok := v.(driver.Valuer); ok { - v, _ = valuer.Value() - } - - err = fieldValue.Interface().(sql.Scanner).Scan(v) - } - return - } - } else if _, ok := fieldValue.Interface().(sql.Scanner); ok { - // struct scanner - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) { - reflectV := reflect.ValueOf(v) - if !reflectV.IsValid() { - field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem()) - } else if reflectV.Type().AssignableTo(field.FieldType) { - field.ReflectValueOf(ctx, value).Set(reflectV) - } else if reflectV.Kind() == reflect.Ptr { - if reflectV.IsNil() || !reflectV.IsValid() { - field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem()) - } else { - return field.Set(ctx, value, reflectV.Elem().Interface()) - } - } else { - if valuer, ok := v.(driver.Valuer); ok { - v, _ = valuer.Value() - } - - err = field.ReflectValueOf(ctx, value).Addr().Interface().(sql.Scanner).Scan(v) - } - return - } - } else { - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) { - return fallbackSetter(ctx, value, v, field.Set) - } - } - } - } - - if field.Serializer != nil { - var ( - oldFieldSetter = field.Set - sameElemType bool - sameType = field.FieldType == reflect.ValueOf(field.Serializer).Type() - ) - - if reflect.ValueOf(field.Serializer).Kind() == reflect.Ptr { - sameElemType = field.FieldType == reflect.ValueOf(field.Serializer).Type().Elem() - } - - field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) { - if s, ok := v.(*serializer); ok { - if s.fieldValue != nil { - err = oldFieldSetter(ctx, value, s.fieldValue) - } else if err = s.Serializer.Scan(ctx, field, value, s.value); err == nil { - if sameElemType { - field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(s.Serializer).Elem()) - s.Serializer = reflect.New(reflect.Indirect(reflect.ValueOf(field.Serializer)).Type()).Interface().(SerializerInterface) - } else if sameType { - field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(s.Serializer)) - s.Serializer = reflect.New(reflect.Indirect(reflect.ValueOf(field.Serializer)).Type()).Interface().(SerializerInterface) - } - } - } else { - err = oldFieldSetter(ctx, value, v) - } - return - } - } -} - -func (field *Field) setupNewValuePool() { - if field.Serializer != nil { - field.NewValuePool = &sync.Pool{ - New: func() interface{} { - return &serializer{ - Field: field, - Serializer: reflect.New(reflect.Indirect(reflect.ValueOf(field.Serializer)).Type()).Interface().(SerializerInterface), - } - }, - } - } - - if field.NewValuePool == nil { - field.NewValuePool = poolInitializer(reflect.PtrTo(field.IndirectFieldType)) - } -} diff --git a/vendor/gorm.io/gorm/schema/naming.go b/vendor/gorm.io/gorm/schema/naming.go index a258beed..e69de29b 100644 --- a/vendor/gorm.io/gorm/schema/naming.go +++ b/vendor/gorm.io/gorm/schema/naming.go @@ -1,181 +0,0 @@ -package schema - -import ( - "crypto/sha1" - "encoding/hex" - "regexp" - "strings" - "unicode/utf8" - - "github.com/jinzhu/inflection" -) - -// Namer namer interface -type Namer interface { - TableName(table string) string - SchemaName(table string) string - ColumnName(table, column string) string - JoinTableName(joinTable string) string - RelationshipFKName(Relationship) string - CheckerName(table, column string) string - IndexName(table, column string) string -} - -// Replacer replacer interface like strings.Replacer -type Replacer interface { - Replace(name string) string -} - -// NamingStrategy tables, columns naming strategy -type NamingStrategy struct { - TablePrefix string - SingularTable bool - NameReplacer Replacer - NoLowerCase bool -} - -// TableName convert string to table name -func (ns NamingStrategy) TableName(str string) string { - if ns.SingularTable { - return ns.TablePrefix + ns.toDBName(str) - } - return ns.TablePrefix + inflection.Plural(ns.toDBName(str)) -} - -// SchemaName generate schema name from table name, don't guarantee it is the reverse value of TableName -func (ns NamingStrategy) SchemaName(table string) string { - table = strings.TrimPrefix(table, ns.TablePrefix) - - if ns.SingularTable { - return ns.toSchemaName(table) - } - return ns.toSchemaName(inflection.Singular(table)) -} - -// ColumnName convert string to column name -func (ns NamingStrategy) ColumnName(table, column string) string { - return ns.toDBName(column) -} - -// JoinTableName convert string to join table name -func (ns NamingStrategy) JoinTableName(str string) string { - if !ns.NoLowerCase && strings.ToLower(str) == str { - return ns.TablePrefix + str - } - - if ns.SingularTable { - return ns.TablePrefix + ns.toDBName(str) - } - return ns.TablePrefix + inflection.Plural(ns.toDBName(str)) -} - -// RelationshipFKName generate fk name for relation -func (ns NamingStrategy) RelationshipFKName(rel Relationship) string { - return ns.formatName("fk", rel.Schema.Table, ns.toDBName(rel.Name)) -} - -// CheckerName generate checker name -func (ns NamingStrategy) CheckerName(table, column string) string { - return ns.formatName("chk", table, column) -} - -// IndexName generate index name -func (ns NamingStrategy) IndexName(table, column string) string { - return ns.formatName("idx", table, ns.toDBName(column)) -} - -func (ns NamingStrategy) formatName(prefix, table, name string) string { - formattedName := strings.ReplaceAll(strings.Join([]string{ - prefix, table, name, - }, "_"), ".", "_") - - if utf8.RuneCountInString(formattedName) > 64 { - h := sha1.New() - h.Write([]byte(formattedName)) - bs := h.Sum(nil) - - formattedName = formattedName[0:56] + hex.EncodeToString(bs)[:8] - } - return formattedName -} - -var ( - // https://github.com/golang/lint/blob/master/lint.go#L770 - commonInitialisms = []string{"API", "ASCII", "CPU", "CSS", "DNS", "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", "IP", "JSON", "LHS", "QPS", "RAM", "RHS", "RPC", "SLA", "SMTP", "SSH", "TLS", "TTL", "UID", "UI", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XSRF", "XSS"} - commonInitialismsReplacer *strings.Replacer -) - -func init() { - commonInitialismsForReplacer := make([]string, 0, len(commonInitialisms)) - for _, initialism := range commonInitialisms { - commonInitialismsForReplacer = append(commonInitialismsForReplacer, initialism, strings.Title(strings.ToLower(initialism))) - } - commonInitialismsReplacer = strings.NewReplacer(commonInitialismsForReplacer...) -} - -func (ns NamingStrategy) toDBName(name string) string { - if name == "" { - return "" - } - - if ns.NameReplacer != nil { - tmpName := ns.NameReplacer.Replace(name) - - if tmpName == "" { - return name - } - - name = tmpName - } - - if ns.NoLowerCase { - return name - } - - var ( - value = commonInitialismsReplacer.Replace(name) - buf strings.Builder - lastCase, nextCase, nextNumber bool // upper case == true - curCase = value[0] <= 'Z' && value[0] >= 'A' - ) - - for i, v := range value[:len(value)-1] { - nextCase = value[i+1] <= 'Z' && value[i+1] >= 'A' - nextNumber = value[i+1] >= '0' && value[i+1] <= '9' - - if curCase { - if lastCase && (nextCase || nextNumber) { - buf.WriteRune(v + 32) - } else { - if i > 0 && value[i-1] != '_' && value[i+1] != '_' { - buf.WriteByte('_') - } - buf.WriteRune(v + 32) - } - } else { - buf.WriteRune(v) - } - - lastCase = curCase - curCase = nextCase - } - - if curCase { - if !lastCase && len(value) > 1 { - buf.WriteByte('_') - } - buf.WriteByte(value[len(value)-1] + 32) - } else { - buf.WriteByte(value[len(value)-1]) - } - ret := buf.String() - return ret -} - -func (ns NamingStrategy) toSchemaName(name string) string { - result := strings.ReplaceAll(strings.Title(strings.ReplaceAll(name, "_", " ")), " ", "") - for _, initialism := range commonInitialisms { - result = regexp.MustCompile(strings.Title(strings.ToLower(initialism))+"([A-Z]|$|_)").ReplaceAllString(result, initialism+"$1") - } - return result -} diff --git a/vendor/gorm.io/gorm/schema/schema.go b/vendor/gorm.io/gorm/schema/schema.go index eca113e9..e69de29b 100644 --- a/vendor/gorm.io/gorm/schema/schema.go +++ b/vendor/gorm.io/gorm/schema/schema.go @@ -1,323 +0,0 @@ -package schema - -import ( - "context" - "errors" - "fmt" - "go/ast" - "reflect" - "sync" - - "gorm.io/gorm/clause" - "gorm.io/gorm/logger" -) - -// ErrUnsupportedDataType unsupported data type -var ErrUnsupportedDataType = errors.New("unsupported data type") - -type Schema struct { - Name string - ModelType reflect.Type - Table string - PrioritizedPrimaryField *Field - DBNames []string - PrimaryFields []*Field - PrimaryFieldDBNames []string - Fields []*Field - FieldsByName map[string]*Field - FieldsByDBName map[string]*Field - FieldsWithDefaultDBValue []*Field // fields with default value assigned by database - Relationships Relationships - CreateClauses []clause.Interface - QueryClauses []clause.Interface - UpdateClauses []clause.Interface - DeleteClauses []clause.Interface - BeforeCreate, AfterCreate bool - BeforeUpdate, AfterUpdate bool - BeforeDelete, AfterDelete bool - BeforeSave, AfterSave bool - AfterFind bool - err error - initialized chan struct{} - namer Namer - cacheStore *sync.Map -} - -func (schema Schema) String() string { - if schema.ModelType.Name() == "" { - return fmt.Sprintf("%s(%s)", schema.Name, schema.Table) - } - return fmt.Sprintf("%s.%s", schema.ModelType.PkgPath(), schema.ModelType.Name()) -} - -func (schema Schema) MakeSlice() reflect.Value { - slice := reflect.MakeSlice(reflect.SliceOf(reflect.PtrTo(schema.ModelType)), 0, 20) - results := reflect.New(slice.Type()) - results.Elem().Set(slice) - return results -} - -func (schema Schema) LookUpField(name string) *Field { - if field, ok := schema.FieldsByDBName[name]; ok { - return field - } - if field, ok := schema.FieldsByName[name]; ok { - return field - } - return nil -} - -type Tabler interface { - TableName() string -} - -// Parse get data type from dialector -func Parse(dest interface{}, cacheStore *sync.Map, namer Namer) (*Schema, error) { - return ParseWithSpecialTableName(dest, cacheStore, namer, "") -} - -// ParseWithSpecialTableName get data type from dialector with extra schema table -func ParseWithSpecialTableName(dest interface{}, cacheStore *sync.Map, namer Namer, specialTableName string) (*Schema, error) { - if dest == nil { - return nil, fmt.Errorf("%w: %+v", ErrUnsupportedDataType, dest) - } - - value := reflect.ValueOf(dest) - if value.Kind() == reflect.Ptr && value.IsNil() { - value = reflect.New(value.Type().Elem()) - } - modelType := reflect.Indirect(value).Type() - - if modelType.Kind() == reflect.Interface { - modelType = reflect.Indirect(reflect.ValueOf(dest)).Elem().Type() - } - - for modelType.Kind() == reflect.Slice || modelType.Kind() == reflect.Array || modelType.Kind() == reflect.Ptr { - modelType = modelType.Elem() - } - - if modelType.Kind() != reflect.Struct { - if modelType.PkgPath() == "" { - return nil, fmt.Errorf("%w: %+v", ErrUnsupportedDataType, dest) - } - return nil, fmt.Errorf("%w: %s.%s", ErrUnsupportedDataType, modelType.PkgPath(), modelType.Name()) - } - - // Cache the Schema for performance, - // Use the modelType or modelType + schemaTable (if it present) as cache key. - var schemaCacheKey interface{} - if specialTableName != "" { - schemaCacheKey = fmt.Sprintf("%p-%s", modelType, specialTableName) - } else { - schemaCacheKey = modelType - } - - // Load exist schmema cache, return if exists - if v, ok := cacheStore.Load(schemaCacheKey); ok { - s := v.(*Schema) - // Wait for the initialization of other goroutines to complete - <-s.initialized - return s, s.err - } - - modelValue := reflect.New(modelType) - tableName := namer.TableName(modelType.Name()) - if tabler, ok := modelValue.Interface().(Tabler); ok { - tableName = tabler.TableName() - } - if en, ok := namer.(embeddedNamer); ok { - tableName = en.Table - } - if specialTableName != "" && specialTableName != tableName { - tableName = specialTableName - } - - schema := &Schema{ - Name: modelType.Name(), - ModelType: modelType, - Table: tableName, - FieldsByName: map[string]*Field{}, - FieldsByDBName: map[string]*Field{}, - Relationships: Relationships{Relations: map[string]*Relationship{}}, - cacheStore: cacheStore, - namer: namer, - initialized: make(chan struct{}), - } - // When the schema initialization is completed, the channel will be closed - defer close(schema.initialized) - - // Load exist schmema cache, return if exists - if v, ok := cacheStore.Load(schemaCacheKey); ok { - s := v.(*Schema) - // Wait for the initialization of other goroutines to complete - <-s.initialized - return s, s.err - } - - for i := 0; i < modelType.NumField(); i++ { - if fieldStruct := modelType.Field(i); ast.IsExported(fieldStruct.Name) { - if field := schema.ParseField(fieldStruct); field.EmbeddedSchema != nil { - schema.Fields = append(schema.Fields, field.EmbeddedSchema.Fields...) - } else { - schema.Fields = append(schema.Fields, field) - } - } - } - - for _, field := range schema.Fields { - if field.DBName == "" && field.DataType != "" { - field.DBName = namer.ColumnName(schema.Table, field.Name) - } - - if field.DBName != "" { - // nonexistence or shortest path or first appear prioritized if has permission - if v, ok := schema.FieldsByDBName[field.DBName]; !ok || ((field.Creatable || field.Updatable || field.Readable) && len(field.BindNames) < len(v.BindNames)) { - if _, ok := schema.FieldsByDBName[field.DBName]; !ok { - schema.DBNames = append(schema.DBNames, field.DBName) - } - schema.FieldsByDBName[field.DBName] = field - schema.FieldsByName[field.Name] = field - - if v != nil && v.PrimaryKey { - for idx, f := range schema.PrimaryFields { - if f == v { - schema.PrimaryFields = append(schema.PrimaryFields[0:idx], schema.PrimaryFields[idx+1:]...) - } - } - } - - if field.PrimaryKey { - schema.PrimaryFields = append(schema.PrimaryFields, field) - } - } - } - - if of, ok := schema.FieldsByName[field.Name]; !ok || of.TagSettings["-"] == "-" { - schema.FieldsByName[field.Name] = field - } - - field.setupValuerAndSetter() - } - - prioritizedPrimaryField := schema.LookUpField("id") - if prioritizedPrimaryField == nil { - prioritizedPrimaryField = schema.LookUpField("ID") - } - - if prioritizedPrimaryField != nil { - if prioritizedPrimaryField.PrimaryKey { - schema.PrioritizedPrimaryField = prioritizedPrimaryField - } else if len(schema.PrimaryFields) == 0 { - prioritizedPrimaryField.PrimaryKey = true - schema.PrioritizedPrimaryField = prioritizedPrimaryField - schema.PrimaryFields = append(schema.PrimaryFields, prioritizedPrimaryField) - } - } - - if schema.PrioritizedPrimaryField == nil && len(schema.PrimaryFields) == 1 { - schema.PrioritizedPrimaryField = schema.PrimaryFields[0] - } - - for _, field := range schema.PrimaryFields { - schema.PrimaryFieldDBNames = append(schema.PrimaryFieldDBNames, field.DBName) - } - - for _, field := range schema.Fields { - if field.HasDefaultValue && field.DefaultValueInterface == nil { - schema.FieldsWithDefaultDBValue = append(schema.FieldsWithDefaultDBValue, field) - } - } - - if field := schema.PrioritizedPrimaryField; field != nil { - switch field.GORMDataType { - case Int, Uint: - if _, ok := field.TagSettings["AUTOINCREMENT"]; !ok { - if !field.HasDefaultValue || field.DefaultValueInterface != nil { - schema.FieldsWithDefaultDBValue = append(schema.FieldsWithDefaultDBValue, field) - } - - field.HasDefaultValue = true - field.AutoIncrement = true - } - } - } - - callbacks := []string{"BeforeCreate", "AfterCreate", "BeforeUpdate", "AfterUpdate", "BeforeSave", "AfterSave", "BeforeDelete", "AfterDelete", "AfterFind"} - for _, name := range callbacks { - if methodValue := modelValue.MethodByName(name); methodValue.IsValid() { - switch methodValue.Type().String() { - case "func(*gorm.DB) error": // TODO hack - reflect.Indirect(reflect.ValueOf(schema)).FieldByName(name).SetBool(true) - default: - logger.Default.Warn(context.Background(), "Model %v don't match %vInterface, should be `%v(*gorm.DB) error`. Please see https://gorm.io/docs/hooks.html", schema, name, name) - } - } - } - - // Cache the schema - if v, loaded := cacheStore.LoadOrStore(schemaCacheKey, schema); loaded { - s := v.(*Schema) - // Wait for the initialization of other goroutines to complete - <-s.initialized - return s, s.err - } - - defer func() { - if schema.err != nil { - logger.Default.Error(context.Background(), schema.err.Error()) - cacheStore.Delete(modelType) - } - }() - - if _, embedded := schema.cacheStore.Load(embeddedCacheKey); !embedded { - for _, field := range schema.Fields { - if field.DataType == "" && (field.Creatable || field.Updatable || field.Readable) { - if schema.parseRelation(field); schema.err != nil { - return schema, schema.err - } else { - schema.FieldsByName[field.Name] = field - } - } - - fieldValue := reflect.New(field.IndirectFieldType) - fieldInterface := fieldValue.Interface() - if fc, ok := fieldInterface.(CreateClausesInterface); ok { - field.Schema.CreateClauses = append(field.Schema.CreateClauses, fc.CreateClauses(field)...) - } - - if fc, ok := fieldInterface.(QueryClausesInterface); ok { - field.Schema.QueryClauses = append(field.Schema.QueryClauses, fc.QueryClauses(field)...) - } - - if fc, ok := fieldInterface.(UpdateClausesInterface); ok { - field.Schema.UpdateClauses = append(field.Schema.UpdateClauses, fc.UpdateClauses(field)...) - } - - if fc, ok := fieldInterface.(DeleteClausesInterface); ok { - field.Schema.DeleteClauses = append(field.Schema.DeleteClauses, fc.DeleteClauses(field)...) - } - } - } - - return schema, schema.err -} - -func getOrParse(dest interface{}, cacheStore *sync.Map, namer Namer) (*Schema, error) { - modelType := reflect.ValueOf(dest).Type() - for modelType.Kind() == reflect.Slice || modelType.Kind() == reflect.Array || modelType.Kind() == reflect.Ptr { - modelType = modelType.Elem() - } - - if modelType.Kind() != reflect.Struct { - if modelType.PkgPath() == "" { - return nil, fmt.Errorf("%w: %+v", ErrUnsupportedDataType, dest) - } - return nil, fmt.Errorf("%w: %s.%s", ErrUnsupportedDataType, modelType.PkgPath(), modelType.Name()) - } - - if v, ok := cacheStore.Load(modelType); ok { - return v.(*Schema), nil - } - - return Parse(dest, cacheStore, namer) -} diff --git a/vendor/gorm.io/gorm/schema/serializer.go b/vendor/gorm.io/gorm/schema/serializer.go index 758a6421..e69de29b 100644 --- a/vendor/gorm.io/gorm/schema/serializer.go +++ b/vendor/gorm.io/gorm/schema/serializer.go @@ -1,157 +0,0 @@ -package schema - -import ( - "bytes" - "context" - "database/sql" - "database/sql/driver" - "encoding/gob" - "encoding/json" - "fmt" - "reflect" - "strings" - "sync" - "time" -) - -var serializerMap = sync.Map{} - -// RegisterSerializer register serializer -func RegisterSerializer(name string, serializer SerializerInterface) { - serializerMap.Store(strings.ToLower(name), serializer) -} - -// GetSerializer get serializer -func GetSerializer(name string) (serializer SerializerInterface, ok bool) { - v, ok := serializerMap.Load(strings.ToLower(name)) - if ok { - serializer, ok = v.(SerializerInterface) - } - return serializer, ok -} - -func init() { - RegisterSerializer("json", JSONSerializer{}) - RegisterSerializer("unixtime", UnixSecondSerializer{}) - RegisterSerializer("gob", GobSerializer{}) -} - -// Serializer field value serializer -type serializer struct { - Field *Field - Serializer SerializerInterface - SerializeValuer SerializerValuerInterface - Destination reflect.Value - Context context.Context - value interface{} - fieldValue interface{} -} - -// Scan implements sql.Scanner interface -func (s *serializer) Scan(value interface{}) error { - s.value = value - return nil -} - -// Value implements driver.Valuer interface -func (s serializer) Value() (driver.Value, error) { - return s.SerializeValuer.Value(s.Context, s.Field, s.Destination, s.fieldValue) -} - -// SerializerInterface serializer interface -type SerializerInterface interface { - Scan(ctx context.Context, field *Field, dst reflect.Value, dbValue interface{}) error - SerializerValuerInterface -} - -// SerializerValuerInterface serializer valuer interface -type SerializerValuerInterface interface { - Value(ctx context.Context, field *Field, dst reflect.Value, fieldValue interface{}) (interface{}, error) -} - -// JSONSerializer json serializer -type JSONSerializer struct { -} - -// Scan implements serializer interface -func (JSONSerializer) Scan(ctx context.Context, field *Field, dst reflect.Value, dbValue interface{}) (err error) { - fieldValue := reflect.New(field.FieldType) - - if dbValue != nil { - var bytes []byte - switch v := dbValue.(type) { - case []byte: - bytes = v - case string: - bytes = []byte(v) - default: - return fmt.Errorf("failed to unmarshal JSONB value: %#v", dbValue) - } - - err = json.Unmarshal(bytes, fieldValue.Interface()) - } - - field.ReflectValueOf(ctx, dst).Set(fieldValue.Elem()) - return -} - -// Value implements serializer interface -func (JSONSerializer) Value(ctx context.Context, field *Field, dst reflect.Value, fieldValue interface{}) (interface{}, error) { - result, err := json.Marshal(fieldValue) - return string(result), err -} - -// UnixSecondSerializer json serializer -type UnixSecondSerializer struct { -} - -// Scan implements serializer interface -func (UnixSecondSerializer) Scan(ctx context.Context, field *Field, dst reflect.Value, dbValue interface{}) (err error) { - t := sql.NullTime{} - if err = t.Scan(dbValue); err == nil && t.Valid { - err = field.Set(ctx, dst, t.Time.Unix()) - } - - return -} - -// Value implements serializer interface -func (UnixSecondSerializer) Value(ctx context.Context, field *Field, dst reflect.Value, fieldValue interface{}) (result interface{}, err error) { - switch v := fieldValue.(type) { - case int64, int, uint, uint64, int32, uint32, int16, uint16, *int64, *int, *uint, *uint64, *int32, *uint32, *int16, *uint16: - result = time.Unix(reflect.Indirect(reflect.ValueOf(v)).Int(), 0) - default: - err = fmt.Errorf("invalid field type %#v for UnixSecondSerializer, only int, uint supported", v) - } - return -} - -// GobSerializer gob serializer -type GobSerializer struct { -} - -// Scan implements serializer interface -func (GobSerializer) Scan(ctx context.Context, field *Field, dst reflect.Value, dbValue interface{}) (err error) { - fieldValue := reflect.New(field.FieldType) - - if dbValue != nil { - var bytesValue []byte - switch v := dbValue.(type) { - case []byte: - bytesValue = v - default: - return fmt.Errorf("failed to unmarshal gob value: %#v", dbValue) - } - decoder := gob.NewDecoder(bytes.NewBuffer(bytesValue)) - err = decoder.Decode(fieldValue.Interface()) - } - field.ReflectValueOf(ctx, dst).Set(fieldValue.Elem()) - return -} - -// Value implements serializer interface -func (GobSerializer) Value(ctx context.Context, field *Field, dst reflect.Value, fieldValue interface{}) (interface{}, error) { - buf := new(bytes.Buffer) - err := gob.NewEncoder(buf).Encode(fieldValue) - return buf.Bytes(), err -} diff --git a/vendor/gorm.io/gorm/soft_delete.go b/vendor/gorm.io/gorm/soft_delete.go index 6d646288..e69de29b 100644 --- a/vendor/gorm.io/gorm/soft_delete.go +++ b/vendor/gorm.io/gorm/soft_delete.go @@ -1,157 +0,0 @@ -package gorm - -import ( - "database/sql" - "database/sql/driver" - "encoding/json" - "reflect" - - "gorm.io/gorm/clause" - "gorm.io/gorm/schema" -) - -type DeletedAt sql.NullTime - -// Scan implements the Scanner interface. -func (n *DeletedAt) Scan(value interface{}) error { - return (*sql.NullTime)(n).Scan(value) -} - -// Value implements the driver Valuer interface. -func (n DeletedAt) Value() (driver.Value, error) { - if !n.Valid { - return nil, nil - } - return n.Time, nil -} - -func (n DeletedAt) MarshalJSON() ([]byte, error) { - if n.Valid { - return json.Marshal(n.Time) - } - return json.Marshal(nil) -} - -func (n *DeletedAt) UnmarshalJSON(b []byte) error { - if string(b) == "null" { - n.Valid = false - return nil - } - err := json.Unmarshal(b, &n.Time) - if err == nil { - n.Valid = true - } - return err -} - -func (DeletedAt) QueryClauses(f *schema.Field) []clause.Interface { - return []clause.Interface{SoftDeleteQueryClause{Field: f}} -} - -type SoftDeleteQueryClause struct { - Field *schema.Field -} - -func (sd SoftDeleteQueryClause) Name() string { - return "" -} - -func (sd SoftDeleteQueryClause) Build(clause.Builder) { -} - -func (sd SoftDeleteQueryClause) MergeClause(*clause.Clause) { -} - -func (sd SoftDeleteQueryClause) ModifyStatement(stmt *Statement) { - if _, ok := stmt.Clauses["soft_delete_enabled"]; !ok && !stmt.Statement.Unscoped { - if c, ok := stmt.Clauses["WHERE"]; ok { - if where, ok := c.Expression.(clause.Where); ok && len(where.Exprs) >= 1 { - for _, expr := range where.Exprs { - if orCond, ok := expr.(clause.OrConditions); ok && len(orCond.Exprs) == 1 { - where.Exprs = []clause.Expression{clause.And(where.Exprs...)} - c.Expression = where - stmt.Clauses["WHERE"] = c - break - } - } - } - } - - stmt.AddClause(clause.Where{Exprs: []clause.Expression{ - clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: sd.Field.DBName}, Value: nil}, - }}) - stmt.Clauses["soft_delete_enabled"] = clause.Clause{} - } -} - -func (DeletedAt) UpdateClauses(f *schema.Field) []clause.Interface { - return []clause.Interface{SoftDeleteUpdateClause{Field: f}} -} - -type SoftDeleteUpdateClause struct { - Field *schema.Field -} - -func (sd SoftDeleteUpdateClause) Name() string { - return "" -} - -func (sd SoftDeleteUpdateClause) Build(clause.Builder) { -} - -func (sd SoftDeleteUpdateClause) MergeClause(*clause.Clause) { -} - -func (sd SoftDeleteUpdateClause) ModifyStatement(stmt *Statement) { - if stmt.SQL.Len() == 0 && !stmt.Statement.Unscoped { - SoftDeleteQueryClause(sd).ModifyStatement(stmt) - } -} - -func (DeletedAt) DeleteClauses(f *schema.Field) []clause.Interface { - return []clause.Interface{SoftDeleteDeleteClause{Field: f}} -} - -type SoftDeleteDeleteClause struct { - Field *schema.Field -} - -func (sd SoftDeleteDeleteClause) Name() string { - return "" -} - -func (sd SoftDeleteDeleteClause) Build(clause.Builder) { -} - -func (sd SoftDeleteDeleteClause) MergeClause(*clause.Clause) { -} - -func (sd SoftDeleteDeleteClause) ModifyStatement(stmt *Statement) { - if stmt.SQL.Len() == 0 && !stmt.Statement.Unscoped { - curTime := stmt.DB.NowFunc() - stmt.AddClause(clause.Set{{Column: clause.Column{Name: sd.Field.DBName}, Value: curTime}}) - stmt.SetColumn(sd.Field.DBName, curTime, true) - - if stmt.Schema != nil { - _, queryValues := schema.GetIdentityFieldValuesMap(stmt.Context, stmt.ReflectValue, stmt.Schema.PrimaryFields) - column, values := schema.ToQueryValues(stmt.Table, stmt.Schema.PrimaryFieldDBNames, queryValues) - - if len(values) > 0 { - stmt.AddClause(clause.Where{Exprs: []clause.Expression{clause.IN{Column: column, Values: values}}}) - } - - if stmt.ReflectValue.CanAddr() && stmt.Dest != stmt.Model && stmt.Model != nil { - _, queryValues = schema.GetIdentityFieldValuesMap(stmt.Context, reflect.ValueOf(stmt.Model), stmt.Schema.PrimaryFields) - column, values = schema.ToQueryValues(stmt.Table, stmt.Schema.PrimaryFieldDBNames, queryValues) - - if len(values) > 0 { - stmt.AddClause(clause.Where{Exprs: []clause.Expression{clause.IN{Column: column, Values: values}}}) - } - } - } - - SoftDeleteQueryClause(sd).ModifyStatement(stmt) - stmt.AddClauseIfNotExists(clause.Update{}) - stmt.Build(stmt.DB.Callback().Update().Clauses...) - } -} diff --git a/vendor/gorm.io/gorm/statement.go b/vendor/gorm.io/gorm/statement.go index ed3e8716..e69de29b 100644 --- a/vendor/gorm.io/gorm/statement.go +++ b/vendor/gorm.io/gorm/statement.go @@ -1,719 +0,0 @@ -package gorm - -import ( - "context" - "database/sql" - "database/sql/driver" - "fmt" - "reflect" - "regexp" - "sort" - "strconv" - "strings" - "sync" - - "gorm.io/gorm/clause" - "gorm.io/gorm/logger" - "gorm.io/gorm/schema" - "gorm.io/gorm/utils" -) - -// Statement statement -type Statement struct { - *DB - TableExpr *clause.Expr - Table string - Model interface{} - Unscoped bool - Dest interface{} - ReflectValue reflect.Value - Clauses map[string]clause.Clause - BuildClauses []string - Distinct bool - Selects []string // selected columns - Omits []string // omit columns - Joins []join - Preloads map[string][]interface{} - Settings sync.Map - ConnPool ConnPool - Schema *schema.Schema - Context context.Context - RaiseErrorOnNotFound bool - SkipHooks bool - SQL strings.Builder - Vars []interface{} - CurDestIndex int - attrs []interface{} - assigns []interface{} - scopes []func(*DB) *DB -} - -type join struct { - Name string - Conds []interface{} - On *clause.Where -} - -// StatementModifier statement modifier interface -type StatementModifier interface { - ModifyStatement(*Statement) -} - -// WriteString write string -func (stmt *Statement) WriteString(str string) (int, error) { - return stmt.SQL.WriteString(str) -} - -// WriteByte write byte -func (stmt *Statement) WriteByte(c byte) error { - return stmt.SQL.WriteByte(c) -} - -// WriteQuoted write quoted value -func (stmt *Statement) WriteQuoted(value interface{}) { - stmt.QuoteTo(&stmt.SQL, value) -} - -// QuoteTo write quoted value to writer -func (stmt *Statement) QuoteTo(writer clause.Writer, field interface{}) { - write := func(raw bool, str string) { - if raw { - writer.WriteString(str) - } else { - stmt.DB.Dialector.QuoteTo(writer, str) - } - } - - switch v := field.(type) { - case clause.Table: - if v.Name == clause.CurrentTable { - if stmt.TableExpr != nil { - stmt.TableExpr.Build(stmt) - } else { - write(v.Raw, stmt.Table) - } - } else { - write(v.Raw, v.Name) - } - - if v.Alias != "" { - writer.WriteByte(' ') - write(v.Raw, v.Alias) - } - case clause.Column: - if v.Table != "" { - if v.Table == clause.CurrentTable { - write(v.Raw, stmt.Table) - } else { - write(v.Raw, v.Table) - } - writer.WriteByte('.') - } - - if v.Name == clause.PrimaryKey { - if stmt.Schema == nil { - stmt.DB.AddError(ErrModelValueRequired) - } else if stmt.Schema.PrioritizedPrimaryField != nil { - write(v.Raw, stmt.Schema.PrioritizedPrimaryField.DBName) - } else if len(stmt.Schema.DBNames) > 0 { - write(v.Raw, stmt.Schema.DBNames[0]) - } - } else { - write(v.Raw, v.Name) - } - - if v.Alias != "" { - writer.WriteString(" AS ") - write(v.Raw, v.Alias) - } - case []clause.Column: - writer.WriteByte('(') - for idx, d := range v { - if idx > 0 { - writer.WriteByte(',') - } - stmt.QuoteTo(writer, d) - } - writer.WriteByte(')') - case clause.Expr: - v.Build(stmt) - case string: - stmt.DB.Dialector.QuoteTo(writer, v) - case []string: - writer.WriteByte('(') - for idx, d := range v { - if idx > 0 { - writer.WriteByte(',') - } - stmt.DB.Dialector.QuoteTo(writer, d) - } - writer.WriteByte(')') - default: - stmt.DB.Dialector.QuoteTo(writer, fmt.Sprint(field)) - } -} - -// Quote returns quoted value -func (stmt *Statement) Quote(field interface{}) string { - var builder strings.Builder - stmt.QuoteTo(&builder, field) - return builder.String() -} - -// AddVar add var -func (stmt *Statement) AddVar(writer clause.Writer, vars ...interface{}) { - for idx, v := range vars { - if idx > 0 { - writer.WriteByte(',') - } - - switch v := v.(type) { - case sql.NamedArg: - stmt.Vars = append(stmt.Vars, v.Value) - case clause.Column, clause.Table: - stmt.QuoteTo(writer, v) - case Valuer: - reflectValue := reflect.ValueOf(v) - if reflectValue.Kind() == reflect.Ptr && reflectValue.IsNil() { - stmt.AddVar(writer, nil) - } else { - stmt.AddVar(writer, v.GormValue(stmt.Context, stmt.DB)) - } - case clause.Expression: - v.Build(stmt) - case driver.Valuer: - stmt.Vars = append(stmt.Vars, v) - stmt.DB.Dialector.BindVarTo(writer, stmt, v) - case []byte: - stmt.Vars = append(stmt.Vars, v) - stmt.DB.Dialector.BindVarTo(writer, stmt, v) - case []interface{}: - if len(v) > 0 { - writer.WriteByte('(') - stmt.AddVar(writer, v...) - writer.WriteByte(')') - } else { - writer.WriteString("(NULL)") - } - case *DB: - subdb := v.Session(&Session{Logger: logger.Discard, DryRun: true}).getInstance() - if v.Statement.SQL.Len() > 0 { - var ( - vars = subdb.Statement.Vars - sql = v.Statement.SQL.String() - ) - - subdb.Statement.Vars = make([]interface{}, 0, len(vars)) - for _, vv := range vars { - subdb.Statement.Vars = append(subdb.Statement.Vars, vv) - bindvar := strings.Builder{} - v.Dialector.BindVarTo(&bindvar, subdb.Statement, vv) - sql = strings.Replace(sql, bindvar.String(), "?", 1) - } - - subdb.Statement.SQL.Reset() - subdb.Statement.Vars = stmt.Vars - if strings.Contains(sql, "@") { - clause.NamedExpr{SQL: sql, Vars: vars}.Build(subdb.Statement) - } else { - clause.Expr{SQL: sql, Vars: vars}.Build(subdb.Statement) - } - } else { - subdb.Statement.Vars = append(stmt.Vars, subdb.Statement.Vars...) - subdb.callbacks.Query().Execute(subdb) - } - - writer.WriteString(subdb.Statement.SQL.String()) - stmt.Vars = subdb.Statement.Vars - default: - switch rv := reflect.ValueOf(v); rv.Kind() { - case reflect.Slice, reflect.Array: - if rv.Len() == 0 { - writer.WriteString("(NULL)") - } else if rv.Type().Elem() == reflect.TypeOf(uint8(0)) { - stmt.Vars = append(stmt.Vars, v) - stmt.DB.Dialector.BindVarTo(writer, stmt, v) - } else { - writer.WriteByte('(') - for i := 0; i < rv.Len(); i++ { - if i > 0 { - writer.WriteByte(',') - } - stmt.AddVar(writer, rv.Index(i).Interface()) - } - writer.WriteByte(')') - } - default: - stmt.Vars = append(stmt.Vars, v) - stmt.DB.Dialector.BindVarTo(writer, stmt, v) - } - } - } -} - -// AddClause add clause -func (stmt *Statement) AddClause(v clause.Interface) { - if optimizer, ok := v.(StatementModifier); ok { - optimizer.ModifyStatement(stmt) - } else { - name := v.Name() - c := stmt.Clauses[name] - c.Name = name - v.MergeClause(&c) - stmt.Clauses[name] = c - } -} - -// AddClauseIfNotExists add clause if not exists -func (stmt *Statement) AddClauseIfNotExists(v clause.Interface) { - if c, ok := stmt.Clauses[v.Name()]; !ok || c.Expression == nil { - stmt.AddClause(v) - } -} - -// BuildCondition build condition -func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) []clause.Expression { - if s, ok := query.(string); ok { - // if it is a number, then treats it as primary key - if _, err := strconv.Atoi(s); err != nil { - if s == "" && len(args) == 0 { - return nil - } - - if len(args) == 0 || (len(args) > 0 && strings.Contains(s, "?")) { - // looks like a where condition - return []clause.Expression{clause.Expr{SQL: s, Vars: args}} - } - - if len(args) > 0 && strings.Contains(s, "@") { - // looks like a named query - return []clause.Expression{clause.NamedExpr{SQL: s, Vars: args}} - } - - if strings.Contains(strings.TrimSpace(s), " ") { - // looks like a where condition - return []clause.Expression{clause.Expr{SQL: s, Vars: args}} - } - - if len(args) == 1 { - return []clause.Expression{clause.Eq{Column: s, Value: args[0]}} - } - } - } - - conds := make([]clause.Expression, 0, 4) - args = append([]interface{}{query}, args...) - for idx, arg := range args { - if valuer, ok := arg.(driver.Valuer); ok { - arg, _ = valuer.Value() - } - - switch v := arg.(type) { - case clause.Expression: - conds = append(conds, v) - case *DB: - for _, scope := range v.Statement.scopes { - v = scope(v) - } - - if cs, ok := v.Statement.Clauses["WHERE"]; ok { - if where, ok := cs.Expression.(clause.Where); ok { - if len(where.Exprs) == 1 { - if orConds, ok := where.Exprs[0].(clause.OrConditions); ok { - where.Exprs[0] = clause.AndConditions(orConds) - } - } - conds = append(conds, clause.And(where.Exprs...)) - } else if cs.Expression != nil { - conds = append(conds, cs.Expression) - } - } - case map[interface{}]interface{}: - for i, j := range v { - conds = append(conds, clause.Eq{Column: i, Value: j}) - } - case map[string]string: - keys := make([]string, 0, len(v)) - for i := range v { - keys = append(keys, i) - } - sort.Strings(keys) - - for _, key := range keys { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) - } - case map[string]interface{}: - keys := make([]string, 0, len(v)) - for i := range v { - keys = append(keys, i) - } - sort.Strings(keys) - - for _, key := range keys { - reflectValue := reflect.Indirect(reflect.ValueOf(v[key])) - switch reflectValue.Kind() { - case reflect.Slice, reflect.Array: - if _, ok := v[key].(driver.Valuer); ok { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) - } else if _, ok := v[key].(Valuer); ok { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) - } else { - // optimize reflect value length - valueLen := reflectValue.Len() - values := make([]interface{}, valueLen) - for i := 0; i < valueLen; i++ { - values[i] = reflectValue.Index(i).Interface() - } - - conds = append(conds, clause.IN{Column: key, Values: values}) - } - default: - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) - } - } - default: - reflectValue := reflect.Indirect(reflect.ValueOf(arg)) - for reflectValue.Kind() == reflect.Ptr { - reflectValue = reflectValue.Elem() - } - - if s, err := schema.Parse(arg, stmt.DB.cacheStore, stmt.DB.NamingStrategy); err == nil { - selectedColumns := map[string]bool{} - if idx == 0 { - for _, v := range args[1:] { - if vs, ok := v.(string); ok { - selectedColumns[vs] = true - } - } - } - restricted := len(selectedColumns) != 0 - - switch reflectValue.Kind() { - case reflect.Struct: - for _, field := range s.Fields { - selected := selectedColumns[field.DBName] || selectedColumns[field.Name] - if selected || (!restricted && field.Readable) { - if v, isZero := field.ValueOf(stmt.Context, reflectValue); !isZero || selected { - if field.DBName != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.DBName}, Value: v}) - } else if field.DataType != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.Name}, Value: v}) - } - } - } - } - case reflect.Slice, reflect.Array: - for i := 0; i < reflectValue.Len(); i++ { - for _, field := range s.Fields { - selected := selectedColumns[field.DBName] || selectedColumns[field.Name] - if selected || (!restricted && field.Readable) { - if v, isZero := field.ValueOf(stmt.Context, reflectValue.Index(i)); !isZero || selected { - if field.DBName != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.DBName}, Value: v}) - } else if field.DataType != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.Name}, Value: v}) - } - } - } - } - } - } - - if restricted { - break - } - } else if !reflectValue.IsValid() { - stmt.AddError(ErrInvalidData) - } else if len(conds) == 0 { - if len(args) == 1 { - switch reflectValue.Kind() { - case reflect.Slice, reflect.Array: - // optimize reflect value length - valueLen := reflectValue.Len() - values := make([]interface{}, valueLen) - for i := 0; i < valueLen; i++ { - values[i] = reflectValue.Index(i).Interface() - } - - if len(values) > 0 { - conds = append(conds, clause.IN{Column: clause.PrimaryColumn, Values: values}) - } - return conds - } - } - - conds = append(conds, clause.IN{Column: clause.PrimaryColumn, Values: args}) - } - } - } - - return conds -} - -// Build build sql with clauses names -func (stmt *Statement) Build(clauses ...string) { - var firstClauseWritten bool - - for _, name := range clauses { - if c, ok := stmt.Clauses[name]; ok { - if firstClauseWritten { - stmt.WriteByte(' ') - } - - firstClauseWritten = true - if b, ok := stmt.DB.ClauseBuilders[name]; ok { - b(c, stmt) - } else { - c.Build(stmt) - } - } - } -} - -func (stmt *Statement) Parse(value interface{}) (err error) { - return stmt.ParseWithSpecialTableName(value, "") -} - -func (stmt *Statement) ParseWithSpecialTableName(value interface{}, specialTableName string) (err error) { - if stmt.Schema, err = schema.ParseWithSpecialTableName(value, stmt.DB.cacheStore, stmt.DB.NamingStrategy, specialTableName); err == nil && stmt.Table == "" { - if tables := strings.Split(stmt.Schema.Table, "."); len(tables) == 2 { - stmt.TableExpr = &clause.Expr{SQL: stmt.Quote(stmt.Schema.Table)} - stmt.Table = tables[1] - return - } - - stmt.Table = stmt.Schema.Table - } - return err -} - -func (stmt *Statement) clone() *Statement { - newStmt := &Statement{ - TableExpr: stmt.TableExpr, - Table: stmt.Table, - Model: stmt.Model, - Unscoped: stmt.Unscoped, - Dest: stmt.Dest, - ReflectValue: stmt.ReflectValue, - Clauses: map[string]clause.Clause{}, - Distinct: stmt.Distinct, - Selects: stmt.Selects, - Omits: stmt.Omits, - Preloads: map[string][]interface{}{}, - ConnPool: stmt.ConnPool, - Schema: stmt.Schema, - Context: stmt.Context, - RaiseErrorOnNotFound: stmt.RaiseErrorOnNotFound, - SkipHooks: stmt.SkipHooks, - } - - if stmt.SQL.Len() > 0 { - newStmt.SQL.WriteString(stmt.SQL.String()) - newStmt.Vars = make([]interface{}, 0, len(stmt.Vars)) - newStmt.Vars = append(newStmt.Vars, stmt.Vars...) - } - - for k, c := range stmt.Clauses { - newStmt.Clauses[k] = c - } - - for k, p := range stmt.Preloads { - newStmt.Preloads[k] = p - } - - if len(stmt.Joins) > 0 { - newStmt.Joins = make([]join, len(stmt.Joins)) - copy(newStmt.Joins, stmt.Joins) - } - - if len(stmt.scopes) > 0 { - newStmt.scopes = make([]func(*DB) *DB, len(stmt.scopes)) - copy(newStmt.scopes, stmt.scopes) - } - - stmt.Settings.Range(func(k, v interface{}) bool { - newStmt.Settings.Store(k, v) - return true - }) - - return newStmt -} - -// SetColumn set column's value -// stmt.SetColumn("Name", "jinzhu") // Hooks Method -// stmt.SetColumn("Name", "jinzhu", true) // Callbacks Method -func (stmt *Statement) SetColumn(name string, value interface{}, fromCallbacks ...bool) { - if v, ok := stmt.Dest.(map[string]interface{}); ok { - v[name] = value - } else if v, ok := stmt.Dest.([]map[string]interface{}); ok { - for _, m := range v { - m[name] = value - } - } else if stmt.Schema != nil { - if field := stmt.Schema.LookUpField(name); field != nil { - destValue := reflect.ValueOf(stmt.Dest) - for destValue.Kind() == reflect.Ptr { - destValue = destValue.Elem() - } - - if stmt.ReflectValue != destValue { - if !destValue.CanAddr() { - destValueCanAddr := reflect.New(destValue.Type()) - destValueCanAddr.Elem().Set(destValue) - stmt.Dest = destValueCanAddr.Interface() - destValue = destValueCanAddr.Elem() - } - - switch destValue.Kind() { - case reflect.Struct: - stmt.AddError(field.Set(stmt.Context, destValue, value)) - default: - stmt.AddError(ErrInvalidData) - } - } - - switch stmt.ReflectValue.Kind() { - case reflect.Slice, reflect.Array: - if len(fromCallbacks) > 0 { - for i := 0; i < stmt.ReflectValue.Len(); i++ { - stmt.AddError(field.Set(stmt.Context, stmt.ReflectValue.Index(i), value)) - } - } else { - stmt.AddError(field.Set(stmt.Context, stmt.ReflectValue.Index(stmt.CurDestIndex), value)) - } - case reflect.Struct: - if !stmt.ReflectValue.CanAddr() { - stmt.AddError(ErrInvalidValue) - return - } - - stmt.AddError(field.Set(stmt.Context, stmt.ReflectValue, value)) - } - } else { - stmt.AddError(ErrInvalidField) - } - } else { - stmt.AddError(ErrInvalidField) - } -} - -// Changed check model changed or not when updating -func (stmt *Statement) Changed(fields ...string) bool { - modelValue := stmt.ReflectValue - switch modelValue.Kind() { - case reflect.Slice, reflect.Array: - modelValue = stmt.ReflectValue.Index(stmt.CurDestIndex) - } - - selectColumns, restricted := stmt.SelectAndOmitColumns(false, true) - changed := func(field *schema.Field) bool { - fieldValue, _ := field.ValueOf(stmt.Context, modelValue) - if v, ok := selectColumns[field.DBName]; (ok && v) || (!ok && !restricted) { - if mv, mok := stmt.Dest.(map[string]interface{}); mok { - if fv, ok := mv[field.Name]; ok { - return !utils.AssertEqual(fv, fieldValue) - } else if fv, ok := mv[field.DBName]; ok { - return !utils.AssertEqual(fv, fieldValue) - } - } else { - destValue := reflect.ValueOf(stmt.Dest) - for destValue.Kind() == reflect.Ptr { - destValue = destValue.Elem() - } - - changedValue, zero := field.ValueOf(stmt.Context, destValue) - if v { - return !utils.AssertEqual(changedValue, fieldValue) - } - return !zero && !utils.AssertEqual(changedValue, fieldValue) - } - } - return false - } - - if len(fields) == 0 { - for _, field := range stmt.Schema.FieldsByDBName { - if changed(field) { - return true - } - } - } else { - for _, name := range fields { - if field := stmt.Schema.LookUpField(name); field != nil { - if changed(field) { - return true - } - } - } - } - - return false -} - -var nameMatcher = regexp.MustCompile(`^[\W]?(?:[a-z_]+?)[\W]?\.[\W]?([a-z_]+?)[\W]?$`) - -// SelectAndOmitColumns get select and omit columns, select -> true, omit -> false -func (stmt *Statement) SelectAndOmitColumns(requireCreate, requireUpdate bool) (map[string]bool, bool) { - results := map[string]bool{} - notRestricted := false - - // select columns - for _, column := range stmt.Selects { - if stmt.Schema == nil { - results[column] = true - } else if column == "*" { - notRestricted = true - for _, dbName := range stmt.Schema.DBNames { - results[dbName] = true - } - } else if column == clause.Associations { - for _, rel := range stmt.Schema.Relationships.Relations { - results[rel.Name] = true - } - } else if field := stmt.Schema.LookUpField(column); field != nil && field.DBName != "" { - results[field.DBName] = true - } else if matches := nameMatcher.FindStringSubmatch(column); len(matches) == 2 { - results[matches[1]] = true - } else { - results[column] = true - } - } - - // omit columns - for _, omit := range stmt.Omits { - if stmt.Schema == nil { - results[omit] = false - } else if omit == "*" { - for _, dbName := range stmt.Schema.DBNames { - results[dbName] = false - } - } else if omit == clause.Associations { - for _, rel := range stmt.Schema.Relationships.Relations { - results[rel.Name] = false - } - } else if field := stmt.Schema.LookUpField(omit); field != nil && field.DBName != "" { - results[field.DBName] = false - } else if matches := nameMatcher.FindStringSubmatch(omit); len(matches) == 2 { - results[matches[1]] = false - } else { - results[omit] = false - } - } - - if stmt.Schema != nil { - for _, field := range stmt.Schema.FieldsByName { - name := field.DBName - if name == "" { - name = field.Name - } - - if requireCreate && !field.Creatable { - results[name] = false - } else if requireUpdate && !field.Updatable { - results[name] = false - } - } - } - - return results, !notRestricted && len(stmt.Selects) > 0 -}