2018-11-21 10:17:05 +00:00
|
|
|
package curl2info
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
2018-11-22 15:05:42 +00:00
|
|
|
"log"
|
2018-11-21 10:17:05 +00:00
|
|
|
"net/http"
|
|
|
|
"net/http/cookiejar"
|
|
|
|
"net/url"
|
|
|
|
"os"
|
|
|
|
"regexp"
|
2018-11-27 10:49:52 +00:00
|
|
|
"strconv"
|
2018-11-21 10:17:05 +00:00
|
|
|
"strings"
|
|
|
|
|
2018-11-22 17:33:33 +00:00
|
|
|
"474420502.top/eson/requests"
|
2018-11-21 10:17:05 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// CURL 信息结构
|
|
|
|
type CURL struct {
|
|
|
|
ParsedURL *url.URL
|
|
|
|
Method string
|
|
|
|
Header http.Header
|
|
|
|
CookieJar http.CookieJar
|
|
|
|
Cookies []*http.Cookie
|
|
|
|
Body *requests.Body
|
2018-11-22 17:33:33 +00:00
|
|
|
Auth *requests.BasicAuth
|
2018-11-27 10:49:52 +00:00
|
|
|
Timeout int // second
|
2018-11-22 17:33:33 +00:00
|
|
|
Insecure bool
|
2018-11-27 10:49:52 +00:00
|
|
|
|
|
|
|
CallBack string
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// NewCURL new 一个 curl 出来
|
2018-11-26 17:32:49 +00:00
|
|
|
func NewCURL() *CURL {
|
2018-11-21 10:17:05 +00:00
|
|
|
|
|
|
|
u := &CURL{}
|
2018-11-22 17:33:33 +00:00
|
|
|
u.Insecure = false
|
|
|
|
|
2018-11-21 10:17:05 +00:00
|
|
|
u.Header = make(http.Header)
|
|
|
|
u.CookieJar, _ = cookiejar.New(nil)
|
|
|
|
u.Body = requests.NewBody()
|
2018-11-27 10:49:52 +00:00
|
|
|
u.Timeout = 30
|
2018-11-21 10:17:05 +00:00
|
|
|
|
|
|
|
return u
|
|
|
|
}
|
|
|
|
|
|
|
|
func (curl *CURL) String() string {
|
2018-11-23 07:30:52 +00:00
|
|
|
if curl != nil {
|
|
|
|
return fmt.Sprintf("Method: %s\nParsedURL: %s\nHeader: %s\nCookie: %s",
|
|
|
|
curl.Method, curl.ParsedURL.String(), curl.Header, curl.Cookies)
|
|
|
|
}
|
|
|
|
return ""
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// CreateSession 创建Session
|
|
|
|
func (curl *CURL) CreateSession() *requests.Session {
|
|
|
|
ses := requests.NewSession()
|
|
|
|
ses.SetHeader(curl.Header)
|
|
|
|
ses.SetCookies(curl.ParsedURL, curl.Cookies)
|
2018-11-27 10:49:52 +00:00
|
|
|
ses.SetConfig(requests.CRequestTimeout, curl.Timeout)
|
2018-11-22 17:33:33 +00:00
|
|
|
|
|
|
|
if curl.Auth != nil {
|
|
|
|
ses.SetConfig(requests.CBasicAuth, curl.Auth)
|
|
|
|
}
|
|
|
|
|
|
|
|
if curl.Insecure {
|
|
|
|
ses.SetConfig(requests.CInsecure, curl.Insecure)
|
|
|
|
}
|
|
|
|
|
2018-11-21 10:17:05 +00:00
|
|
|
return ses
|
|
|
|
}
|
|
|
|
|
|
|
|
// CreateWorkflow 根据Session 创建Workflow
|
|
|
|
func (curl *CURL) CreateWorkflow(ses *requests.Session) *requests.Workflow {
|
|
|
|
var wf *requests.Workflow
|
2018-11-22 17:33:33 +00:00
|
|
|
|
2018-11-27 06:05:24 +00:00
|
|
|
if ses == nil {
|
|
|
|
ses = curl.CreateSession()
|
|
|
|
}
|
|
|
|
|
2018-11-21 10:17:05 +00:00
|
|
|
switch curl.Method {
|
|
|
|
case "HEAD":
|
|
|
|
wf = ses.Head(curl.ParsedURL.String())
|
|
|
|
case "GET":
|
|
|
|
wf = ses.Get(curl.ParsedURL.String())
|
|
|
|
case "POST":
|
|
|
|
wf = ses.Post(curl.ParsedURL.String())
|
|
|
|
case "PUT":
|
|
|
|
wf = ses.Put(curl.ParsedURL.String())
|
|
|
|
case "PATCH":
|
|
|
|
wf = ses.Patch(curl.ParsedURL.String())
|
|
|
|
case "OPTIONS":
|
|
|
|
wf = ses.Options(curl.ParsedURL.String())
|
|
|
|
case "DELETE":
|
|
|
|
wf = ses.Delete(curl.ParsedURL.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
wf.SetBody(curl.Body)
|
|
|
|
return wf
|
|
|
|
}
|
|
|
|
|
|
|
|
// ParseRawCURL curl_bash
|
|
|
|
func ParseRawCURL(scurl string) (cURL *CURL, err error) {
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if _err := recover(); _err != nil {
|
|
|
|
cURL = nil
|
|
|
|
err = _err.(error)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2018-11-23 07:30:52 +00:00
|
|
|
executor := newPQueueExecute()
|
2018-11-22 17:33:33 +00:00
|
|
|
|
2018-11-21 10:17:05 +00:00
|
|
|
curl := NewCURL()
|
2018-11-23 10:36:32 +00:00
|
|
|
|
|
|
|
if scurl[0] == '"' && scurl[len(scurl)-1] == '"' {
|
|
|
|
scurl = strings.Trim(scurl, `"`)
|
|
|
|
scurl = strings.TrimSpace(scurl)
|
|
|
|
} else if scurl[0] == '\'' && scurl[len(scurl)-1] == '\'' {
|
|
|
|
scurl = strings.Trim(scurl, `'`)
|
|
|
|
scurl = strings.TrimSpace(scurl)
|
|
|
|
} else {
|
|
|
|
scurl = strings.TrimSpace(scurl)
|
|
|
|
}
|
|
|
|
|
2018-11-21 10:17:05 +00:00
|
|
|
scurl = strings.TrimLeft(scurl, "curl")
|
|
|
|
mathches := regexp.MustCompile(`--[^ ]+ +'[^']+'|--[^ ]+ +[^ ]+|-[A-Za-z] +'[^']+'|-[A-Za-z] +[^ ]+| '[^']+'|--[a-z]+ {0,}`).FindAllString(scurl, -1)
|
|
|
|
for _, m := range mathches {
|
|
|
|
m = strings.TrimSpace(m)
|
|
|
|
switch v := m[0]; v {
|
|
|
|
case '\'':
|
|
|
|
purl, err := url.Parse(strings.Trim(m, "'"))
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
curl.ParsedURL = purl
|
|
|
|
case '-':
|
2018-11-22 17:33:33 +00:00
|
|
|
exec := judgeAndParseOptions(curl, m)
|
|
|
|
if exec != nil {
|
|
|
|
executor.Push(exec)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-22 17:33:33 +00:00
|
|
|
for executor.Len() > 0 {
|
|
|
|
exec := executor.Pop()
|
|
|
|
exec.Execute()
|
|
|
|
}
|
|
|
|
|
2018-11-21 10:17:05 +00:00
|
|
|
if curl.Method == "" {
|
|
|
|
curl.Method = "GET"
|
|
|
|
}
|
|
|
|
|
|
|
|
return curl, nil
|
|
|
|
}
|
|
|
|
|
2018-11-23 07:30:52 +00:00
|
|
|
func judgeAndParseOptions(u *CURL, soption string) *parseFunction {
|
2018-11-21 10:17:05 +00:00
|
|
|
switch prefix := soption[0:2]; prefix {
|
|
|
|
case "-H":
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseHeader, Prioty: 10}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "-X":
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseOptX, Prioty: 10}
|
2018-11-21 10:47:22 +00:00
|
|
|
case "-A": // User-Agent 先后顺序的问题
|
2018-11-22 17:33:33 +00:00
|
|
|
data := extractData("^-A +(.+)", soption)
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseUserAgent, Prioty: 15}
|
2018-11-21 10:47:22 +00:00
|
|
|
case "-I":
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseOptI, Prioty: 15}
|
2018-11-21 10:47:22 +00:00
|
|
|
case "--":
|
2018-11-22 15:05:42 +00:00
|
|
|
return parseLongOption(u, soption)
|
2018-11-22 17:33:33 +00:00
|
|
|
case "-d":
|
|
|
|
data := extractData("^-d +(.+)", soption)
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyASCII, Prioty: 10}
|
2018-11-22 17:33:33 +00:00
|
|
|
case "-u":
|
|
|
|
data := extractData("^-u +(.+)", soption)
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseUser, Prioty: 15}
|
2018-11-22 17:33:33 +00:00
|
|
|
case "-k": // -k, --insecure Allow insecure server connections when using SSL
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseInsecure, Prioty: 15}
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
2018-11-22 15:05:42 +00:00
|
|
|
return nil
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
|
2018-11-23 07:30:52 +00:00
|
|
|
func parseLongOption(u *CURL, soption string) *parseFunction {
|
2018-11-21 10:17:05 +00:00
|
|
|
// -d, --data <data> HTTP POST data
|
|
|
|
// --data-ascii <data> HTTP POST ASCII data
|
|
|
|
// --data-binary <data> HTTP POST binary data
|
|
|
|
// --data-raw <data> HTTP POST data, '@' allowed
|
|
|
|
// --data-urlencode <data> HTTP POST data url encoded
|
|
|
|
|
|
|
|
switch {
|
|
|
|
case regexp.MustCompile("^--data |^--data-urlencode|^--data-binary|^--data-ascii|^--data-raw").MatchString(soption):
|
|
|
|
|
2018-11-22 17:33:33 +00:00
|
|
|
datas := regexp.MustCompile("^--data-(binary) +(.+)|^--data-(ascii) +(.+)|^--data-(raw) +(.+)|^--data-(urlencode) +(.+)|^--(data) +(.+)").FindStringSubmatch(soption)
|
2018-11-21 10:17:05 +00:00
|
|
|
dtype := datas[1]
|
|
|
|
data := strings.Trim(datas[2], "'")
|
|
|
|
|
|
|
|
if u.Method != "" {
|
|
|
|
u.Method = "POST"
|
|
|
|
}
|
|
|
|
|
|
|
|
switch dtype {
|
|
|
|
case "binary":
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyBinary, Prioty: 10}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "ascii":
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyASCII, Prioty: 10}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "raw":
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyRaw, Prioty: 10}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "urlencode":
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyURLEncode, Prioty: 10}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "data":
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyASCII, Prioty: 10}
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
2018-11-22 17:33:33 +00:00
|
|
|
|
2018-11-21 10:47:22 +00:00
|
|
|
case regexp.MustCompile("^--header").MatchString(soption):
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseHeader, Prioty: 10}
|
2018-11-27 10:49:52 +00:00
|
|
|
case regexp.MustCompile("^--call").MatchString(soption):
|
|
|
|
data := extractData("^--call +(.+)", soption)
|
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseCallBack, Prioty: 10}
|
2018-11-22 17:33:33 +00:00
|
|
|
case regexp.MustCompile("^--user-agent").MatchString(soption):
|
|
|
|
data := extractData("^--user-agent +(.+)", soption)
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseUserAgent, Prioty: 15}
|
2018-11-22 17:33:33 +00:00
|
|
|
case regexp.MustCompile("^--user").MatchString(soption):
|
|
|
|
data := extractData("^--user +(.+)", soption)
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseUser, Prioty: 15}
|
2018-11-22 17:33:33 +00:00
|
|
|
case regexp.MustCompile("^--insecure").MatchString(soption):
|
2018-11-23 07:30:52 +00:00
|
|
|
return &parseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseInsecure, Prioty: 15}
|
2018-11-27 10:49:52 +00:00
|
|
|
case regexp.MustCompile("^--connect-timeout").MatchString(soption):
|
|
|
|
data := extractData("^--connect-timeout +(.+)", soption)
|
|
|
|
return &parseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseTimeout, Prioty: 15}
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
|
2018-11-22 15:05:42 +00:00
|
|
|
log.Println("can't parseOption", soption)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-11-22 17:33:33 +00:00
|
|
|
func extractData(re, soption string) string {
|
|
|
|
datas := regexp.MustCompile(re).FindStringSubmatch(soption)
|
|
|
|
return strings.Trim(datas[1], "'")
|
|
|
|
}
|
|
|
|
|
2018-11-27 10:49:52 +00:00
|
|
|
func parseCallBack(u *CURL, value string) {
|
|
|
|
u.CallBack = value
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseTimeout(u *CURL, value string) {
|
|
|
|
timeout, err := strconv.Atoi(value)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
u.Timeout = timeout
|
|
|
|
}
|
|
|
|
|
2018-11-22 17:33:33 +00:00
|
|
|
func parseInsecure(u *CURL, soption string) {
|
|
|
|
u.Insecure = true
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseUser(u *CURL, soption string) {
|
|
|
|
auth := strings.Split(soption, ":")
|
|
|
|
u.Auth = &requests.BasicAuth{User: auth[0], Password: auth[1]}
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseUserAgent(u *CURL, value string) {
|
|
|
|
u.Header.Add("User-Agent", value)
|
|
|
|
}
|
|
|
|
|
2018-11-22 15:05:42 +00:00
|
|
|
func parseOptI(u *CURL, soption string) {
|
|
|
|
u.Method = "HEAD"
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseOptX(u *CURL, soption string) {
|
|
|
|
matches := regexp.MustCompile("-X +(.+)").FindStringSubmatch(soption)
|
|
|
|
method := strings.Trim(matches[1], "'")
|
|
|
|
u.Method = method
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func parseBodyURLEncode(u *CURL, data string) {
|
|
|
|
u.Body.SetPrefix(requests.TypeURLENCODED)
|
|
|
|
u.Body.SetIOBody(data)
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseBodyRaw(u *CURL, data string) {
|
|
|
|
u.Body.SetPrefix(requests.TypeURLENCODED)
|
|
|
|
u.Body.SetIOBody(data)
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseBodyASCII(u *CURL, data string) {
|
|
|
|
u.Body.SetPrefix(requests.TypeURLENCODED)
|
|
|
|
|
|
|
|
if data[0] != '@' {
|
|
|
|
u.Body.SetIOBody(data)
|
|
|
|
} else {
|
|
|
|
f, err := os.Open(data[1:])
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
bdata, err := ioutil.ReadAll(f)
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
u.Body.SetIOBody(bdata)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// 处理@ 并且替/r/n符号
|
|
|
|
func parseBodyBinary(u *CURL, data string) {
|
|
|
|
u.Body.SetPrefix(requests.TypeURLENCODED)
|
|
|
|
|
|
|
|
if data[0] != '@' {
|
|
|
|
u.Body.SetIOBody(data)
|
|
|
|
} else {
|
|
|
|
f, err := os.Open(data[1:])
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
defer f.Close()
|
|
|
|
bdata, err := ioutil.ReadAll(f)
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
bdata = regexp.MustCompile("\n|\r").ReplaceAll(bdata, []byte(""))
|
|
|
|
u.Body.SetIOBody(bdata)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseHeader(u *CURL, soption string) {
|
|
|
|
matches := regexp.MustCompile(`'([^:]+): ([^']+)'`).FindAllStringSubmatch(soption, 1)[0]
|
|
|
|
key := matches[1]
|
|
|
|
value := matches[2]
|
|
|
|
|
|
|
|
switch key {
|
|
|
|
case "Cookie":
|
|
|
|
u.Cookies = ReadRawCookies(value, "")
|
|
|
|
u.CookieJar.SetCookies(u.ParsedURL, u.Cookies)
|
|
|
|
case "Content-Type":
|
|
|
|
u.Body.SetPrefix(value)
|
|
|
|
default:
|
|
|
|
u.Header.Add(key, value)
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|