2018-11-21 10:17:05 +00:00
|
|
|
package curl2info
|
|
|
|
|
|
|
|
import (
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
2018-11-22 15:05:42 +00:00
|
|
|
"log"
|
2018-11-21 10:17:05 +00:00
|
|
|
"net/http"
|
|
|
|
"net/http/cookiejar"
|
|
|
|
"net/url"
|
|
|
|
"os"
|
|
|
|
"regexp"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/474420502/requests"
|
|
|
|
)
|
|
|
|
|
|
|
|
// CURL 信息结构
|
|
|
|
type CURL struct {
|
|
|
|
ParsedURL *url.URL
|
|
|
|
Method string
|
|
|
|
Header http.Header
|
|
|
|
CookieJar http.CookieJar
|
|
|
|
Cookies []*http.Cookie
|
|
|
|
Body *requests.Body
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewCURL new 一个 curl 出来
|
|
|
|
func NewCURL(scurl ...string) *CURL {
|
|
|
|
|
|
|
|
if len(scurl) != 0 {
|
|
|
|
if len(scurl) > 1 {
|
|
|
|
panic(errors.New("NewCURL only accept one curl info"))
|
|
|
|
}
|
|
|
|
|
|
|
|
curl, err := ParseRawCURL(scurl[0])
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
return curl
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
u := &CURL{}
|
|
|
|
u.Header = make(http.Header)
|
|
|
|
u.CookieJar, _ = cookiejar.New(nil)
|
|
|
|
u.Body = requests.NewBody()
|
|
|
|
|
|
|
|
return u
|
|
|
|
}
|
|
|
|
|
|
|
|
func (curl *CURL) String() string {
|
|
|
|
return fmt.Sprintf("Method: %s\nParsedURL: %s\nHeader: %s\nCookie: %s",
|
|
|
|
curl.Method, curl.ParsedURL.String(), curl.Header, curl.Cookies)
|
|
|
|
}
|
|
|
|
|
|
|
|
// CreateSession 创建Session
|
|
|
|
func (curl *CURL) CreateSession() *requests.Session {
|
|
|
|
ses := requests.NewSession()
|
|
|
|
ses.SetHeader(curl.Header)
|
|
|
|
ses.SetCookies(curl.ParsedURL, curl.Cookies)
|
|
|
|
return ses
|
|
|
|
}
|
|
|
|
|
|
|
|
// CreateWorkflow 根据Session 创建Workflow
|
|
|
|
func (curl *CURL) CreateWorkflow(ses *requests.Session) *requests.Workflow {
|
|
|
|
var wf *requests.Workflow
|
|
|
|
switch curl.Method {
|
|
|
|
case "HEAD":
|
|
|
|
wf = ses.Head(curl.ParsedURL.String())
|
|
|
|
case "GET":
|
|
|
|
wf = ses.Get(curl.ParsedURL.String())
|
|
|
|
case "POST":
|
|
|
|
wf = ses.Post(curl.ParsedURL.String())
|
|
|
|
case "PUT":
|
|
|
|
wf = ses.Put(curl.ParsedURL.String())
|
|
|
|
case "PATCH":
|
|
|
|
wf = ses.Patch(curl.ParsedURL.String())
|
|
|
|
case "OPTIONS":
|
|
|
|
wf = ses.Options(curl.ParsedURL.String())
|
|
|
|
case "DELETE":
|
|
|
|
wf = ses.Delete(curl.ParsedURL.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
wf.SetBody(curl.Body)
|
|
|
|
return wf
|
|
|
|
}
|
|
|
|
|
|
|
|
// ParseRawCURL curl_bash
|
|
|
|
func ParseRawCURL(scurl string) (cURL *CURL, err error) {
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if _err := recover(); _err != nil {
|
|
|
|
cURL = nil
|
|
|
|
err = _err.(error)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
curl := NewCURL()
|
|
|
|
scurl = strings.TrimSpace(scurl)
|
|
|
|
scurl = strings.TrimLeft(scurl, "curl")
|
|
|
|
mathches := regexp.MustCompile(`--[^ ]+ +'[^']+'|--[^ ]+ +[^ ]+|-[A-Za-z] +'[^']+'|-[A-Za-z] +[^ ]+| '[^']+'|--[a-z]+ {0,}`).FindAllString(scurl, -1)
|
|
|
|
for _, m := range mathches {
|
|
|
|
m = strings.TrimSpace(m)
|
|
|
|
switch v := m[0]; v {
|
|
|
|
case '\'':
|
|
|
|
purl, err := url.Parse(strings.Trim(m, "'"))
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
curl.ParsedURL = purl
|
|
|
|
case '-':
|
|
|
|
judgeAndParseOptions(curl, m)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if curl.Method == "" {
|
|
|
|
curl.Method = "GET"
|
|
|
|
}
|
|
|
|
|
|
|
|
return curl, nil
|
|
|
|
}
|
|
|
|
|
2018-11-21 10:58:49 +00:00
|
|
|
func judgeAndParseOptions(u *CURL, soption string) *ParseFunction {
|
2018-11-21 10:17:05 +00:00
|
|
|
switch prefix := soption[0:2]; prefix {
|
|
|
|
case "-H":
|
2018-11-21 10:58:49 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseHeader, Prioty: 1}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "-X":
|
2018-11-21 10:58:49 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseOptX, Prioty: 1}
|
2018-11-21 10:47:22 +00:00
|
|
|
case "-A": // User-Agent 先后顺序的问题
|
2018-11-22 15:05:42 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseHeader, Prioty: 1}
|
2018-11-21 10:47:22 +00:00
|
|
|
case "-I":
|
2018-11-22 15:05:42 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseOptI, Prioty: 1}
|
2018-11-21 10:47:22 +00:00
|
|
|
case "--":
|
2018-11-22 15:05:42 +00:00
|
|
|
return parseLongOption(u, soption)
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
2018-11-22 15:05:42 +00:00
|
|
|
return nil
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
|
2018-11-22 15:05:42 +00:00
|
|
|
func parseLongOption(u *CURL, soption string) *ParseFunction {
|
2018-11-21 10:17:05 +00:00
|
|
|
// -d, --data <data> HTTP POST data
|
|
|
|
// --data-ascii <data> HTTP POST ASCII data
|
|
|
|
// --data-binary <data> HTTP POST binary data
|
|
|
|
// --data-raw <data> HTTP POST data, '@' allowed
|
|
|
|
// --data-urlencode <data> HTTP POST data url encoded
|
|
|
|
|
|
|
|
switch {
|
|
|
|
case regexp.MustCompile("^--data |^--data-urlencode|^--data-binary|^--data-ascii|^--data-raw").MatchString(soption):
|
|
|
|
datas := regexp.MustCompile("^--data-(binary) +(.+)|^--data-(ascii) +(.+)|^--data-(raw) +(.+)|^--data-(urlencode) +(.+)|^--(data) +(.+)").FindStringSubmatch(soption)
|
|
|
|
|
|
|
|
dtype := datas[1]
|
|
|
|
data := strings.Trim(datas[2], "'")
|
|
|
|
|
|
|
|
if u.Method != "" {
|
|
|
|
u.Method = "POST"
|
|
|
|
}
|
|
|
|
|
|
|
|
switch dtype {
|
|
|
|
case "binary":
|
2018-11-22 15:05:42 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyBinary, Prioty: 1}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "ascii":
|
2018-11-22 15:05:42 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyASCII, Prioty: 1}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "raw":
|
2018-11-22 15:05:42 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyRaw, Prioty: 1}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "urlencode":
|
2018-11-22 15:05:42 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyURLEncode, Prioty: 1}
|
2018-11-21 10:17:05 +00:00
|
|
|
case "data":
|
2018-11-22 15:05:42 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: data, ExecuteFunction: parseBodyASCII, Prioty: 1}
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
2018-11-21 10:47:22 +00:00
|
|
|
case regexp.MustCompile("^--header").MatchString(soption):
|
2018-11-22 15:05:42 +00:00
|
|
|
return &ParseFunction{ParamCURL: u, ParamData: soption, ExecuteFunction: parseHeader, Prioty: 1}
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
|
2018-11-22 15:05:42 +00:00
|
|
|
log.Println("can't parseOption", soption)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseOptI(u *CURL, soption string) {
|
|
|
|
u.Method = "HEAD"
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseOptX(u *CURL, soption string) {
|
|
|
|
matches := regexp.MustCompile("-X +(.+)").FindStringSubmatch(soption)
|
|
|
|
method := strings.Trim(matches[1], "'")
|
|
|
|
u.Method = method
|
2018-11-21 10:17:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func parseBodyURLEncode(u *CURL, data string) {
|
|
|
|
u.Body.SetPrefix(requests.TypeURLENCODED)
|
|
|
|
u.Body.SetIOBody(data)
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseBodyRaw(u *CURL, data string) {
|
|
|
|
u.Body.SetPrefix(requests.TypeURLENCODED)
|
|
|
|
u.Body.SetIOBody(data)
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseBodyASCII(u *CURL, data string) {
|
|
|
|
u.Body.SetPrefix(requests.TypeURLENCODED)
|
|
|
|
|
|
|
|
if data[0] != '@' {
|
|
|
|
u.Body.SetIOBody(data)
|
|
|
|
} else {
|
|
|
|
f, err := os.Open(data[1:])
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
bdata, err := ioutil.ReadAll(f)
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
u.Body.SetIOBody(bdata)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// 处理@ 并且替/r/n符号
|
|
|
|
func parseBodyBinary(u *CURL, data string) {
|
|
|
|
u.Body.SetPrefix(requests.TypeURLENCODED)
|
|
|
|
|
|
|
|
if data[0] != '@' {
|
|
|
|
u.Body.SetIOBody(data)
|
|
|
|
} else {
|
|
|
|
f, err := os.Open(data[1:])
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
defer f.Close()
|
|
|
|
bdata, err := ioutil.ReadAll(f)
|
2018-11-21 10:20:50 +00:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2018-11-21 10:17:05 +00:00
|
|
|
bdata = regexp.MustCompile("\n|\r").ReplaceAll(bdata, []byte(""))
|
|
|
|
u.Body.SetIOBody(bdata)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseHeader(u *CURL, soption string) {
|
|
|
|
matches := regexp.MustCompile(`'([^:]+): ([^']+)'`).FindAllStringSubmatch(soption, 1)[0]
|
|
|
|
key := matches[1]
|
|
|
|
value := matches[2]
|
|
|
|
|
|
|
|
switch key {
|
|
|
|
case "Cookie":
|
|
|
|
u.Cookies = ReadRawCookies(value, "")
|
|
|
|
u.CookieJar.SetCookies(u.ParsedURL, u.Cookies)
|
|
|
|
case "Content-Type":
|
|
|
|
u.Body.SetPrefix(value)
|
|
|
|
default:
|
|
|
|
u.Header.Add(key, value)
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|