Skip to content

Commit

Permalink
feat: add google search in site data
Browse files Browse the repository at this point in the history
Made improvements
- add google search in site data
- fix precisely mode
- optimize code structure
  • Loading branch information
piaolin committed May 5, 2023
1 parent a37530c commit db1ec78
Show file tree
Hide file tree
Showing 5 changed files with 369 additions and 120 deletions.
13 changes: 7 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,18 +31,19 @@ Usage:
Flags:
-c, --check self-check
-f, --file string Site data file (default "data.json")
-h, --help help for detect
-n, --name strings name[s]
--nsfw Include checking of NSFW sites from default list.
--nsfw Include checking of NSFW sites from default list.
--precisely Check precisely
-p, --proxy string Make requests over a proxy. e.g. socks5:https://127.0.0.1:1080
-s, --site strings Limit analysis to just the listed sites. Add multiple opt
ions to specify more than one site.
-t, --timeout int Time (in seconds) to wait for response to requests (defau
lt 60)
-p, --proxy string Make requests over a proxy. e.g. socks5:https://127.0.0.1:1080
-r, --retry int Retry times after request failed (default 3)
-s, --site strings Limit analysis to just the listed sites. Add multiple options to specify more than one site.
-t, --timeout int Time (in seconds) to wait for response to requests (default 10)
Global Flags:
-v, --verbose verbose output
```

To search for only one user:
Expand Down
12 changes: 6 additions & 6 deletions README_ZH.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,15 @@ Usage:
Flags:
-c, --check self-check
-f, --file string Site data file (default "data.json")
-h, --help help for detect
-n, --name strings name[s]
--nsfw Include checking of NSFW sites from default list.
--nsfw Include checking of NSFW sites from default list.
--precisely Check precisely
-p, --proxy string Make requests over a proxy. e.g. socks5:https://127.0.0.1:1080
-s, --site strings Limit analysis to just the listed sites. Add multiple opt
ions to specify more than one site.
-t, --timeout int Time (in seconds) to wait for response to requests (defau
lt 60)
-p, --proxy string Make requests over a proxy. e.g. socks5:https://127.0.0.1:1080
-r, --retry int Retry times after request failed (default 3)
-s, --site strings Limit analysis to just the listed sites. Add multiple options to specify more than one site.
-t, --timeout int Time (in seconds) to wait for response to requests (default 10)
Global Flags:
-v, --verbose verbose output
Expand Down
234 changes: 129 additions & 105 deletions cmd/detect.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,16 @@ var (
reqErrorInfo = "[!] %-15s %-15s: %s requests error, retry %d/%d\n"
sleepMap = make(map[string]int64)
sleepChannel = make(map[string]chan bool)
searchInfo = "[+] %-15s %-15s: Please check in %s\n"
)

func init() {
detectCmd.Flags().StringSliceVarP(&detectArgs.name, "name", "n", []string{}, "name[s]")
_ = detectCmd.MarkFlagRequired("name")
//_ = detectCmd.MarkFlagRequired("name")
detectCmd.Flags().StringSliceVarP(&detectArgs.site, "site", "s", []string{}, "Limit analysis to just the listed sites. Add multiple options to specify more than one site.")
detectCmd.Flags().BoolVarP(&detectArgs.check, "check", "c", false, "self-check")
detectCmd.Flags().StringVarP(&detectArgs.proxy, "proxy", "p", "", "Make requests over a proxy. e.g. socks5:https://127.0.0.1:1080")
detectCmd.Flags().IntVarP(&detectArgs.timeout, "timeout", "t", 30, "Time (in seconds) to wait for response to requests")
detectCmd.Flags().IntVarP(&detectArgs.timeout, "timeout", "t", 10, "Time (in seconds) to wait for response to requests")
detectCmd.Flags().BoolVar(&detectArgs.isNSFW, "nsfw", false, "Include checking of NSFW sites from default list.")
detectCmd.Flags().IntVarP(&detectArgs.retry, "retry", "r", 3, "Retry times after request failed")
detectCmd.Flags().BoolVar(&detectArgs.precisely, "precisely", false, "Check precisely")
Expand All @@ -55,7 +56,7 @@ func init() {

var detectCmd = &cobra.Command{
Use: "detect",
Short: "Hunt down social media accounts by username across social networks",
Short: "Hunt down social media accounts by username, email or phone across social networks",
Long: ``,
Run: detect,
}
Expand Down Expand Up @@ -93,10 +94,14 @@ func detect(_ *cobra.Command, _ []string) {
}
}

for _, name := range detectArgs.name {
for site, siteBody := range siteDataMap {
go detectSite(name, site, siteBody)
wg.Add(1)
if detectArgs.check {
log.Infoln("self check")
} else {
for _, name := range detectArgs.name {
for site, siteBody := range siteDataMap {
go detectSite(name, site, siteBody)
wg.Add(1)
}
}
}
wg.Wait()
Expand Down Expand Up @@ -127,122 +132,141 @@ func detectSite(name, site string, siteBody gjson.Result) {

detectReq := siteBody.Get("detect").Array()

retryTimes := 0
detectCount := len(detectReq) - 1
for index, detectData := range detectReq {
retryTimes := 0
if detectUser(name, site, index, retryTimes, detectCount, &flag, detectData) {
continue
} else {
break
}
}
}

// set header
header := make(map[string]string)
if h := detectData.Get("header"); h.Exists() {
headerTemp, ok := h.Value().(map[string]interface{})
if !ok {
log.Fatalln(headerTemp, "is invalid")
}
for key, value := range headerTemp {
strKey := fmt.Sprintf("%v", key)
strValue := fmt.Sprintf("%v", value)
header[strKey] = strValue
}
func detectUser(name, site string, requestTimes, retryTimes, detectCount int, flag *bool, detectData gjson.Result) bool {

// set header
header := make(map[string]string)
if h := detectData.Get("header"); h.Exists() {
headerTemp, ok := h.Value().(map[string]interface{})
if !ok {
log.Fatalln(headerTemp, "is invalid")
}

// if body is set, set method to post
var body string
if d := detectData.Get("body"); d.Exists() {
body = d.String()
if strings.Contains(body, "%s") {
body = fmt.Sprintf(body, name)
}
for key, value := range headerTemp {
strKey := fmt.Sprintf("%v", key)
strValue := fmt.Sprintf("%v", value)
header[strKey] = strValue
}

// set url with name
var url string
if u := detectData.Get("url"); u.Exists() {
url = u.String()
if strings.Contains(url, "%s") {
url = fmt.Sprintf(url, name)
}
} else {
log.Fatalln("Why no URL???")
}
}

// delay
if ch, ok := sleepChannel[site]; ok {
ch <- true
time.Sleep(time.Duration(sleepMap[site]) * time.Second)
<-ch
// if body is set, set method to post
var body string
if d := detectData.Get("body"); d.Exists() {
body = d.String()
if strings.Contains(body, "%s") {
body = fmt.Sprintf(body, name)
}
}

var rep *resty.Response
for retryTimes < detectArgs.retry {
var err error
rep, err = utils.Requests(url, body, detectArgs.proxy, header, detectArgs.timeout)
if err != nil {
retryTimes += 1
log.Debugf(reqErrorInfo, name, site, url, retryTimes, detectArgs.retry)
time.Sleep(time.Duration(sleepMap[site]) * time.Second)
continue
}
break
// set url with name
var url string
if u := detectData.Get("url"); u.Exists() {
url = u.String()
if strings.Contains(url, "%s") {
url = fmt.Sprintf(url, name)
}

if retryTimes == detectArgs.retry {
log.Infof(reqErrorInfo, name, site, url, retryTimes, detectArgs.retry)
break
} else if search := detectData.Get("search"); search.Exists() {
searchString := search.String()
if strings.Contains(searchString, "%s") {
searchString = fmt.Sprintf(searchString, name)
}
searchUrl := fmt.Sprintf(detectData.Get("searchUrl").String(), searchString)
log.Infof(searchInfo, name, site, searchUrl)
return false
} else {
log.Fatalln("Why no URL???")
}

//log.Infoln(rep.Request.Body)
//log.Debugln(rep.Status())
//log.Debugln(rep.Proto())
//
//log.Debugln(rep.Request.Header)
//
//log.Debugln(rep.String())

// statusCode, existRegex must both be true
statusCode := detectData.Get("statusCode")
existRegex := detectData.Get("existRegex")

statusCodeCheck := !statusCode.Exists() || strings.Contains(rep.Status(), statusCode.String())
existRegexCheck := true
if existRegex.Exists() && len(existRegex.Str) != 0 {
match, err := regexp.MatchString(existRegex.String(), rep.String())
if err != nil {
log.Errorln(err)
}
existRegexCheck = match
}
// delay
if ch, ok := sleepChannel[site]; ok {
ch <- true
time.Sleep(time.Duration(sleepMap[site]) * time.Second)
<-ch
}

// nonExistRegex have a veto power
nonExistRegex := detectData.Get("nonExistRegex")
nonExistRegexCheck := false
if nonExistRegex.Exists() && len(nonExistRegex.Str) != 0 {
match, err := regexp.MatchString(nonExistRegex.String(), rep.String())
if err != nil {
log.Errorln(err)
}
nonExistRegexCheck = match
}
if statusCodeCheck && existRegexCheck && !nonExistRegexCheck {
flag = true
var rep *resty.Response
for retryTimes < detectArgs.retry {
var err error
rep, err = utils.Requests(url, body, detectArgs.proxy, header, detectArgs.timeout)
if err != nil {
retryTimes += 1
log.Debugf(reqErrorInfo, name, site, url, retryTimes, detectArgs.retry)
time.Sleep(time.Duration(sleepMap[site]) * time.Second)
continue
}
break
}

if retryTimes == detectArgs.retry {
log.Infof(reqErrorInfo, name, site, url, retryTimes, detectArgs.retry)
return false
}

userPage := detectData.Get("userPage").String()
if strings.Contains(userPage, "%s") {
userPage = fmt.Sprintf(userPage, name)
//log.Infoln(rep.Request.Body)
//log.Debugln(rep.Status())
//log.Debugln(rep.Proto())
//
//log.Debugln(rep.Request.Header)
//
//log.Debugln(rep.String())

// statusCode, existRegex must both be true
statusCode := detectData.Get("statusCode")
existRegex := detectData.Get("existRegex")

statusCodeCheck := !statusCode.Exists() || strings.Contains(rep.Status(), statusCode.String())
existRegexCheck := true
if existRegex.Exists() && len(existRegex.Str) != 0 {
match, err := regexp.MatchString(existRegex.String(), rep.String())
if err != nil {
log.Errorln(err)
}
existRegexCheck = match
}

// precisely mode
if !flag {
log.Debugf(nonExistInfo, name, site)
break
} else if !detectArgs.precisely {
// flag=true && precisely=false
log.Infof(existInfo, name, site, userPage)
break
} else if index == len(detectReq)-1 {
// flag=true && precisely=true
log.Infof(existInfo, name, site, userPage)
// nonExistRegex have a veto power
nonExistRegex := detectData.Get("nonExistRegex")
nonExistRegexCheck := false
if nonExistRegex.Exists() && len(nonExistRegex.Str) != 0 {
match, err := regexp.MatchString(nonExistRegex.String(), rep.String())
if err != nil {
log.Errorln(err)
}
nonExistRegexCheck = match
}
if statusCodeCheck && existRegexCheck && !nonExistRegexCheck {
*flag = true
}

userPage := detectData.Get("userPage").String()
if strings.Contains(userPage, "%s") {
userPage = fmt.Sprintf(userPage, name)
}

// precisely mode
if !*flag {
log.Debugf(nonExistInfo, name, site)
return false
} else if !detectArgs.precisely {
// flag=true && precisely=false
log.Infof(existInfo, name, site, userPage)
return false
} else if requestTimes == detectCount {
// flag=true && precisely=true && last request
log.Infof(existInfo, name, site, userPage)
return true
} else {
return true
}
}
2 changes: 1 addition & 1 deletion cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ func init() {

var rootCmd = &cobra.Command{
Use: "DetectDee",
Short: "Hunt down social media accounts by username across social networks && Credential Stuffing them",
Short: "Hunt down social media accounts by username, email or phone across social networks && Credential Stuffing them",
Long: ``,
Run: func(cmd *cobra.Command, args []string) {
fmt.Println(cmd.UsageString())
Expand Down
Loading

0 comments on commit db1ec78

Please sign in to comment.