Skip to content

Commit

Permalink
misc option update
Browse files Browse the repository at this point in the history
  • Loading branch information
ehsandeep committed Aug 1, 2023
1 parent 58fc6bf commit 1c2fe65
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 8 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/security-crawl-maze-score.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@ jobs:
working-directory: cmd/katana/

- name: Run Katana Standard
run: ./katana -u https://security-crawl-maze.app/ -kf all -jc -jl -d 10 -o output_standard.txt -cos node_modules
run: ./katana -u https://security-crawl-maze.app/ -kf all -jc -jsluice -d 10 -o output_standard.txt -cos node_modules
working-directory: cmd/katana

- name: Run Katana Headless
run: ./katana -u https://security-crawl-maze.app/ -kf all -jc -jl -d 10 -headless -o output_headless.txt -cos node_modules
run: ./katana -u https://security-crawl-maze.app/ -kf all -jc -jsluice -d 10 -headless -o output_headless.txt -cos node_modules
working-directory: cmd/katana

- name: Run Score
Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,13 +120,13 @@ CONFIGURATION:
-r, -resolvers string[] list of custom resolver (file or comma separated)
-d, -depth int maximum depth to crawl (default 3)
-jc, -js-crawl enable endpoint parsing / crawling in javascript file
-jl, -jsluice enable jsluice parsing in javascript file (memory intensive)
-jsl, -jsluice enable jsluice parsing in javascript file (memory intensive)
-ct, -crawl-duration value maximum duration to crawl the target for (s, m, h, d) (default s)
-kf, -known-files string enable crawling of known files (all,robotstxt,sitemapxml)
-mrs, -max-response-size int maximum response size to read (default 9223372036854775807)
-timeout int time to wait for request in seconds (default 10)
-aff, -automatic-form-fill enable automatic form filling (experimental)
-fx, -form-extraction enable extraction of form, input, textarea & select elements
-fx, -form-extraction extract form, input, textarea & select elements in jsonl output
-retry int number of times to retry the request (default 1)
-proxy string http/socks5 proxy to use
-H, -headers string[] custom header/cookie to include in all http request in header:value format (file)
Expand All @@ -151,7 +151,7 @@ HEADLESS:
-scp, -system-chrome-path string use specified chrome browser for headless crawling
-noi, -no-incognito start headless chrome without incognito mode
-cwu, -chrome-ws-url string use chrome browser instance launched elsewhere with the debugger listening at this URL
-xhr, -xhr-extraction extract xhr requests
-xhr, -xhr-extraction extract xhr request url,method in jsonl output

SCOPE:
-cs, -crawl-scope string[] in scope url regex to be followed by crawler
Expand Down
6 changes: 3 additions & 3 deletions cmd/katana/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,13 +70,13 @@ pipelines offering both headless and non-headless crawling.`)
flagSet.StringSliceVarP(&options.Resolvers, "resolvers", "r", nil, "list of custom resolver (file or comma separated)", goflags.FileCommaSeparatedStringSliceOptions),
flagSet.IntVarP(&options.MaxDepth, "depth", "d", 3, "maximum depth to crawl"),
flagSet.BoolVarP(&options.ScrapeJSResponses, "js-crawl", "jc", false, "enable endpoint parsing / crawling in javascript file"),
flagSet.BoolVarP(&options.ScrapeJSLuiceResponses, "jsluice", "jl", false, "enable jsluice parsing in javascript file (memory intensive)"),
flagSet.BoolVarP(&options.ScrapeJSLuiceResponses, "jsluice", "jsl", false, "enable jsluice parsing in javascript file (memory intensive)"),
flagSet.DurationVarP(&options.CrawlDuration, "crawl-duration", "ct", 0, "maximum duration to crawl the target for (s, m, h, d) (default s)"),
flagSet.StringVarP(&options.KnownFiles, "known-files", "kf", "", "enable crawling of known files (all,robotstxt,sitemapxml)"),
flagSet.IntVarP(&options.BodyReadSize, "max-response-size", "mrs", math.MaxInt, "maximum response size to read"),
flagSet.IntVar(&options.Timeout, "timeout", 10, "time to wait for request in seconds"),
flagSet.BoolVarP(&options.AutomaticFormFill, "automatic-form-fill", "aff", false, "enable automatic form filling (experimental)"),
flagSet.BoolVarP(&options.FormExtraction, "form-extraction", "fx", false, "enable extraction of form, input, textarea & select elements"),
flagSet.BoolVarP(&options.FormExtraction, "form-extraction", "fx", false, "extract form, input, textarea & select elements in jsonl output"),
flagSet.IntVar(&options.Retries, "retry", 1, "number of times to retry the request"),
flagSet.StringVar(&options.Proxy, "proxy", "", "http/socks5 proxy to use"),
flagSet.StringSliceVarP(&options.CustomHeaders, "headers", "H", nil, "custom header/cookie to include in all http request in header:value format (file)", goflags.FileStringSliceOptions),
Expand All @@ -103,7 +103,7 @@ pipelines offering both headless and non-headless crawling.`)
flagSet.StringVarP(&options.SystemChromePath, "system-chrome-path", "scp", "", "use specified chrome browser for headless crawling"),
flagSet.BoolVarP(&options.HeadlessNoIncognito, "no-incognito", "noi", false, "start headless chrome without incognito mode"),
flagSet.StringVarP(&options.ChromeWSUrl, "chrome-ws-url", "cwu", "", "use chrome browser instance launched elsewhere with the debugger listening at this URL"),
flagSet.BoolVarP(&options.XhrExtraction, "xhr-extraction", "xhr", false, "extract xhr requests"),
flagSet.BoolVarP(&options.XhrExtraction, "xhr-extraction", "xhr", false, "extract xhr request url,method in jsonl output"),
)

flagSet.CreateGroup("scope", "Scope",
Expand Down

0 comments on commit 1c2fe65

Please sign in to comment.