From 1c2fe65b85ff4bbc72bca510c3538113217b0a56 Mon Sep 17 00:00:00 2001 From: sandeep <8293321+ehsandeep@users.noreply.github.com> Date: Tue, 1 Aug 2023 17:45:36 +0530 Subject: [PATCH] misc option update --- .github/workflows/security-crawl-maze-score.yaml | 4 ++-- README.md | 6 +++--- cmd/katana/main.go | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/security-crawl-maze-score.yaml b/.github/workflows/security-crawl-maze-score.yaml index da2f0c50..bdf87ba8 100644 --- a/.github/workflows/security-crawl-maze-score.yaml +++ b/.github/workflows/security-crawl-maze-score.yaml @@ -21,11 +21,11 @@ jobs: working-directory: cmd/katana/ - name: Run Katana Standard - run: ./katana -u https://security-crawl-maze.app/ -kf all -jc -jl -d 10 -o output_standard.txt -cos node_modules + run: ./katana -u https://security-crawl-maze.app/ -kf all -jc -jsluice -d 10 -o output_standard.txt -cos node_modules working-directory: cmd/katana - name: Run Katana Headless - run: ./katana -u https://security-crawl-maze.app/ -kf all -jc -jl -d 10 -headless -o output_headless.txt -cos node_modules + run: ./katana -u https://security-crawl-maze.app/ -kf all -jc -jsluice -d 10 -headless -o output_headless.txt -cos node_modules working-directory: cmd/katana - name: Run Score diff --git a/README.md b/README.md index 585f169f..1d4e681b 100644 --- a/README.md +++ b/README.md @@ -120,13 +120,13 @@ CONFIGURATION: -r, -resolvers string[] list of custom resolver (file or comma separated) -d, -depth int maximum depth to crawl (default 3) -jc, -js-crawl enable endpoint parsing / crawling in javascript file - -jl, -jsluice enable jsluice parsing in javascript file (memory intensive) + -jsl, -jsluice enable jsluice parsing in javascript file (memory intensive) -ct, -crawl-duration value maximum duration to crawl the target for (s, m, h, d) (default s) -kf, -known-files string enable crawling of known files (all,robotstxt,sitemapxml) -mrs, -max-response-size int maximum response size to read (default 9223372036854775807) -timeout int time to wait for request in seconds (default 10) -aff, -automatic-form-fill enable automatic form filling (experimental) - -fx, -form-extraction enable extraction of form, input, textarea & select elements + -fx, -form-extraction extract form, input, textarea & select elements in jsonl output -retry int number of times to retry the request (default 1) -proxy string http/socks5 proxy to use -H, -headers string[] custom header/cookie to include in all http request in header:value format (file) @@ -151,7 +151,7 @@ HEADLESS: -scp, -system-chrome-path string use specified chrome browser for headless crawling -noi, -no-incognito start headless chrome without incognito mode -cwu, -chrome-ws-url string use chrome browser instance launched elsewhere with the debugger listening at this URL - -xhr, -xhr-extraction extract xhr requests + -xhr, -xhr-extraction extract xhr request url,method in jsonl output SCOPE: -cs, -crawl-scope string[] in scope url regex to be followed by crawler diff --git a/cmd/katana/main.go b/cmd/katana/main.go index 49f8825b..07191397 100644 --- a/cmd/katana/main.go +++ b/cmd/katana/main.go @@ -70,13 +70,13 @@ pipelines offering both headless and non-headless crawling.`) flagSet.StringSliceVarP(&options.Resolvers, "resolvers", "r", nil, "list of custom resolver (file or comma separated)", goflags.FileCommaSeparatedStringSliceOptions), flagSet.IntVarP(&options.MaxDepth, "depth", "d", 3, "maximum depth to crawl"), flagSet.BoolVarP(&options.ScrapeJSResponses, "js-crawl", "jc", false, "enable endpoint parsing / crawling in javascript file"), - flagSet.BoolVarP(&options.ScrapeJSLuiceResponses, "jsluice", "jl", false, "enable jsluice parsing in javascript file (memory intensive)"), + flagSet.BoolVarP(&options.ScrapeJSLuiceResponses, "jsluice", "jsl", false, "enable jsluice parsing in javascript file (memory intensive)"), flagSet.DurationVarP(&options.CrawlDuration, "crawl-duration", "ct", 0, "maximum duration to crawl the target for (s, m, h, d) (default s)"), flagSet.StringVarP(&options.KnownFiles, "known-files", "kf", "", "enable crawling of known files (all,robotstxt,sitemapxml)"), flagSet.IntVarP(&options.BodyReadSize, "max-response-size", "mrs", math.MaxInt, "maximum response size to read"), flagSet.IntVar(&options.Timeout, "timeout", 10, "time to wait for request in seconds"), flagSet.BoolVarP(&options.AutomaticFormFill, "automatic-form-fill", "aff", false, "enable automatic form filling (experimental)"), - flagSet.BoolVarP(&options.FormExtraction, "form-extraction", "fx", false, "enable extraction of form, input, textarea & select elements"), + flagSet.BoolVarP(&options.FormExtraction, "form-extraction", "fx", false, "extract form, input, textarea & select elements in jsonl output"), flagSet.IntVar(&options.Retries, "retry", 1, "number of times to retry the request"), flagSet.StringVar(&options.Proxy, "proxy", "", "http/socks5 proxy to use"), flagSet.StringSliceVarP(&options.CustomHeaders, "headers", "H", nil, "custom header/cookie to include in all http request in header:value format (file)", goflags.FileStringSliceOptions), @@ -103,7 +103,7 @@ pipelines offering both headless and non-headless crawling.`) flagSet.StringVarP(&options.SystemChromePath, "system-chrome-path", "scp", "", "use specified chrome browser for headless crawling"), flagSet.BoolVarP(&options.HeadlessNoIncognito, "no-incognito", "noi", false, "start headless chrome without incognito mode"), flagSet.StringVarP(&options.ChromeWSUrl, "chrome-ws-url", "cwu", "", "use chrome browser instance launched elsewhere with the debugger listening at this URL"), - flagSet.BoolVarP(&options.XhrExtraction, "xhr-extraction", "xhr", false, "extract xhr requests"), + flagSet.BoolVarP(&options.XhrExtraction, "xhr-extraction", "xhr", false, "extract xhr request url,method in jsonl output"), ) flagSet.CreateGroup("scope", "Scope",