Skip to content

Commit

Permalink
Handle verbosity, fallbacking on rate limit by ip and check character…
Browse files Browse the repository at this point in the history
… encoding works
  • Loading branch information
axelberardino committed Aug 17, 2019
1 parent 2575635 commit f5d2d13
Show file tree
Hide file tree
Showing 8 changed files with 69 additions and 15 deletions.
9 changes: 4 additions & 5 deletions TODO
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
Real issues
* New release needed
* Regenerate demo files to add download button

Ideas
* More rigourous item description generation
* Shop id for the link after shop generation
* Search bar? (lot of works, may easily be slow)
* Minify generated files, reducing size.
* Filter character by league on generation, to not mix them.
* Choose what tabs and characters to generate
* By character name
* By stash name, by stash number, by range
8 changes: 7 additions & 1 deletion cmd/cli/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ func main() {
league := flag.String("league", "standard", "league name (anarchy, legion, synthesis, delve...)")
output := flag.String("output", "", "where to generate html file (put \"-\" for stdin), if empty, a generated name will be created (account-league.html)")
cache := flag.Bool("cache", false, "do not call distant api, and use local cache if possible, for debug purpose only")
verbosity := flag.Int("verbosity", 0, "set the log verbose level")
interactive := flag.Bool("interactive", false, "interactive mode")
version := flag.Bool("version", false, "display the version of this tool")
flag.Parse()
Expand Down Expand Up @@ -67,7 +68,12 @@ func main() {
}
}

scraper := scraper.NewScraper(*account, *poeSessID, *realm, *league, *cache)
scraper := scraper.NewScraper(*account, *poeSessID, *realm, *league)
if *cache {
scraper.EnableCache()
}
scraper.SetVerbosity(*verbosity)

data, errScrap := scraper.ScrapEverything()
if errScrap != nil {
fmt.Println("can't scrap data", errScrap)
Expand Down
14 changes: 12 additions & 2 deletions cmd/server/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,18 @@ import (
"github.com/poe-stash/scraper"
)

// EnvMiddleware will add env to query.
func EnvMiddleware(verbosity int) gin.HandlerFunc {
return func(c *gin.Context) {
c.Set("verbosity", verbosity)
c.Next()
}
}

// setupRouter setups the http server and all its pages.
func setupRouter(passwords map[string]string) *gin.Engine {
func setupRouter(passwords map[string]string, verbosity int) *gin.Engine {
router := gin.Default()
router.Use(EnvMiddleware(verbosity))

t := template.Must(generate.LoadAllTemplates())
router.SetHTMLTemplate(t)
Expand Down Expand Up @@ -91,6 +100,7 @@ func main() {
port := flag.Int("port", 2121, "port")
passwordFile := flag.String("passwords", "", "password file (containing login:pass in plain text)")
version := flag.Bool("version", false, "display the version of this tool")
verbosity := flag.Int("verbosity", 0, "set the log verbose level")
flag.Parse()

if *version {
Expand Down Expand Up @@ -121,6 +131,6 @@ func main() {
}
}

r := setupRouter(passwords)
r := setupRouter(passwords, *verbosity)
r.Run(fmt.Sprintf(":%d", *port))
}
5 changes: 4 additions & 1 deletion cmd/server/page/gen_account.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ import (

// GenAccountHandler handles refresh of an account.
func GenAccountHandler(c *gin.Context) {
verbosity := c.MustGet("verbosity").(int)

account := c.Params.ByName("account")
poeSessID := ""
realm := "pc"
Expand All @@ -41,7 +43,8 @@ func GenAccountHandler(c *gin.Context) {
}
}

scrap := scraper.NewScraper(account, poeSessID, realm, league, false)
scrap := scraper.NewScraper(account, poeSessID, realm, league)
scrap.SetVerbosity(verbosity)
data, errScrap := scrap.ScrapEverything()
if errScrap != nil {
c.HTML(http.StatusOK, "error", errScrap)
Expand Down
2 changes: 1 addition & 1 deletion cmd/server/page/main_page.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ func MainPageHandler(c *gin.Context) {
return
}

scraper := scraper.NewScraper("", "", "", "", false)
scraper := scraper.NewScraper("", "", "", "")
leagues, errLeagues := scraper.GetLeagues()
if errLeagues != nil {
c.HTML(http.StatusInternalServerError, "error", errLeagues)
Expand Down
6 changes: 6 additions & 0 deletions docs/faq.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@

## Frequently asked questions

### Generating my profile is quite long

Issue is, GGG is rate limiting queries to the speed of one per second. What it
means is I can't download fast. So every stash tab will take one second to be
fetch. If you have hundred of it, then you will have to wait several minutes.

### Your currencies are not up to date

Yes, it's hard coded, not live. They will never be exact, or intended to be.
Expand Down
2 changes: 1 addition & 1 deletion misc/version.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package misc

// Version of the application.
const Version = "v0.2"
const Version = "v0.3"
38 changes: 34 additions & 4 deletions scraper/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,9 @@ const (

// Scraper scraps path of exile site using its API.
type Scraper struct {
cache bool
cacheDir string
verbosity int
cache bool
cacheDir string

accountName string
poeSessionID string
Expand All @@ -58,9 +59,8 @@ type ScrapedData struct {
}

// NewScraper returns a configured scraper.
func NewScraper(accountName, poeSessionID, realm, league string, cache bool) *Scraper {
func NewScraper(accountName, poeSessionID, realm, league string) *Scraper {
return &Scraper{
cache: cache,
cacheDir: DataCacheDir,
accountName: accountName,
poeSessionID: poeSessionID,
Expand All @@ -71,6 +71,17 @@ func NewScraper(accountName, poeSessionID, realm, league string, cache bool) *Sc
}
}

// EnableCache enable caching of queries.
// Useful for debug, do not enable it in production.
func (s *Scraper) EnableCache() {
s.cache = true
}

// SetVerbosity set verbosity of logs.
func (s *Scraper) SetVerbosity(v int) {
s.verbosity = v
}

// hash url into a number.
func hash(s string) string {
h := fnv.New32a()
Expand Down Expand Up @@ -106,6 +117,12 @@ func (s *Scraper) CallAPI(apiURL string) ([]byte, error) {
rateLimiter := s.rateLimitManager.GetRateLimiter(s.poeSessionID, baseURL)

waitTime, queryDone := rateLimiter.NextQuery()
if s.verbosity > 0 {
fmt.Println("wait:", waitTime, "query:", apiURL)
if s.verbosity > 1 {
fmt.Println("request:", req)
}
}
time.Sleep(waitTime)

// Query the server.
Expand All @@ -117,14 +134,27 @@ func (s *Scraper) CallAPI(apiURL string) ([]byte, error) {

// Let check if there are some rate limiting rules
rateLimitRules := resp.Header.Get("X-Rate-Limit-Account")
if rateLimitRules == "" {
rateLimitRules = resp.Header.Get("X-Rate-Limit-Ip")
}
rateLimitState := resp.Header.Get("X-Rate-Limit-Account-State")
if rateLimitState == "" {
rateLimitState = resp.Header.Get("X-Rate-Limit-Ip-State")
}
rules, errRule := ExtractFirstRuleFromString(rateLimitRules)
state, errState := ExtractFirstRuleFromString(rateLimitState)
// If so, then update our current rate limit counters with the ones
// the server see from its side (for better accuracy).
if errRule == nil && errState == nil {
s.rateLimitManager.UpdateRateLimiter(s.poeSessionID, baseURL, rules, state)
}
if s.verbosity > 0 {
r := s.rateLimitManager.GetRateLimiter(s.poeSessionID, baseURL)
fmt.Println("Status:", resp.StatusCode, "Rate:", r.NbQuery, "/", r.NbMaxQuery, "ServerRate:", rateLimitState, rateLimitRules)
if s.verbosity > 1 {
fmt.Println("Response:", resp)
}
}

defer func() {
localErr := resp.Body.Close()
Expand Down

0 comments on commit f5d2d13

Please sign in to comment.