-
Notifications
You must be signed in to change notification settings - Fork 1
/
parser.go
67 lines (61 loc) · 1.48 KB
/
parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
package siw
import (
//"bytes"
"strings"
// import the latest
"code.google.com/p/go.net/html"
//"fmt"
"io"
//"io/ioutil"
"log"
)
/*
Cut is a simple 'split-on-whitespace' tokenizer using `strings.Fields()`
To be used when no better option is available.
*/
func Cut(sent string) (cut_text []string) {
for _, token := range strings.Fields(sent) {
cut_text = append(cut_text, token)
}
return
}
/*
ParseHtml is a better html tokenizer using the `net/html` package.
Use this for parsing web stuff.
NOTE: the api for net/html may change; check regularly for updates!!
Example: read in some stream of bytes, implement io.Reader via byte buffer.
data, _ := ioutil.ReadFile(fileStr)
hText, hTags := parseHtml(bytes.NewBuffer(data))
NOTE : data struture for html.Token
type Token struct {
Type TokenType
DataAtom atom.Atom
Data string
Attr []Attribute
}
type Attribute struct {
Namespace, Key, Val string
}
*/
func ParseHtml(r io.Reader) (html_text, html_tags []html.Token) {
d := html.NewTokenizer(r)
for {
//token type
tokenType := d.Next()
if tokenType == html.ErrorToken {
log.Println("Html Error Token Found", tokenType)
return
}
token := d.Token()
switch tokenType {
case html.StartTagToken:
html_tags = append(html_tags, token)
case html.TextToken:
html_text = append(html_text, token)
case html.EndTagToken:
html_tags = append(html_tags, token)
case html.SelfClosingTagToken:
html_tags = append(html_tags, token)
}
}
}