diff --git a/dajarep.go b/dajarep.go index fa3e436..545b388 100644 --- a/dajarep.go +++ b/dajarep.go @@ -2,16 +2,17 @@ package main import ( - "github.com/ikawaha/kagome" "math" "regexp" "strings" -) -var ( - token *kagome.Tokenizer + "github.com/ikawaha/kagome/tokenizer" ) +func init() { + tokenizer.SysDic() +} + //単語 type word struct { str string @@ -87,7 +88,7 @@ func fixSentence(text string) string { //テキストからsentenceオブジェクトを作る。 func getSentences(text string) []sentence { var sentences []sentence - t:= getTokenizer() + t := tokenizer.New() text = strings.Replace(text, "。", "\n", -1) text = strings.Replace(text, ".", "\n", -1) @@ -122,10 +123,3 @@ func getSentences(text string) []sentence { } return sentences } -//Tokenizerを取得 -func getTokenizer() *kagome.Tokenizer{ - if token == nil { - token = kagome.NewTokenizer() - } - return token -} \ No newline at end of file diff --git a/main.go b/main.go index 4cf8a26..ef65acf 100644 --- a/main.go +++ b/main.go @@ -46,9 +46,6 @@ func main() { os.Exit(1) } - //あらかじめTokenizerを読み込んで時短 - _ = getTokenizer() - if interactive == false { text, err := transEnc(text, encode) if err != nil {