-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathvocabularies_generate.go
193 lines (177 loc) · 4.4 KB
/
vocabularies_generate.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
// +build generate
// Takes the scopes.json file and generates scopes.ts and scopes.go
package main
import (
"fmt"
"html/template"
"io/ioutil"
"log"
"os"
"strings"
"unicode"
"golang.org/x/text/transform"
"golang.org/x/text/unicode/norm"
)
var goTemplate = template.Must(template.New("go").Parse(`// Code generated by vocabularies_generate.go; DO NOT EDIT.
package {{.Language}}
// map of PoS to (map of Form to Lemma)
var Dictionary = map[string]map[string]string{
{{- range $pos, $dict := .Entries}}
"{{$pos}}": {
{{- range $f, $l := $dict}}
"{{$f}}": "{{$l}}", {{end}}
}, {{end}}
}
`))
// Dicts is a dictionary of posCodes-Dict
type Dicts map[string]Dict
// Dict is a dictionary of form-lemma relations
type Dict map[string]string
// remoceAccents removes accents from the string
// See https://blog.golang.org/normalization
func removeAccents(original string) (modified string, err error) {
isMn := func(r rune) bool {
return unicode.Is(unicode.Mn, r) // Mn: nonspacing marks
}
t := transform.Chain(norm.NFD, transform.RemoveFunc(isMn), norm.NFC)
modified, _, err = transform.String(t, original)
return modified, err
}
func processEntry(langDicts Dicts, entry string) error {
sEntry := strings.Split(entry, " ") // form lemma pos
if len(sEntry) != 3 {
return fmt.Errorf("Invalid entry %s", entry)
}
var dictKey string
switch string(sEntry[2][0]) { // First character of pos
case "D": // determiner
dictKey = "DET"
case "A": // adjective
dictKey = "ADJ"
case "N": // noun
dictKey = "NOUN"
case "V": // verb
dictKey = "VERB"
case "R": // adverb
dictKey = "ADV"
case "S": // adposition
dictKey = "ADP"
case "C": // conjuntion
dictKey = "CONJ"
case "P": // pronoun
dictKey = "PRON"
case "I": // interjection
dictKey = "INTJ"
default:
return nil // Skip it
}
dict, ok := langDicts[dictKey]
if !ok {
dict = make(Dict)
}
if _, ok := dict[sEntry[0]]; !ok { // dont override, use first match
dict[sEntry[0]] = sEntry[1]
modified, err := removeAccents(sEntry[0])
if err != nil {
return err
}
if modified != sEntry[0] { // instance modified, it had accents. Try to add the corrected one
if _, ok := dict[modified]; !ok { // dont override, use first match
dict[modified] = sEntry[1]
}
}
}
langDicts[dictKey] = dict
return nil
}
type LanguageDictionary struct {
Language string
Entries Dicts
}
func loadDict(langDicts Dicts, dictFileName string) error {
content, err := ioutil.ReadFile(dictFileName)
if err != nil {
return err
}
entries := strings.Split(string(content), "\n")
for _, entry := range entries {
if entry != "" {
err := processEntry(langDicts, entry)
if err != nil {
return err
}
}
}
return nil
}
func generateLangDict(Language string, files []string) error {
dicts := make(Dicts)
for _, d := range files {
err := loadDict(dicts, d)
if err != nil {
return err
}
}
var langDict = LanguageDictionary{
Language,
dicts,
}
outFile := fmt.Sprintf("%v/dictionary.go", Language)
langDictf, err := os.OpenFile(outFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
return err
}
if err = goTemplate.Execute(langDictf, langDict); err != nil {
return fmt.Errorf("render %v: %v", outFile, err)
}
return nil
}
func main() {
fmt.Println("Starting dictionaries generation...")
fmt.Println("[Lemmatizer] Loading es dictionaries...")
esFiles := []string{
"./data/es/MM.adj",
"./data/es/MM.adv",
"./data/es/MM.int",
"./data/es/MM.nom",
"./data/es/MM.tanc",
"./data/es/MM.vaux",
"./data/es/MM.verb",
}
err := generateLangDict("es", esFiles)
if err != nil {
log.Fatal(err)
}
fmt.Println("[Lemmatizer] es Dictionaries loaded.")
fmt.Println("[Lemmatizer] Loading fr dictionaries...")
frFiles := []string{
"./data/fr/lefff.adj",
"./data/fr/lefff.adv",
"./data/fr/lefff.int",
"./data/fr/lefff.nom",
"./data/fr/lefff.tanc",
"./data/fr/lefff.vaux",
"./data/fr/lefff.verb",
}
err = generateLangDict("fr", frFiles)
if err != nil {
log.Fatal(err)
}
fmt.Println("[Lemmatizer] fr Dictionaries loaded.")
fmt.Println("[Lemmatizer] Loading de dictionaries...")
deFiles := []string{
"./data/de/de.adj",
"./data/de/de.adv",
"./data/de/de.closed",
"./data/de/de.contr",
"./data/de/de.int",
"./data/de/de.nouns",
"./data/de/de.proper",
"./data/de/de.verbs",
}
err = generateLangDict("de", deFiles)
if err != nil {
log.Fatal(err)
}
fmt.Println("[Lemmatizer] de Dictionaries loaded.")
}