forked from xxyzz/WordDumb
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdatabase.py
212 lines (166 loc) · 5.34 KB
/
database.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
#!/usr/bin/env python3
import sqlite3
from pathlib import Path
from calibre_plugins.worddumb.unzip import load_json_or_pickle
def get_ll_path(asin, book_path):
return Path(book_path).parent.joinpath(f'LanguageLayer.en.{asin}.kll')
def check_db_file(path):
'''
if file exists return None otherwise create file
then return sqlite connection
'''
if path.exists():
journal = path.with_name(path.name + '-journal')
if not journal.exists():
return None
else: # last time failed
path.unlink()
journal.unlink()
return sqlite3.connect(':memory:')
def create_lang_layer(asin, book_path, acr, revision):
db_path = get_ll_path(asin, book_path)
if (ll_conn := check_db_file(db_path)) is None:
return None, None
ll_conn.executescript('''
CREATE TABLE metadata (
key TEXT,
value TEXT
);
CREATE TABLE glosses (
start INTEGER PRIMARY KEY,
end INTEGER,
difficulty INTEGER,
sense_id INTEGER,
low_confidence BOOLEAN
);
''')
metadata = [('acr', acr),
('targetLanguages', 'en'),
('sidecarRevision', '9'),
('bookRevision', revision),
('sourceLanguage', 'en'),
('enDictionaryVersion', '2016-09-14'),
('enDictionaryRevision', '57'),
('enDictionaryId', 'kll.en.en'),
('sidecarFormat', '1.0')]
ll_conn.executemany('INSERT INTO metadata VALUES (?, ?)', metadata)
return ll_conn, db_path
def insert_lemma(ll_conn, data):
ll_conn.execute('''
INSERT INTO glosses (start, end, difficulty, sense_id, low_confidence)
VALUES (?, ?, ?, ?, 0)
''', data)
def get_x_ray_path(asin, book_path):
return Path(book_path).parent.joinpath(f'XRAY.entities.{asin}.asc')
def create_x_ray_db(asin, book_path, lang):
db_path = get_x_ray_path(asin, book_path)
if (x_ray_conn := check_db_file(db_path)) is None:
return None, None
x_ray_conn.executescript('''
PRAGMA user_version = 1;
CREATE TABLE book_metadata (
srl INTEGER,
erl INTEGER,
has_images TINYINT,
has_excerpts TINYINT,
show_spoilers_default TINYINT,
num_people INTEGER,
num_terms INTEGER,
num_images INTEGER,
preview_images TEXT);
CREATE TABLE bookmentions_entity (
id INTEGER PRIMARY KEY,
asin TEXT,
title TEXT,
authors TEXT,
description TEXT,
ratings INTEGER,
totalRatings INTEGER,
type TEXT);
CREATE TABLE bookmentions_occurrence (
entity INTEGER,
start INTEGER,
length INTEGER);
CREATE TABLE entity (
id INTEGER PRIMARY KEY,
label TEXT,
loc_label INTEGER,
type INTEGER,
count INTEGER,
has_info_card TINYINT);
CREATE TABLE entity_description (
text TEXT,
source_wildcard TEXT,
source INTEGER,
entity INTEGER PRIMARY KEY);
CREATE TABLE entity_excerpt (
entity INTEGER,
excerpt INTEGER);
CREATE TABLE excerpt (
id INTEGER PRIMARY KEY,
start INTEGER,
length INTEGER,
image TEXT,
related_entities TEXT,
goto INTEGER);
CREATE TABLE occurrence (
entity INTEGER,
start INTEGER,
length INTEGER);
CREATE TABLE source (
id INTEGER PRIMARY KEY,
label INTEGER,
url INTEGER,
license_label INTEGER,
license_url INTEGER);
CREATE TABLE string (
id INTEGER,
language TEXT,
text TEXT);
CREATE TABLE type (
id INTEGER PRIMARY KEY,
label INTEGER,
singular_label INTEGER,
icon INTEGER,
top_mentioned_entities TEXT);
INSERT INTO entity (id, loc_label, has_info_card) VALUES(0, 1, 0);
INSERT INTO source (id, label, url) VALUES(0, 5, 20);
INSERT INTO source VALUES(1, 6, 21, 7, 8);
INSERT INTO source (id, label, url) VALUES(2, 4, 22);
''')
str_list = load_json_or_pickle('data/x_ray_strings.json', True)
if lang != 'en':
str_list[-2][-1] = f'https://{lang}.wikipedia.org/wiki/%s'
x_ray_conn.executemany('INSERT INTO string VALUES(?, ?, ?)', str_list)
return x_ray_conn, db_path
def create_x_indices(conn):
conn.executescript('''
CREATE INDEX idx_entity_type ON entity(type ASC);
CREATE INDEX idx_entity_excerpt ON entity_excerpt(entity ASC);
CREATE INDEX idx_occurrence_start ON occurrence(start ASC);''')
def insert_x_book_metadata(conn, data):
conn.execute(
'INSERT INTO book_metadata VALUES(0, ?, ?, 0, 0, ?, ?, ?, ?)', data)
def insert_x_entity(conn, data):
conn.executemany('''
INSERT INTO entity (id, label, type, count, has_info_card)
VALUES(?, ?, ?, ?, 1)''', data)
def insert_x_entity_description(conn, data):
conn.execute('INSERT INTO entity_description VALUES(?, ?, ?, ?)', data)
def insert_x_occurrence(conn, data):
conn.execute('INSERT INTO occurrence VALUES(?, ?, ?)', data)
def insert_x_type(conn, data):
conn.execute('INSERT INTO type VALUES(?, ?, ?, ?, ?)', data)
def insert_x_excerpt_image(conn, data):
conn.execute('''
INSERT INTO excerpt (id, start, length, image, goto)
VALUES(?, ?, 0, ?, ?)''', data)
def save_db(source, dest_path):
source.commit()
dest_path.parent.mkdir(exist_ok=True)
dest_path.touch()
dest = sqlite3.connect(dest_path)
with dest:
source.backup(dest)
source.close()
dest.close()