Skip to content

Commit bf9be0e

Browse files
committed
fix function names
1 parent ae163f3 commit bf9be0e

File tree

3 files changed

+7
-7
lines changed

3 files changed

+7
-7
lines changed

wikipedia2vec/dictionary.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ def build(
193193
entity_counter = Counter()
194194
entity_doc_counter = Counter()
195195

196-
with closing(Pool(pool_size, initializer=init_worker, initargs=(dump_db, tokenizer))) as pool:
196+
with closing(Pool(pool_size, initializer=_init_worker, initargs=(dump_db, tokenizer))) as pool:
197197
with tqdm(total=dump_db.page_size(), mininterval=0.5, disable=not progressbar) as bar:
198198
f = partial(_process_page, lowercase=lowercase, min_paragraph_len=min_paragraph_len)
199199
for word_cnt, entity_cnt in pool.imap_unordered(f, dump_db.titles(), chunksize=chunk_size):
@@ -346,7 +346,7 @@ def load(target: Union[str, dict], mmap: bool = True) -> "Dictionary":
346346
_tokenizer: Optional[BaseTokenizer] = None
347347

348348

349-
def init_worker(dump_db: DumpDB, tokenizer: BaseTokenizer):
349+
def _init_worker(dump_db: DumpDB, tokenizer: BaseTokenizer):
350350
global _dump_db, _tokenizer
351351

352352
_dump_db = dump_db

wikipedia2vec/link_graph.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def build(
8282
logger.info("Step 1/2: Processing Wikipedia pages...")
8383

8484
with closing(
85-
Pool(pool_size, initializer=init_worker, initargs=(dump_db, dictionary.serialize(shared_array=True)))
85+
Pool(pool_size, initializer=_init_worker, initargs=(dump_db, dictionary.serialize(shared_array=True)))
8686
) as pool:
8787
rows = []
8888
cols = []
@@ -131,7 +131,7 @@ def build(
131131
_dictionary: Optional[Dictionary] = None
132132

133133

134-
def init_worker(dump_db: DumpDB, dictionary_obj: dict):
134+
def _init_worker(dump_db: DumpDB, dictionary_obj: dict):
135135
global _dump_db, _dictionary
136136

137137
_dump_db = dump_db

wikipedia2vec/mention_db.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ def build(
169169
name_dict = defaultdict(lambda: Counter())
170170
init_args = [dump_db, dictionary.serialize(shared_array=True), tokenizer, None]
171171

172-
with closing(Pool(pool_size, initializer=init_worker, initargs=init_args)) as pool:
172+
with closing(Pool(pool_size, initializer=_init_worker, initargs=init_args)) as pool:
173173
with tqdm(total=dump_db.page_size(), mininterval=0.5, disable=not progressbar) as bar:
174174
f = partial(_extract_links, max_mention_len=max_mention_len, case_sensitive=case_sensitive)
175175
for ret in pool.imap_unordered(f, dump_db.titles(), chunksize=chunk_size):
@@ -184,7 +184,7 @@ def build(
184184
name_counter = Counter()
185185
init_args[3] = name_trie
186186

187-
with closing(Pool(pool_size, initializer=init_worker, initargs=init_args)) as pool:
187+
with closing(Pool(pool_size, initializer=_init_worker, initargs=init_args)) as pool:
188188
with tqdm(total=dump_db.page_size(), mininterval=0.5, disable=not progressbar) as bar:
189189
f = partial(_count_occurrences, max_mention_len=max_mention_len, case_sensitive=case_sensitive)
190190
for names in pool.imap_unordered(f, dump_db.titles(), chunksize=chunk_size):
@@ -264,7 +264,7 @@ def load(target: Union[str, dict], dictionary: Dictionary) -> "MentionDB":
264264
_name_trie: Optional[Trie] = None
265265

266266

267-
def init_worker(dump_db: DumpDB, dictionary_obj: dict, tokenizer: BaseTokenizer, name_trie: Trie = None):
267+
def _init_worker(dump_db: DumpDB, dictionary_obj: dict, tokenizer: BaseTokenizer, name_trie: Trie = None):
268268
global _dump_db, _dictionary, _tokenizer, _name_trie
269269

270270
_dump_db = dump_db

0 commit comments

Comments
 (0)