Skip to content

Commit fd82aab

Browse files
committed
apply isort
1 parent 6ff1489 commit fd82aab

File tree

8 files changed

+21
-22
lines changed

8 files changed

+21
-22
lines changed

wikipedia2vec/dictionary.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
import cython
1313
import joblib
1414
import numpy as np
15-
from marisa_trie import Trie, RecordTrie
15+
from marisa_trie import RecordTrie, Trie
1616
from tqdm import tqdm
1717

1818
from .dump_db import DumpDB

wikipedia2vec/link_graph.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from functools import partial
66
from itertools import chain
77
from multiprocessing.pool import Pool
8-
from typing import Optional, List, Tuple, Union
8+
from typing import List, Optional, Tuple, Union
99
from uuid import uuid1
1010

1111
import joblib

wikipedia2vec/mention_db.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,15 @@
11
import logging
2-
import pkg_resources
32
import time
4-
from collections import defaultdict, Counter
3+
from collections import Counter, defaultdict
54
from contextlib import closing
65
from functools import partial
76
from multiprocessing.pool import Pool
8-
from typing import FrozenSet, Iterator, List, Optional, Tuple, Union
7+
from typing import FrozenSet, Iterator, List, Tuple, Union
98
from uuid import uuid1
109

1110
import cython
1211
import joblib
13-
from marisa_trie import Trie, RecordTrie
12+
from marisa_trie import RecordTrie, Trie
1413
from tqdm import tqdm
1514

1615
from .dictionary import Dictionary, Entity
@@ -213,7 +212,6 @@ def item_generator():
213212
dump_db=dump_db.uuid,
214213
dictionary=dictionary.uuid,
215214
build_time=time.time() - start_time,
216-
version=pkg_resources.get_distribution("wikipedia2vec").version,
217215
)
218216

219217
return MentionDB(mention_trie, data_trie, dictionary, case_sensitive, max_mention_len, build_params, uuid)

wikipedia2vec/utils/sentence_detector/icu_sentence_detector.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from typing import List
22

3-
from icu import Locale, BreakIterator
3+
from icu import BreakIterator, Locale
44

55
from .base_sentence_detector import BaseSentenceDetector
66
from .sentence import Sentence

wikipedia2vec/utils/tokenizer/icu_tokenizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import re
22
from typing import List, Tuple
33

4-
from icu import Locale, BreakIterator
4+
from icu import BreakIterator, Locale
55

66
from .base_tokenizer import BaseTokenizer
77

wikipedia2vec/utils/tokenizer/token.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
import cython
2-
31
from typing import Tuple
42

3+
import cython
4+
55

66
@cython.cclass
77
class Token:

wikipedia2vec/utils/wiki_dump_reader.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
import logging
33
import re
44
from typing import Iterator, Tuple
5-
65
from xml.etree.ElementTree import iterparse
76

87
from .utils import normalize_title

wikipedia2vec/wikipedia2vec.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,33 +4,35 @@
44
import random
55
import re
66
import time
7+
from collections import defaultdict
78
from contextlib import closing
89
from functools import partial
10+
from itertools import islice
911
from multiprocessing import RawArray
1012
from multiprocessing.pool import Pool
1113
from typing import Any, Iterable, List, NamedTuple, Optional, Tuple
1214

1315
import cython
1416
import joblib
1517
import numpy as np
16-
from collections import defaultdict
17-
from itertools import islice
18-
from marisa_trie import Trie, RecordTrie
18+
from marisa_trie import RecordTrie, Trie
1919
from tqdm import tqdm
20-
from cython.cimports.libcpp.random import mt19937
21-
from cython.cimports.scipy.linalg.cython_blas import saxpy, sdot
20+
21+
from cython.cimports.libcpp.random cimport mt19937
22+
from cython.cimports.scipy.linalg.cython_blas cimport saxpy, sdot
2223

2324
from .dictionary import Dictionary
2425
from .dump_db import DumpDB
2526
from .link_graph import LinkGraph
2627
from .mention_db import MentionDB
2728
from .utils.sentence_detector.base_sentence_detector import BaseSentenceDetector
2829
from .utils.tokenizer.base_tokenizer import BaseTokenizer
29-
from cython.cimports.wikipedia2vec.dictionary import Item, Word
30-
from cython.cimports.wikipedia2vec.dump_db import Paragraph, WikiLink
31-
from cython.cimports.wikipedia2vec.mention_db import Mention
32-
from cython.cimports.wikipedia2vec.utils.tokenizer.token import Token
33-
from cython.cimports.wikipedia2vec.utils.sentence_detector.sentence import Sentence
30+
31+
from cython.cimports.wikipedia2vec.dictionary cimport Item, Word
32+
from cython.cimports.wikipedia2vec.dump_db cimport Paragraph, WikiLink
33+
from cython.cimports.wikipedia2vec.mention_db cimport Mention
34+
from cython.cimports.wikipedia2vec.utils.sentence_detector.sentence cimport Sentence
35+
from cython.cimports.wikipedia2vec.utils.tokenizer.token cimport Token
3436

3537
MAX_EXP = cython.declare(cython.float, 6.0)
3638
EXP_TABLE_SIZE = cython.declare(cython.int, 1000)

0 commit comments

Comments
 (0)