Skip to content

Commit 971d4b3

Browse files
authored
Fix for 0.3.0 (#105)
* More docs * Update codecov-actions * Use more explicit gcc options
1 parent 467d30b commit 971d4b3

File tree

16 files changed

+101
-2070
lines changed

16 files changed

+101
-2070
lines changed

.github/workflows/run-test.yml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@ jobs:
1717
run: |
1818
pip install --upgrade pip
1919
sudo apt-get install lcov
20-
CXXFLAGS="-O0 -g -coverage" pip install -e .
20+
FLAGS="-fprofile-arcs -ftest-coverage"
21+
CFLAGS="$FLAGS" CXXFLAGS="$FLAGS" pip install -e .
2122
- name: Run pytest
2223
run: |
2324
pip install pytest pytest-mock pytest-cov
@@ -28,10 +29,10 @@ jobs:
2829
coverage xml
2930
lcov -d `pwd` -c -o coverage.info
3031
- name: Upload coverage to Codecov
31-
uses: codecov/codecov-action@v1
32+
uses: codecov/codecov-action@v3
3233
with:
3334
files: ./coverage.xml,./coverage.info
35+
name: codecov-umbrella
36+
fail_ci_if_error: true
3437
verbose: false
3538
env_vars: OS,PYTHON
36-
name: codecov-umbrella
37-
fail_ci_if_error: false

create_pb_stubs.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ for module_name in "${modules[@]}"
99
do
1010
echo "Create stub for $module_name"
1111
pybind11-stubgen -o stubs --no-setup-py "$module_name"
12-
output_path="$(echo "${module_name}" | sed 's/\./\//g').pyi"
12+
output_path="src/$(echo "${module_name}" | sed 's/\./\//g').pyi"
1313
input_path="stubs/$(echo "${module_name}" | sed 's/\./\//g')-stubs/__init__.pyi"
1414
rm "${output_path}"
1515
echo 'm: int

docs/source/api_reference.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,3 +95,5 @@ Dataset
9595
MovieLens1MDataManager
9696
MovieLens100KDataManager
9797
MovieLens20MDataManager
98+
NeuMFML1MDownloader
99+
NeuMFPinterestDownloader

examples/1-vs-100-negative.ipynb

Lines changed: 16 additions & 2049 deletions
Large diffs are not rendered by default.

run_autobuild.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,5 @@
33
sphinx-autobuild \
44
--host 0.0.0.0 \
55
--port 9999 \
6-
--watch irspack/ \
6+
--watch src/irspack/ \
77
docs/source docs/build

src/irspack/dataset/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,14 @@
55
MovieLens20MDataManager,
66
MovieLens100KDataManager,
77
)
8+
from .neu_mf import NeuMFML1MDownloader, NeuMFMPinterestDownloader
89

910
__all__ = [
1011
"MovieLens100KDataManager",
1112
"MovieLens1MDataManager",
1213
"MovieLens20MDataManager",
1314
"CiteULikeADataManager",
1415
"AmazonMusicDataManager",
16+
"NeuMFML1MDownloader",
17+
"NeuMFMPinterestDownloader",
1518
]

src/irspack/dataset/downloader.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,15 @@ def _save_to_zippath(self, path: Path) -> None:
1717
def __init__(
1818
self, zippath: Optional[Union[Path, str]] = None, force_download: bool = False
1919
):
20+
"""Specify the zip path for dataset. If that path does not exist, try downloading the relevant data from online resources.
21+
22+
Args:
23+
zippath (Optional[Union[Path, str]], optional): _description_. Defaults to None.
24+
force_download (bool, optional): _description_. Defaults to False.
25+
26+
Raises:
27+
RuntimeError: _description_
28+
"""
2029
if zippath is None:
2130
zippath = self.DEFAULT_PATH
2231
zippath = Path(zippath)

src/irspack/dataset/neu_mf.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,13 +54,33 @@ def read_train_test(self) -> Tuple[pd.DataFrame, pd.DataFrame]:
5454

5555

5656
class NeuMFML1MDownloader(NeuMFDownloader):
57+
r"""Manages MovieLens 1M dataset split under 1-vs-100 negative evaluation protocol.
58+
59+
Args:
60+
zippath:
61+
Where the zipped data is located. If `None`, assumes the path to be `~/.neumf-ml-1m.zip`.
62+
If the designated path does not exist, you will be prompted for the permission to download the data.
63+
Defaults to `None`.
64+
force_download:
65+
If `True`, the class will not prompt for the permission and start downloading immediately.
66+
"""
5767
DEFAULT_PATH = Path("~/.neumf-ml-1m.zip").expanduser()
5868

5969
TRAIN_URL = "https://raw.githubusercontent.com/tohtsky/neural_collaborative_filtering/master/Data/ml-1m.train.rating"
6070
NEGATIVE_URL = "https://raw.githubusercontent.com/tohtsky/neural_collaborative_filtering/master/Data/ml-1m.test.negative"
6171

6272

6373
class NeuMFMPinterestDownloader(NeuMFDownloader):
74+
r"""Manages Pinterest dataset split under 1-vs-100 negative evaluation protocol.
75+
76+
Args:
77+
zippath:
78+
Where the zipped data is located. If `None`, assumes the path to be `~/.neumf-pinterest.zip`.
79+
If the designated path does not exist, you will be prompted for the permission to download the data.
80+
Defaults to `None`.
81+
force_download:
82+
If `True`, the class will not prompt for the permission and start downloading immediately.
83+
"""
6484
DEFAULT_PATH = Path("~/.neumf-pinterest.zip").expanduser()
6585

6686
TRAIN_URL = "https://raw.githubusercontent.com/tohtsky/neural_collaborative_filtering/master/Data/pinterest-20.train.rating"

src/irspack/recommenders/base.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -84,11 +84,13 @@ class BaseRecommender(object, metaclass=RecommenderMeta):
8484
config_class: Type[RecommenderConfig]
8585
default_tune_range: List[ParameterRange]
8686

87+
X_train_all: sps.csr_matrix
88+
"""The matrix to feed into recommender."""
89+
8790
def __init__(self, X_train_all: InteractionMatrix, **kwargs: Any) -> None:
8891
self.X_train_all: sps.csr_matrix = sps.csr_matrix(X_train_all).astype(
8992
np.float64
9093
)
91-
"""The matrix to feed into recommender."""
9294

9395
self.n_users: int = self.X_train_all.shape[0]
9496
self.n_items: int = self.X_train_all.shape[1]
@@ -387,18 +389,18 @@ def get_score_cold_user_remove_seen(self, X: InteractionMatrix) -> DenseScoreArr
387389
class BaseSimilarityRecommender(BaseRecommender):
388390
"""The computed item-item similarity. Might not be initialized before `learn()` is called."""
389391

390-
W_: Optional[Union[sps.csr_matrix, sps.csc_matrix, np.ndarray]]
392+
_W: Optional[Union[sps.csr_matrix, sps.csc_matrix, np.ndarray]]
391393

392394
def __init__(self, *args: Any, **kwargs: Any) -> None:
393395
super().__init__(*args, **kwargs)
394-
self.W_ = None
396+
self._W = None
395397

396398
@property
397399
def W(self) -> Union[sps.csr_matrix, sps.csc_matrix, np.ndarray]:
398400
"""The computed item-item similarity weight matrix."""
399-
if self.W_ is None:
401+
if self._W is None:
400402
raise RuntimeError("W fetched before fit.")
401-
return self.W_
403+
return self._W
402404

403405
def get_score(self, user_indices: UserIndexArray) -> DenseScoreArray:
404406
return _sparse_to_array(self.X_train_all[user_indices].dot(self.W))

src/irspack/recommenders/dense_slim.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,4 +50,4 @@ def _learn(self) -> None:
5050
P_dense *= -diag_P_inv[np.newaxis, :]
5151
range_ = np.arange(self.n_items)
5252
P_dense[range_, range_] = 0
53-
self.W_ = P_dense
53+
self._W = P_dense

0 commit comments

Comments
 (0)