Skip to content

Commit 8acacaa

Browse files
authored
RHOAIENG-12605: Update JPype to 1.5.0 (#216)
* Update JPype to 1.5.0 * Add Python 3.11 to GHAs * Fix lint error * Workaround Pylint error * Fix styling
1 parent a4a3da8 commit 8acacaa

File tree

3 files changed

+9
-7
lines changed

3 files changed

+9
-7
lines changed

.github/workflows/workflow.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ jobs:
77
runs-on: ubuntu-latest
88
strategy:
99
matrix:
10-
python-version: [ 3.8, 3.9 ]
10+
python-version: [ 3.8, 3.9, 3.11 ]
1111
java-version: [ 17 ]
1212
maven-version: [ '3.8.6' ]
1313
steps:

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ classifiers = [
2222
]
2323

2424
dependencies = [
25-
"Jpype1==1.4.1",
25+
"Jpype1==1.5.0",
2626
"pyarrow==17.0.0",
2727
"matplotlib~=3.6.3",
2828
"pandas~=1.5.3",
@@ -32,7 +32,7 @@ dependencies = [
3232

3333
[project.optional-dependencies]
3434
dev = [
35-
"JPype1==1.4.1",
35+
"JPype1==1.5.0",
3636
"black~=22.12.0",
3737
"click==8.0.4",
3838
"joblib~=1.2.0",

src/trustyai/language/detoxify/tmarco.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -104,14 +104,16 @@ def __init__(
104104
"cuda" if torch.cuda.is_available() else "cpu"
105105
)
106106

107-
def load_models(self, experts: list[str] = None, expert_weights: list = None):
107+
def load_models(
108+
self, experts: list[str] = None, expert_weights: list = None
109+
): # pylint: disable=unsubscriptable-object
108110
"""Load expert models."""
109111
if expert_weights is not None:
110112
self.expert_weights = expert_weights
111113
expert_models = []
112114
for expert in experts:
113115
# Load TMaRCO models
114-
if (expert == "trustyai/gplus" or expert == "trustyai/gminus"):
116+
if expert in ["trustyai/gplus", "trustyai/gminus"]:
115117
expert = BartForConditionalGeneration.from_pretrained(
116118
expert,
117119
forced_bos_token_id=self.tokenizer.bos_token_id,
@@ -122,14 +124,14 @@ def load_models(self, experts: list[str] = None, expert_weights: list = None):
122124
expert = AutoModelForMaskedLM.from_pretrained(
123125
expert,
124126
forced_bos_token_id=self.tokenizer.bos_token_id,
125-
device_map = "auto"
127+
device_map="auto",
126128
)
127129
# Load HuggingFace models
128130
else:
129131
expert = AutoModelForCausalLM.from_pretrained(
130132
expert,
131133
forced_bos_token_id=self.tokenizer.bos_token_id,
132-
device_map = "auto"
134+
device_map="auto",
133135
)
134136
expert_models.append(expert)
135137
self.experts = expert_models

0 commit comments

Comments
 (0)