Skip to content
This repository was archived by the owner on May 25, 2020. It is now read-only.

Commit ec68708

Browse files
Oxylibriumnikita-smetanin
authored andcommitted
Python 3 Compatibility (#10)
* Python 2+3 compatibility
1 parent d46c3ef commit ec68708

35 files changed

+248
-52
lines changed

cakechat/api/response.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import random
22

3+
from six.moves import xrange, map
4+
35
from cakechat.api.config import PREDICTION_MODE, NUM_BEST_CANDIDATES_TO_PICK_FROM, SAMPLING_ATTEMPTS_NUM, \
46
DEFAULT_RESPONSE
57
from cakechat.config import INPUT_CONTEXT_SIZE, INPUT_SEQUENCE_LENGTH, PREDICTION_MODES
@@ -53,7 +55,7 @@ def get_response(dialog_context, emotion):
5355
:param emotion: emotion to condition response
5456
:return: dialog response conditioned on input emotion
5557
"""
56-
tokenized_dialog_context = map(get_tokens_sequence, dialog_context)
58+
tokenized_dialog_context = list(map(get_tokens_sequence, dialog_context))
5759
tokenized_dialog_contexts = [tokenized_dialog_context]
5860
context_tokens_ids = transform_contexts_to_token_ids(tokenized_dialog_contexts, _cakechat_model.token_to_index,
5961
INPUT_SEQUENCE_LENGTH, INPUT_CONTEXT_SIZE)

cakechat/api/utils.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from flask import jsonify
2+
from six import text_type
23

34

45
def get_api_error_response(message, code, logger):
@@ -7,8 +8,8 @@ def get_api_error_response(message, code, logger):
78

89

910
def _is_list_of_unicode_strings(data):
10-
return (isinstance(data, list) or isinstance(data, tuple)) and len(data) > 0 \
11-
and all(isinstance(s, unicode) for s in data)
11+
return bool(data and isinstance(data, (list, tuple)) and
12+
all(isinstance(s, text_type) for s in data))
1213

1314

1415
def parse_dataset_param(params, param_name, required=True):

cakechat/api/v1/server.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@ def get_model_response():
2020
try:
2121
dialog_context = parse_dataset_param(params, param_name='context')
2222
except KeyError as e:
23-
return get_api_error_response('Malformed request, no "%s" param was found' % e.message, 400, _logger)
23+
return get_api_error_response('Malformed request, no "%s" param was found' % str(e), 400, _logger)
2424
except ValueError as e:
25-
return get_api_error_response('Malformed request: %s' % e.message, 400, _logger)
25+
return get_api_error_response('Malformed request: %s' % str(e), 400, _logger)
2626

2727
emotion = params.get('emotion', DEFAULT_CONDITION)
2828
if emotion not in EMOTIONS_TYPES:

cakechat/dialog_model/inference/candidates/beamsearch.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
from itertools import izip_longest
1+
from six.moves import zip_longest
22

33
import numpy as np
4+
from six.moves import xrange
45
import theano
56

67
from cakechat.dialog_model.inference.candidates.abstract_generator import AbstractCandidatesGenerator
@@ -248,7 +249,7 @@ def _generate_candidates_for_one_context(self, condition_id, output_seq_len):
248249

249250
@timer
250251
def generate_candidates(self, context_token_ids, condition_ids, output_seq_len):
251-
x_with_conditions_batch = izip_longest(context_token_ids, condition_ids if condition_ids is not None else [])
252+
x_with_conditions_batch = zip_longest(context_token_ids, condition_ids if condition_ids is not None else [])
252253
result = []
253254
for x, condition_id in x_with_conditions_batch:
254255
self._compute_thought_vectors(x)

cakechat/dialog_model/inference/candidates/sampling.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import numpy as np
2+
from six.moves import xrange
23
import theano
34

45
from cakechat.dialog_model.inference.candidates.abstract_generator import AbstractCandidatesGenerator

cakechat/dialog_model/inference/predict.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import numpy as np
2+
from six.moves import xrange
23

34
from cakechat.config import MAX_PREDICTIONS_LENGTH, BEAM_SIZE, MMI_REVERSE_MODEL_SCORE_WEIGHT, DEFAULT_TEMPERATURE, \
45
SAMPLES_NUM_FOR_RERANKING, PREDICTION_MODES, REPETITION_PENALIZE_COEFFICIENT
@@ -98,7 +99,7 @@ def get_nn_responses(context_token_ids,
9899
response_tokens_ids = np.reshape(response_tokens_ids, (-1, output_seq_len))
99100
response_tokens = transform_token_ids_to_sentences(response_tokens_ids, nn_model.index_to_token)
100101

101-
lines_num = len(response_tokens) / output_candidates_num
102+
lines_num = len(response_tokens) // output_candidates_num
102103
responses = [response_tokens[i * output_candidates_num:(i + 1) * output_candidates_num] for i in xrange(lines_num)]
103104

104105
return responses

cakechat/dialog_model/inference/predictor.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import numpy as np
2+
from six.moves import xrange
23

34

45
class Predictor(object):

cakechat/dialog_model/inference/reranking.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
from abc import ABCMeta, abstractmethod
2-
from itertools import izip_longest
2+
from six.moves import zip_longest
33

44
import numpy as np
5+
from six.moves import xrange
56

67
from cakechat.dialog_model.inference.service_tokens import ServiceTokensIDs
78
from cakechat.dialog_model.inference.utils import get_sequence_score_by_thought_vector, get_sequence_score, \
@@ -103,7 +104,7 @@ def rerank_candidates(self, contexts, all_candidates, condition_ids):
103104
condition_ids = [] if condition_ids is None else condition_ids # For izip_lingest
104105
candidates_scores = [
105106
self._compute_candidates_scores(context, candidates, condition_id)
106-
for context, candidates, condition_id in izip_longest(contexts, all_candidates, condition_ids)
107+
for context, candidates, condition_id in zip_longest(contexts, all_candidates, condition_ids)
107108
]
108109
scores_order = [np.argsort(-np.array(scores)) for scores in candidates_scores]
109110
batch_size = len(contexts)

cakechat/dialog_model/inference/tests/predict.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import sys
33
import unittest
44
import numpy as np
5+
from six.moves import xrange
56

67
sys.path.append(
78
os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))))

cakechat/dialog_model/inference/tests/sampling.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
import numpy as np
66
from scipy.stats import binom
7+
from six.moves import xrange
78

89
sys.path.append(
910
os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))))

0 commit comments

Comments
 (0)