Error while using get_predictor method

Solved!
MNOP
Level 3
Error while using get_predictor method

I am facing an error while using the get_predictor. 

 

import dataiku
from dataiku import pandasutils as pdu
import pandas as pd 
 
# Example: load a DSS dataset as a Pandas dataframe
model = dataiku.Model("model_id")
 
model.get_info()
model.get_predictor()

 

I get this error. 

---------------------------------------------------------------------------

TypeError                                 Traceback (most recent call last)<ipython-input-29-7152fc869c78> in <module>----> 1 model.get_predictor()/opt/dataiku-dss-12.5.1/python/dataiku/core/saved_model.py in get_predictor(self, version_id)    272             sm = res["saved_model"]    273             model_folder_context = build_readonly_saved_model_folder_context(model_folder, self.project_key, self.get_id())--> 274             self._predictors[version_id] = build_predictor_for_saved_model(model_folder_context, sm.get("conditionalOutputs", []))    275         return self._predictors[version_id]    276
/opt/dataiku-dss-12.5.1/python/dataiku/core/saved_model.py in build_predictor_for_saved_model(model_folder_context, conditional_outputs)    457     model_type = core_params.get("taskType")    458     return build_predictor(model_type, model_folder_context, model_folder_context, split_folder_context,--> 459                            conditional_outputs, core_params, split_desc)    460     461
/opt/dataiku-dss-12.5.1/python/dataiku/core/saved_model.py in build_predictor(model_type, model_folder_context, preprocessing_folder_context, split_folder_context, conditional_outputs, core_params, split_desc, train_split_desc, train_split_folder_context)    571         return KerasPredictor(model_params, preprocessing, model, batch_size=100)    572     else:--> 573         preprocessing = Preprocessing(preprocessing_handler, modeling_params)    574         feature_names = _get_or_compute_feature_names(model_params.model_perf, collector_data, preprocessing.pipeline)    575         return Predictor(model_params, preprocessing, feature_names, clf)/opt/dataiku-dss-12.5.1/python/dataiku/core/saved_model.py in __init__(self, preprocessing_handler, modeling_params, with_prediction)    654 class Preprocessing:    655     def __init__(self, preprocessing_handler, modeling_params, with_prediction=False):--> 656         self.pipeline = preprocessing_handler.build_preprocessing_pipeline()    657         self.pipeline_with_target = preprocessing_handler.build_preprocessing_pipeline(with_target=True,    658                                                                                        allow_empty_mf=True,/opt/dataiku-dss-12.5.1/python/dataiku/doctor/preprocessing_handler.py in build_preprocessing_pipeline(self, *args, **kwargs)    224     def build_preprocessing_pipeline(self, *args, **kwargs):    225         pipeline = PreprocessingPipeline(steps=list(self.preprocessing_steps(*args, **kwargs)))--> 226         pipeline.init_resources(self)    227         return pipeline    228
/opt/dataiku-dss-12.5.1/python/dataiku/doctor/preprocessing/dataframe_preprocessing.py in init_resources(self, resource_handler)   2512     def init_resources(self, resource_handler):   2513         for step in self.steps:-> 2514             step.init_resources(resource_handler)   2515    2516     def fit_and_process(self, input_df, *args, **kwargs):/opt/dataiku-dss-12.5.1/python/dataiku/doctor/preprocessing/dataframe_preprocessing.py in init_resources(self, resources_handler)   2178    2179     def init_resources(self, resources_handler):-> 2180         self.res = resources_handler.get_resource("custom_prep", "pkl")   2181         if self.input_col in self.res:   2182             self.processor = self.res[self.input_col]/opt/dataiku-dss-12.5.1/python/dataiku/doctor/preprocessing_handler.py in get_resource(self, resource_name, resource_type)    176             return self.__resources[resource_name]    177         self.__resource_types[resource_name] = resource_type--> 178         resource = read_resource(self._data_folder_context, resource_name, resource_type)    179         if resource is None:    180             resource = {}/opt/dataiku-dss-12.5.1/python/dataiku/doctor/preprocessing_handler.py in read_resource(folder_context, resource_name, resource_type)    122             with folder_context.get_file_path_to_read(zipped_name) as zipped_path:    123                 with gzip.open(zipped_path, "rb") as f:--> 124                     return dku_pickle.load(f)    125         # Maintain compatibility for non-gzipped pkl files    126         elif folder_context.isfile(legacy_filename):/opt/dataiku-dss-12.5.1/python/dataiku/base/dku_pickle.py in load(pkl_file)     49     """     50     try:---> 51         return SklearnUnpickler(pkl_file).load()     52     except UnicodeDecodeError:     53         raise PickleLoadException(u"Failed to unpickle {}. You might have been trying to load a resource saved in a python 2 code environment with a python 3 one.".format(pkl_file.name))/opt/dataiku-dss-12.5.1/python/dataiku/base/dku_pickle.py in load(self)    151     152     def load(self):--> 153         return super(SklearnUnpickler, self).load()

TypeError: code() takes at most 15 arguments (16 given)


Operating system used: Windows

0 Kudos
1 Solution
AlexT
Dataiker

Hi,
Yes you can control the python version from the Lab under :
Screenshot 2024-02-22 at 10.38.50โ€ฏAM.png

You are likely using the built-in which was using Python2 before and now just retraining ont he same Built-in will automatically use Python3.

Thanks

View solution in original post

0 Kudos
3 Replies
AlexT
Dataiker

Hi,
There is likely an issue with the model being trained on older python version.

"trying to load a resource saved in a python 2 code environment with a python 3"

Please try to retrain the model with Python 3 , and publish the newly trained model. Retry the get_predictor code again

If the issues persists please open a support ticket with the job diagnostics( with the get_predictor code)  and model training diagnostics.

Thanks

0 Kudos
MNOP
Level 3
Author

@AlexT Thanks for your response.
How do I retrain the model with Python 3? This model is trained in the LAB. Do I have control over which Python version to use?

0 Kudos
AlexT
Dataiker

Hi,
Yes you can control the python version from the Lab under :
Screenshot 2024-02-22 at 10.38.50โ€ฏAM.png

You are likely using the built-in which was using Python2 before and now just retraining ont he same Built-in will automatically use Python3.

Thanks

0 Kudos