## Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements. See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version 2.0# (the "License"); you may not use this file except in compliance with# the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.## DO NOT MODIFY THIS FILE! It was generated by _shared_params_code_gen.py.fromtypingimportListfrompyspark.ml.paramimportParam,Params,TypeConvertersclassHasMaxIter(Params):""" Mixin for param maxIter: max number of iterations (>= 0). """maxIter:"Param[int]"=Param(Params._dummy(),"maxIter","max number of iterations (>= 0).",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasMaxIter,self).__init__()defgetMaxIter(self)->int:""" Gets the value of maxIter or its default value. """returnself.getOrDefault(self.maxIter)classHasRegParam(Params):""" Mixin for param regParam: regularization parameter (>= 0). """regParam:"Param[float]"=Param(Params._dummy(),"regParam","regularization parameter (>= 0).",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasRegParam,self).__init__()defgetRegParam(self)->float:""" Gets the value of regParam or its default value. """returnself.getOrDefault(self.regParam)classHasFeaturesCol(Params):""" Mixin for param featuresCol: features column name. """featuresCol:"Param[str]"=Param(Params._dummy(),"featuresCol","features column name.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasFeaturesCol,self).__init__()self._setDefault(featuresCol="features")defgetFeaturesCol(self)->str:""" Gets the value of featuresCol or its default value. """returnself.getOrDefault(self.featuresCol)classHasLabelCol(Params):""" Mixin for param labelCol: label column name. """labelCol:"Param[str]"=Param(Params._dummy(),"labelCol","label column name.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasLabelCol,self).__init__()self._setDefault(labelCol="label")defgetLabelCol(self)->str:""" Gets the value of labelCol or its default value. """returnself.getOrDefault(self.labelCol)classHasPredictionCol(Params):""" Mixin for param predictionCol: prediction column name. """predictionCol:"Param[str]"=Param(Params._dummy(),"predictionCol","prediction column name.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasPredictionCol,self).__init__()self._setDefault(predictionCol="prediction")defgetPredictionCol(self)->str:""" Gets the value of predictionCol or its default value. """returnself.getOrDefault(self.predictionCol)classHasProbabilityCol(Params):""" Mixin for param probabilityCol: Column name for predicted class conditional probabilities. Note: Not all models output well-calibrated probability estimates! These probabilities should be treated as confidences, not precise probabilities. """probabilityCol:"Param[str]"=Param(Params._dummy(),"probabilityCol","Column name for predicted class conditional probabilities. Note: Not all models output well-calibrated probability estimates! These probabilities should be treated as confidences, not precise probabilities.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasProbabilityCol,self).__init__()self._setDefault(probabilityCol="probability")defgetProbabilityCol(self)->str:""" Gets the value of probabilityCol or its default value. """returnself.getOrDefault(self.probabilityCol)classHasRawPredictionCol(Params):""" Mixin for param rawPredictionCol: raw prediction (a.k.a. confidence) column name. """rawPredictionCol:"Param[str]"=Param(Params._dummy(),"rawPredictionCol","raw prediction (a.k.a. confidence) column name.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasRawPredictionCol,self).__init__()self._setDefault(rawPredictionCol="rawPrediction")defgetRawPredictionCol(self)->str:""" Gets the value of rawPredictionCol or its default value. """returnself.getOrDefault(self.rawPredictionCol)classHasInputCol(Params):""" Mixin for param inputCol: input column name. """inputCol:"Param[str]"=Param(Params._dummy(),"inputCol","input column name.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasInputCol,self).__init__()defgetInputCol(self)->str:""" Gets the value of inputCol or its default value. """returnself.getOrDefault(self.inputCol)classHasInputCols(Params):""" Mixin for param inputCols: input column names. """inputCols:"Param[List[str]]"=Param(Params._dummy(),"inputCols","input column names.",typeConverter=TypeConverters.toListString,)def__init__(self)->None:super(HasInputCols,self).__init__()defgetInputCols(self)->List[str]:""" Gets the value of inputCols or its default value. """returnself.getOrDefault(self.inputCols)classHasOutputCol(Params):""" Mixin for param outputCol: output column name. """outputCol:"Param[str]"=Param(Params._dummy(),"outputCol","output column name.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasOutputCol,self).__init__()self._setDefault(outputCol=self.uid+"__output")defgetOutputCol(self)->str:""" Gets the value of outputCol or its default value. """returnself.getOrDefault(self.outputCol)classHasOutputCols(Params):""" Mixin for param outputCols: output column names. """outputCols:"Param[List[str]]"=Param(Params._dummy(),"outputCols","output column names.",typeConverter=TypeConverters.toListString,)def__init__(self)->None:super(HasOutputCols,self).__init__()defgetOutputCols(self)->List[str]:""" Gets the value of outputCols or its default value. """returnself.getOrDefault(self.outputCols)classHasNumFeatures(Params):""" Mixin for param numFeatures: Number of features. Should be greater than 0. """numFeatures:"Param[int]"=Param(Params._dummy(),"numFeatures","Number of features. Should be greater than 0.",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasNumFeatures,self).__init__()self._setDefault(numFeatures=262144)defgetNumFeatures(self)->int:""" Gets the value of numFeatures or its default value. """returnself.getOrDefault(self.numFeatures)classHasCheckpointInterval(Params):""" Mixin for param checkpointInterval: set checkpoint interval (>= 1) or disable checkpoint (-1). E.g. 10 means that the cache will get checkpointed every 10 iterations. Note: this setting will be ignored if the checkpoint directory is not set in the SparkContext. """checkpointInterval:"Param[int]"=Param(Params._dummy(),"checkpointInterval","set checkpoint interval (>= 1) or disable checkpoint (-1). E.g. 10 means that the cache will get checkpointed every 10 iterations. Note: this setting will be ignored if the checkpoint directory is not set in the SparkContext.",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasCheckpointInterval,self).__init__()defgetCheckpointInterval(self)->int:""" Gets the value of checkpointInterval or its default value. """returnself.getOrDefault(self.checkpointInterval)classHasSeed(Params):""" Mixin for param seed: random seed. """seed:"Param[int]"=Param(Params._dummy(),"seed","random seed.",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasSeed,self).__init__()self._setDefault(seed=hash(type(self).__name__))defgetSeed(self)->int:""" Gets the value of seed or its default value. """returnself.getOrDefault(self.seed)classHasTol(Params):""" Mixin for param tol: the convergence tolerance for iterative algorithms (>= 0). """tol:"Param[float]"=Param(Params._dummy(),"tol","the convergence tolerance for iterative algorithms (>= 0).",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasTol,self).__init__()defgetTol(self)->float:""" Gets the value of tol or its default value. """returnself.getOrDefault(self.tol)classHasRelativeError(Params):""" Mixin for param relativeError: the relative target precision for the approximate quantile algorithm. Must be in the range [0, 1] """relativeError:"Param[float]"=Param(Params._dummy(),"relativeError","the relative target precision for the approximate quantile algorithm. Must be in the range [0, 1]",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasRelativeError,self).__init__()self._setDefault(relativeError=0.001)defgetRelativeError(self)->float:""" Gets the value of relativeError or its default value. """returnself.getOrDefault(self.relativeError)classHasStepSize(Params):""" Mixin for param stepSize: Step size to be used for each iteration of optimization (>= 0). """stepSize:"Param[float]"=Param(Params._dummy(),"stepSize","Step size to be used for each iteration of optimization (>= 0).",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasStepSize,self).__init__()defgetStepSize(self)->float:""" Gets the value of stepSize or its default value. """returnself.getOrDefault(self.stepSize)classHasHandleInvalid(Params):""" Mixin for param handleInvalid: how to handle invalid entries. Options are skip (which will filter out rows with bad values), or error (which will throw an error). More options may be added later. """handleInvalid:"Param[str]"=Param(Params._dummy(),"handleInvalid","how to handle invalid entries. Options are skip (which will filter out rows with bad values), or error (which will throw an error). More options may be added later.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasHandleInvalid,self).__init__()defgetHandleInvalid(self)->str:""" Gets the value of handleInvalid or its default value. """returnself.getOrDefault(self.handleInvalid)classHasElasticNetParam(Params):""" Mixin for param elasticNetParam: the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty. """elasticNetParam:"Param[float]"=Param(Params._dummy(),"elasticNetParam","the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasElasticNetParam,self).__init__()self._setDefault(elasticNetParam=0.0)defgetElasticNetParam(self)->float:""" Gets the value of elasticNetParam or its default value. """returnself.getOrDefault(self.elasticNetParam)classHasFitIntercept(Params):""" Mixin for param fitIntercept: whether to fit an intercept term. """fitIntercept:"Param[bool]"=Param(Params._dummy(),"fitIntercept","whether to fit an intercept term.",typeConverter=TypeConverters.toBoolean,)def__init__(self)->None:super(HasFitIntercept,self).__init__()self._setDefault(fitIntercept=True)defgetFitIntercept(self)->bool:""" Gets the value of fitIntercept or its default value. """returnself.getOrDefault(self.fitIntercept)classHasStandardization(Params):""" Mixin for param standardization: whether to standardize the training features before fitting the model. """standardization:"Param[bool]"=Param(Params._dummy(),"standardization","whether to standardize the training features before fitting the model.",typeConverter=TypeConverters.toBoolean,)def__init__(self)->None:super(HasStandardization,self).__init__()self._setDefault(standardization=True)defgetStandardization(self)->bool:""" Gets the value of standardization or its default value. """returnself.getOrDefault(self.standardization)classHasThresholds(Params):""" Mixin for param thresholds: Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values > 0, excepting that at most one value may be 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class's threshold. """thresholds:"Param[List[float]]"=Param(Params._dummy(),"thresholds","Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values > 0, excepting that at most one value may be 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class's threshold.",typeConverter=TypeConverters.toListFloat,)def__init__(self)->None:super(HasThresholds,self).__init__()defgetThresholds(self)->List[float]:""" Gets the value of thresholds or its default value. """returnself.getOrDefault(self.thresholds)classHasThreshold(Params):""" Mixin for param threshold: threshold in binary classification prediction, in range [0, 1] """threshold:"Param[float]"=Param(Params._dummy(),"threshold","threshold in binary classification prediction, in range [0, 1]",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasThreshold,self).__init__()self._setDefault(threshold=0.5)defgetThreshold(self)->float:""" Gets the value of threshold or its default value. """returnself.getOrDefault(self.threshold)classHasWeightCol(Params):""" Mixin for param weightCol: weight column name. If this is not set or empty, we treat all instance weights as 1.0. """weightCol:"Param[str]"=Param(Params._dummy(),"weightCol","weight column name. If this is not set or empty, we treat all instance weights as 1.0.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasWeightCol,self).__init__()defgetWeightCol(self)->str:""" Gets the value of weightCol or its default value. """returnself.getOrDefault(self.weightCol)classHasSolver(Params):""" Mixin for param solver: the solver algorithm for optimization. If this is not set or empty, default value is 'auto'. """solver:"Param[str]"=Param(Params._dummy(),"solver","the solver algorithm for optimization. If this is not set or empty, default value is 'auto'.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasSolver,self).__init__()self._setDefault(solver="auto")defgetSolver(self)->str:""" Gets the value of solver or its default value. """returnself.getOrDefault(self.solver)classHasVarianceCol(Params):""" Mixin for param varianceCol: column name for the biased sample variance of prediction. """varianceCol:"Param[str]"=Param(Params._dummy(),"varianceCol","column name for the biased sample variance of prediction.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasVarianceCol,self).__init__()defgetVarianceCol(self)->str:""" Gets the value of varianceCol or its default value. """returnself.getOrDefault(self.varianceCol)classHasAggregationDepth(Params):""" Mixin for param aggregationDepth: suggested depth for treeAggregate (>= 2). """aggregationDepth:"Param[int]"=Param(Params._dummy(),"aggregationDepth","suggested depth for treeAggregate (>= 2).",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasAggregationDepth,self).__init__()self._setDefault(aggregationDepth=2)defgetAggregationDepth(self)->int:""" Gets the value of aggregationDepth or its default value. """returnself.getOrDefault(self.aggregationDepth)classHasParallelism(Params):""" Mixin for param parallelism: the number of threads to use when running parallel algorithms (>= 1). """parallelism:"Param[int]"=Param(Params._dummy(),"parallelism","the number of threads to use when running parallel algorithms (>= 1).",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasParallelism,self).__init__()self._setDefault(parallelism=1)defgetParallelism(self)->int:""" Gets the value of parallelism or its default value. """returnself.getOrDefault(self.parallelism)classHasCollectSubModels(Params):""" Mixin for param collectSubModels: Param for whether to collect a list of sub-models trained during tuning. If set to false, then only the single best sub-model will be available after fitting. If set to true, then all sub-models will be available. Warning: For large models, collecting all sub-models can cause OOMs on the Spark driver. """collectSubModels:"Param[bool]"=Param(Params._dummy(),"collectSubModels","Param for whether to collect a list of sub-models trained during tuning. If set to false, then only the single best sub-model will be available after fitting. If set to true, then all sub-models will be available. Warning: For large models, collecting all sub-models can cause OOMs on the Spark driver.",typeConverter=TypeConverters.toBoolean,)def__init__(self)->None:super(HasCollectSubModels,self).__init__()self._setDefault(collectSubModels=False)defgetCollectSubModels(self)->bool:""" Gets the value of collectSubModels or its default value. """returnself.getOrDefault(self.collectSubModels)classHasLoss(Params):""" Mixin for param loss: the loss function to be optimized. """loss:"Param[str]"=Param(Params._dummy(),"loss","the loss function to be optimized.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasLoss,self).__init__()defgetLoss(self)->str:""" Gets the value of loss or its default value. """returnself.getOrDefault(self.loss)classHasDistanceMeasure(Params):""" Mixin for param distanceMeasure: the distance measure. Supported options: 'euclidean' and 'cosine'. """distanceMeasure:"Param[str]"=Param(Params._dummy(),"distanceMeasure","the distance measure. Supported options: 'euclidean' and 'cosine'.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasDistanceMeasure,self).__init__()self._setDefault(distanceMeasure="euclidean")defgetDistanceMeasure(self)->str:""" Gets the value of distanceMeasure or its default value. """returnself.getOrDefault(self.distanceMeasure)classHasValidationIndicatorCol(Params):""" Mixin for param validationIndicatorCol: name of the column that indicates whether each row is for training or for validation. False indicates training; true indicates validation. """validationIndicatorCol:"Param[str]"=Param(Params._dummy(),"validationIndicatorCol","name of the column that indicates whether each row is for training or for validation. False indicates training; true indicates validation.",typeConverter=TypeConverters.toString,)def__init__(self)->None:super(HasValidationIndicatorCol,self).__init__()defgetValidationIndicatorCol(self)->str:""" Gets the value of validationIndicatorCol or its default value. """returnself.getOrDefault(self.validationIndicatorCol)classHasBlockSize(Params):""" Mixin for param blockSize: block size for stacking input data in matrices. Data is stacked within partitions. If block size is more than remaining data in a partition then it is adjusted to the size of this data. """blockSize:"Param[int]"=Param(Params._dummy(),"blockSize","block size for stacking input data in matrices. Data is stacked within partitions. If block size is more than remaining data in a partition then it is adjusted to the size of this data.",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasBlockSize,self).__init__()defgetBlockSize(self)->int:""" Gets the value of blockSize or its default value. """returnself.getOrDefault(self.blockSize)classHasMaxBlockSizeInMB(Params):""" Mixin for param maxBlockSizeInMB: maximum memory in MB for stacking input data into blocks. Data is stacked within partitions. If more than remaining data size in a partition then it is adjusted to the data size. Default 0.0 represents choosing optimal value, depends on specific algorithm. Must be >= 0. """maxBlockSizeInMB:"Param[float]"=Param(Params._dummy(),"maxBlockSizeInMB","maximum memory in MB for stacking input data into blocks. Data is stacked within partitions. If more than remaining data size in a partition then it is adjusted to the data size. Default 0.0 represents choosing optimal value, depends on specific algorithm. Must be >= 0.",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasMaxBlockSizeInMB,self).__init__()self._setDefault(maxBlockSizeInMB=0.0)defgetMaxBlockSizeInMB(self)->float:""" Gets the value of maxBlockSizeInMB or its default value. """returnself.getOrDefault(self.maxBlockSizeInMB)classHasNumTrainWorkers(Params):""" Mixin for param numTrainWorkers: number of training workers """numTrainWorkers:"Param[int]"=Param(Params._dummy(),"numTrainWorkers","number of training workers",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasNumTrainWorkers,self).__init__()self._setDefault(numTrainWorkers=1)defgetNumTrainWorkers(self)->int:""" Gets the value of numTrainWorkers or its default value. """returnself.getOrDefault(self.numTrainWorkers)classHasBatchSize(Params):""" Mixin for param batchSize: number of training batch size """batchSize:"Param[int]"=Param(Params._dummy(),"batchSize","number of training batch size",typeConverter=TypeConverters.toInt,)def__init__(self)->None:super(HasBatchSize,self).__init__()defgetBatchSize(self)->int:""" Gets the value of batchSize or its default value. """returnself.getOrDefault(self.batchSize)classHasLearningRate(Params):""" Mixin for param learningRate: learning rate for training """learningRate:"Param[float]"=Param(Params._dummy(),"learningRate","learning rate for training",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasLearningRate,self).__init__()defgetLearningRate(self)->float:""" Gets the value of learningRate or its default value. """returnself.getOrDefault(self.learningRate)classHasMomentum(Params):""" Mixin for param momentum: momentum for training optimizer """momentum:"Param[float]"=Param(Params._dummy(),"momentum","momentum for training optimizer",typeConverter=TypeConverters.toFloat,)def__init__(self)->None:super(HasMomentum,self).__init__()defgetMomentum(self)->float:""" Gets the value of momentum or its default value. """returnself.getOrDefault(self.momentum)classHasFeatureSizes(Params):""" Mixin for param featureSizes: input feature size list for input columns of vector assembler """featureSizes:"Param[List[int]]"=Param(Params._dummy(),"featureSizes","input feature size list for input columns of vector assembler",typeConverter=TypeConverters.toListInt,)def__init__(self)->None:super(HasFeatureSizes,self).__init__()defgetFeatureSizes(self)->List[int]:""" Gets the value of featureSizes or its default value. """returnself.getOrDefault(self.featureSizes)