Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-19389][ML][PYTHON][DOC] Minor doc fixes for ML Python Params and LinearSVC #16723

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ private[classification] trait LinearSVCParams extends ClassifierParams with HasR
/**
* :: Experimental ::
*
* Linear SVM Classifier (https://en.wikipedia.org/wiki/Support_vector_machine#Linear_SVM)
* <a href = "https://en.wikipedia.org/wiki/Support_vector_machine#Linear_SVM">
* Linear SVM Classifier</a>
*
* This binary classifier optimizes the Hinge Loss using the OWLQN optimizer.
*
Expand Down
9 changes: 5 additions & 4 deletions python/pyspark/ml/classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,10 @@ class LinearSVC(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, Ha
HasRegParam, HasTol, HasRawPredictionCol, HasFitIntercept, HasStandardization,
HasThreshold, HasWeightCol, HasAggregationDepth, JavaMLWritable, JavaMLReadable):
"""
.. note:: Experimental

`Linear SVM Classifier <https://en.wikipedia.org/wiki/Support_vector_machine#Linear_SVM>`_

This binary classifier optimizes the Hinge Loss using the OWLQN optimizer.

>>> from pyspark.sql import Row
Expand All @@ -89,10 +92,6 @@ class LinearSVC(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, Ha
1.0
>>> result.rawPrediction
DenseVector([-1.4831, 1.4831])
>>> svm.setParams("vector")
Traceback (most recent call last):
...
TypeError: Method setParams forces keyword arguments.
>>> svm_path = temp_path + "/svm"
>>> svm.save(svm_path)
>>> svm2 = LinearSVC.load(svm_path)
Expand Down Expand Up @@ -150,6 +149,8 @@ def _create_model(self, java_model):

class LinearSVCModel(JavaModel, JavaClassificationModel, JavaMLWritable, JavaMLReadable):
"""
.. note:: Experimental

Model fitted by LinearSVC.

.. versionadded:: 2.2.0
Expand Down
13 changes: 0 additions & 13 deletions python/pyspark/ml/param/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,9 @@
from abc import ABCMeta
import copy
import numpy as np
import warnings

from py4j.java_gateway import JavaObject

from pyspark import since
from pyspark.ml.linalg import DenseVector, Vector
from pyspark.ml.util import Identifiable

Expand Down Expand Up @@ -251,7 +249,6 @@ def _copy_params(self):
setattr(self, name, param._copy_new_parent(self))

@property
@since("1.3.0")
def params(self):
"""
Returns all params ordered by name. The default implementation
Expand All @@ -264,7 +261,6 @@ def params(self):
not isinstance(getattr(type(self), x, None), property)]))
return self._params

@since("1.4.0")
def explainParam(self, param):
"""
Explains a single param and returns its name, doc, and optional
Expand All @@ -282,15 +278,13 @@ def explainParam(self, param):
valueStr = "(" + ", ".join(values) + ")"
return "%s: %s %s" % (param.name, param.doc, valueStr)

@since("1.4.0")
def explainParams(self):
"""
Returns the documentation of all params with their optionally
default values and user-supplied values.
"""
return "\n".join([self.explainParam(param) for param in self.params])

@since("1.4.0")
def getParam(self, paramName):
"""
Gets a param by its name.
Expand All @@ -301,31 +295,27 @@ def getParam(self, paramName):
else:
raise ValueError("Cannot find param with name %s." % paramName)

@since("1.4.0")
def isSet(self, param):
"""
Checks whether a param is explicitly set by user.
"""
param = self._resolveParam(param)
return param in self._paramMap

@since("1.4.0")
def hasDefault(self, param):
"""
Checks whether a param has a default value.
"""
param = self._resolveParam(param)
return param in self._defaultParamMap

@since("1.4.0")
def isDefined(self, param):
"""
Checks whether a param is explicitly set by user or has
a default value.
"""
return self.isSet(param) or self.hasDefault(param)

@since("1.4.0")
def hasParam(self, paramName):
"""
Tests whether this instance contains a param with a given
Expand All @@ -337,7 +327,6 @@ def hasParam(self, paramName):
else:
raise TypeError("hasParam(): paramName must be a string")

@since("1.4.0")
def getOrDefault(self, param):
"""
Gets the value of a param in the user-supplied param map or its
Expand All @@ -349,7 +338,6 @@ def getOrDefault(self, param):
else:
return self._defaultParamMap[param]

@since("1.4.0")
def extractParamMap(self, extra=None):
"""
Extracts the embedded default param values and user-supplied
Expand All @@ -368,7 +356,6 @@ def extractParamMap(self, extra=None):
paramMap.update(extra)
return paramMap

@since("1.4.0")
def copy(self, extra=None):
"""
Creates a copy of this instance with the same uid and some
Expand Down