diff --git a/third_party/3/pyspark/__init__.pyi b/third_party/3/pyspark/__init__.pyi index fb371f6c..9d9b05c7 100644 --- a/third_party/3/pyspark/__init__.pyi +++ b/third_party/3/pyspark/__init__.pyi @@ -14,7 +14,7 @@ from pyspark.context import SparkContext as SparkContext from pyspark.files import SparkFiles as SparkFiles from pyspark.profiler import BasicProfiler as BasicProfiler, Profiler as Profiler from pyspark.rdd import RDD as RDD, RDDBarrier as RDDBarrier -from pyspark.resourceinformation import ResourceInformation as ResourceInformation +from pyspark.resource import ResourceInformation as ResourceInformation from pyspark.serializers import ( MarshalSerializer as MarshalSerializer, PickleSerializer as PickleSerializer, diff --git a/third_party/3/pyspark/context.pyi b/third_party/3/pyspark/context.pyi index fefd56e7..b063f69b 100644 --- a/third_party/3/pyspark/context.pyi +++ b/third_party/3/pyspark/context.pyi @@ -9,7 +9,7 @@ from pyspark.accumulators import Accumulator, AccumulatorParam from pyspark.broadcast import Broadcast from pyspark.conf import SparkConf from pyspark.profiler import Profiler -from pyspark.resourceinformation import ResourceInformation +from pyspark.resource import ResourceInformation from pyspark.rdd import RDD from pyspark.serializers import Serializer from pyspark.status import StatusTracker diff --git a/third_party/3/pyspark/resourceinformation.pyi b/third_party/3/pyspark/resourceinformation.pyi deleted file mode 100644 index f1a6f7f9..00000000 --- a/third_party/3/pyspark/resourceinformation.pyi +++ /dev/null @@ -1,11 +0,0 @@ -# Stubs for pyspark.resourceinformation (Python 3) -# - -from typing import List - -class ResourceInformation: - def __init__(self, name: str, addresses: List[str]) -> None: ... - @property - def name(self) -> str: ... - @property - def addresses(self) -> List[str]: ... diff --git a/third_party/3/pyspark/taskcontext.pyi b/third_party/3/pyspark/taskcontext.pyi index 8b4f8a50..af3d7755 100644 --- a/third_party/3/pyspark/taskcontext.pyi +++ b/third_party/3/pyspark/taskcontext.pyi @@ -3,7 +3,7 @@ from typing import Any, Dict, List from typing_extensions import Literal -from pyspark.resourceinformation import ResourceInformation +from pyspark.resource import ResourceInformation class TaskContext: def __new__(cls) -> TaskContext: ... diff --git a/third_party/3/pyspark/worker.pyi b/third_party/3/pyspark/worker.pyi index d5bcef99..041ef6e0 100644 --- a/third_party/3/pyspark/worker.pyi +++ b/third_party/3/pyspark/worker.pyi @@ -3,7 +3,7 @@ from pyspark.broadcast import Broadcast as Broadcast from pyspark.files import SparkFiles as SparkFiles from pyspark.java_gateway import local_connect_and_auth as local_connect_and_auth from pyspark.rdd import PythonEvalType as PythonEvalType -from pyspark.resourceinformation import ResourceInformation as ResourceInformation +from pyspark.resource import ResourceInformation as ResourceInformation from pyspark.serializers import BatchedSerializer as BatchedSerializer, PickleSerializer as PickleSerializer, SpecialLengths as SpecialLengths, UTF8Deserializer as UTF8Deserializer, read_bool as read_bool, read_int as read_int, read_long as read_long, write_int as write_int, write_long as write_long, write_with_length as write_with_length # type: ignore[attr-defined] from pyspark.sql.pandas.serializers import ( ArrowStreamPandasUDFSerializer as ArrowStreamPandasUDFSerializer,