Skip to content
This repository has been archived by the owner on Nov 22, 2022. It is now read-only.

Commit

Permalink
Drop resourceinformaiton.pyi and fix related refs (#473)
Browse files Browse the repository at this point in the history
  • Loading branch information
zero323 committed Aug 29, 2020
1 parent 3d09cc7 commit ca9d091
Show file tree
Hide file tree
Showing 5 changed files with 4 additions and 15 deletions.
2 changes: 1 addition & 1 deletion third_party/3/pyspark/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ from pyspark.context import SparkContext as SparkContext
from pyspark.files import SparkFiles as SparkFiles
from pyspark.profiler import BasicProfiler as BasicProfiler, Profiler as Profiler
from pyspark.rdd import RDD as RDD, RDDBarrier as RDDBarrier
from pyspark.resourceinformation import ResourceInformation as ResourceInformation
from pyspark.resource import ResourceInformation as ResourceInformation
from pyspark.serializers import (
MarshalSerializer as MarshalSerializer,
PickleSerializer as PickleSerializer,
Expand Down
2 changes: 1 addition & 1 deletion third_party/3/pyspark/context.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ from pyspark.accumulators import Accumulator, AccumulatorParam
from pyspark.broadcast import Broadcast
from pyspark.conf import SparkConf
from pyspark.profiler import Profiler
from pyspark.resourceinformation import ResourceInformation
from pyspark.resource import ResourceInformation
from pyspark.rdd import RDD
from pyspark.serializers import Serializer
from pyspark.status import StatusTracker
Expand Down
11 changes: 0 additions & 11 deletions third_party/3/pyspark/resourceinformation.pyi

This file was deleted.

2 changes: 1 addition & 1 deletion third_party/3/pyspark/taskcontext.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from typing import Any, Dict, List
from typing_extensions import Literal
from pyspark.resourceinformation import ResourceInformation
from pyspark.resource import ResourceInformation

class TaskContext:
def __new__(cls) -> TaskContext: ...
Expand Down
2 changes: 1 addition & 1 deletion third_party/3/pyspark/worker.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ from pyspark.broadcast import Broadcast as Broadcast
from pyspark.files import SparkFiles as SparkFiles
from pyspark.java_gateway import local_connect_and_auth as local_connect_and_auth
from pyspark.rdd import PythonEvalType as PythonEvalType
from pyspark.resourceinformation import ResourceInformation as ResourceInformation
from pyspark.resource import ResourceInformation as ResourceInformation
from pyspark.serializers import BatchedSerializer as BatchedSerializer, PickleSerializer as PickleSerializer, SpecialLengths as SpecialLengths, UTF8Deserializer as UTF8Deserializer, read_bool as read_bool, read_int as read_int, read_long as read_long, write_int as write_int, write_long as write_long, write_with_length as write_with_length # type: ignore[attr-defined]
from pyspark.sql.pandas.serializers import (
ArrowStreamPandasUDFSerializer as ArrowStreamPandasUDFSerializer,
Expand Down

0 comments on commit ca9d091

Please sign in to comment.