Skip to content
  •  
  •  
  •  
1 change: 1 addition & 0 deletions pyrightconfig.stricter.json
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
"stubs/qrcode",
"stubs/redis",
"stubs/requests",
"stubs/sagemaker",
"stubs/setuptools",
"stubs/stripe",
"stubs/tqdm",
Expand Down
2 changes: 2 additions & 0 deletions stubs/sagemaker/METADATA.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
version = "2.168.*"
upstream_repository = "https://github.com/aws/sagemaker-python-sdk/"
49 changes: 49 additions & 0 deletions stubs/sagemaker/sagemaker/__init__.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from sagemaker import estimator as estimator, parameter as parameter, tuner as tuner
from sagemaker.algorithm import AlgorithmEstimator as AlgorithmEstimator
from sagemaker.amazon.factorization_machines import (
FactorizationMachines as FactorizationMachines,
FactorizationMachinesModel as FactorizationMachinesModel,
FactorizationMachinesPredictor as FactorizationMachinesPredictor,
)
from sagemaker.amazon.ipinsights import (
IPInsights as IPInsights,
IPInsightsModel as IPInsightsModel,
IPInsightsPredictor as IPInsightsPredictor,
)
from sagemaker.amazon.kmeans import KMeans as KMeans, KMeansModel as KMeansModel, KMeansPredictor as KMeansPredictor
from sagemaker.amazon.knn import KNN as KNN, KNNModel as KNNModel, KNNPredictor as KNNPredictor
from sagemaker.amazon.lda import LDA as LDA, LDAModel as LDAModel, LDAPredictor as LDAPredictor
from sagemaker.amazon.linear_learner import (
LinearLearner as LinearLearner,
LinearLearnerModel as LinearLearnerModel,
LinearLearnerPredictor as LinearLearnerPredictor,
)
from sagemaker.amazon.ntm import NTM as NTM, NTMModel as NTMModel, NTMPredictor as NTMPredictor
from sagemaker.amazon.object2vec import Object2Vec as Object2Vec, Object2VecModel as Object2VecModel
from sagemaker.amazon.pca import PCA as PCA, PCAModel as PCAModel, PCAPredictor as PCAPredictor
from sagemaker.amazon.randomcutforest import (
RandomCutForest as RandomCutForest,
RandomCutForestModel as RandomCutForestModel,
RandomCutForestPredictor as RandomCutForestPredictor,
)
from sagemaker.analytics import (
HyperparameterTuningJobAnalytics as HyperparameterTuningJobAnalytics,
TrainingJobAnalytics as TrainingJobAnalytics,
)
from sagemaker.automl.automl import AutoML as AutoML, AutoMLInput as AutoMLInput, AutoMLJob as AutoMLJob
from sagemaker.automl.candidate_estimator import CandidateEstimator as CandidateEstimator, CandidateStep as CandidateStep
from sagemaker.inputs import TrainingInput as TrainingInput
from sagemaker.local.local_session import LocalSession as LocalSession
from sagemaker.model import Model as Model, ModelPackage as ModelPackage
from sagemaker.model_metrics import FileSource as FileSource, MetricsSource as MetricsSource, ModelMetrics as ModelMetrics
from sagemaker.pipeline import PipelineModel as PipelineModel
from sagemaker.predictor import Predictor as Predictor
from sagemaker.processing import Processor as Processor, ScriptProcessor as ScriptProcessor
from sagemaker.session import (
Session as Session,
container_def as container_def,
get_execution_role as get_execution_role,
get_model_package_args as get_model_package_args,
pipeline_container_def as pipeline_container_def,
production_variant as production_variant,
)
4 changes: 4 additions & 0 deletions stubs/sagemaker/sagemaker/_studio.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from _typeshed import Incomplete

STUDIO_PROJECT_CONFIG: str
logger: Incomplete
14 changes: 14 additions & 0 deletions stubs/sagemaker/sagemaker/accept_types.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
def retrieve_options(
region: str | None = None,
model_id: str | None = None,
model_version: str | None = None,
tolerate_vulnerable_model: bool = False,
tolerate_deprecated_model: bool = False,
) -> list[str]: ...
def retrieve_default(
region: str | None = None,
model_id: str | None = None,
model_version: str | None = None,
tolerate_vulnerable_model: bool = False,
tolerate_deprecated_model: bool = False,
) -> str: ...
74 changes: 74 additions & 0 deletions stubs/sagemaker/sagemaker/algorithm.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
from _typeshed import Incomplete

from sagemaker.estimator import EstimatorBase
from sagemaker.inputs import FileSystemInput, TrainingInput
from sagemaker.session import Session
from sagemaker.workflow.entities import PipelineVariable

class AlgorithmEstimator(EstimatorBase):
algorithm_arn: Incomplete
algorithm_spec: Incomplete
hyperparameter_definitions: Incomplete
def __init__(
self,
algorithm_arn: str,
role: str | None = None,
instance_count: int | PipelineVariable | None = None,
instance_type: str | PipelineVariable | None = None,
volume_size: int | PipelineVariable = 30,
volume_kms_key: str | PipelineVariable | None = None,
max_run: int | PipelineVariable = 86400,
input_mode: str | PipelineVariable = "File",
output_path: str | PipelineVariable | None = None,
output_kms_key: str | PipelineVariable | None = None,
base_job_name: str | None = None,
sagemaker_session: Session | None = None,
hyperparameters: dict[str, str | PipelineVariable] | None = None,
tags: list[dict[str, str | PipelineVariable]] | None = None,
subnets: list[str | PipelineVariable] | None = None,
security_group_ids: list[str | PipelineVariable] | None = None,
model_uri: str | None = None,
model_channel_name: str | PipelineVariable = "model",
metric_definitions: list[dict[str, str | PipelineVariable]] | None = None,
encrypt_inter_container_traffic: bool | PipelineVariable = False,
use_spot_instances: bool | PipelineVariable = False,
max_wait: int | PipelineVariable | None = None,
**kwargs,
) -> None: ...
def validate_train_spec(self) -> None: ...
def set_hyperparameters(self, **kwargs) -> None: ...
def hyperparameters(self): ...
def training_image_uri(self) -> None: ...
def enable_network_isolation(self): ...
def create_model(
self,
role: Incomplete | None = None,
predictor_cls: Incomplete | None = None,
serializer=...,
deserializer=...,
vpc_config_override="VPC_CONFIG_DEFAULT",
**kwargs,
): ...
def transformer(
self,
instance_count,
instance_type,
strategy: Incomplete | None = None,
assemble_with: Incomplete | None = None,
output_path: Incomplete | None = None,
output_kms_key: Incomplete | None = None,
accept: Incomplete | None = None,
env: Incomplete | None = None,
max_concurrent_transforms: Incomplete | None = None,
max_payload: Incomplete | None = None,
tags: Incomplete | None = None,
role: Incomplete | None = None,
volume_kms_key: Incomplete | None = None,
): ...
def fit(
self,
inputs: str | dict | TrainingInput | FileSystemInput | None = None,
wait: bool = True,
logs: bool = True,
job_name: str | None = None,
): ...
Empty file.
83 changes: 83 additions & 0 deletions stubs/sagemaker/sagemaker/amazon/amazon_estimator.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import abc
from _typeshed import Incomplete

from sagemaker.amazon.hyperparameter import Hyperparameter as hp
from sagemaker.estimator import EstimatorBase
from sagemaker.workflow.entities import PipelineVariable

logger: Incomplete

class AmazonAlgorithmEstimatorBase(EstimatorBase, metaclass=abc.ABCMeta):
feature_dim: hp
mini_batch_size: hp
repo_name: str | None
repo_version: str | None
DEFAULT_MINI_BATCH_SIZE: int | None
def __init__(
self,
role: str | PipelineVariable | None = None,
instance_count: int | PipelineVariable | None = None,
instance_type: str | PipelineVariable | None = None,
data_location: str | None = None,
enable_network_isolation: bool | PipelineVariable = False,
**kwargs,
) -> None: ...
def training_image_uri(self): ...
def hyperparameters(self): ...
@property
def data_location(self): ...
@data_location.setter
def data_location(self, data_location: str): ...
def prepare_workflow_for_training(
self, records: Incomplete | None = None, mini_batch_size: Incomplete | None = None, job_name: Incomplete | None = None
) -> None: ...
latest_training_job: Incomplete
def fit(
self,
records: RecordSet,
mini_batch_size: int | None = None,
wait: bool = True,
logs: bool = True,
job_name: str | None = None,
experiment_config: dict[str, str] | None = None,
): ...
def record_set(self, train, labels: Incomplete | None = None, channel: str = "train", encrypt: bool = False): ...

class RecordSet:
s3_data: Incomplete
feature_dim: Incomplete
num_records: Incomplete
s3_data_type: Incomplete
channel: Incomplete
def __init__(
self,
s3_data: str | PipelineVariable,
num_records: int,
feature_dim: int,
s3_data_type: str | PipelineVariable = "ManifestFile",
channel: str | PipelineVariable = "train",
) -> None: ...
def data_channel(self): ...
def records_s3_input(self): ...

class FileSystemRecordSet:
file_system_input: Incomplete
feature_dim: Incomplete
num_records: Incomplete
channel: Incomplete
def __init__(
self,
file_system_id,
file_system_type,
directory_path,
num_records,
feature_dim,
file_system_access_mode: str = "ro",
channel: str = "train",
) -> None: ...
def data_channel(self): ...

def upload_numpy_to_s3_shards(
num_shards, s3, bucket, key_prefix, array, labels: Incomplete | None = None, encrypt: bool = False
): ...
def get_image_uri(region_name, repo_name, repo_version: str = "1"): ...
24 changes: 24 additions & 0 deletions stubs/sagemaker/sagemaker/amazon/common.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from _typeshed import Incomplete
from collections.abc import Generator

from sagemaker.deserializers import SimpleBaseDeserializer
from sagemaker.serializers import SimpleBaseSerializer

class RecordSerializer(SimpleBaseSerializer):
def __init__(self, content_type: str = "application/x-recordio-protobuf") -> None: ...
def serialize(self, data): ...

class RecordDeserializer(SimpleBaseDeserializer):
def __init__(self, accept: str = "application/x-recordio-protobuf") -> None: ...
def deserialize(self, data, content_type): ...

def write_numpy_to_dense_tensor(file, array, labels: Incomplete | None = None) -> None: ...
def write_spmatrix_to_sparse_tensor(file, array, labels: Incomplete | None = None) -> None: ...
def read_records(file): ...

padding: Incomplete

def read_recordio(f) -> Generator[Incomplete, None, None]: ...

numpy_to_record_serializer: Incomplete
record_deserializer: Incomplete
76 changes: 76 additions & 0 deletions stubs/sagemaker/sagemaker/amazon/factorization_machines.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
from _typeshed import Incomplete

from sagemaker.amazon.amazon_estimator import AmazonAlgorithmEstimatorBase
from sagemaker.amazon.hyperparameter import Hyperparameter as hp
from sagemaker.model import Model
from sagemaker.predictor import Predictor
from sagemaker.session import Session
from sagemaker.workflow.entities import PipelineVariable

class FactorizationMachines(AmazonAlgorithmEstimatorBase):
repo_name: str
repo_version: str
num_factors: hp
predictor_type: hp
epochs: hp
clip_gradient: hp
eps: hp
rescale_grad: hp
bias_lr: hp
linear_lr: hp
factors_lr: hp
bias_wd: hp
linear_wd: hp
factors_wd: hp
bias_init_method: hp
bias_init_scale: hp
bias_init_sigma: hp
bias_init_value: hp
linear_init_method: hp
linear_init_scale: hp
linear_init_sigma: hp
linear_init_value: hp
factors_init_method: hp
factors_init_scale: hp
factors_init_sigma: hp
factors_init_value: hp
def __init__(
self,
role: str | PipelineVariable | None = None,
instance_count: int | PipelineVariable | None = None,
instance_type: str | PipelineVariable | None = None,
num_factors: int | None = None,
predictor_type: str | None = None,
epochs: int | None = None,
clip_gradient: float | None = None,
eps: float | None = None,
rescale_grad: float | None = None,
bias_lr: float | None = None,
linear_lr: float | None = None,
factors_lr: float | None = None,
bias_wd: float | None = None,
linear_wd: float | None = None,
factors_wd: float | None = None,
bias_init_method: str | None = None,
bias_init_scale: float | None = None,
bias_init_sigma: float | None = None,
bias_init_value: float | None = None,
linear_init_method: str | None = None,
linear_init_scale: float | None = None,
linear_init_sigma: float | None = None,
linear_init_value: float | None = None,
factors_init_method: str | None = None,
factors_init_scale: float | None = None,
factors_init_sigma: float | None = None,
factors_init_value: float | None = None,
**kwargs,
) -> None: ...
def create_model(self, vpc_config_override="VPC_CONFIG_DEFAULT", **kwargs): ...

class FactorizationMachinesPredictor(Predictor):
def __init__(self, endpoint_name, sagemaker_session: Incomplete | None = None, serializer=..., deserializer=...) -> None: ...

class FactorizationMachinesModel(Model):
def __init__(
self, model_data: str | PipelineVariable, role: str | None = None, sagemaker_session: Session | None = None, **kwargs
) -> None: ...
14 changes: 14 additions & 0 deletions stubs/sagemaker/sagemaker/amazon/hyperparameter.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from _typeshed import Incomplete

class Hyperparameter:
validation: Incomplete
validation_message: Incomplete
name: Incomplete
data_type: Incomplete
def __init__(self, name, validate=..., validation_message: str = "", data_type=...) -> None: ...
def validate(self, value) -> None: ...
def __get__(self, obj, objtype): ...
def __set__(self, obj, value) -> None: ...
def __delete__(self, obj) -> None: ...
@staticmethod
def serialize_all(obj): ...
Loading