lentiq.models.spark_sql_configuration_database module
# coding: utf-8 ####alex test """ """ import pprint import re # noqa: F401 import six class SparkSQLConfigurationDatabase(object): """ """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'type': 'str' } attribute_map = { 'type': 'type' } discriminator_value_class_map = { 'SparkSQLConfigurationDatabasePostgreSQL': 'SparkSQLConfigurationDatabasePostgreSQL' } discriminator = 'type' def __init__(self, type=None): # noqa: E501 """SparkSQLConfigurationDatabase - a model defined in Swagger""" # noqa: E501 self._type = None self.type = type @property def type(self): """Gets the type of this SparkSQLConfigurationDatabase. # noqa: E501 :return: The type of this SparkSQLConfigurationDatabase. # noqa: E501 :rtype: str """ return self._type @type.setter def type(self, type): """Sets the type of this SparkSQLConfigurationDatabase. :param type: The type of this SparkSQLConfigurationDatabase. # noqa: E501 :type: str """ if type is None: raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 self._type = type def get_real_child_model(self, data): """Returns the real base class specified by the discriminator""" discriminator_value = data[self.discriminator] #.lower() return self.discriminator_value_class_map.get(discriminator_value) def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(SparkSQLConfigurationDatabase, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, SparkSQLConfigurationDatabase): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
Classes
class SparkSQLConfigurationDatabase
class SparkSQLConfigurationDatabase(object): """ """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'type': 'str' } attribute_map = { 'type': 'type' } discriminator_value_class_map = { 'SparkSQLConfigurationDatabasePostgreSQL': 'SparkSQLConfigurationDatabasePostgreSQL' } discriminator = 'type' def __init__(self, type=None): # noqa: E501 """SparkSQLConfigurationDatabase - a model defined in Swagger""" # noqa: E501 self._type = None self.type = type @property def type(self): """Gets the type of this SparkSQLConfigurationDatabase. # noqa: E501 :return: The type of this SparkSQLConfigurationDatabase. # noqa: E501 :rtype: str """ return self._type @type.setter def type(self, type): """Sets the type of this SparkSQLConfigurationDatabase. :param type: The type of this SparkSQLConfigurationDatabase. # noqa: E501 :type: str """ if type is None: raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 self._type = type def get_real_child_model(self, data): """Returns the real base class specified by the discriminator""" discriminator_value = data[self.discriminator] #.lower() return self.discriminator_value_class_map.get(discriminator_value) def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(SparkSQLConfigurationDatabase, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, SparkSQLConfigurationDatabase): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
Ancestors (in MRO)
- SparkSQLConfigurationDatabase
- builtins.object
Class variables
var attribute_map
var discriminator
var discriminator_value_class_map
var swagger_types
Static methods
def __init__(
self, type=None)
SparkSQLConfigurationDatabase - a model defined in Swagger
def __init__(self, type=None): # noqa: E501 """SparkSQLConfigurationDatabase - a model defined in Swagger""" # noqa: E501 self._type = None self.type = type
def get_real_child_model(
self, data)
Returns the real base class specified by the discriminator
def get_real_child_model(self, data): """Returns the real base class specified by the discriminator""" discriminator_value = data[self.discriminator] #.lower() return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(
self)
Returns the model properties as a dict
def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(SparkSQLConfigurationDatabase, dict): for key, value in self.items(): result[key] = value return result
def to_str(
self)
Returns the string representation of the model
def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict())
Instance variables
var type