How to use the typing.Optional function in typing

To help you get started, we’ve selected a few typing examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github gaphor / gaphor / gaphor / UML / presentation.py View on Github external
def __init__(self, id=None, model=None):
        super().__init__(id, model)

        def update(event):
            self.request_update()

        self._watcher = self.watcher(default_handler=update)

        self.watch("subject")

    subject: umlproperty[S, S] = association(
        "subject", Element, upper=1, opposite="presentation"
    )

    canvas: Optional["Canvas"]

    matrix: "Matrix"

    def watch(self, path, handler=None):
        """
        Watch a certain path of elements starting with the DiagramItem.
        The handler is optional and will default to a simple
        self.request_update().

        Watches should be set in the constructor, so they can be registered
        and unregistered in one shot.

        This interface is fluent(returns self).
        """
        self._watcher.watch(path, handler)
        return self
github timheap / flask-saml2 / flask_saml2 / idp / sphandler.py View on Github external
from flask_saml2.xml_templates import XmlTemplate

from .parser import AuthnRequestParser, LogoutRequestParser
from .xml_templates import AssertionTemplate, ResponseTemplate


class SPHandler(object):
    """
    Handles incoming SAML requests from a specific Service Provider for
    a running Identity Provider.

    Sub-classes should provide Service Provider-specific functionality.
    """
    entity_id: str
    acs_url = None
    certificate: Optional[X509] = None
    display_name: str = None

    subject_format = 'urn:oasis:names:tc:SAML:2.0:nameid-format:email'
    assertion_template = AssertionTemplate
    response_template = ResponseTemplate

    def __init__(
        self, idp,
        *,
        entity_id: str,
        acs_url: str = None,
        certificate: Optional[X509] = None,
        display_name: str = None,
    ):
        self.idp = idp
github HazyResearch / fonduer / src / fonduer / candidates / mentions.py View on Github external
"""Extract mentions from the given Document.

        :param doc: A document to process.
        :param clear: Whether or not to clear the existing database entries.
        """

        # Reattach doc with the current session or DetachedInstanceError happens
        doc = self.session.merge(doc)
        # Iterate over each mention class
        for i, mention_class in enumerate(self.mention_classes):
            tc_to_insert: DefaultDict[Type, List[Dict[str, Any]]] = defaultdict(list)
            # Generate TemporaryContexts that are children of the context using
            # the mention_space and filtered by the Matcher
            self.child_context_set.clear()
            for tc in self.matchers[i].apply(self.mention_spaces[i].apply(doc)):
                rec: Optional[Dict[str, Any]] = tc._load_id_or_insert(self.session)
                if rec:
                    tc_to_insert[tc._get_table()].append(rec)
                self.child_context_set.add(tc)

            # Bulk insert temporary contexts
            for table, records in tc_to_insert.items():
                stmt = insert(table.__table__).values(records)
                self.session.execute(stmt)

            # Generates and persists mentions
            mention_args = {"document_id": doc.id}
            for child_context in self.child_context_set:
                # Assemble mention arguments
                for arg_name in mention_class.__argnames__:
                    mention_args[arg_name + "_id"] = child_context.id
github HexDecimal / roguelike-tutorial / items / __init__.py View on Github external
def __init__(self) -> None:
        self.owner: Optional[Inventory] = None
        self.location: Optional[Location] = None
github acconeer / acconeer-python-exploration / src / acconeer / exptool / recording.py View on Github external
import acconeer.exptool.structs.configbase as cb
from acconeer.exptool import configs, modes


@attr.s
class Record:
    # Sensor session related (required):
    mode = attr.ib(type=modes.Mode)               # save as str (Mode.name), restore with get_mode
    sensor_config_dump = attr.ib(type=str)        # cb._dumps
    session_info = attr.ib(type=dict)             # save/restore with json.dumps/loads
    data = attr.ib(default=None)                  # [np.array], saved as np.array, restore as is
    data_info = attr.ib(type=list, factory=list)  # [[{...}]], save/restore with json.dumps/loads

    # Processing related (optional):
    module_key = attr.ib(type=Optional[str], default=None)
    processing_config_dump = attr.ib(type=Optional[str], default=None)  # cb._dumps

    # Other (optional):
    rss_version = attr.ib(type=Optional[str], default=None)
    lib_version = attr.ib(type=Optional[str], default=None)
    timestamp = attr.ib(type=Optional[str], default=None)
    sample_times = attr.ib(default=None)
    note = attr.ib(type=Optional[str], default=None)  # not to be used internally

    # Legacy (optional):
    legacy_processing_config_dump = attr.ib(type=Optional[str], default=None)

    def __attrs_post_init__(self):
        self._iter_index = None

    def __iter__(self):
        self._iter_index = 0
github Instagram / LibCST / libcst / codemod / _codemod.py View on Github external
def transform_module(self, tree: Module) -> Module:
        """
        Transform entrypoint which handles multi-pass logic and metadata calculation
        for you. This is the method that you should call if you wish to
        invoke a codemod directly.
        """

        if not self.should_allow_multiple_passes():
            with self._handle_metadata_reference(tree) as tree_with_metadata:
                return self.transform_module_impl(tree_with_metadata)

        # We allow multiple passes, so we execute 1+ passes until there are
        # no more changes.
        before: str = tree.code
        after: Optional[str] = None
        while before != after:
            if after is not None:
                tree = parse_module(after)
                before = after
            with self._handle_metadata_reference(tree) as tree_with_metadata:
                tree = self.transform_module_impl(tree_with_metadata)
            after = tree.code
        return tree
github anbergem / opencv_wrapper / opencv_wrapper / model.py View on Github external
def __post_init__(self) -> None:
        self._moments: Optional[Dict[str, float]] = None
        self._bounding_rect: Optional[Rect] = None
github qtumproject / qtum-electrum / electrum / util.py View on Github external
assert asyncio.iscoroutinefunction(func), 'func needs to be a coroutine'
    async def wrapper(*args, **kwargs):
        try:
            return await func(*args, **kwargs)
        except asyncio.CancelledError:
            # note: with python 3.8, CancelledError no longer inherits Exception, so this catch is redundant
            raise
        except Exception as e:
            pass
    return wrapper


class TxMinedInfo(NamedTuple):
    height: int                        # height of block that mined tx
    conf: Optional[int] = None         # number of confirmations, SPV verified (None means unknown)
    timestamp: Optional[int] = None    # timestamp of block that mined tx
    txpos: Optional[int] = None        # position of tx in serialized block
    header_hash: Optional[str] = None  # hash of block that mined tx


def make_aiohttp_session(proxy: Optional[dict], headers=None, timeout=None):
    if headers is None:
        headers = {'User-Agent': 'Electrum'}
    if timeout is None:
        # The default timeout is high intentionally.
        # DNS on some systems can be really slow, see e.g. #5337
        timeout = aiohttp.ClientTimeout(total=45)
    elif isinstance(timeout, (int, float)):
        timeout = aiohttp.ClientTimeout(total=timeout)
    ssl_context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=ca_path)

    if proxy:
github hail-is / hail / hail / python / hail / ir / renderer.py View on Github external
# If 'node' is to be bound higher than this, it must be bound by an
            # AggLet, with 'scan_scope' determining whether it is bound in
            # the agg scope or scan scope.
            self.min_value_binding_depth: int = min_value_binding_depth
            self.scan_scope: bool = scan_scope
            # The binding context of 'node'. Maps variables bound above to the
            # depth at which they were bound (more precisely, if
            # 'context[var] == depth', then 'stack[depth-1].node' binds 'var' in
            # the subtree rooted at 'stack[depth].node').
            self.context = context
            # The depth of 'node' in the original tree, i.e. the number of
            # BaseIR above this in the stack, not counting other 'Renderable's.
            self.depth: int = depth
            # The outermost frame above this in which 'node' was marked to be
            # lifted in the analysis pass, if any, otherwise None.
            self.lift_to_frame: Optional[CSEPrintPass.BindingsStackFrame] = lift_to_frame
            # True if any lets need to be inserted above 'node'. No node has
            # both 'lift_to_frame' not None and 'insert_lets' True.
            self.insert_lets: bool = insert_lets

            # Mutable

            # The index of the 'Renderable' child currently being visited.
            # Starts at -1 because it is incremented at the top of the main loop.
            self.child_idx = -1
            # The array of strings building 'node's IR.
            # * If 'insert_lets', all lets will be added to the parent's
            #   'builder' before appending this 'builder'.
            # * If 'lift_to_frame', 'builder' will be added to 'lift_to_frame's
            #   list of lifted lets, while only "(Ref ...)" will be added to
            #   the parent's 'builder'.
            # * If neither, then it is safe for 'builder' to be an alias of the