Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
return f'<section>{highlighted}</section>'
def _make_index_htm(target: str) -> int:
contents = open('slides.md').read()
html = INDEX_TMPL.format(
slides=''.join(
_to_slide(slide) for slide in contents.split(SLIDE_DELIM)
),
)
with open(target, 'w') as f:
f.write(html)
return 0
BACKENDS: Dict[str, Callable[[str], int]] = {
'.mtp/package.json': _make_package_json,
'.mtp/node_modules': _make_node_modules,
'.mtp/style.scss': _make_style_scss,
'build/presentation.css': _make_presentation_css,
'build/presentation.js': _make_presentation_js,
'index.htm': _make_index_htm,
}
def run_backend(target: str) -> int:
def _make_old() -> None:
print(f'make old {target}')
if os.path.exists(target):
os.utime(target, (0, 0))
def _make_new() -> None:
import importlib.machinery
from types import ModuleType
from typing import List, Callable
from petronia3.extensions.extensions.api import ExtensionConfiguration, ExtensionState
from petronia3.system.bus import EventBus
from petronia3.system.logging import log, logerr, DEBUG, INFO, ERROR
from petronia3.errors import (
PetroniaInternalError,
PetroniaExtensionNotFound,
PetroniaInvalidExtension,
PetroniaCyclicExtensionDependency,
PetroniaExtensionInitializationError,
)
from petronia3.util.memory import EMPTY_TUPLE
ModuleLoadedCallback = Callable[[str], None]
class _LoadState:
__slots__ = ('loading', 'loaded', 'state', 'config', 'bus', 'origin', 'pathfinder')
loading: List[str]
loaded: List[str]
def __init__(
self, origin: str,
bus: EventBus,
config: ExtensionConfiguration,
state: ExtensionState
) -> None:
self.origin = origin
self.bus = bus
self.config = config
self.state = state
def __init__(
self,
func: Callable[[_T], Union[bool, Awaitable[bool]]],
iterable: Union[AsyncIterable[_T], Iterable[_T]],
) -> None:
self.__func: Callable[[_T], Union[bool, Awaitable[bool]]] = func
self.__iterable: Union[AsyncIterable[_T], Iterable[_T]] = iterable
# We assign the generator strategy based on the arguments' types
if isinstance(iterable, AsyncIterable):
if asyncio.iscoroutinefunction(func):
self.__generator_instance = self.__async_generator_async_pred()
else:
self.__generator_instance = self.__async_generator_sync_pred()
elif asyncio.iscoroutinefunction(func):
self.__generator_instance = self.__sync_generator_async_pred()
else:
raise TypeError("Must be either an async predicate, an async iterable, or both.")
type_, (ConstrainedInt, ConstrainedFloat, ConstrainedDecimal, ConstrainedList, bool)
):
# Is numeric type
attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of')
numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch
constraint_func = _map_types_constraint[numeric_type]
if attrs:
used_constraints.update(set(attrs))
kwargs = {
attr_name: attr
for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs)
if attr is not None
}
if kwargs:
constraint_func = cast(Callable[..., type], constraint_func)
return constraint_func(**kwargs)
return type_
Note: The image can also be specified by decorating the function with the @python_component decorator. If different base images are explicitly specified in both places, an error is raised.
extra_code: Optional. Python source code that gets placed before the function code. Can be used as workaround to define types used in function signature.
packages_to_install: Optional. List of [versioned] python packages to pip install before executing the user function.
modules_to_capture: Optional. List of module names that will be captured (instead of just referencing) during the dependency scan. By default the func.__module__ is captured.
use_code_pickling: Specifies whether the function code should be captured using pickling as opposed to source code manipulation. Pickling has better support for capturing dependencies, but is sensitive to version mismatch between python in component creation environment and runtime image.
'''
decorator_base_image = getattr(func, '_component_base_image', None)
if decorator_base_image is not None:
if base_image is not None and decorator_base_image != base_image:
raise ValueError('base_image ({}) conflicts with the decorator-specified base image metadata ({})'.format(base_image, decorator_base_image))
else:
base_image = decorator_base_image
else:
if base_image is None:
base_image = default_base_image_or_builder
if isinstance(base_image, Callable):
base_image = base_image()
packages_to_install = packages_to_install or []
component_spec = _extract_component_interface(func)
component_inputs = component_spec.inputs or []
component_outputs = component_spec.outputs or []
arguments = []
arguments.extend(InputValuePlaceholder(input.name) for input in component_inputs)
arguments.extend(OutputPathPlaceholder(output.name) for output in component_outputs)
if use_code_pickling:
func_code = _capture_function_code_using_cloudpickle(func, modules_to_capture)
# pip startup is quite slow. TODO: Remove the special cloudpickle installation code in favor of the the following line once a way to speed up pip startup is discovered.
import multiprocessing.pool
from multiprocessing import Queue
import pandas as pd
from six.moves import queue
import time
from sklearn.utils import check_X_y
from autosklearn.classification import AutoSklearnClassifier
from autosklearn.pipeline.components import classification
import gc
gc.enable()
classification.add_classifier(LogisticRegressionSK)
classification.add_classifier(LogisticRegressionSMAC)
ObjectiveFuncType = Callable[[trial_module.Trial], float]
def _name_estimators(estimators):
"""Generate names for estimators."""
names = [type(estimator).__name__.lower() for estimator in estimators]
namecount = defaultdict(int)
for est, name in zip(estimators, names):
namecount[name] += 1
for k, v in list(six.iteritems(namecount)):
if v == 1:
del namecount[k]
for i in reversed(range(len(estimators))):
name = names[i]
from typing import Dict, List, Union, Callable
import tensorflow as tf
from typeguard import check_argument_types
from neuralmonkey.decoders.autoregressive import AutoregressiveDecoder
from neuralmonkey.decoders.ctc_decoder import CTCDecoder
from neuralmonkey.decoders.classifier import Classifier
from neuralmonkey.decoders.sequence_labeler import SequenceLabeler
from neuralmonkey.decorators import tensor
from neuralmonkey.runners.base_runner import BaseRunner
# pylint: disable=invalid-name
SupportedDecoder = Union[
AutoregressiveDecoder, CTCDecoder, Classifier, SequenceLabeler]
Postprocessor = Callable[[List[List[str]]], List[List[str]]]
# pylint: enable=invalid-name
class PlainRunner(BaseRunner[SupportedDecoder]):
"""A runner which takes the output from decoder.decoded."""
# pylint: disable=too-few-public-methods
# Pylint issue here: https://github.com/PyCQA/pylint/issues/2607
class Executable(BaseRunner.Executable["PlainRunner"]):
def collect_results(self, results: List[Dict]) -> None:
if len(results) != 1:
raise ValueError("PlainRunner needs exactly 1 execution "
"result, got {}".format(len(results)))
vocabulary = self.executor.decoder.vocabulary
def __init__(self, entry_id: str, component_key: str, key: int):
"""Initialize."""
self._entry_id = entry_id
self._component_key = component_key
self._key = key
self._remove_callbacks: List[Callable[[], None]] = []
@dataclass
class FitData:
# This is all the different data values that PyFit supports.
data: Dict[str, numpy.ndarray]
monte_carlo: Dict[str, numpy.ndarray]
quality_factor: Dict[str, numpy.ndarray]
binned: Dict[str, numpy.ndarray]
events_errors: Dict[str, numpy.ndarray]
expected_values: Dict[str, numpy.ndarray]
generated_length: numpy.float64
@dataclass
class CallPackage:
setup: Callable[[], None]
run: Callable[[Any], float]
class LikelihoodFetch(object):
__LOGGER = logging.getLogger(__name__ + ".PluginSearch")
def __init__(self):
self.__found_plugins = plugin_loader.fetch_plugins(
likelihoods, "Likelihood"
)
def __repr__(self):
return "{0}()".format(self.__class__.__name__)
def get_likelihood(self, name) -> fit_plugin.Setup:
for likelihood in self.__found_plugins:
self._job_id = job_id
self._job_base_dir = BackgroundJobDefines.base_dir
self._job_initializiation_lock = os.path.join(self._job_base_dir, "job_initialization.lock")
if not logger:
raise MKGeneralException(_("The background job is missing a logger instance"))
self._logger = logger
kwargs.setdefault("stoppable", True)
self._kwargs = kwargs
self._work_dir = os.path.join(self._job_base_dir, self._job_id)
self._jobstatus = JobStatus(self._work_dir)
# The function ptr and its args/kwargs
self._queued_function: Optional[Tuple[Callable, Tuple[Any, ...], Dict[str, Any]]] = None