Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
toolbox.register("grow", sp.generate_parametrized_expression,
partial(gp.genGrow, pset=pset, min_=self.min_gen_grow, max_=self.max_gen_grow),
variable_type_indices, variable_names)
toolbox.register("mutate", operators.mutation_biased, expr=toolbox.grow,
node_selector=toolbox.koza_node_selector)
toolbox.decorate("mutate", operators.static_limit(key=operator.attrgetter("height"), max_value=self.max_height))
toolbox.decorate("mutate", operators.static_limit(key=len, max_value=self.max_size))
toolbox.decorate("mutate", self.history.decorator)
def generate_randoms(individuals):
return individuals
toolbox.register("generate_randoms", generate_randoms,
individuals=[toolbox.individual() for i in range(self.num_randoms)])
toolbox.decorate("generate_randoms", self.history.decorator)
toolbox.register("error_func", self.error_function)
expression_dict = cachetools.LRUCache(maxsize=1000)
subset_selection_archive = subset_selection.RandomSubsetSelectionArchive(frequency=self.subset_change_frequency,
predictors=predictors,
response=response,
subset_size=subset_size,
expression_dict=expression_dict)
evaluate_function = partial(subset_selection.fast_numpy_evaluate_subset,
get_node_semantics=sp.get_node_semantics,
context=pset.context,
subset_selection_archive=subset_selection_archive,
error_function=toolbox.error_func,
expression_dict=expression_dict)
toolbox.register("evaluate_error", evaluate_function)
toolbox.register("assign_fitness", afpo.assign_age_fitness_size_complexity)
self.multi_archive = utils.get_archive(100)
if self.log_mutate:
mutation_stats_archive = archive.MutationStatsArchive(evaluate_function)
UPLOAD_FOLDER = 'flask/uploads'
ALLOWED_EXTENSIONS = set('png')
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
log = app.logger
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s|%(name)-8s|%(levelname)s: %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.INFO)
mqttc = None
master_shadow = None
shady_vals = {}
topic_cache = cachetools.LRUCache(maxsize=50)
msg_cache = cachetools.LRUCache(maxsize=100)
second = timedelta(seconds=1)
last_hz = 0
incr_lock = Lock()
current_hz = 0
current_hz_time = dt.datetime.utcnow()
rollover_lock = Lock()
convey_topics = [
"/convey/telemetry",
"/convey/errors",
"/convey/stages"
]
sort_bridge_topics = [
"/arm/telemetry",
)
def deserialize(self, data):
DepotManifest.deserialize(self, data)
# order chunks in ascending order by their offset
# required for CDNDepotFile
for mapping in self.payload.mappings:
mapping.chunks.sort(key=lambda x: x.offset, reverse=False)
class CDNClient(object):
DepotManifestClass = CDNDepotManifest
_LOG = logging.getLogger("CDNClient")
servers = deque() #: CS Server list
_chunk_cache = LRUCache(20)
cell_id = 0 #: Cell ID to use, initialized from SteamClient instance
def __init__(self, client):
"""CDNClient allows loading and reading of manifests for Steam apps are used
to list and download content
:param client: logged in SteamClient instance
:type client: :class:`.SteamClient`
"""
self.gpool = GPool(8) #: task pool
self.steam = client #: SteamClient instance
if self.steam:
self.cell_id = self.steam.cell_id
self.web = make_requests_session()
self.depot_keys = {} #: depot decryption keys
def __init__(self, capacity, cache_policy=None, ttl_policy=None, *args, **kwargs):
self.cache_policy = cache_policy or self.DEFAULT_CACHE_POLICY
self.ttl_policy = ttl_policy or self.DEFAULT_TTL_POLICY
self.cache = LRUCache(maxsize=capacity, missing=self.missing)
self.args = args
self.after_response_hook = kwargs.pop('after_response_hook', None)
self.kwargs = kwargs
token_lifetime (int): The amount of time in seconds for
which the token is valid. Defaults to 1 hour.
max_cache_size (int): The maximum number of JWT tokens to keep in
cache. Tokens are cached using :class:`cachetools.LRUCache`.
"""
super(OnDemandCredentials, self).__init__()
self._signer = signer
self._issuer = issuer
self._subject = subject
self._token_lifetime = token_lifetime
if additional_claims is None:
additional_claims = {}
self._additional_claims = additional_claims
self._cache = cachetools.LRUCache(maxsize=max_cache_size)
def _create_workflow_execution_cache():
return cachetools.LRUCache(maxsize=500)
def get_cache():
ttl = settings.CACHE_METRIC_NAMES_TTL
size = settings.CACHE_METRIC_NAMES_MAX
if ttl > 0 and size > 0:
return TTLCache(size, ttl)
elif size > 0:
return LRUCache(size)
else:
return dict()
import logging
import urllib.parse
import os
import contextlib
import cachetools
import threading
from ChromeController.manager import ChromeRemoteDebugInterface
class _TabStore(cachetools.LRUCache):
def __init__(self, chrome_interface, *args, **kwargs):
assert "maxsize" in kwargs
assert kwargs['maxsize']
super().__init__(*args, **kwargs)
assert self.maxsize
self.chrome_interface = chrome_interface
self.log = logging.getLogger("Main.ChromeController.TabPool.Store")
def __getitem__(self, key):
self.log.debug("__getitem__: %s", key)
assert key is not None, "You have to pass a key to __getitem__!"
return super().__getitem__(key)
def __missing__(self, key):
self.log.debug("__missing__: %s", key)
self._clear_cell(x + 1, y, visited)
self._clear_cell(x, y + 1, visited)
self._clear_cell(x - 1, y - 1, visited)
self._clear_cell(x - 1, y + 1, visited)
self._clear_cell(x + 1, y - 1, visited)
self._clear_cell(x + 1, y + 1, visited)
def _mutate_cell(self, x, y, new_play: Play):
if self.play[x][y] in UNKNOWN_OR_FLAGGED and new_play != Play.UNKNOWN:
self.remaining_unknown -= 1
self.play[x][y] = new_play
else:
raise AssertionError("this shouldn't happen (is {}, wants to be {})".format(self.play[x][y], new_play))
cache = cachetools.LRUCache(maxsize=1000)
@commands.create("minesweeper start", "mine start", "m start", category="Games", params=[])
@channel_only
@games_allowed_only
async def start(message):
"""
Starts a game of minesweeper.
Example::
mine start
"""
key = (message.transport.id, message.server.id, message.channel.id)