Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
assert set(["z"]) == set(C2Slots.__slots__)
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
with pytest.raises(AttributeError):
c2.t = "test"
non_slot_instance = C2(x=1, y=2, z="test")
if has_pympler:
assert asizeof(c2) < asizeof(non_slot_instance)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
def test_asizeof(self):
'''Test asizeof.asizeof()
'''
self.assertEqual(asizeof.asizeof(), 0)
objs = [Foo(42), ThinFoo("spam"), OldFoo(67)]
total = asizeof.asizeof(*objs)
sizes = list(asizeof.asizesof(*objs))
sum = 0
for sz in sizes:
sum += sz
self.assertEqual(total, sum, (total, sum))
def testBaselineMemory(self):
try:
import pympler.asizeof.asizeof as size
except ImportError:
return
# These values might be platform dependent, so don't fret too much.
self.assertEqual(size(bitstring.ConstBitStream([0])), 64)
self.assertEqual(size(bitstring.Bits([0])), 64)
self.assertEqual(size(bitstring.BitStream([0])), 64)
self.assertEqual(size(bitstring.BitArray([0])), 64)
from bitstring.bitstore import ByteStore
self.assertEqual(size(ByteStore(bytearray())), 100)
def test_classes(self):
'''Test sizing class objects and instances
'''
self.assertTrue(asizeof.asizeof(Foo, code=True) > 0)
self.assertTrue(asizeof.asizeof(ThinFoo, code=True) > 0)
self.assertTrue(asizeof.asizeof(OldFoo, code=True) > 0)
self.assertTrue(asizeof.asizeof(Foo([17,42,59])) > 0)
self.assertTrue(asizeof.asizeof(ThinFoo([17,42,59])) > 0)
self.assertTrue(asizeof.asizeof(OldFoo([17,42,59])) > 0)
s1 = asizeof.asizeof(Foo("short"))
s2 = asizeof.asizeof(Foo("long text ... well"))
self.assertTrue(s2 >= s1)
s3 = asizeof.asizeof(ThinFoo("short"))
self.assertTrue(s3 <= s1)
{
'memory' : 120000
'events' : n,11000
'rest' : n,11000
'zmq' : 11000
}
:returns: A dict containing the memory information
:rtype: dict()
"""
self.log.debug("memory : Start ...")
rdict = dict()
rdict["memory"] = "%s kbytes" % (asizeof(self)/1024)
rdict["events"] ="%s kbytes(%s)" % (asizeof(self.events.data)/1024, self.events.count_events())
rdict["store"] = "%s kbytes" % (asizeof(self.events.store)/1024)
rdict["datafiles"] = "%s" % (self.events.store.count_files())
rdict["zmq"] = "%s kbytes" % ((asizeof(self._zmq_interface) + \
asizeof(self._zmq_reply_thread) + asizeof(self._zmq_publish))/1024)
self.log.debug("memory : Done")
return rdict
def get_collection_sizes(obj, collections: Optional[Tuple]=None,
get_only_non_empty=False):
"""
Iterates over `collections` of the gives object and gives its byte size
and number of items in collection
"""
from pympler import asizeof
collections = collections or (list, dict, set, deque, abc.Sized)
if not isinstance(collections, tuple):
collections = tuple(collections)
result = []
for attr_name in dir(obj):
attr = getattr(obj, attr_name)
if isinstance(attr, collections) and (not get_only_non_empty or len(attr) > 0):
result.append((attr_name, len(attr), asizeof.asizeof(attr, detail=1)))
return result
# if the counter equals the interval then output to dict_cov
if reset_counter == interval:
dict_cov[ref]["values"].append(
int(sum(array_of_cov)/len(array_of_cov))
)
# reset counter
reset_counter = 0
else:
# if counter is less than interval then sums 1
reset_counter += 1
logger.info("Successfully generated dicts necessary for output json file "
"and .report.json depth file.")
logger.debug("Size of percentage_bases_covered: {} kb".format(
asizeof(percentage_bases_covered)/1024))
logger.debug("Size of dict_cov: {} kb".format(asizeof(dict_cov)/1024))
return percentage_bases_covered, dict_cov
@param which: the counter to get. 0 to get all counters.
'''
if which == 0 :
data = []
data.append("api : %s items, %s kbytes" % (1, asizeof(self)/1024))
data.append("apscheduler : %s items, %s kbytes" % (1, asizeof(self._scheduler)/1024))
data.append("jobs dict : %s items, %s kbytes" % (len(self.data), asizeof(self.data)/1024))
data.append("store : %s items, %s kbytes" % (1, asizeof(self.store)/1024))
return data
else:
if which == MEMORY_PLUGIN:
return 0, 0
elif which == MEMORY_API:
return 1, asizeof(self)
elif which == MEMORY_SCHEDULER:
return 1, asizeof(self._scheduler)
elif which == MEMORY_DATA:
return len(self.data), asizeof(self.data)
elif which == MEMORY_STORE:
return 1, asizeof(self.store)
return None
def new_value(self, name, value):
"""Create new value in data"""
try:
# Needed to prevent memory leaks. See spyder-ide/spyder#7158.
if asizeof(value) < MAX_SERIALIZED_LENGHT:
self.shellwidget.set_value(name, value)
else:
QMessageBox.warning(self, _("Warning"),
_("The object you are trying to modify is "
"too big to be sent back to the kernel. "
"Therefore, your modifications won't "
"take place."))
except TypeError as e:
QMessageBox.critical(self, _("Error"),
"TypeError: %s" % to_text_string(e))
self.shellwidget.refresh_namespacebrowser()
def add_result_data(self, title, key, data):
if SIZE_CONTROL:
if not self.MEM_LIMIT:
mem_size = asizeof(self.current_task.results)
if mem_size < 15000000:
self._add_result(title, key, data)
else:
self.MEM_LIMIT = True
else:
self._add_result(title, key, data)