Skip to content
This repository has been archived by the owner on May 25, 2024. It is now read-only.

Commit

Permalink
add extra compile flags for windows (#12)
Browse files Browse the repository at this point in the history
* add extra compile flags for windows

* move file open() out of gc.collect()

* use c++17,c17 for all platform

* _

* test extra flags

* test extraflags

* test extraflags -std=c++17 for macos
  • Loading branch information
helix-xx authored Apr 27, 2024
1 parent 9f6564f commit 05c0155
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 42 deletions.
13 changes: 8 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,18 @@

from setuptools import Extension, setup

CPPSTD = "c++20"
CPPSTD = "c++2a"
CSTD = "c2x"
extra_compile_args = ["-D_GNU_SOURCE"]
if platform.system() == "Linux" or platform.system() == "Darwin":
if platform.system() == "Linux":
strip_flags = ["-Wl,--strip-all"]
extra_compile_args += [f"-std={CPPSTD}", f"-std={CSTD}"]
else:
elif platform.system() == "Windows":
strip_flags = []
extra_compile_args += [f"/std:{CPPSTD}", f"/std:{CSTD}"]
extra_compile_args += [f"/std:c++20", f"/std:c2x"]
elif platform.system() == "Darwin":
strip_flags = []
extra_compile_args += [f"-std=c++17"]


def find_src():
Expand All @@ -25,7 +28,7 @@ def find_src():
if file != "version_template.h":
headers.append(os.path.join(root, file))
srcs.append(os.path.abspath("./deps/double-conversion.cpp"))
headers.append(os.path.abspath("./deps/double-conversion.h"))
headers.append(os.path.abspath("./deps/double-conversion.hpp"))
return srcs, headers


Expand Down
83 changes: 46 additions & 37 deletions test/memory_test.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import os
import sys
import unittest

import json
import math
import random
Expand Down Expand Up @@ -51,6 +50,8 @@ def dict(self):
def length(self):
self._shrink *= 0.99
return int(math.exp(self._randomizer.uniform(-0.5, 5)) * self._shrink)


class TestMemory(unittest.TestCase):
@staticmethod
def collect_all_objects(obj):
Expand Down Expand Up @@ -105,7 +106,7 @@ def test_encode_leak(self):
# PyPy's GC works differently (no ref counting), so this wouldn't be useful.
# Simply returning an empty list effectively disables the refcount test.
return []

import cjson

now = time.time()
Expand Down Expand Up @@ -167,7 +168,7 @@ def test_decode_leak(self):
print(f"mem_diff: {mem_diff}, peak_diff: {peak_diff}")
# should not increase more than 100 bytes
self.assertGreaterEqual(100, mem_diff)

def test_dump_leak_refcount(self):
"""
Developed by ESN, an Electronic Arts Inc. studio.
Expand Down Expand Up @@ -196,33 +197,34 @@ def test_dump_leak_refcount(self):
# PyPy's GC works differently (no ref counting), so this wouldn't be useful.
# Simply returning an empty list effectively disables the refcount test.
return []

import cjson
import tempfile

now = time.time()
seeds = [now * i for i in range(1, 31)]
for seed in seeds:
with tempfile.NamedTemporaryFile("w", delete=True) as f:
data = self.random_object(seed)
# print(f"--seed {seed}")

data_objects = self.collect_all_objects(data)
# Exclude ints because they get referenced by the lists below.
data_objects = [o for o in data_objects if not isinstance(o, int)]
gc.collect()
data_ref_counts_before = [sys.getrefcount(o) for o in data_objects]
cjson.dump(data, f)
gc.collect()
data_ref_counts_after = [sys.getrefcount(o) for o in data_objects]
if data_ref_counts_before != data_ref_counts_after:
for o, before, after in zip(
data_objects, data_ref_counts_before, data_ref_counts_after
):
if before != after:
print(f"Ref count of {o!r} went from {before} to {after}")
self.assertTrue(False, "Ref count changed")

f = tempfile.NamedTemporaryFile("w", delete=True)
data = self.random_object(seed)
# print(f"--seed {seed}")

data_objects = self.collect_all_objects(data)
# Exclude ints because they get referenced by the lists below.
data_objects = [o for o in data_objects if not isinstance(o, int)]
gc.collect()
data_ref_counts_before = [sys.getrefcount(o) for o in data_objects]
cjson.dump(data, f)
gc.collect()
data_ref_counts_after = [sys.getrefcount(o) for o in data_objects]
if data_ref_counts_before != data_ref_counts_after:
for o, before, after in zip(
data_objects, data_ref_counts_before, data_ref_counts_after
):
if before != after:
print(f"Ref count of {o!r} went from {before} to {after}")
self.assertTrue(False, "Ref count changed")
f.close()

def test_dump_leak(self):
if hasattr(sys, "pypy_version_info"):
# skip PyPy
Expand All @@ -235,26 +237,28 @@ def test_dump_leak(self):

import cjson
import tempfile

datas = []
for file in get_benchfiles_fullpath():
with open(file, "r", encoding='utf-8') as f:
datas.append(json.load(f))

f = tempfile.NamedTemporaryFile("w", delete=True)

# warm up. CPython will not release memory immediately.
for data in datas:
for _ in range(10):
with tempfile.NamedTemporaryFile("w", delete=True) as f:
json.dump(data, f)
cjson.dump(data, f)
f.seek(0)
#
tracemalloc.start()
#
gc.collect()
snapshot_1, peak_1 = tracemalloc.get_traced_memory()
for data in datas:
for _ in range(10):
with tempfile.NamedTemporaryFile("w", delete=True) as f:
cjson.dump(data, f)
cjson.dump(data, f)
f.seek(0)
gc.collect()
snapshot_2, peak_2 = tracemalloc.get_traced_memory()
#
Expand All @@ -263,7 +267,8 @@ def test_dump_leak(self):
print(f"mem_diff: {mem_diff}, peak_diff: {peak_diff}")
# should not increase more than 100 bytes
self.assertGreaterEqual(100, mem_diff)

f.close()

def test_load_leak(self):
if hasattr(sys, "pypy_version_info"):
# skip PyPy
Expand All @@ -275,23 +280,25 @@ def test_load_leak(self):
from test_utils import get_benchfiles_fullpath

import cjson
import json

file_paths = get_benchfiles_fullpath()
# warm up. CPython will not release memory immediately.
fs = []
for file in file_paths:
fs.append(open(file, "r"))
# warm up. CPython will not release memory immediately.
for f in fs:
for _ in range(10):
with open(file, "r") as f:
cjson.load(f)
cjson.load(f)
f.seek(0)
#
tracemalloc.start()
#
gc.collect()
snapshot_1, peak_1 = tracemalloc.get_traced_memory()
for file in file_paths:
for f in fs:
for _ in range(10):
with open(file, "r") as f:
cjson.load(f)
cjson.load(f)
f.seek(0)
gc.collect()
snapshot_2, peak_2 = tracemalloc.get_traced_memory()
#
Expand All @@ -300,6 +307,8 @@ def test_load_leak(self):
print(f"mem_diff: {mem_diff}, peak_diff: {peak_diff}")
# should not increase more than 100 bytes
self.assertGreaterEqual(100, mem_diff)
for f in fs:
f.close()


if __name__ == "__main__":
Expand Down

0 comments on commit 05c0155

Please sign in to comment.