[tor-commits] [depictor/master] Attempt to reduce memorysize before forking a bunch of times...

tom at torproject.org tom at torproject.org
Mon Mar 23 16:32:51 UTC 2015


commit 487b01f79309521ea15f3781ce0fcd0482152c10
Author: Tom Ritter <tom at ritter.vg>
Date:   Sun Mar 8 14:29:37 2015 -0500

    Attempt to reduce memorysize before forking a bunch of times...
---
 write_website.py |   11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)

diff --git a/write_website.py b/write_website.py
index f761643..6fe9089 100755
--- a/write_website.py
+++ b/write_website.py
@@ -71,9 +71,14 @@ def main():
 	w.write_website(os.path.join(os.path.dirname(__file__), 'out', 'consensus-health.html'), True)
 	w.write_website(os.path.join(os.path.dirname(__file__), 'out', 'index.html'), False)
 
+	# delete giant data structures for subprocess forking by piling hacks on top of each other
 	consensus_time = w.get_consensus_time()
+	import gc
+	del w, consensuses, votes
+	gc.collect()
+	time.sleep(1)
 	archived = os.path.join(os.path.dirname(__file__), 'out', \
-							'consensus-health-' + consensus_time.strftime("%Y-%m-%d-%H-%M") + '.html')
+				'consensus-health-' + consensus_time.strftime("%Y-%m-%d-%H-%M") + '.html')
 	subprocess.call(["cp", os.path.join(os.path.dirname(__file__), 'out', 'consensus-health.html'), archived])
 	subprocess.call(["gzip", "-9", archived])
 	subprocess.call(["ln", "-s", archived + ".gz", archived])
@@ -82,11 +87,9 @@ def main():
 	weeks_to_keep = 3
 	files = [f for f in os.listdir(os.path.join(os.path.dirname(__file__), 'out'))]
 	for f in files:
-		print f
 		if f.startswith("consensus-health-"):
-			f = f.replace("consensus-health-", "").replace(".html", "")
+			f = f.replace("consensus-health-", "").replace(".html", "").replace(".gz", "")
 			f_time = datetime.datetime.strptime(f, "%Y-%m-%d-%H-%M")
-			print "\t", f_time
 			if (consensus_time - f_time).days > weeks_to_keep * 7:
 				os.remove(f)
 





More information about the tor-commits mailing list