import json
 import os
+import codecs
+import urllib2
 
 from multiprocessing import Process, Queue
 from time import gmtime, strftime
 class BenchmarkRunner(object):
     'manages runs of all the benchmarks, including cluster restarts etc.'
 
-    def __init__(self, out_dir, cluster, collector):
+    def __init__(self, out_dir, url, secret, cluster, collector):
         ''
 
         self._output = out_dir  # where to store output files
         self._configs = {}  # config name => (bench name, config)
         self._cluster = cluster
         self._collector = collector
+        self._url = url
+        self._secret = secret
 
     def register_benchmark(self, benchmark_name, benchmark_class):
         ''
         with open('%s/results.json' % self._output, 'w') as f:
             f.write(json.dumps(r, indent=4))
 
+        try:
+            self._upload_results(r)
+        except Exception as e:
+            print e
+
+    def _upload_results(self, results):
+        postdata = results
+        post = []
+        post.append(postdata)
+        req = urllib2.Request(self._url, json.dumps(post))
+        req.add_header('Authorization', self._secret) # add token in header
+        req.add_header('Content-Type', 'application/json')
+        response = urllib2.urlopen(req)
+
+
     def run(self):
         'run all the configured benchmarks'
 
 
                                          bin_path=('%s/bin' % (BUILD_PATH)))
         collectors.register('postgres', pg_collector)
 
-        runner = BenchmarkRunner(OUTPUT_DIR, cluster, collectors)
+        runner = BenchmarkRunner(OUTPUT_DIR, PERFFARM_URL, SECRET,
+                                 cluster, collectors)
 
         # register the three tests we currently have