diff --git a/build.py b/build.py
index 6ac68693..3fc56e5f 100755
--- a/build.py
+++ b/build.py
@@ -35,11 +35,16 @@
 #   msg/js/<LANG>.js for every language <LANG> defined in msg/js/<LANG>.json.
 
 import sys
-if sys.version_info[0] != 2:
-  raise Exception("Blockly build only compatible with Python 2.x.\n"
-                  "You are using: " + sys.version)
 
-import errno, glob, httplib, json, os, re, subprocess, threading, urllib
+import errno, glob, json, os, re, subprocess, threading, codecs, functools
+
+if sys.version_info[0] == 2:
+  import httplib
+  from urllib import urlencode
+else:
+  import http.client as httplib
+  from urllib.parse import urlencode
+  from importlib import reload
 
 REMOTE_COMPILER = "remote"
 
@@ -194,7 +199,7 @@ if (isNodeJS) {
 
     key_whitelist = self.closure_env.keys()
 
-    keys_pipe_separated = reduce(lambda accum, key: accum + "|" + key, key_whitelist)
+    keys_pipe_separated = functools.reduce(lambda accum, key: accum + "|" + key, key_whitelist)
     begin_brace = re.compile(r"\{(?!%s)" % (keys_pipe_separated,))
 
     end_brace = re.compile(r"\}")
@@ -336,7 +341,7 @@ class Gen_compressed(threading.Thread):
       return dict(
         compiledCode=stdout,
         statistics=dict(
-          originalSize=reduce(lambda v, size: v + size, filesizes, 0),
+          originalSize=functools.reduce(lambda v, size: v + size, filesizes, 0),
           compressedSize=len(stdout),
         )
       )
@@ -373,9 +378,10 @@ class Gen_compressed(threading.Thread):
 
       headers = {"Content-type": "application/x-www-form-urlencoded"}
       conn = httplib.HTTPSConnection("closure-compiler.appspot.com")
-      conn.request("POST", "/compile", urllib.urlencode(remoteParams), headers)
+      conn.request("POST", "/compile", urlencode(remoteParams), headers)
       response = conn.getresponse()
-      json_str = response.read()
+      # Decode is necessary for Python 3.4 compatibility
+      json_str = response.read().decode("utf-8")
       conn.close()
 
       # Parse the JSON response.
@@ -388,12 +394,12 @@ class Gen_compressed(threading.Thread):
       n = int(name[6:]) - 1
       return filenames[n]
 
-    if json_data.has_key("serverErrors"):
+    if "serverErrors" in json_data:
       errors = json_data["serverErrors"]
       for error in errors:
         print("SERVER ERROR: %s" % target_filename)
         print(error["error"])
-    elif json_data.has_key("errors"):
+    elif "errors" in json_data:
       errors = json_data["errors"]
       for error in errors:
         print("FATAL ERROR")
@@ -405,7 +411,7 @@ class Gen_compressed(threading.Thread):
           print((" " * error["charno"]) + "^")
         sys.exit(1)
     else:
-      if json_data.has_key("warnings"):
+      if "warnings" in json_data:
         warnings = json_data["warnings"]
         for warning in warnings:
           print("WARNING")
@@ -422,11 +428,11 @@ class Gen_compressed(threading.Thread):
     return False
 
   def write_output(self, target_filename, remove, json_data):
-      if not json_data.has_key("compiledCode"):
+      if "compiledCode" not in json_data:
         print("FATAL ERROR: Compiler did not return compiledCode.")
         sys.exit(1)
 
-      code = HEADER + "\n" + json_data["compiledCode"]
+      code = HEADER + "\n" + json_data["compiledCode"].decode("utf-8")
       code = code.replace(remove, "")
 
       # Trim down Google's (and only Google's) Apache licences.
@@ -500,7 +506,7 @@ class Gen_langfiles(threading.Thread):
           # If a destination file was missing, rebuild.
           return True
       else:
-        print("Error checking file creation times: " + e)
+        print("Error checking file creation times: " + str(e))
 
   def run(self):
     # The files msg/json/{en,qqq,synonyms}.json depend on msg/messages.js.
@@ -573,7 +579,7 @@ if __name__ == "__main__":
     test_args = [closure_compiler, os.path.join("build", "test_input.js")]
     test_proc = subprocess.Popen(test_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
     (stdout, _) = test_proc.communicate()
-    assert stdout == read(os.path.join("build", "test_expect.js"))
+    assert stdout.decode("utf-8") == read(os.path.join("build", "test_expect.js"))
 
     print("Using local compiler: %s ...\n" % CLOSURE_COMPILER_NPM)
   except (ImportError, AssertionError):
@@ -602,11 +608,11 @@ if __name__ == "__main__":
   developers.google.com/blockly/guides/modify/web/closure""")
       sys.exit(1)
 
-  search_paths = calcdeps.ExpandDirectories(
-      ["core", os.path.join(closure_root, closure_library)])
+  search_paths = list(calcdeps.ExpandDirectories(
+      ["core", os.path.join(closure_root, closure_library)]))
 
-  search_paths_horizontal = filter(exclude_vertical, search_paths)
-  search_paths_vertical = filter(exclude_horizontal, search_paths)
+  search_paths_horizontal = list(filter(exclude_vertical, search_paths))
+  search_paths_vertical = list(filter(exclude_horizontal, search_paths))
 
   closure_env = {
     "closure_dir": closure_dir,
diff --git a/i18n/common.py b/i18n/common.py
index 90e584e1..2323cea0 100644
--- a/i18n/common.py
+++ b/i18n/common.py
@@ -59,7 +59,7 @@ def read_json_file(filename):
     if '@metadata' in defs:
       del defs['@metadata']
     return defs
-  except ValueError, e:
+  except ValueError as e:
     print('Error reading ' + filename)
     raise InputError(filename, str(e))
 
@@ -85,7 +85,7 @@ def _create_qqq_file(output_dir):
     """
     qqq_file_name = os.path.join(os.curdir, output_dir, 'qqq.json')
     qqq_file = codecs.open(qqq_file_name, 'w', 'utf-8')
-    print 'Created file: ' + qqq_file_name
+    print('Created file: ' + qqq_file_name)
     qqq_file.write('{\n')
     return qqq_file
 
@@ -126,7 +126,7 @@ def _create_lang_file(author, lang, output_dir):
     """
     lang_file_name = os.path.join(os.curdir, output_dir, lang + '.json')
     lang_file = codecs.open(lang_file_name, 'w', 'utf-8')
-    print 'Created file: ' + lang_file_name
+    print('Created file: ' + lang_file_name)
     # string.format doesn't like printing braces, so break up our writes.
     lang_file.write('{\n\t"@metadata": {')
     lang_file.write("""
@@ -166,7 +166,7 @@ def _create_key_file(output_dir):
     key_file_name = os.path.join(os.curdir, output_dir, 'keys.json')
     key_file = open(key_file_name, 'w')
     key_file.write('{\n')
-    print 'Created file: ' + key_file_name
+    print('Created file: ' + key_file_name)
     return key_file
 
 
diff --git a/i18n/create_messages.py b/i18n/create_messages.py
index dc2620a3..2d37f25c 100755
--- a/i18n/create_messages.py
+++ b/i18n/create_messages.py
@@ -29,11 +29,8 @@ _NEWLINE_PATTERN = re.compile('[\n\r]')
 
 
 def string_is_ascii(s):
-  try:
-    s.decode('ascii')
-    return True
-  except UnicodeEncodeError:
-    return False
+  # This approach is better for compatibility
+  return all(ord(c) < 128 for c in s)
 
 def load_constants(filename):
   """Read in constants file, which must be output in every language."""
@@ -81,14 +78,14 @@ def main():
       print('ERROR: definition of {0} in {1} contained a newline character.'.
             format(key, args.source_lang_file))
       sys.exit(1)
-  sorted_keys = source_defs.keys()
-  sorted_keys.sort()
+  sorted_keys = sorted(source_defs.keys())
 
   # Read in synonyms file, which must be output in every language.
   synonym_defs = read_json_file(os.path.join(
       os.curdir, args.source_synonym_file))
+  # synonym_defs is also being sorted to ensure the same order is kept
   synonym_text = '\n'.join(['Blockly.Msg.{0} = Blockly.Msg.{1};'.format(
-      key, synonym_defs[key]) for key in synonym_defs])
+      key, synonym_defs[key]) for key in sorted(synonym_defs)])
 
   # Read in constants file, which must be output in every language.
   constants_text = load_constants(os.path.join(os.curdir, args.source_constants_file))
diff --git a/i18n/dedup_json.py b/i18n/dedup_json.py
index 30e572dd..a27df50f 100755
--- a/i18n/dedup_json.py
+++ b/i18n/dedup_json.py
@@ -51,9 +51,9 @@ def main():
     try:
       with codecs.open(filename, 'r', 'utf-8') as infile:
         j = json.load(infile)
-    except ValueError, e:
+    except ValueError as e:
       print('Error reading ' + filename)
-      raise InputError(file, str(e))
+      raise InputError(filename, str(e))
 
     # Built up output strings as an array to make output of delimiters easier.
     output = []
diff --git a/i18n/json_to_js.py b/i18n/json_to_js.py
index f8c20f6a..bf3fb38d 100755
--- a/i18n/json_to_js.py
+++ b/i18n/json_to_js.py
@@ -100,7 +100,7 @@ def _process_file(path_to_json, target_lang, key_dict):
         if key != '@metadata':
             try:
                 identifier = key_dict[key]
-            except KeyError, e:
+            except KeyError as e:
                 print('Key "%s" is in %s but not in %s' %
                       (key, keyfile, args.key_file))
                 raise e
diff --git a/i18n/tests.py b/i18n/tests.py
index 7e4fc49a..2de6fef6 100644
--- a/i18n/tests.py
+++ b/i18n/tests.py
@@ -37,7 +37,7 @@ class TestSequenceFunctions(unittest.TestCase):
                  u'block of actions.']
     for sentence in sentences:
       output = common.insert_breaks(sentence, 30, 50)
-      self.assert_(contains_all_chars(sentence, output),
+      self.assertTrue(contains_all_chars(sentence, output),
                    u'Mismatch between:\n{0}\n{1}'.format(
                        re.sub(spaces, '', sentence),
                        re.sub(spaces, '', output)))
diff --git a/i18n/xliff_to_json.py b/i18n/xliff_to_json.py
index b38b4d6e..c95d8336 100755
--- a/i18n/xliff_to_json.py
+++ b/i18n/xliff_to_json.py
@@ -65,7 +65,7 @@ def _parse_trans_unit(trans_unit):
     try:
         result['source'] = get_value('source')
         result['target'] = get_value('target')
-    except InputError, e:
+    except InputError as e:
         raise InputError(key, e.msg)
 
     # Get notes, using the from value as key and the data as value.
@@ -112,8 +112,8 @@ def _process_file(filename):
         except IOError:
             # Don't get caught by below handler
             raise
-        except Exception, e:
-            print
+        except Exception as e:
+            print()
             raise InputError(filename, str(e))
 
         # Make sure needed fields are present and non-empty.
@@ -146,8 +146,8 @@ def _process_file(filename):
               results.append(unit)
 
         return results
-    except IOError, e:
-        print 'Error with file {0}: {1}'.format(filename, e.strerror)
+    except IOError as e:
+        print('Error with file {0}: {1}'.format(filename, e.strerror))
         sys.exit(1)