diff options
Diffstat (limited to 'methods.py')
-rwxr-xr-x | methods.py | 966 |
1 files changed, 483 insertions, 483 deletions
diff --git a/methods.py b/methods.py index 0ede914aec..38dee11a35 100755 --- a/methods.py +++ b/methods.py @@ -1,231 +1,231 @@ import os -def add_source_files(self, sources, filetype, lib_env = None, shared = False): +def add_source_files(self, sources, filetype, lib_env=None, shared=False): import glob; import string; - #if not lib_objects: + # if not lib_objects: if not lib_env: lib_env = self if type(filetype) == type(""): dir = self.Dir('.').abspath - list = glob.glob(dir + "/"+filetype) + list = glob.glob(dir + "/" + filetype) for f in list: - sources.append( self.Object(f) ) + sources.append(self.Object(f)) else: for f in filetype: sources.append(self.Object(f)) -def build_shader_header( target, source, env ): +def build_shader_header(target, source, env): for x in source: print x name = str(x) - name = name[ name.rfind("/")+1: ] - name = name[ name.rfind("\\")+1: ] - name = name.replace(".","_") + name = name[name.rfind("/") + 1:] + name = name[name.rfind("\\") + 1:] + name = name.replace(".", "_") - fs = open(str(x),"r") - fd = open(str(x)+".h","w") + fs = open(str(x), "r") + fd = open(str(x) + ".h", "w") fd.write("/* this file has been generated by SCons, do not edit! */\n") - fd.write("static const char *"+name+"=\n") - line=fs.readline() + fd.write("static const char *" + name + "=\n") + line = fs.readline() while(line): - line=line.replace("\r","") - line=line.replace("\n","") - line=line.replace("\\","\\\\") - line=line.replace("\"","\\\"") - fd.write("\""+line+"\\n\"\n") - line=fs.readline() + line = line.replace("\r", "") + line = line.replace("\n", "") + line = line.replace("\\", "\\\\") + line = line.replace("\"", "\\\"") + fd.write("\"" + line + "\\n\"\n") + line = fs.readline() fd.write(";\n") return 0 -def build_glsl_header( filename ): +def build_glsl_header(filename): - fs = open(filename,"r") - line=fs.readline() + fs = open(filename, "r") + line = fs.readline() - vertex_lines=[] - fragment_lines=[] - uniforms=[] - attributes=[] - fbos=[] - conditionals=[] - texunits=[] - texunit_names=[] - ubos=[] - ubo_names=[] + vertex_lines = [] + fragment_lines = [] + uniforms = [] + attributes = [] + fbos = [] + conditionals = [] + texunits = [] + texunit_names = [] + ubos = [] + ubo_names = [] - reading="" - line_offset=0 - vertex_offset=0 - fragment_offset=0 + reading = "" + line_offset = 0 + vertex_offset = 0 + fragment_offset = 0 while(line): - if (line.find("[vertex]")!=-1): - reading="vertex" - line=fs.readline() - line_offset+=1 - vertex_offset=line_offset + if (line.find("[vertex]") != -1): + reading = "vertex" + line = fs.readline() + line_offset += 1 + vertex_offset = line_offset continue - if (line.find("[fragment]")!=-1): - reading="fragment" - line=fs.readline() - line_offset+=1 - fragment_offset=line_offset + if (line.find("[fragment]") != -1): + reading = "fragment" + line = fs.readline() + line_offset += 1 + fragment_offset = line_offset continue - if (line.find("#ifdef ")!=-1): - ifdefline = line.replace("#ifdef ","").strip() + if (line.find("#ifdef ") != -1): + ifdefline = line.replace("#ifdef ", "").strip() if (not ifdefline in conditionals): - conditionals+=[ifdefline] + conditionals += [ifdefline] - if (line.find("#elif defined(")!=-1): - ifdefline = line.replace("#elif defined(","").strip() - ifdefline = ifdefline.replace(")","").strip() + if (line.find("#elif defined(") != -1): + ifdefline = line.replace("#elif defined(", "").strip() + ifdefline = ifdefline.replace(")", "").strip() if (not ifdefline in conditionals): - conditionals+=[ifdefline] + conditionals += [ifdefline] import re if re.search(r"^\s*uniform", line): - if (line.lower().find("texunit:")!=-1): - #texture unit - texunit = str(int( line[line.find(":")+1:].strip() )) - uline=line[:line.lower().find("//")] - uline = uline.replace("uniform",""); - uline = uline.replace(";",""); + if (line.lower().find("texunit:") != -1): + # texture unit + texunit = str(int(line[line.find(":") + 1:].strip())) + uline = line[:line.lower().find("//")] + uline = uline.replace("uniform", ""); + uline = uline.replace(";", ""); lines = uline.split(",") for x in lines: x = x.strip() - x = x[ x.rfind(" ")+1: ] - if (x.find("[")!=-1): - #unfiorm array - x = x[ :x.find("[") ] + x = x[x.rfind(" ") + 1:] + if (x.find("[") != -1): + # unfiorm array + x = x[:x.find("[")] if (not x in texunit_names): - texunits+=[(x,texunit)] - texunit_names+=[x] - - elif (line.lower().find("ubo:")!=-1): - #ubo - uboidx = str(int( line[line.find(":")+1:].strip() )) - uline=line[:line.lower().find("//")] - uline = uline[uline.find("uniform")+len("uniform"):]; - uline = uline.replace(";",""); - uline = uline.replace("{",""); + texunits += [(x, texunit)] + texunit_names += [x] + + elif (line.lower().find("ubo:") != -1): + # ubo + uboidx = str(int(line[line.find(":") + 1:].strip())) + uline = line[:line.lower().find("//")] + uline = uline[uline.find("uniform") + len("uniform"):]; + uline = uline.replace(";", ""); + uline = uline.replace("{", ""); lines = uline.split(",") for x in lines: x = x.strip() - x = x[ x.rfind(" ")+1: ] - if (x.find("[")!=-1): - #unfiorm array - x = x[ :x.find("[") ] + x = x[x.rfind(" ") + 1:] + if (x.find("[") != -1): + # unfiorm array + x = x[:x.find("[")] if (not x in ubo_names): - ubos+=[(x,uboidx)] - ubo_names+=[x] + ubos += [(x, uboidx)] + ubo_names += [x] else: - uline = line.replace("uniform",""); - uline = uline.replace(";",""); + uline = line.replace("uniform", ""); + uline = uline.replace(";", ""); lines = uline.split(",") for x in lines: x = x.strip() - x = x[ x.rfind(" ")+1: ] - if (x.find("[")!=-1): - #unfiorm array - x = x[ :x.find("[") ] + x = x[x.rfind(" ") + 1:] + if (x.find("[") != -1): + # unfiorm array + x = x[:x.find("[")] if (not x in uniforms): - uniforms+=[x] + uniforms += [x] - if ((line.strip().find("in ")==0 or line.strip().find("attribute ")==0) and line.find("attrib:")!=-1): - uline = line.replace("in ",""); - uline = uline.replace("attribute ",""); - uline = uline.replace(";",""); - uline = uline[ uline.find(" "): ].strip() + if ((line.strip().find("in ") == 0 or line.strip().find("attribute ") == 0) and line.find("attrib:") != -1): + uline = line.replace("in ", ""); + uline = uline.replace("attribute ", ""); + uline = uline.replace(";", ""); + uline = uline[uline.find(" "):].strip() - if (uline.find("//")!=-1): - name,bind = uline.split("//") - if (bind.find("attrib:")!=-1): - name=name.strip() - bind=bind.replace("attrib:","").strip() - attributes+=[(name,bind)] + if (uline.find("//") != -1): + name, bind = uline.split("//") + if (bind.find("attrib:") != -1): + name = name.strip() + bind = bind.replace("attrib:", "").strip() + attributes += [(name, bind)] - if (line.strip().find("out ")==0): - uline = line.replace("out","").strip(); - uline = uline.replace(";",""); - uline = uline[ uline.find(" "): ].strip() + if (line.strip().find("out ") == 0): + uline = line.replace("out", "").strip(); + uline = uline.replace(";", ""); + uline = uline[uline.find(" "):].strip() - if (uline.find("//")!=-1): - name,bind = uline.split("//") - if (bind.find("drawbuffer:")!=-1): - name=name.strip() - bind=bind.replace("drawbuffer:","").strip() - fbos+=[(name,bind)] + if (uline.find("//") != -1): + name, bind = uline.split("//") + if (bind.find("drawbuffer:") != -1): + name = name.strip() + bind = bind.replace("drawbuffer:", "").strip() + fbos += [(name, bind)] - line=line.replace("\r","") - line=line.replace("\n","") - line=line.replace("\\","\\\\") - line=line.replace("\"","\\\"") - #line=line+"\\n\\" no need to anymore + line = line.replace("\r", "") + line = line.replace("\n", "") + line = line.replace("\\", "\\\\") + line = line.replace("\"", "\\\"") + # line=line+"\\n\\" no need to anymore - if (reading=="vertex"): - vertex_lines+=[line] - if (reading=="fragment"): - fragment_lines+=[line] + if (reading == "vertex"): + vertex_lines += [line] + if (reading == "fragment"): + fragment_lines += [line] - line=fs.readline() - line_offset+=1 + line = fs.readline() + line_offset += 1 fs.close(); - out_file = filename+".h" - fd = open(out_file,"w") + out_file = filename + ".h" + fd = open(out_file, "w") fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n"); out_file_base = out_file - out_file_base = out_file_base[ out_file_base.rfind("/")+1: ] - out_file_base = out_file_base[ out_file_base.rfind("\\")+1: ] + out_file_base = out_file_base[out_file_base.rfind("/") + 1:] + out_file_base = out_file_base[out_file_base.rfind("\\") + 1:] # print("out file "+out_file+" base " +out_file_base) - out_file_ifdef = out_file_base.replace(".","_").upper() - fd.write("#ifndef "+out_file_ifdef+"\n") - fd.write("#define "+out_file_ifdef+"\n") + out_file_ifdef = out_file_base.replace(".", "_").upper() + fd.write("#ifndef " + out_file_ifdef + "\n") + fd.write("#define " + out_file_ifdef + "\n") - out_file_class = out_file_base.replace(".glsl.h","").title().replace("_","").replace(".","")+"ShaderGL"; + out_file_class = out_file_base.replace(".glsl.h", "").title().replace("_", "").replace(".", "") + "ShaderGL"; fd.write("\n\n"); fd.write("#include \"drivers/opengl/shader_gl.h\"\n\n\n"); - fd.write("class "+out_file_class+" : public ShaderGL {\n\n"); - fd.write("\t virtual String get_shader_name() const { return \""+out_file_class+"\"; }\n"); + fd.write("class " + out_file_class + " : public ShaderGL {\n\n"); + fd.write("\t virtual String get_shader_name() const { return \"" + out_file_class + "\"; }\n"); fd.write("public:\n\n"); if (len(conditionals)): fd.write("\tenum Conditionals {\n"); for x in conditionals: - fd.write("\t\t"+x+",\n"); + fd.write("\t\t" + x + ",\n"); fd.write("\t};\n\n"); if (len(uniforms)): fd.write("\tenum Uniforms {\n"); for x in uniforms: - fd.write("\t\t"+x.upper()+",\n"); + fd.write("\t\t" + x.upper() + ",\n"); fd.write("\t};\n\n"); fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n"); @@ -337,7 +337,7 @@ def build_glsl_header( filename ): fd.write("\t\tstatic const char* _conditional_strings[]={\n") if (len(conditionals)): for x in conditionals: - fd.write("\t\t\t\"#define "+x+"\\n\",\n"); + fd.write("\t\t\t\"#define " + x + "\\n\",\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_conditional_strings=NULL;\n") @@ -347,7 +347,7 @@ def build_glsl_header( filename ): fd.write("\t\tstatic const char* _uniform_strings[]={\n") if (len(uniforms)): for x in uniforms: - fd.write("\t\t\t\""+x+"\",\n"); + fd.write("\t\t\t\"" + x + "\",\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_uniform_strings=NULL;\n") @@ -356,7 +356,7 @@ def build_glsl_header( filename ): fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n") for x in attributes: - fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); + fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n") @@ -366,7 +366,7 @@ def build_glsl_header( filename ): if (len(fbos)): fd.write("\t\tstatic FBOPair _fbo_pairs[]={\n") for x in fbos: - fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); + fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic FBOPair *_fbo_pairs=NULL;\n") @@ -374,7 +374,7 @@ def build_glsl_header( filename ): if (len(ubos)): fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n") for x in ubos: - fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); + fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic UBOPair *_ubo_pairs=NULL;\n") @@ -382,27 +382,27 @@ def build_glsl_header( filename ): if (len(texunits)): fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n") for x in texunits: - fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); + fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n") fd.write("\t\tstatic const char* _vertex_code=\"\\\n") for x in vertex_lines: - fd.write("\t\t\t"+x+"\n"); + fd.write("\t\t\t" + x + "\n"); fd.write("\t\t\";\n\n"); - fd.write("\t\tstatic const int _vertex_code_start="+str(vertex_offset)+";\n") + fd.write("\t\tstatic const int _vertex_code_start=" + str(vertex_offset) + ";\n") fd.write("\t\tstatic const char* _fragment_code=\"\\\n") for x in fragment_lines: - fd.write("\t\t\t"+x+"\n"); + fd.write("\t\t\t" + x + "\n"); fd.write("\t\t\";\n\n"); - fd.write("\t\tstatic const int _fragment_code_start="+str(fragment_offset)+";\n") + fd.write("\t\tstatic const int _fragment_code_start=" + str(fragment_offset) + ";\n") - fd.write("\t\tsetup(_conditional_strings,"+str(len(conditionals))+",_uniform_strings,"+str(len(uniforms))+",_attribute_pairs,"+str(len(attributes))+",_fbo_pairs,"+str(len(fbos))+",_ubo_pairs,"+str(len(ubos))+",_texunit_pairs,"+str(len(texunits))+",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write("\t\tsetup(_conditional_strings," + str(len(conditionals)) + ",_uniform_strings," + str(len(uniforms)) + ",_attribute_pairs," + str(len(attributes)) + ",_fbo_pairs," + str(len(fbos)) + ",_ubo_pairs," + str(len(ubos)) + ",_texunit_pairs," + str(len(texunits)) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") fd.write("\t};\n\n") fd.write("};\n\n"); @@ -410,7 +410,7 @@ def build_glsl_header( filename ): fd.close(); -def build_glsl_headers( target, source, env ): +def build_glsl_headers(target, source, env): for x in source: @@ -421,110 +421,110 @@ def build_glsl_headers( target, source, env ): -def build_hlsl_dx9_header( filename ): +def build_hlsl_dx9_header(filename): - fs = open(filename,"r") - line=fs.readline() + fs = open(filename, "r") + line = fs.readline() - vertex_lines=[] - fragment_lines=[] - uniforms=[] - fragment_uniforms=[] - attributes=[] - fbos=[] - conditionals=[] + vertex_lines = [] + fragment_lines = [] + uniforms = [] + fragment_uniforms = [] + attributes = [] + fbos = [] + conditionals = [] - reading="" - line_offset=0 - vertex_offset=0 - fragment_offset=0 + reading = "" + line_offset = 0 + vertex_offset = 0 + fragment_offset = 0 while(line): - if (line.find("[vertex]")!=-1): - reading="vertex" - line=fs.readline() - line_offset+=1 - vertex_offset=line_offset + if (line.find("[vertex]") != -1): + reading = "vertex" + line = fs.readline() + line_offset += 1 + vertex_offset = line_offset continue - if (line.find("[fragment]")!=-1): - reading="fragment" - line=fs.readline() - line_offset+=1 - fragment_offset=line_offset + if (line.find("[fragment]") != -1): + reading = "fragment" + line = fs.readline() + line_offset += 1 + fragment_offset = line_offset continue - if (line.find("#ifdef ")!=-1): - ifdefline = line.replace("#ifdef ","").strip() + if (line.find("#ifdef ") != -1): + ifdefline = line.replace("#ifdef ", "").strip() if (not ifdefline in conditionals): - conditionals+=[ifdefline] + conditionals += [ifdefline] - if (line.find("#elif defined(")!=-1): - ifdefline = line.replace("#elif defined(","").strip() - ifdefline = ifdefline.replace(")","").strip() + if (line.find("#elif defined(") != -1): + ifdefline = line.replace("#elif defined(", "").strip() + ifdefline = ifdefline.replace(")", "").strip() if (not ifdefline in conditionals): - conditionals+=[ifdefline] - if (line.find("uniform")!=-1): - uline = line.replace("uniform",""); - uline = uline.replace(";",""); + conditionals += [ifdefline] + if (line.find("uniform") != -1): + uline = line.replace("uniform", ""); + uline = uline.replace(";", ""); lines = uline.split(",") for x in lines: x = x.strip() - x = x[ x.rfind(" ")+1: ] - if (x.find("[")!=-1): - #unfiorm array - x = x[ :x.find("[") ] + x = x[x.rfind(" ") + 1:] + if (x.find("[") != -1): + # unfiorm array + x = x[:x.find("[")] if (not x in uniforms): - uniforms+=[x] - fragment_uniforms+=[reading=="fragment"] - line=line.replace("\r","") - line=line.replace("\n","") - line=line.replace("\\","\\\\") - line=line.replace("\"","\\\"") - line=line+"\\n\\" - - if (reading=="vertex"): - vertex_lines+=[line] - if (reading=="fragment"): - fragment_lines+=[line] - - line=fs.readline() - line_offset+=1 + uniforms += [x] + fragment_uniforms += [reading == "fragment"] + line = line.replace("\r", "") + line = line.replace("\n", "") + line = line.replace("\\", "\\\\") + line = line.replace("\"", "\\\"") + line = line + "\\n\\" + + if (reading == "vertex"): + vertex_lines += [line] + if (reading == "fragment"): + fragment_lines += [line] + + line = fs.readline() + line_offset += 1 fs.close(); - out_file = filename+".h" - fd = open(out_file,"w") + out_file = filename + ".h" + fd = open(out_file, "w") fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n"); out_file_base = out_file - out_file_base = out_file_base[ out_file_base.rfind("/")+1: ] - out_file_base = out_file_base[ out_file_base.rfind("\\")+1: ] + out_file_base = out_file_base[out_file_base.rfind("/") + 1:] + out_file_base = out_file_base[out_file_base.rfind("\\") + 1:] # print("out file "+out_file+" base " +out_file_base) - out_file_ifdef = out_file_base.replace(".","_").upper() - fd.write("#ifndef "+out_file_ifdef+"\n") - fd.write("#define "+out_file_ifdef+"\n") + out_file_ifdef = out_file_base.replace(".", "_").upper() + fd.write("#ifndef " + out_file_ifdef + "\n") + fd.write("#define " + out_file_ifdef + "\n") - out_file_class = out_file_base.replace(".hlsl.h","").title().replace("_","").replace(".","")+"ShaderDX9"; + out_file_class = out_file_base.replace(".hlsl.h", "").title().replace("_", "").replace(".", "") + "ShaderDX9"; fd.write("\n\n"); fd.write("#include \"drivers/directx9/shader_dx9.h\"\n\n\n"); - fd.write("class "+out_file_class+" : public ShaderDX9 {\n\n"); - fd.write("\t virtual String get_shader_name() const { return \""+out_file_class+"\"; }\n"); + fd.write("class " + out_file_class + " : public ShaderDX9 {\n\n"); + fd.write("\t virtual String get_shader_name() const { return \"" + out_file_class + "\"; }\n"); fd.write("public:\n\n"); if (len(conditionals)): fd.write("\tenum Conditionals {\n"); for x in conditionals: - fd.write("\t\t"+x+",\n"); + fd.write("\t\t" + x + ",\n"); fd.write("\t};\n\n"); if (len(uniforms)): fd.write("\tenum Uniforms {\n"); for x in uniforms: - fd.write("\t\t"+x.upper()+",\n"); + fd.write("\t\t" + x.upper() + ",\n"); fd.write("\t};\n\n"); @@ -604,7 +604,7 @@ def build_hlsl_dx9_header( filename ): fd.write("\t\tstatic const char* _conditional_strings[]={\n") if (len(conditionals)): for x in conditionals: - fd.write("\t\t\t\""+x+"\",\n"); + fd.write("\t\t\t\"" + x + "\",\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_conditional_strings=NULL;\n") @@ -614,7 +614,7 @@ def build_hlsl_dx9_header( filename ): fd.write("\t\tstatic const char* _uniform_strings[]={\n") if (len(uniforms)): for x in uniforms: - fd.write("\t\t\t\""+x+"\",\n"); + fd.write("\t\t\t\"" + x + "\",\n"); fd.write("\t\t};\n\n"); fd.write("\t\tstatic const bool _fragment_uniforms[]={\n") @@ -633,20 +633,20 @@ def build_hlsl_dx9_header( filename ): fd.write("\t\tstatic const char* _vertex_code=\"\\\n") for x in vertex_lines: - fd.write("\t\t\t"+x+"\n"); + fd.write("\t\t\t" + x + "\n"); fd.write("\t\t\";\n\n"); - fd.write("\t\tstatic const int _vertex_code_start="+str(vertex_offset)+";\n") + fd.write("\t\tstatic const int _vertex_code_start=" + str(vertex_offset) + ";\n") fd.write("\t\tstatic const char* _fragment_code=\"\\\n") for x in fragment_lines: - fd.write("\t\t\t"+x+"\n"); + fd.write("\t\t\t" + x + "\n"); fd.write("\t\t\";\n\n"); - fd.write("\t\tstatic const int _fragment_code_start="+str(fragment_offset)+";\n") + fd.write("\t\tstatic const int _fragment_code_start=" + str(fragment_offset) + ";\n") - fd.write("\t\tsetup(p_device,p_version,_conditional_strings,"+str(len(conditionals))+",_uniform_strings,"+str(len(uniforms))+",_fragment_uniforms,_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write("\t\tsetup(p_device,p_version,_conditional_strings," + str(len(conditionals)) + ",_uniform_strings," + str(len(uniforms)) + ",_fragment_uniforms,_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") fd.write("\t};\n\n") fd.write("};\n\n"); @@ -654,7 +654,7 @@ def build_hlsl_dx9_header( filename ): fd.close(); -def build_hlsl_dx9_headers( target, source, env ): +def build_hlsl_dx9_headers(target, source, env): for x in source: @@ -666,154 +666,154 @@ def build_hlsl_dx9_headers( target, source, env ): class LegacyGLHeaderStruct: def __init__(self): - self.vertex_lines=[] - self.fragment_lines=[] - self.uniforms=[] - self.attributes=[] - self.fbos=[] - self.conditionals=[] - self.enums={} - self.texunits=[] - self.texunit_names=[] - self.ubos=[] - self.ubo_names=[] - - self.vertex_included_files=[] - self.fragment_included_files=[] - - self.reading="" - self.line_offset=0 - self.vertex_offset=0 - self.fragment_offset=0 - -def include_file_in_legacygl_header( filename, header_data, depth ): - fs = open(filename,"r") - line=fs.readline() + self.vertex_lines = [] + self.fragment_lines = [] + self.uniforms = [] + self.attributes = [] + self.fbos = [] + self.conditionals = [] + self.enums = {} + self.texunits = [] + self.texunit_names = [] + self.ubos = [] + self.ubo_names = [] + + self.vertex_included_files = [] + self.fragment_included_files = [] + + self.reading = "" + self.line_offset = 0 + self.vertex_offset = 0 + self.fragment_offset = 0 + +def include_file_in_legacygl_header(filename, header_data, depth): + fs = open(filename, "r") + line = fs.readline() while(line): - if (line.find("[vertex]")!=-1): - header_data.reading="vertex" - line=fs.readline() - header_data.line_offset+=1 - header_data.vertex_offset=header_data.line_offset + if (line.find("[vertex]") != -1): + header_data.reading = "vertex" + line = fs.readline() + header_data.line_offset += 1 + header_data.vertex_offset = header_data.line_offset continue - if (line.find("[fragment]")!=-1): - header_data.reading="fragment" - line=fs.readline() - header_data.line_offset+=1 - header_data.fragment_offset=header_data.line_offset + if (line.find("[fragment]") != -1): + header_data.reading = "fragment" + line = fs.readline() + header_data.line_offset += 1 + header_data.fragment_offset = header_data.line_offset continue - while(line.find("#include ")!=-1): - includeline = line.replace("#include ","").strip()[1:-1] + while(line.find("#include ") != -1): + includeline = line.replace("#include ", "").strip()[1:-1] import os.path included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline) - if (not included_file in header_data.vertex_included_files and header_data.reading=="vertex"): - header_data.vertex_included_files+=[included_file] - if(include_file_in_legacygl_header( included_file, header_data, depth + 1 ) == None): + if (not included_file in header_data.vertex_included_files and header_data.reading == "vertex"): + header_data.vertex_included_files += [included_file] + if(include_file_in_legacygl_header(included_file, header_data, depth + 1) == None): print "Error in file '" + filename + "': #include " + includeline + "could not be found!" - elif (not included_file in header_data.fragment_included_files and header_data.reading=="fragment"): - header_data.fragment_included_files+=[included_file] - if(include_file_in_legacygl_header( included_file, header_data, depth + 1 ) == None): + elif (not included_file in header_data.fragment_included_files and header_data.reading == "fragment"): + header_data.fragment_included_files += [included_file] + if(include_file_in_legacygl_header(included_file, header_data, depth + 1) == None): print "Error in file '" + filename + "': #include " + includeline + "could not be found!" - line=fs.readline() + line = fs.readline() - if (line.find("#ifdef ")!=-1 or line.find("#elif defined(")!=-1): - if (line.find("#ifdef ")!=-1): - ifdefline = line.replace("#ifdef ","").strip() + if (line.find("#ifdef ") != -1 or line.find("#elif defined(") != -1): + if (line.find("#ifdef ") != -1): + ifdefline = line.replace("#ifdef ", "").strip() else: - ifdefline = line.replace("#elif defined(","").strip() - ifdefline = ifdefline.replace(")","").strip() + ifdefline = line.replace("#elif defined(", "").strip() + ifdefline = ifdefline.replace(")", "").strip() - if (line.find("_EN_")!=-1): + if (line.find("_EN_") != -1): enumbase = ifdefline[:ifdefline.find("_EN_")]; - ifdefline = ifdefline.replace("_EN_","_") - line = line.replace("_EN_","_") + ifdefline = ifdefline.replace("_EN_", "_") + line = line.replace("_EN_", "_") # print(enumbase+":"+ifdefline); if (enumbase not in header_data.enums): - header_data.enums[enumbase]=[] + header_data.enums[enumbase] = [] if (ifdefline not in header_data.enums[enumbase]): header_data.enums[enumbase].append(ifdefline); elif (not ifdefline in header_data.conditionals): - header_data.conditionals+=[ifdefline] + header_data.conditionals += [ifdefline] - if (line.find("uniform")!=-1 and line.lower().find("texunit:")!=-1): - #texture unit - texunitstr = line[line.find(":")+1:].strip() - if (texunitstr=="auto"): - texunit="-1" + if (line.find("uniform") != -1 and line.lower().find("texunit:") != -1): + # texture unit + texunitstr = line[line.find(":") + 1:].strip() + if (texunitstr == "auto"): + texunit = "-1" else: - texunit = str(int(texunitstr )) - uline=line[:line.lower().find("//")] - uline = uline.replace("uniform",""); - uline = uline.replace("highp",""); - uline = uline.replace(";",""); + texunit = str(int(texunitstr)) + uline = line[:line.lower().find("//")] + uline = uline.replace("uniform", ""); + uline = uline.replace("highp", ""); + uline = uline.replace(";", ""); lines = uline.split(",") for x in lines: x = x.strip() - x = x[ x.rfind(" ")+1: ] - if (x.find("[")!=-1): - #unfiorm array - x = x[ :x.find("[") ] + x = x[x.rfind(" ") + 1:] + if (x.find("[") != -1): + # unfiorm array + x = x[:x.find("[")] if (not x in header_data.texunit_names): - header_data.texunits+=[(x,texunit)] - header_data.texunit_names+=[x] + header_data.texunits += [(x, texunit)] + header_data.texunit_names += [x] - elif (line.find("uniform")!=-1): - uline = line.replace("uniform",""); - uline = uline.replace(";",""); + elif (line.find("uniform") != -1): + uline = line.replace("uniform", ""); + uline = uline.replace(";", ""); lines = uline.split(",") for x in lines: x = x.strip() - x = x[ x.rfind(" ")+1: ] - if (x.find("[")!=-1): - #unfiorm array - x = x[ :x.find("[") ] + x = x[x.rfind(" ") + 1:] + if (x.find("[") != -1): + # unfiorm array + x = x[:x.find("[")] if (not x in header_data.uniforms): - header_data.uniforms+=[x] + header_data.uniforms += [x] - if ((line.strip().find("in ")==0 or line.strip().find("attribute ")==0) and line.find("attrib:")!=-1): - uline = line.replace("in ",""); - uline = uline.replace("attribute ",""); - uline = uline.replace("highp ",""); - uline = uline.replace(";",""); - uline = uline[ uline.find(" "): ].strip() + if ((line.strip().find("in ") == 0 or line.strip().find("attribute ") == 0) and line.find("attrib:") != -1): + uline = line.replace("in ", ""); + uline = uline.replace("attribute ", ""); + uline = uline.replace("highp ", ""); + uline = uline.replace(";", ""); + uline = uline[uline.find(" "):].strip() - if (uline.find("//")!=-1): - name,bind = uline.split("//") - if (bind.find("attrib:")!=-1): - name=name.strip() - bind=bind.replace("attrib:","").strip() - header_data.attributes+=[(name,bind)] + if (uline.find("//") != -1): + name, bind = uline.split("//") + if (bind.find("attrib:") != -1): + name = name.strip() + bind = bind.replace("attrib:", "").strip() + header_data.attributes += [(name, bind)] - line=line.replace("\r","") - line=line.replace("\n","") - #line=line.replace("\\","\\\\") - #line=line.replace("\"","\\\"") - #line=line+"\\n\\" + line = line.replace("\r", "") + line = line.replace("\n", "") + # line=line.replace("\\","\\\\") + # line=line.replace("\"","\\\"") + # line=line+"\\n\\" - if (header_data.reading=="vertex"): - header_data.vertex_lines+=[line] - if (header_data.reading=="fragment"): - header_data.fragment_lines+=[line] + if (header_data.reading == "vertex"): + header_data.vertex_lines += [line] + if (header_data.reading == "fragment"): + header_data.fragment_lines += [line] - line=fs.readline() - header_data.line_offset+=1 + line = fs.readline() + header_data.line_offset += 1 fs.close(); @@ -821,31 +821,31 @@ def include_file_in_legacygl_header( filename, header_data, depth ): -def build_legacygl_header( filename, include, class_suffix, output_attribs ): +def build_legacygl_header(filename, include, class_suffix, output_attribs): header_data = LegacyGLHeaderStruct() - include_file_in_legacygl_header( filename, header_data, 0 ) + include_file_in_legacygl_header(filename, header_data, 0) - out_file = filename+".h" - fd = open(out_file,"w") + out_file = filename + ".h" + fd = open(out_file, "w") - enum_constants=[] + enum_constants = [] fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n"); out_file_base = out_file - out_file_base = out_file_base[ out_file_base.rfind("/")+1: ] - out_file_base = out_file_base[ out_file_base.rfind("\\")+1: ] + out_file_base = out_file_base[out_file_base.rfind("/") + 1:] + out_file_base = out_file_base[out_file_base.rfind("\\") + 1:] # print("out file "+out_file+" base " +out_file_base) - out_file_ifdef = out_file_base.replace(".","_").upper() - fd.write("#ifndef "+out_file_ifdef+class_suffix+"_120\n") - fd.write("#define "+out_file_ifdef+class_suffix+"_120\n") + out_file_ifdef = out_file_base.replace(".", "_").upper() + fd.write("#ifndef " + out_file_ifdef + class_suffix + "_120\n") + fd.write("#define " + out_file_ifdef + class_suffix + "_120\n") - out_file_class = out_file_base.replace(".glsl.h","").title().replace("_","").replace(".","")+"Shader"+class_suffix; + out_file_class = out_file_base.replace(".glsl.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix; fd.write("\n\n"); fd.write("#include \"" + include + "\"\n\n\n"); - fd.write("class "+out_file_class+" : public Shader"+class_suffix+" {\n\n"); - fd.write("\t virtual String get_shader_name() const { return \""+out_file_class+"\"; }\n"); + fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n"); + fd.write("\t virtual String get_shader_name() const { return \"" + out_file_class + "\"; }\n"); fd.write("public:\n\n"); @@ -853,14 +853,14 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): if (len(header_data.conditionals)): fd.write("\tenum Conditionals {\n"); for x in header_data.conditionals: - fd.write("\t\t"+x.upper()+",\n"); + fd.write("\t\t" + x.upper() + ",\n"); fd.write("\t};\n\n"); if (len(header_data.uniforms)): fd.write("\tenum Uniforms {\n"); for x in header_data.uniforms: - fd.write("\t\t"+x.upper()+",\n"); + fd.write("\t\t" + x.upper() + ",\n"); fd.write("\t};\n\n"); fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n"); @@ -969,47 +969,47 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.write("\tvirtual void init() {\n\n"); - enum_value_count=0; + enum_value_count = 0; if (len(header_data.enums)): fd.write("\t\t//Written using math, given nonstandarity of 64 bits integer constants..\n"); fd.write("\t\tstatic const Enum _enums[]={\n") - bitofs=len(header_data.conditionals) - enum_vals=[] + bitofs = len(header_data.conditionals) + enum_vals = [] for xv in header_data.enums: - x=header_data.enums[xv] - bits=1 + x = header_data.enums[xv] + bits = 1 amt = len(x); # print(x) while(2**bits < amt): - bits+=1 + bits += 1 # print("amount: "+str(amt)+" bits "+str(bits)); - strs="{" + strs = "{" for i in range(amt): - strs+="\"#define "+x[i]+"\\n\"," + strs += "\"#define " + x[i] + "\\n\"," - v={} - v["set_mask"]="uint64_t("+str(i)+")<<"+str(bitofs) - v["clear_mask"]="((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<"+str(bits)+") - 1)<<"+str(bitofs)+")" + v = {} + v["set_mask"] = "uint64_t(" + str(i) + ")<<" + str(bitofs) + v["clear_mask"] = "((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")" enum_vals.append(v) enum_constants.append(x[i]) - strs+="NULL}" + strs += "NULL}" - fd.write("\t\t\t{(uint64_t(1<<"+str(bits)+")-1)<<"+str(bitofs)+","+str(bitofs)+","+strs+"},\n"); - bitofs+=bits + fd.write("\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n"); + bitofs += bits fd.write("\t\t};\n\n"); fd.write("\t\tstatic const EnumValue _enum_values[]={\n") - enum_value_count=len(enum_vals); + enum_value_count = len(enum_vals); for x in enum_vals: - fd.write("\t\t\t{"+x["set_mask"]+","+x["clear_mask"]+"},\n"); + fd.write("\t\t\t{" + x["set_mask"] + "," + x["clear_mask"] + "},\n"); fd.write("\t\t};\n\n"); else: @@ -1021,7 +1021,7 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.write("\t\tstatic const char* _conditional_strings[]={\n") if (len(header_data.conditionals)): for x in header_data.conditionals: - fd.write("\t\t\t\"#define "+x+"\\n\",\n"); + fd.write("\t\t\t\"#define " + x + "\\n\",\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_conditional_strings=NULL;\n") @@ -1031,7 +1031,7 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.write("\t\tstatic const char* _uniform_strings[]={\n") if (len(header_data.uniforms)): for x in header_data.uniforms: - fd.write("\t\t\t\""+x+"\",\n"); + fd.write("\t\t\t\"" + x + "\",\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_uniform_strings=NULL;\n") @@ -1041,7 +1041,7 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n") for x in header_data.attributes: - fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); + fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n") @@ -1050,7 +1050,7 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): if (len(header_data.texunits)): fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n") for x in header_data.texunits: - fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); + fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n") @@ -1058,28 +1058,28 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.write("\t\tstatic const char _vertex_code[]={\n") for x in header_data.vertex_lines: for i in range(len(x)): - fd.write(str(ord(x[i]))+","); + fd.write(str(ord(x[i])) + ","); - fd.write(str(ord('\n'))+","); + fd.write(str(ord('\n')) + ","); fd.write("\t\t0};\n\n"); - fd.write("\t\tstatic const int _vertex_code_start="+str(header_data.vertex_offset)+";\n") + fd.write("\t\tstatic const int _vertex_code_start=" + str(header_data.vertex_offset) + ";\n") fd.write("\t\tstatic const char _fragment_code[]={\n") for x in header_data.fragment_lines: for i in range(len(x)): - fd.write(str(ord(x[i]))+","); + fd.write(str(ord(x[i])) + ","); - fd.write(str(ord('\n'))+","); + fd.write(str(ord('\n')) + ","); fd.write("\t\t0};\n\n"); - fd.write("\t\tstatic const int _fragment_code_start="+str(header_data.fragment_offset)+";\n") + fd.write("\t\tstatic const int _fragment_code_start=" + str(header_data.fragment_offset) + ";\n") if output_attribs: - fd.write("\t\tsetup(_conditional_strings,"+str(len(header_data.conditionals))+",_uniform_strings,"+str(len(header_data.uniforms))+",_attribute_pairs,"+str(len(header_data.attributes))+", _texunit_pairs,"+str(len(header_data.texunits))+",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str(len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") else: - fd.write("\t\tsetup(_conditional_strings,"+str(len(header_data.conditionals))+",_uniform_strings,"+str(len(header_data.uniforms))+",_texunit_pairs,"+str(len(header_data.texunits))+",_enums,"+str(len(header_data.enums))+",_enum_values,"+str(enum_value_count)+",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str(len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(enum_value_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") fd.write("\t};\n\n") @@ -1088,7 +1088,7 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.write("\tenum EnumConditionals {\n") for x in enum_constants: - fd.write("\t\t"+x.upper()+",\n"); + fd.write("\t\t" + x.upper() + ",\n"); fd.write("\t};\n\n"); fd.write("\tvoid set_enum_conditional(EnumConditionals p_cond) { _set_enum_conditional(p_cond); }\n") @@ -1098,47 +1098,47 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.close(); -def build_legacygl_headers( target, source, env ): +def build_legacygl_headers(target, source, env): for x in source: - build_legacygl_header(str(x), include = "drivers/legacygl/shader_lgl.h", class_suffix = "LGL", output_attribs = False); + build_legacygl_header(str(x), include="drivers/legacygl/shader_lgl.h", class_suffix="LGL", output_attribs=False); return 0 -def build_gles2_headers( target, source, env ): +def build_gles2_headers(target, source, env): for x in source: - build_legacygl_header(str(x), include="drivers/gles2/shader_gles2.h", class_suffix = "GLES2", output_attribs = True) + build_legacygl_header(str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True) def update_version(): rev = "custom_build" - if (os.getenv("BUILD_REVISION")!=None): - rev=os.getenv("BUILD_REVISION") - print("Using custom revision: "+rev) + if (os.getenv("BUILD_REVISION") != None): + rev = os.getenv("BUILD_REVISION") + print("Using custom revision: " + rev) import version - f=open("core/version.h","wb") - f.write("#define VERSION_SHORT_NAME "+str(version.short_name)+"\n") - f.write("#define VERSION_NAME "+str(version.name)+"\n") - f.write("#define VERSION_MAJOR "+str(version.major)+"\n") - f.write("#define VERSION_MINOR "+str(version.minor)+"\n") + f = open("core/version.h", "wb") + f.write("#define VERSION_SHORT_NAME " + str(version.short_name) + "\n") + f.write("#define VERSION_NAME " + str(version.name) + "\n") + f.write("#define VERSION_MAJOR " + str(version.major) + "\n") + f.write("#define VERSION_MINOR " + str(version.minor) + "\n") if (hasattr(version, 'patch')): - f.write("#define VERSION_PATCH "+str(version.patch)+"\n") - f.write("#define VERSION_REVISION "+str(rev)+"\n") - f.write("#define VERSION_STATUS "+str(version.status)+"\n") + f.write("#define VERSION_PATCH " + str(version.patch) + "\n") + f.write("#define VERSION_REVISION " + str(rev) + "\n") + f.write("#define VERSION_STATUS " + str(version.status) + "\n") import datetime - f.write("#define VERSION_YEAR "+str(datetime.datetime.now().year)+"\n") + f.write("#define VERSION_YEAR " + str(datetime.datetime.now().year) + "\n") def parse_cg_file(fname, uniforms, sizes, conditionals): import re fs = open(fname, "r") - line=fs.readline() + line = fs.readline() while line: @@ -1172,15 +1172,15 @@ def build_cg_shader(sname): vp_uniform_sizes = [] vp_conditionals = [] - parse_cg_file("vp_"+sname+".cg", vp_uniforms, vp_uniform_sizes, vp_conditionals); + parse_cg_file("vp_" + sname + ".cg", vp_uniforms, vp_uniform_sizes, vp_conditionals); fp_uniforms = [] fp_uniform_sizes = [] fp_conditionals = [] - parse_cg_file("fp_"+sname+".cg", fp_uniforms, fp_uniform_sizes, fp_conditionals); + parse_cg_file("fp_" + sname + ".cg", fp_uniforms, fp_uniform_sizes, fp_conditionals); - fd = open("shader_"+sname+".cg.h", "w"); + fd = open("shader_" + sname + ".cg.h", "w"); fd.write('\n#include "shader_cell.h"\n'); fd.write("\nclass Shader_" + sname + " : public ShaderCell {\n"); @@ -1208,53 +1208,53 @@ def build_cg_shader(sname): import glob def detect_modules(): - module_list=[] - includes_cpp="" - register_cpp="" - unregister_cpp="" + module_list = [] + includes_cpp = "" + register_cpp = "" + unregister_cpp = "" files = glob.glob("modules/*") - files.sort() #so register_module_types does not change that often, and also plugins are registered in alphabetic order + files.sort() # so register_module_types does not change that often, and also plugins are registered in alphabetic order for x in files: if (not os.path.isdir(x)): continue - x=x.replace("modules/","") # rest of world - x=x.replace("modules\\","") # win32 + x = x.replace("modules/", "") # rest of world + x = x.replace("modules\\", "") # win32 module_list.append(x) try: - with open("modules/"+x+"/register_types.h"): - includes_cpp+='#include "modules/'+x+'/register_types.h"\n' - register_cpp+='#ifdef MODULE_'+x.upper()+'_ENABLED\n' - register_cpp+='\tregister_'+x+'_types();\n' - register_cpp+='#endif\n' - unregister_cpp+='#ifdef MODULE_'+x.upper()+'_ENABLED\n' - unregister_cpp+='\tunregister_'+x+'_types();\n' - unregister_cpp+='#endif\n' + with open("modules/" + x + "/register_types.h"): + includes_cpp += '#include "modules/' + x + '/register_types.h"\n' + register_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n' + register_cpp += '\tregister_' + x + '_types();\n' + register_cpp += '#endif\n' + unregister_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n' + unregister_cpp += '\tunregister_' + x + '_types();\n' + unregister_cpp += '#endif\n' except IOError: pass - modules_cpp=""" + modules_cpp = """ // modules.cpp - THIS FILE IS GENERATED, DO NOT EDIT!!!!!!! #include "register_module_types.h" -"""+includes_cpp+""" +""" + includes_cpp + """ void register_module_types() { -"""+register_cpp+""" +""" + register_cpp + """ } void unregister_module_types() { -"""+unregister_cpp+""" +""" + unregister_cpp + """ } """ - f=open("modules/register_module_types.cpp","wb") + f = open("modules/register_module_types.cpp", "wb") f.write(modules_cpp) return module_list @@ -1271,7 +1271,7 @@ def win32_spawn(sh, escape, cmd, args, env): if type(env[e]) != type(""): env[e] = str(env[e]) proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, startupinfo=startupinfo, shell = False, env = env) + stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env) data, err = proc.communicate() rv = proc.wait() if rv: @@ -1309,48 +1309,48 @@ def win32_spawn(sh, escape, cmd, args, spawnenv): return exit_code """ -def android_add_maven_repository(self,url): +def android_add_maven_repository(self, url): self.android_maven_repos.append(url) -def android_add_dependency(self,depline): +def android_add_dependency(self, depline): self.android_dependencies.append(depline) -def android_add_java_dir(self,subpath): - base_path = self.Dir(".").abspath+"/modules/"+self.current_module+"/"+subpath +def android_add_java_dir(self, subpath): + base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath self.android_java_dirs.append(base_path) -def android_add_res_dir(self,subpath): - base_path = self.Dir(".").abspath+"/modules/"+self.current_module+"/"+subpath +def android_add_res_dir(self, subpath): + base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath self.android_res_dirs.append(base_path) -def android_add_aidl_dir(self,subpath): - base_path = self.Dir(".").abspath+"/modules/"+self.current_module+"/"+subpath +def android_add_aidl_dir(self, subpath): + base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath self.android_aidl_dirs.append(base_path) -def android_add_jni_dir(self,subpath): - base_path = self.Dir(".").abspath+"/modules/"+self.current_module+"/"+subpath +def android_add_jni_dir(self, subpath): + base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath self.android_jni_dirs.append(base_path) -def android_add_default_config(self,config): +def android_add_default_config(self, config): self.android_default_config.append(config) -def android_add_to_manifest(self,file): - base_path = self.Dir(".").abspath+"/modules/"+self.current_module+"/"+file - f = open(base_path,"rb") - self.android_manifest_chunk+=f.read() -def android_add_to_permissions(self,file): - base_path = self.Dir(".").abspath+"/modules/"+self.current_module+"/"+file - f = open(base_path,"rb") - self.android_permission_chunk+=f.read() -def android_add_to_attributes(self,file): - base_path = self.Dir(".").abspath+"/modules/"+self.current_module+"/"+file - f = open(base_path,"rb") - self.android_appattributes_chunk+=f.read() +def android_add_to_manifest(self, file): + base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + file + f = open(base_path, "rb") + self.android_manifest_chunk += f.read() +def android_add_to_permissions(self, file): + base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + file + f = open(base_path, "rb") + self.android_permission_chunk += f.read() +def android_add_to_attributes(self, file): + base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + file + f = open(base_path, "rb") + self.android_appattributes_chunk += f.read() def disable_module(self): self.disabled_modules.append(self.current_module) def use_windows_spawn_fix(self, platform=None): - if (os.name!="nt"): - return #not needed, only for windows + if (os.name != "nt"): + return # not needed, only for windows # On Windows, due to the limited command line length, when creating a static library # from a very high number of objects SCons will invoke "ar" once per object file; @@ -1364,7 +1364,7 @@ def use_windows_spawn_fix(self, platform=None): import subprocess - def mySubProcess(cmdline,env): + def mySubProcess(cmdline, env): prefix = "" if(platform == 'javascript'): prefix = "python.exe " @@ -1372,7 +1372,7 @@ def use_windows_spawn_fix(self, platform=None): startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW proc = subprocess.Popen(prefix + cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, startupinfo=startupinfo, shell = False, env = env) + stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env) data, err = proc.communicate() rv = proc.wait() if rv: @@ -1386,16 +1386,16 @@ def use_windows_spawn_fix(self, platform=None): newargs = ' '.join(args[1:]) cmdline = cmd + " " + newargs - rv=0 + rv = 0 env = {str(key): str(value) for key, value in env.iteritems()} - if len(cmdline) > 32000 and cmd.endswith("ar") : + if len(cmdline) > 32000 and cmd.endswith("ar"): cmdline = cmd + " " + args[1] + " " + args[2] + " " - for i in range(3,len(args)) : - rv = mySubProcess( cmdline + args[i], env ) - if rv : + for i in range(3, len(args)): + rv = mySubProcess(cmdline + args[i], env) + if rv: break else: - rv = mySubProcess( cmdline, env ) + rv = mySubProcess(cmdline, env) return rv @@ -1435,7 +1435,7 @@ def split_lib(self, libname): if len(lib_list) > 0: import os, sys if os.name == 'posix' and sys.platform == 'msys': - env.Replace(ARFLAGS = ['rcsT']) + env.Replace(ARFLAGS=['rcsT']) lib = env.Library(libname + "_collated", lib_list) lib_list = [lib] @@ -1443,65 +1443,65 @@ def split_lib(self, libname): env.add_source_files(lib_base, "*.cpp") lib_list.insert(0, env.Library(libname, lib_base)) - env.Prepend(LIBS = lib_list) + env.Prepend(LIBS=lib_list) -def save_active_platforms(apnames,ap): +def save_active_platforms(apnames, ap): for x in ap: - pth = x+"/logo.png" + pth = x + "/logo.png" # print("open path: "+pth) - pngf=open(pth,"rb"); - b=pngf.read(1); - str=" /* AUTOGENERATED FILE, DO NOT EDIT */ \n" - str+=" static const unsigned char _"+x[9:]+"_logo[]={" - while(len(b)==1): - str+=hex(ord(b)) - b=pngf.read(1); - if (len(b)==1): - str+="," - - str+="};\n" - - wf = x+"/logo.h" - logow = open(wf,"wb") + pngf = open(pth, "rb"); + b = pngf.read(1); + str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n" + str += " static const unsigned char _" + x[9:] + "_logo[]={" + while(len(b) == 1): + str += hex(ord(b)) + b = pngf.read(1); + if (len(b) == 1): + str += "," + + str += "};\n" + + wf = x + "/logo.h" + logow = open(wf, "wb") logow.write(str) -def no_verbose(sys,env): +def no_verbose(sys, env): - #If the output is not a terminal, do nothing + # If the output is not a terminal, do nothing if not sys.stdout.isatty(): return colors = {} - colors['cyan'] = '\033[96m' + colors['cyan'] = '\033[96m' colors['purple'] = '\033[95m' - colors['blue'] = '\033[94m' - colors['green'] = '\033[92m' + colors['blue'] = '\033[94m' + colors['green'] = '\033[92m' colors['yellow'] = '\033[93m' - colors['red'] = '\033[91m' - colors['end'] = '\033[0m' + colors['red'] = '\033[91m' + colors['end'] = '\033[0m' - compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) - java_compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) + compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) + java_compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) compile_shared_source_message = '%sCompiling shared %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) - link_program_message = '%sLinking Program %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - link_library_message = '%sLinking Static Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - ranlib_library_message = '%sRanlib Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - link_shared_library_message = '%sLinking Shared Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - java_library_message = '%sCreating Java Archive %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - - env.Append( CXXCOMSTR=[compile_source_message] ) - env.Append( CCCOMSTR=[compile_source_message] ) - env.Append( SHCCCOMSTR=[compile_shared_source_message] ) - env.Append( SHCXXCOMSTR=[compile_shared_source_message] ) - env.Append( ARCOMSTR=[link_library_message] ) - env.Append( RANLIBCOMSTR=[ranlib_library_message] ) - env.Append( SHLINKCOMSTR=[link_shared_library_message] ) - env.Append( LINKCOMSTR=[link_program_message] ) - env.Append( JARCOMSTR=[java_library_message] ) - env.Append( JAVACCOMSTR=[java_compile_source_message] ) + link_program_message = '%sLinking Program %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) + link_library_message = '%sLinking Static Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) + ranlib_library_message = '%sRanlib Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) + link_shared_library_message = '%sLinking Shared Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) + java_library_message = '%sCreating Java Archive %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) + + env.Append(CXXCOMSTR=[compile_source_message]) + env.Append(CCCOMSTR=[compile_source_message]) + env.Append(SHCCCOMSTR=[compile_shared_source_message]) + env.Append(SHCXXCOMSTR=[compile_shared_source_message]) + env.Append(ARCOMSTR=[link_library_message]) + env.Append(RANLIBCOMSTR=[ranlib_library_message]) + env.Append(SHLINKCOMSTR=[link_shared_library_message]) + env.Append(LINKCOMSTR=[link_program_message]) + env.Append(JARCOMSTR=[java_library_message]) + env.Append(JAVACCOMSTR=[java_compile_source_message]) def detect_visual_c_compiler_version(tools_env): # tools_env is the variable scons uses to call tools that execute tasks, SCons's env['ENV'] that executes tasks... @@ -1531,12 +1531,12 @@ def detect_visual_c_compiler_version(tools_env): # find() works with -1 so big ifs bellow are needed... the simplest solution, in fact # First test if amd64 and amd64_x86 compilers are present in the path - vc_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"]+"BIN\\amd64;") + vc_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64;") if(vc_amd64_compiler_detection_index > -1): vc_chosen_compiler_index = vc_amd64_compiler_detection_index vc_chosen_compiler_str = "amd64" - vc_amd64_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"]+"BIN\\amd64_x86;") + vc_amd64_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64_x86;") if(vc_amd64_x86_compiler_detection_index > -1 and (vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)): @@ -1545,14 +1545,14 @@ def detect_visual_c_compiler_version(tools_env): # Now check the 32 bit compilers - vc_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"]+"BIN;") + vc_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN;") if(vc_x86_compiler_detection_index > -1 and (vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index)): vc_chosen_compiler_index = vc_x86_compiler_detection_index vc_chosen_compiler_str = "x86" - vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env['VCINSTALLDIR']+"BIN\\x86_amd64;") + vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env['VCINSTALLDIR'] + "BIN\\x86_amd64;") if(vc_x86_amd64_compiler_detection_index > -1 and (vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)): @@ -1560,11 +1560,11 @@ def detect_visual_c_compiler_version(tools_env): vc_chosen_compiler_str = "x86_amd64" # debug help - #print vc_amd64_compiler_detection_index - #print vc_amd64_x86_compiler_detection_index - #print vc_x86_compiler_detection_index - #print vc_x86_amd64_compiler_detection_index - #print "chosen "+str(vc_chosen_compiler_index)+ " | "+str(vc_chosen_compiler_str) + # print vc_amd64_compiler_detection_index + # print vc_amd64_x86_compiler_detection_index + # print vc_x86_compiler_detection_index + # print vc_x86_amd64_compiler_detection_index + # print "chosen "+str(vc_chosen_compiler_index)+ " | "+str(vc_chosen_compiler_str) return vc_chosen_compiler_str |