summaryrefslogtreecommitdiff
path: root/deps/gyp/pylib/gyp/msvs_emulation.py
blob: ca67b122f0b9b1d0544eebce9a66a678c9dd6853 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""

import os
import re
import subprocess
import sys

from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion


windows_quoter_regex = re.compile(r'(\\*)"')


def QuoteForRspFile(arg):
  """Quote a command line argument so that it appears as one argument when
  processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
  Windows programs)."""
  # See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
  # threads. This is actually the quoting rules for CommandLineToArgvW, not
  # for the shell, because the shell doesn't do anything in Windows. This
  # works more or less because most programs (including the compiler, etc.)
  # use that function to handle command line arguments.

  # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
  # preceding it, and results in n backslashes + the quote. So we substitute
  # in 2* what we match, +1 more, plus the quote.
  arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)

  # %'s also need to be doubled otherwise they're interpreted as batch
  # positional arguments. Also make sure to escape the % so that they're
  # passed literally through escaping so they can be singled to just the
  # original %. Otherwise, trying to pass the literal representation that
  # looks like an environment variable to the shell (e.g. %PATH%) would fail.
  arg = arg.replace('%', '%%')

  # These commands are used in rsp files, so no escaping for the shell (via ^)
  # is necessary.

  # Finally, wrap the whole thing in quotes so that the above quote rule
  # applies and whitespace isn't a word break.
  return '"' + arg + '"'


def EncodeRspFileList(args):
  """Process a list of arguments using QuoteCmdExeArgument."""
  # Note that the first argument is assumed to be the command. Don't add
  # quotes around it because then built-ins like 'echo', etc. won't work.
  # Take care to normpath only the path in the case of 'call ../x.bat' because
  # otherwise the whole thing is incorrectly interpreted as a path and not
  # normalized correctly.
  if not args: return ''
  if args[0].startswith('call '):
    call, program = args[0].split(' ', 1)
    program = call + ' ' + os.path.normpath(program)
  else:
    program = os.path.normpath(args[0])
  return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])


def _GenericRetrieve(root, default, path):
  """Given a list of dictionary keys |path| and a tree of dicts |root|, find
  value at path, or return |default| if any of the path doesn't exist."""
  if not root:
    return default
  if not path:
    return root
  return _GenericRetrieve(root.get(path[0]), default, path[1:])


def _AddPrefix(element, prefix):
  """Add |prefix| to |element| or each subelement if element is iterable."""
  if element is None:
    return element
  # Note, not Iterable because we don't want to handle strings like that.
  if isinstance(element, list) or isinstance(element, tuple):
    return [prefix + e for e in element]
  else:
    return prefix + element


def _DoRemapping(element, map):
  """If |element| then remap it through |map|. If |element| is iterable then
  each item will be remapped. Any elements not found will be removed."""
  if map is not None and element is not None:
    if not callable(map):
      map = map.get # Assume it's a dict, otherwise a callable to do the remap.
    if isinstance(element, list) or isinstance(element, tuple):
      element = filter(None, [map(elem) for elem in element])
    else:
      element = map(element)
  return element


def _AppendOrReturn(append, element):
  """If |append| is None, simply return |element|. If |append| is not None,
  then add |element| to it, adding each item in |element| if it's a list or
  tuple."""
  if append is not None and element is not None:
    if isinstance(element, list) or isinstance(element, tuple):
      append.extend(element)
    else:
      append.append(element)
  else:
    return element


def _FindDirectXInstallation():
  """Try to find an installation location for the DirectX SDK. Check for the
  standard environment variable, and if that doesn't exist, try to find
  via the registry. May return None if not found in either location."""
  # Return previously calculated value, if there is one
  if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
    return _FindDirectXInstallation.dxsdk_dir

  dxsdk_dir = os.environ.get('DXSDK_DIR')
  if not dxsdk_dir:
    # Setup params to pass to and attempt to launch reg.exe.
    cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    for line in p.communicate()[0].splitlines():
      if 'InstallPath' in line:
        dxsdk_dir = line.split('    ')[3] + "\\"

  # Cache return value
  _FindDirectXInstallation.dxsdk_dir = dxsdk_dir
  return dxsdk_dir


def GetGlobalVSMacroEnv(vs_version):
  """Get a dict of variables mapping internal VS macro names to their gyp
  equivalents. Returns all variables that are independent of the target."""
  env = {}
  # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
  # Visual Studio is actually installed.
  if vs_version.Path():
    env['$(VSInstallDir)'] = vs_version.Path()
    env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
  # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
  # set. This happens when the SDK is sync'd via src-internal, rather than
  # by typical end-user installation of the SDK. If it's not set, we don't
  # want to leave the unexpanded variable in the path, so simply strip it.
  dxsdk_dir = _FindDirectXInstallation()
  env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
  # Try to find an installation location for the Windows DDK by checking
  # the WDK_DIR environment variable, may be None.
  env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
  return env

def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
  """Finds msvs_system_include_dirs that are common to all targets, removes
  them from all targets, and returns an OrderedSet containing them."""
  all_system_includes = OrderedSet(
      configs[0].get('msvs_system_include_dirs', []))
  for config in configs[1:]:
    system_includes = config.get('msvs_system_include_dirs', [])
    all_system_includes = all_system_includes & OrderedSet(system_includes)
  if not all_system_includes:
    return None
  # Expand macros in all_system_includes.
  env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
  expanded_system_includes = OrderedSet([ExpandMacros(include, env)
                                         for include in all_system_includes])
  if any(['$' in include for include in expanded_system_includes]):
    # Some path relies on target-specific variables, bail.
    return None

  # Remove system includes shared by all targets from the targets.
  for config in configs:
    includes = config.get('msvs_system_include_dirs', [])
    if includes:  # Don't insert a msvs_system_include_dirs key if not needed.
      # This must check the unexpanded includes list:
      new_includes = [i for i in includes if i not in all_system_includes]
      config['msvs_system_include_dirs'] = new_includes
  return expanded_system_includes


class MsvsSettings(object):
  """A class that understands the gyp 'msvs_...' values (especially the
  msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
  class helps map those settings to command line options."""

  def __init__(self, spec, generator_flags):
    self.spec = spec
    self.vs_version = GetVSVersion(generator_flags)

    supported_fields = [
        ('msvs_configuration_attributes', dict),
        ('msvs_settings', dict),
        ('msvs_system_include_dirs', list),
        ('msvs_disabled_warnings', list),
        ('msvs_precompiled_header', str),
        ('msvs_precompiled_source', str),
        ('msvs_configuration_platform', str),
        ('msvs_target_platform', str),
        ]
    configs = spec['configurations']
    for field, default in supported_fields:
      setattr(self, field, {})
      for configname, config in configs.iteritems():
        getattr(self, field)[configname] = config.get(field, default())

    self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])

    unsupported_fields = [
        'msvs_prebuild',
        'msvs_postbuild',
    ]
    unsupported = []
    for field in unsupported_fields:
      for config in configs.values():
        if field in config:
          unsupported += ["%s not supported (target %s)." %
                          (field, spec['target_name'])]
    if unsupported:
      raise Exception('\n'.join(unsupported))

  def GetExtension(self):
    """Returns the extension for the target, with no leading dot.

    Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
    the target type.
    """
    ext = self.spec.get('product_extension', None)
    if ext:
      return ext
    return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')

  def GetVSMacroEnv(self, base_to_build=None, config=None):
    """Get a dict of variables mapping internal VS macro names to their gyp
    equivalents."""
    target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
    target_name = self.spec.get('product_prefix', '') + \
        self.spec.get('product_name', self.spec['target_name'])
    target_dir = base_to_build + '\\' if base_to_build else ''
    target_ext = '.' + self.GetExtension()
    target_file_name = target_name + target_ext

    replacements = {
        '$(InputName)': '${root}',
        '$(InputPath)': '${source}',
        '$(IntDir)': '$!INTERMEDIATE_DIR',
        '$(OutDir)\\': target_dir,
        '$(PlatformName)': target_platform,
        '$(ProjectDir)\\': '',
        '$(ProjectName)': self.spec['target_name'],
        '$(TargetDir)\\': target_dir,
        '$(TargetExt)': target_ext,
        '$(TargetFileName)': target_file_name,
        '$(TargetName)': target_name,
        '$(TargetPath)': os.path.join(target_dir, target_file_name),
    }
    replacements.update(GetGlobalVSMacroEnv(self.vs_version))
    return replacements

  def ConvertVSMacros(self, s, base_to_build=None, config=None):
    """Convert from VS macro names to something equivalent."""
    env = self.GetVSMacroEnv(base_to_build, config=config)
    return ExpandMacros(s, env)

  def AdjustLibraries(self, libraries):
    """Strip -l from library if it's specified with that."""
    libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
    return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]

  def _GetAndMunge(self, field, path, default, prefix, append, map):
    """Retrieve a value from |field| at |path| or return |default|. If
    |append| is specified, and the item is found, it will be appended to that
    object instead of returned. If |map| is specified, results will be
    remapped through |map| before being returned or appended."""
    result = _GenericRetrieve(field, default, path)
    result = _DoRemapping(result, map)
    result = _AddPrefix(result, prefix)
    return _AppendOrReturn(append, result)

  class _GetWrapper(object):
    def __init__(self, parent, field, base_path, append=None):
      self.parent = parent
      self.field = field
      self.base_path = [base_path]
      self.append = append
    def __call__(self, name, map=None, prefix='', default=None):
      return self.parent._GetAndMunge(self.field, self.base_path + [name],
          default=default, prefix=prefix, append=self.append, map=map)

  def GetArch(self, config):
    """Get architecture based on msvs_configuration_platform and
    msvs_target_platform. Returns either 'x86' or 'x64'."""
    configuration_platform = self.msvs_configuration_platform.get(config, '')
    platform = self.msvs_target_platform.get(config, '')
    if not platform: # If no specific override, use the configuration's.
      platform = configuration_platform
    # Map from platform to architecture.
    return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')

  def _TargetConfig(self, config):
    """Returns the target-specific configuration."""
    # There's two levels of architecture/platform specification in VS. The
    # first level is globally for the configuration (this is what we consider
    # "the" config at the gyp level, which will be something like 'Debug' or
    # 'Release_x64'), and a second target-specific configuration, which is an
    # override for the global one. |config| is remapped here to take into
    # account the local target-specific overrides to the global configuration.
    arch = self.GetArch(config)
    if arch == 'x64' and not config.endswith('_x64'):
      config += '_x64'
    if arch == 'x86' and config.endswith('_x64'):
      config = config.rsplit('_', 1)[0]
    return config

  def _Setting(self, path, config,
              default=None, prefix='', append=None, map=None):
    """_GetAndMunge for msvs_settings."""
    return self._GetAndMunge(
        self.msvs_settings[config], path, default, prefix, append, map)

  def _ConfigAttrib(self, path, config,
                   default=None, prefix='', append=None, map=None):
    """_GetAndMunge for msvs_configuration_attributes."""
    return self._GetAndMunge(
        self.msvs_configuration_attributes[config],
        path, default, prefix, append, map)

  def AdjustIncludeDirs(self, include_dirs, config):
    """Updates include_dirs to expand VS specific paths, and adds the system
    include dirs used for platform SDK and similar."""
    config = self._TargetConfig(config)
    includes = include_dirs + self.msvs_system_include_dirs[config]
    includes.extend(self._Setting(
      ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
    return [self.ConvertVSMacros(p, config=config) for p in includes]

  def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
    """Updates midl_include_dirs to expand VS specific paths, and adds the
    system include dirs used for platform SDK and similar."""
    config = self._TargetConfig(config)
    includes = midl_include_dirs + self.msvs_system_include_dirs[config]
    includes.extend(self._Setting(
      ('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
    return [self.ConvertVSMacros(p, config=config) for p in includes]

  def GetComputedDefines(self, config):
    """Returns the set of defines that are injected to the defines list based
    on other VS settings."""
    config = self._TargetConfig(config)
    defines = []
    if self._ConfigAttrib(['CharacterSet'], config) == '1':
      defines.extend(('_UNICODE', 'UNICODE'))
    if self._ConfigAttrib(['CharacterSet'], config) == '2':
      defines.append('_MBCS')
    defines.extend(self._Setting(
        ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
    return defines

  def GetCompilerPdbName(self, config, expand_special):
    """Get the pdb file name that should be used for compiler invocations, or
    None if there's no explicit name specified."""
    config = self._TargetConfig(config)
    pdbname = self._Setting(
        ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
    if pdbname:
      pdbname = expand_special(self.ConvertVSMacros(pdbname))
    return pdbname

  def GetMapFileName(self, config, expand_special):
    """Gets the explicitly overriden map file name for a target or returns None
    if it's not set."""
    config = self._TargetConfig(config)
    map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
    if map_file:
      map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
    return map_file

  def GetOutputName(self, config, expand_special):
    """Gets the explicitly overridden output name for a target or returns None
    if it's not overridden."""
    config = self._TargetConfig(config)
    type = self.spec['type']
    root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
    # TODO(scottmg): Handle OutputDirectory without OutputFile.
    output_file = self._Setting((root, 'OutputFile'), config)
    if output_file:
      output_file = expand_special(self.ConvertVSMacros(
          output_file, config=config))
    return output_file

  def GetPDBName(self, config, expand_special, default):
    """Gets the explicitly overridden pdb name for a target or returns
    default if it's not overridden, or if no pdb will be generated."""
    config = self._TargetConfig(config)
    output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
    generate_debug_info = self._Setting(
        ('VCLinkerTool', 'GenerateDebugInformation'), config)
    if generate_debug_info == 'true':
      if output_file:
        return expand_special(self.ConvertVSMacros(output_file, config=config))
      else:
        return default
    else:
      return None

  def GetNoImportLibrary(self, config):
    """If NoImportLibrary: true, ninja will not expect the output to include
    an import library."""
    config = self._TargetConfig(config)
    noimplib = self._Setting(('NoImportLibrary',), config)
    return noimplib == 'true'

  def GetAsmflags(self, config):
    """Returns the flags that need to be added to ml invocations."""
    config = self._TargetConfig(config)
    asmflags = []
    safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
    if safeseh == 'true':
      asmflags.append('/safeseh')
    return asmflags

  def GetCflags(self, config):
    """Returns the flags that need to be added to .c and .cc compilations."""
    config = self._TargetConfig(config)
    cflags = []
    cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
    cl = self._GetWrapper(self, self.msvs_settings[config],
                          'VCCLCompilerTool', append=cflags)
    cl('Optimization',
       map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
    cl('InlineFunctionExpansion', prefix='/Ob')
    cl('DisableSpecificWarnings', prefix='/wd')
    cl('StringPooling', map={'true': '/GF'})
    cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
    cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
    cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
    cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
    cl('FloatingPointModel',
        map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
        default='0')
    cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
    cl('WholeProgramOptimization', map={'true': '/GL'})
    cl('WarningLevel', prefix='/W')
    cl('WarnAsError', map={'true': '/WX'})
    cl('CallingConvention',
        map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
    cl('DebugInformationFormat',
        map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
    cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
    cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
    cl('MinimalRebuild', map={'true': '/Gm'})
    cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
    cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
    cl('RuntimeLibrary',
        map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
    cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
    cl('DefaultCharIsUnsigned', map={'true': '/J'})
    cl('TreatWChar_tAsBuiltInType',
        map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
    cl('EnablePREfast', map={'true': '/analyze'})
    cl('AdditionalOptions', prefix='')
    cl('EnableEnhancedInstructionSet',
        map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
        prefix='/arch:')
    cflags.extend(['/FI' + f for f in self._Setting(
        ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
    if self.vs_version.short_name in ('2013', '2013e', '2015'):
      # New flag required in 2013 to maintain previous PDB behavior.
      cflags.append('/FS')
    # ninja handles parallelism by itself, don't have the compiler do it too.
    cflags = filter(lambda x: not x.startswith('/MP'), cflags)
    return cflags

  def _GetPchFlags(self, config, extension):
    """Get the flags to be added to the cflags for precompiled header support.
    """
    config = self._TargetConfig(config)
    # The PCH is only built once by a particular source file. Usage of PCH must
    # only be for the same language (i.e. C vs. C++), so only include the pch
    # flags when the language matches.
    if self.msvs_precompiled_header[config]:
      source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
      if _LanguageMatchesForPch(source_ext, extension):
        pch = os.path.split(self.msvs_precompiled_header[config])[1]
        return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
    return  []

  def GetCflagsC(self, config):
    """Returns the flags that need to be added to .c compilations."""
    config = self._TargetConfig(config)
    return self._GetPchFlags(config, '.c')

  def GetCflagsCC(self, config):
    """Returns the flags that need to be added to .cc compilations."""
    config = self._TargetConfig(config)
    return ['/TP'] + self._GetPchFlags(config, '.cc')

  def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
    """Get and normalize the list of paths in AdditionalLibraryDirectories
    setting."""
    config = self._TargetConfig(config)
    libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
                             config, default=[])
    libpaths = [os.path.normpath(
                    gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
                for p in libpaths]
    return ['/LIBPATH:"' + p + '"' for p in libpaths]

  def GetLibFlags(self, config, gyp_to_build_path):
    """Returns the flags that need to be added to lib commands."""
    config = self._TargetConfig(config)
    libflags = []
    lib = self._GetWrapper(self, self.msvs_settings[config],
                          'VCLibrarianTool', append=libflags)
    libflags.extend(self._GetAdditionalLibraryDirectories(
        'VCLibrarianTool', config, gyp_to_build_path))
    lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
    lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
        prefix='/MACHINE:')
    lib('AdditionalOptions')
    return libflags

  def GetDefFile(self, gyp_to_build_path):
    """Returns the .def file from sources, if any.  Otherwise returns None."""
    spec = self.spec
    if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
      def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
      if len(def_files) == 1:
        return gyp_to_build_path(def_files[0])
      elif len(def_files) > 1:
        raise Exception("Multiple .def files")
    return None

  def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
    """.def files get implicitly converted to a ModuleDefinitionFile for the
    linker in the VS generator. Emulate that behaviour here."""
    def_file = self.GetDefFile(gyp_to_build_path)
    if def_file:
      ldflags.append('/DEF:"%s"' % def_file)

  def GetPGDName(self, config, expand_special):
    """Gets the explicitly overridden pgd name for a target or returns None
    if it's not overridden."""
    config = self._TargetConfig(config)
    output_file = self._Setting(
        ('VCLinkerTool', 'ProfileGuidedDatabase'), config)
    if output_file:
      output_file = expand_special(self.ConvertVSMacros(
          output_file, config=config))
    return output_file

  def GetLdflags(self, config, gyp_to_build_path, expand_special,
                 manifest_base_name, output_name, is_executable, build_dir):
    """Returns the flags that need to be added to link commands, and the
    manifest files."""
    config = self._TargetConfig(config)
    ldflags = []
    ld = self._GetWrapper(self, self.msvs_settings[config],
                          'VCLinkerTool', append=ldflags)
    self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
    ld('GenerateDebugInformation', map={'true': '/DEBUG'})
    ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
       prefix='/MACHINE:')
    ldflags.extend(self._GetAdditionalLibraryDirectories(
        'VCLinkerTool', config, gyp_to_build_path))
    ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
    ld('TreatLinkerWarningAsErrors', prefix='/WX',
       map={'true': '', 'false': ':NO'})
    out = self.GetOutputName(config, expand_special)
    if out:
      ldflags.append('/OUT:' + out)
    pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
    if pdb:
      ldflags.append('/PDB:' + pdb)
    pgd = self.GetPGDName(config, expand_special)
    if pgd:
      ldflags.append('/PGD:' + pgd)
    map_file = self.GetMapFileName(config, expand_special)
    ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
        else '/MAP'})
    ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
    ld('AdditionalOptions', prefix='')

    minimum_required_version = self._Setting(
        ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
    if minimum_required_version:
      minimum_required_version = ',' + minimum_required_version
    ld('SubSystem',
       map={'1': 'CONSOLE%s' % minimum_required_version,
            '2': 'WINDOWS%s' % minimum_required_version},
       prefix='/SUBSYSTEM:')

    stack_reserve_size = self._Setting(
        ('VCLinkerTool', 'StackReserveSize'), config, default='')
    if stack_reserve_size:
      stack_commit_size = self._Setting(
          ('VCLinkerTool', 'StackCommitSize'), config, default='')
      if stack_commit_size:
        stack_commit_size = ',' + stack_commit_size
      ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))

    ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
    ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
    ld('BaseAddress', prefix='/BASE:')
    ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
    ld('RandomizedBaseAddress',
        map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
    ld('DataExecutionPrevention',
        map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
    ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
    ld('ForceSymbolReferences', prefix='/INCLUDE:')
    ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
    ld('LinkTimeCodeGeneration',
        map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
             '4': ':PGUPDATE'},
        prefix='/LTCG')
    ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
    ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
    ld('EntryPointSymbol', prefix='/ENTRY:')
    ld('Profile', map={'true': '/PROFILE'})
    ld('LargeAddressAware',
        map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
    # TODO(scottmg): This should sort of be somewhere else (not really a flag).
    ld('AdditionalDependencies', prefix='')

    if self.GetArch(config) == 'x86':
      safeseh_default = 'true'
    else:
      safeseh_default = None
    ld('ImageHasSafeExceptionHandlers',
        map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
        default=safeseh_default)

    # If the base address is not specifically controlled, DYNAMICBASE should
    # be on by default.
    base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
                        ldflags)
    if not base_flags:
      ldflags.append('/DYNAMICBASE')

    # If the NXCOMPAT flag has not been specified, default to on. Despite the
    # documentation that says this only defaults to on when the subsystem is
    # Vista or greater (which applies to the linker), the IDE defaults it on
    # unless it's explicitly off.
    if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
      ldflags.append('/NXCOMPAT')

    have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
    manifest_flags, intermediate_manifest, manifest_files = \
        self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
                                 is_executable and not have_def_file, build_dir)
    ldflags.extend(manifest_flags)
    return ldflags, intermediate_manifest, manifest_files

  def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
                          allow_isolation, build_dir):
    """Returns a 3-tuple:
    - the set of flags that need to be added to the link to generate
      a default manifest
    - the intermediate manifest that the linker will generate that should be
      used to assert it doesn't add anything to the merged one.
    - the list of all the manifest files to be merged by the manifest tool and
      included into the link."""
    generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
                                      config,
                                      default='true')
    if generate_manifest != 'true':
      # This means not only that the linker should not generate the intermediate
      # manifest but also that the manifest tool should do nothing even when
      # additional manifests are specified.
      return ['/MANIFEST:NO'], [], []

    output_name = name + '.intermediate.manifest'
    flags = [
      '/MANIFEST',
      '/ManifestFile:' + output_name,
    ]

    # Instead of using the MANIFESTUAC flags, we generate a .manifest to
    # include into the list of manifests. This allows us to avoid the need to
    # do two passes during linking. The /MANIFEST flag and /ManifestFile are
    # still used, and the intermediate manifest is used to assert that the
    # final manifest we get from merging all the additional manifest files
    # (plus the one we generate here) isn't modified by merging the
    # intermediate into it.

    # Always NO, because we generate a manifest file that has what we want.
    flags.append('/MANIFESTUAC:NO')

    config = self._TargetConfig(config)
    enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
                               default='true')
    manifest_files = []
    generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
    if enable_uac == 'true':
      execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
                                      config, default='0')
      execution_level_map = {
        '0': 'asInvoker',
        '1': 'highestAvailable',
        '2': 'requireAdministrator'
      }

      ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
                                default='false')

      inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
  <security>
    <requestedPrivileges>
      <requestedExecutionLevel level='%s' uiAccess='%s' />
    </requestedPrivileges>
  </security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
    else:
      inner = ''

    generated_manifest_contents = generated_manifest_outer % inner
    generated_name = name + '.generated.manifest'
    # Need to join with the build_dir here as we're writing it during
    # generation time, but we return the un-joined version because the build
    # will occur in that directory. We only write the file if the contents
    # have changed so that simply regenerating the project files doesn't
    # cause a relink.
    build_dir_generated_name = os.path.join(build_dir, generated_name)
    gyp.common.EnsureDirExists(build_dir_generated_name)
    f = gyp.common.WriteOnDiff(build_dir_generated_name)
    f.write(generated_manifest_contents)
    f.close()
    manifest_files = [generated_name]

    if allow_isolation:
      flags.append('/ALLOWISOLATION')

    manifest_files += self._GetAdditionalManifestFiles(config,
                                                       gyp_to_build_path)
    return flags, output_name, manifest_files

  def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
    """Gets additional manifest files that are added to the default one
    generated by the linker."""
    files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
                          default=[])
    if isinstance(files, str):
      files = files.split(';')
    return [os.path.normpath(
                gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
            for f in files]

  def IsUseLibraryDependencyInputs(self, config):
    """Returns whether the target should be linked via Use Library Dependency
    Inputs (using component .objs of a given .lib)."""
    config = self._TargetConfig(config)
    uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
    return uldi == 'true'

  def IsEmbedManifest(self, config):
    """Returns whether manifest should be linked into binary."""
    config = self._TargetConfig(config)
    embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
                          default='true')
    return embed == 'true'

  def IsLinkIncremental(self, config):
    """Returns whether the target should be linked incrementally."""
    config = self._TargetConfig(config)
    link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
    return link_inc != '1'

  def GetRcflags(self, config, gyp_to_ninja_path):
    """Returns the flags that need to be added to invocations of the resource
    compiler."""
    config = self._TargetConfig(config)
    rcflags = []
    rc = self._GetWrapper(self, self.msvs_settings[config],
        'VCResourceCompilerTool', append=rcflags)
    rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
    rcflags.append('/I' + gyp_to_ninja_path('.'))
    rc('PreprocessorDefinitions', prefix='/d')
    # /l arg must be in hex without leading '0x'
    rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
    return rcflags

  def BuildCygwinBashCommandLine(self, args, path_to_base):
    """Build a command line that runs args via cygwin bash. We assume that all
    incoming paths are in Windows normpath'd form, so they need to be
    converted to posix style for the part of the command line that's passed to
    bash. We also have to do some Visual Studio macro emulation here because
    various rules use magic VS names for things. Also note that rules that
    contain ninja variables cannot be fixed here (for example ${source}), so
    the outer generator needs to make sure that the paths that are written out
    are in posix style, if the command line will be used here."""
    cygwin_dir = os.path.normpath(
        os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
    cd = ('cd %s' % path_to_base).replace('\\', '/')
    args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
    args = ["'%s'" % a.replace("'", "'\\''") for a in args]
    bash_cmd = ' '.join(args)
    cmd = (
        'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
        'bash -c "%s ; %s"' % (cd, bash_cmd))
    return cmd

  def IsRuleRunUnderCygwin(self, rule):
    """Determine if an action should be run under cygwin. If the variable is
    unset, or set to 1 we use cygwin."""
    return int(rule.get('msvs_cygwin_shell',
                        self.spec.get('msvs_cygwin_shell', 1))) != 0

  def _HasExplicitRuleForExtension(self, spec, extension):
    """Determine if there's an explicit rule for a particular extension."""
    for rule in spec.get('rules', []):
      if rule['extension'] == extension:
        return True
    return False

  def _HasExplicitIdlActions(self, spec):
    """Determine if an action should not run midl for .idl files."""
    return any([action.get('explicit_idl_action', 0)
                for action in spec.get('actions', [])])

  def HasExplicitIdlRulesOrActions(self, spec):
    """Determine if there's an explicit rule or action for idl files. When
    there isn't we need to generate implicit rules to build MIDL .idl files."""
    return (self._HasExplicitRuleForExtension(spec, 'idl') or
            self._HasExplicitIdlActions(spec))

  def HasExplicitAsmRules(self, spec):
    """Determine if there's an explicit rule for asm files. When there isn't we
    need to generate implicit rules to assemble .asm files."""
    return self._HasExplicitRuleForExtension(spec, 'asm')

  def GetIdlBuildData(self, source, config):
    """Determine the implicit outputs for an idl file. Returns output
    directory, outputs, and variables and flags that are required."""
    config = self._TargetConfig(config)
    midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
    def midl(name, default=None):
      return self.ConvertVSMacros(midl_get(name, default=default),
                                  config=config)
    tlb = midl('TypeLibraryName', default='${root}.tlb')
    header = midl('HeaderFileName', default='${root}.h')
    dlldata = midl('DLLDataFileName', default='dlldata.c')
    iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
    proxy = midl('ProxyFileName', default='${root}_p.c')
    # Note that .tlb is not included in the outputs as it is not always
    # generated depending on the content of the input idl file.
    outdir = midl('OutputDirectory', default='')
    output = [header, dlldata, iid, proxy]
    variables = [('tlb', tlb),
                 ('h', header),
                 ('dlldata', dlldata),
                 ('iid', iid),
                 ('proxy', proxy)]
    # TODO(scottmg): Are there configuration settings to set these flags?
    target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
    flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
    return outdir, output, variables, flags


def _LanguageMatchesForPch(source_ext, pch_source_ext):
  c_exts = ('.c',)
  cc_exts = ('.cc', '.cxx', '.cpp')
  return ((source_ext in c_exts and pch_source_ext in c_exts) or
          (source_ext in cc_exts and pch_source_ext in cc_exts))


class PrecompiledHeader(object):
  """Helper to generate dependencies and build rules to handle generation of
  precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
  """
  def __init__(
      self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
    self.settings = settings
    self.config = config
    pch_source = self.settings.msvs_precompiled_source[self.config]
    self.pch_source = gyp_to_build_path(pch_source)
    filename, _ = os.path.splitext(pch_source)
    self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()

  def _PchHeader(self):
    """Get the header that will appear in an #include line for all source
    files."""
    return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]

  def GetObjDependencies(self, sources, objs, arch):
    """Given a list of sources files and the corresponding object files,
    returns a list of the pch files that should be depended upon. The
    additional wrapping in the return value is for interface compatibility
    with make.py on Mac, and xcode_emulation.py."""
    assert arch is None
    if not self._PchHeader():
      return []
    pch_ext = os.path.splitext(self.pch_source)[1]
    for source in sources:
      if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
        return [(None, None, self.output_obj)]
    return []

  def GetPchBuildCommands(self, arch):
    """Not used on Windows as there are no additional build steps required
    (instead, existing steps are modified in GetFlagsModifications below)."""
    return []

  def GetFlagsModifications(self, input, output, implicit, command,
                            cflags_c, cflags_cc, expand_special):
    """Get the modified cflags and implicit dependencies that should be used
    for the pch compilation step."""
    if input == self.pch_source:
      pch_output = ['/Yc' + self._PchHeader()]
      if command == 'cxx':
        return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
                self.output_obj, [])
      elif command == 'cc':
        return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
                self.output_obj, [])
    return [], output, implicit


vs_version = None
def GetVSVersion(generator_flags):
  global vs_version
  if not vs_version:
    vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
        generator_flags.get('msvs_version', 'auto'),
        allow_fallback=False)
  return vs_version

def _GetVsvarsSetupArgs(generator_flags, arch):
  vs = GetVSVersion(generator_flags)
  return vs.SetupScript()

def ExpandMacros(string, expansions):
  """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
  for the canonical way to retrieve a suitable dict."""
  if '$' in string:
    for old, new in expansions.iteritems():
      assert '$(' not in new, new
      string = string.replace(old, new)
  return string

def _ExtractImportantEnvironment(output_of_set):
  """Extracts environment variables required for the toolchain to run from
  a textual dump output by the cmd.exe 'set' command."""
  envvars_to_save = (
      'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
      'include',
      'lib',
      'libpath',
      'path',
      'pathext',
      'systemroot',
      'temp',
      'tmp',
      )
  env = {}
  for line in output_of_set.splitlines():
    for envvar in envvars_to_save:
      if re.match(envvar + '=', line.lower()):
        var, setting = line.split('=', 1)
        if envvar == 'path':
          # Our own rules (for running gyp-win-tool) and other actions in
          # Chromium rely on python being in the path. Add the path to this
          # python here so that if it's not in the path when ninja is run
          # later, python will still be found.
          setting = os.path.dirname(sys.executable) + os.pathsep + setting
        env[var.upper()] = setting
        break
  for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
    if required not in env:
      raise Exception('Environment variable "%s" '
                      'required to be set to valid path' % required)
  return env

def _FormatAsEnvironmentBlock(envvar_dict):
  """Format as an 'environment block' directly suitable for CreateProcess.
  Briefly this is a list of key=value\0, terminated by an additional \0. See
  CreateProcess documentation for more details."""
  block = ''
  nul = '\0'
  for key, value in envvar_dict.iteritems():
    block += key + '=' + value + nul
  block += nul
  return block

def _ExtractCLPath(output_of_where):
  """Gets the path to cl.exe based on the output of calling the environment
  setup batch file, followed by the equivalent of `where`."""
  # Take the first line, as that's the first found in the PATH.
  for line in output_of_where.strip().splitlines():
    if line.startswith('LOC:'):
      return line[len('LOC:'):].strip()

def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
                             system_includes, open_out):
  """It's not sufficient to have the absolute path to the compiler, linker,
  etc. on Windows, as those tools rely on .dlls being in the PATH. We also
  need to support both x86 and x64 compilers within the same build (to support
  msvs_target_platform hackery). Different architectures require a different
  compiler binary, and different supporting environment variables (INCLUDE,
  LIB, LIBPATH). So, we extract the environment here, wrap all invocations
  of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
  sets up the environment, and then we do not prefix the compiler with
  an absolute path, instead preferring something like "cl.exe" in the rule
  which will then run whichever the environment setup has put in the path.
  When the following procedure to generate environment files does not
  meet your requirement (e.g. for custom toolchains), you can pass
  "-G ninja_use_custom_environment_files" to the gyp to suppress file
  generation and use custom environment files prepared by yourself."""
  archs = ('x86', 'x64')
  if generator_flags.get('ninja_use_custom_environment_files', 0):
    cl_paths = {}
    for arch in archs:
      cl_paths[arch] = 'cl.exe'
    return cl_paths
  vs = GetVSVersion(generator_flags)
  cl_paths = {}
  for arch in archs:
    # Extract environment variables for subprocesses.
    args = vs.SetupScript(arch)
    args.extend(('&&', 'set'))
    popen = subprocess.Popen(
        args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    variables, _ = popen.communicate()
    env = _ExtractImportantEnvironment(variables)

    # Inject system includes from gyp files into INCLUDE.
    if system_includes:
      system_includes = system_includes | OrderedSet(
                                              env.get('INCLUDE', '').split(';'))
      env['INCLUDE'] = ';'.join(system_includes)

    env_block = _FormatAsEnvironmentBlock(env)
    f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
    f.write(env_block)
    f.close()

    # Find cl.exe location for this architecture.
    args = vs.SetupScript(arch)
    args.extend(('&&',
      'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
    popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
    output, _ = popen.communicate()
    cl_paths[arch] = _ExtractCLPath(output)
  return cl_paths

def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
  """Emulate behavior of msvs_error_on_missing_sources present in the msvs
  generator: Check that all regular source files, i.e. not created at run time,
  exist on disk. Missing files cause needless recompilation when building via
  VS, and we want this check to match for people/bots that build using ninja,
  so they're not surprised when the VS build fails."""
  if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
    no_specials = filter(lambda x: '$' not in x, sources)
    relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
    missing = filter(lambda x: not os.path.exists(x), relative)
    if missing:
      # They'll look like out\Release\..\..\stuff\things.cc, so normalize the
      # path for a slightly less crazy looking output.
      cleaned_up = [os.path.normpath(x) for x in missing]
      raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))

# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
  generator_flags = params.get('generator_flags', {})

  # Set a variable so conditions can be based on msvs_version.
  msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
  default_variables['MSVS_VERSION'] = msvs_version.ShortName()

  # To determine processor word size on Windows, in addition to checking
  # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
  # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
  # contains the actual word size of the system when running thru WOW64).
  if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
      '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
    default_variables['MSVS_OS_BITS'] = 64
  else:
    default_variables['MSVS_OS_BITS'] = 32