This commit is contained in:
lalBi94
2023-03-05 13:23:23 +01:00
commit 7bc56c09b5
14034 changed files with 1834369 additions and 0 deletions

4
node_modules/node-gyp/gyp/.flake8 generated vendored Normal file
View File

@@ -0,0 +1,4 @@
[flake8]
max-complexity = 101
max-line-length = 88
extend-ignore = E203 # whitespace before ':' to agree with psf/black

View File

@@ -0,0 +1,30 @@
# TODO: Enable os: windows-latest
# TODO: Enable pytest --doctest-modules
name: Python_tests
on: [push, pull_request]
jobs:
Python_tests:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
max-parallel: 8
matrix:
os: [macos-latest, ubuntu-latest] # , windows-latest]
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements_dev.txt
- name: Lint with flake8
run: flake8 . --ignore=E203,W503 --max-complexity=101 --max-line-length=88 --show-source --statistics
- name: Test with pytest
run: pytest
# - name: Run doctests with pytest
# run: pytest --doctest-modules

View File

@@ -0,0 +1,42 @@
name: node-gyp integration
on: [push, pull_request]
jobs:
test:
strategy:
fail-fast: false
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
python: [3.6, 3.9]
runs-on: ${{ matrix.os }}
steps:
- name: Clone gyp-next
uses: actions/checkout@v2
with:
path: gyp-next
- name: Clone nodejs/node-gyp
uses: actions/checkout@v2
with:
repository: nodejs/node-gyp
path: node-gyp
- uses: actions/setup-node@v2
with:
node-version: 14.x
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Install dependencies
run: |
cd node-gyp
npm install --no-progress
- name: Replace gyp in node-gyp
shell: bash
run: |
rm -rf node-gyp/gyp
cp -r gyp-next node-gyp/gyp
- name: Run tests
run: |
cd node-gyp
npm test

View File

@@ -0,0 +1,27 @@
name: Node.js Windows integration
on: [push, pull_request]
jobs:
build-windows:
runs-on: windows-latest
steps:
- name: Clone gyp-next
uses: actions/checkout@v2
with:
path: gyp-next
- name: Clone nodejs/node
uses: actions/checkout@v2
with:
repository: nodejs/node
path: node
- name: Install deps
run: choco install nasm
- name: Replace gyp in Node.js
run: |
rm -Recurse node/tools/gyp
cp -Recurse gyp-next node/tools/gyp
- name: Build Node.js
run: |
cd node
./vcbuild.bat

View File

@@ -0,0 +1,16 @@
on:
push:
branches:
- main
name: release-please
jobs:
release-please:
runs-on: ubuntu-latest
steps:
- uses: GoogleCloudPlatform/release-please-action@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
release-type: python
package-name: gyp-next
bump-minor-pre-major: Yes

16
node_modules/node-gyp/gyp/AUTHORS generated vendored Normal file
View File

@@ -0,0 +1,16 @@
# Names should be added to this file like so:
# Name or Organization <email address>
Google Inc. <*@google.com>
Bloomberg Finance L.P. <*@bloomberg.net>
IBM Inc. <*@*.ibm.com>
Yandex LLC <*@yandex-team.ru>
Steven Knight <knight@baldmt.com>
Ryan Norton <rnorton10@gmail.com>
David J. Sankel <david@sankelsoftware.com>
Eric N. Vander Weele <ericvw@gmail.com>
Tom Freudenberg <th.freudenberg@gmail.com>
Julien Brianceau <jbriance@cisco.com>
Refael Ackermann <refack@gmail.com>
Ujjwal Sharma <ryzokuken@disroot.org>

177
node_modules/node-gyp/gyp/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,177 @@
# Changelog
## [0.10.0](https://www.github.com/nodejs/gyp-next/compare/v0.9.6...v0.10.0) (2021-08-26)
### Features
* **msvs:** add support for Visual Studio 2022 ([#124](https://www.github.com/nodejs/gyp-next/issues/124)) ([4bd9215](https://www.github.com/nodejs/gyp-next/commit/4bd9215c44d300f06e916aec1d6327c22b78272d))
### [0.9.6](https://www.github.com/nodejs/gyp-next/compare/v0.9.5...v0.9.6) (2021-08-23)
### Bug Fixes
* align flake8 test ([#122](https://www.github.com/nodejs/gyp-next/issues/122)) ([f1faa8d](https://www.github.com/nodejs/gyp-next/commit/f1faa8d3081e1a47e917ff910892f00dff16cf8a))
* **msvs:** fix paths again in action command arguments ([#121](https://www.github.com/nodejs/gyp-next/issues/121)) ([7159dfb](https://www.github.com/nodejs/gyp-next/commit/7159dfbc5758c9ec717e215f2c36daf482c846a1))
### [0.9.5](https://www.github.com/nodejs/gyp-next/compare/v0.9.4...v0.9.5) (2021-08-18)
### Bug Fixes
* add python 3.6 to node-gyp integration test ([3462d4c](https://www.github.com/nodejs/gyp-next/commit/3462d4ce3c31cce747513dc7ca9760c81d57c60e))
* revert for windows compatibility ([d078e7d](https://www.github.com/nodejs/gyp-next/commit/d078e7d7ae080ddae243188f6415f940376a7368))
* support msvs_quote_cmd in ninja generator ([#117](https://www.github.com/nodejs/gyp-next/issues/117)) ([46486ac](https://www.github.com/nodejs/gyp-next/commit/46486ac6e9329529d51061e006a5b39631e46729))
### [0.9.4](https://www.github.com/nodejs/gyp-next/compare/v0.9.3...v0.9.4) (2021-08-09)
### Bug Fixes
* .S is an extension for asm file on Windows ([#115](https://www.github.com/nodejs/gyp-next/issues/115)) ([d2fad44](https://www.github.com/nodejs/gyp-next/commit/d2fad44ef3a79ca8900f1307060153ded57053fc))
### [0.9.3](https://www.github.com/nodejs/gyp-next/compare/v0.9.2...v0.9.3) (2021-07-07)
### Bug Fixes
* build failure with ninja and Python 3 on Windows ([#113](https://www.github.com/nodejs/gyp-next/issues/113)) ([c172d10](https://www.github.com/nodejs/gyp-next/commit/c172d105deff5db4244e583942215918fa80dd3c))
### [0.9.2](https://www.github.com/nodejs/gyp-next/compare/v0.9.1...v0.9.2) (2021-05-21)
### Bug Fixes
* add support of utf8 encoding ([#105](https://www.github.com/nodejs/gyp-next/issues/105)) ([4d0f93c](https://www.github.com/nodejs/gyp-next/commit/4d0f93c249286d1f0c0f665f5fe7346119f98cf1))
### [0.9.1](https://www.github.com/nodejs/gyp-next/compare/v0.9.0...v0.9.1) (2021-05-14)
### Bug Fixes
* py lint ([3b6a8ee](https://www.github.com/nodejs/gyp-next/commit/3b6a8ee7a66193a8a6867eba9e1d2b70bdf04402))
## [0.9.0](https://www.github.com/nodejs/gyp-next/compare/v0.8.1...v0.9.0) (2021-05-13)
### Features
* use LDFLAGS_host for host toolset ([#98](https://www.github.com/nodejs/gyp-next/issues/98)) ([bea5c7b](https://www.github.com/nodejs/gyp-next/commit/bea5c7bd67d6ad32acbdce79767a5481c70675a2))
### Bug Fixes
* msvs.py: remove overindentation ([#102](https://www.github.com/nodejs/gyp-next/issues/102)) ([3f83e99](https://www.github.com/nodejs/gyp-next/commit/3f83e99056d004d9579ceb786e06b624ddc36529))
* update gyp.el to change case to cl-case ([#93](https://www.github.com/nodejs/gyp-next/issues/93)) ([13d5b66](https://www.github.com/nodejs/gyp-next/commit/13d5b66aab35985af9c2fb1174fdc6e1c1407ecc))
### [0.8.1](https://www.github.com/nodejs/gyp-next/compare/v0.8.0...v0.8.1) (2021-02-18)
### Bug Fixes
* update shebang lines from python to python3 ([#94](https://www.github.com/nodejs/gyp-next/issues/94)) ([a1b0d41](https://www.github.com/nodejs/gyp-next/commit/a1b0d4171a8049a4ab7a614202063dec332f2df4))
## [0.8.0](https://www.github.com/nodejs/gyp-next/compare/v0.7.0...v0.8.0) (2021-01-15)
### ⚠ BREAKING CHANGES
* remove support for Python 2
### Bug Fixes
* revert posix build job ([#86](https://www.github.com/nodejs/gyp-next/issues/86)) ([39dc34f](https://www.github.com/nodejs/gyp-next/commit/39dc34f0799c074624005fb9bbccf6e028607f9d))
### gyp
* Remove support for Python 2 ([#88](https://www.github.com/nodejs/gyp-next/issues/88)) ([22e4654](https://www.github.com/nodejs/gyp-next/commit/22e465426fd892403c95534229af819a99c3f8dc))
## [0.7.0](https://www.github.com/nodejs/gyp-next/compare/v0.6.2...v0.7.0) (2020-12-17)
### ⚠ BREAKING CHANGES
* **msvs:** On Windows, arguments passed to the "action" commands are no longer transformed to replace slashes with backslashes.
### Features
* **xcode:** --cross-compiling overrides arch-specific settings ([973bae0](https://www.github.com/nodejs/gyp-next/commit/973bae0b7b08be7b680ecae9565fbd04b3e0787d))
### Bug Fixes
* **msvs:** do not fix paths in action command arguments ([fc22f83](https://www.github.com/nodejs/gyp-next/commit/fc22f8335e2016da4aae4f4233074bd651d2faea))
* cmake on python 3 ([fd61f5f](https://www.github.com/nodejs/gyp-next/commit/fd61f5faa5275ec8fc98e3c7868c0dd46f109540))
* ValueError: invalid mode: 'rU' while trying to load binding.gyp ([d0504e6](https://www.github.com/nodejs/gyp-next/commit/d0504e6700ce48f44957a4d5891b142a60be946f))
* xcode cmake parsing ([eefe8d1](https://www.github.com/nodejs/gyp-next/commit/eefe8d10e99863bc4ac7e2ed32facd608d400d4b))
### [0.6.2](https://www.github.com/nodejs/gyp-next/compare/v0.6.1...v0.6.2) (2020-10-16)
### Bug Fixes
* do not rewrite absolute paths to avoid long paths ([#74](https://www.github.com/nodejs/gyp-next/issues/74)) ([c2ccc1a](https://www.github.com/nodejs/gyp-next/commit/c2ccc1a81f7f94433a94f4d01a2e820db4c4331a))
* only include MARMASM when toolset is target ([5a2794a](https://www.github.com/nodejs/gyp-next/commit/5a2794aefb58f0c00404ff042b61740bc8b8d5cd))
### [0.6.1](https://github.com/nodejs/gyp-next/compare/v0.6.0...v0.6.1) (2020-10-14)
### Bug Fixes
* Correctly rename object files for absolute paths in MSVS generator.
## [0.6.0](https://github.com/nodejs/gyp-next/compare/v0.5.0...v0.6.0) (2020-10-13)
### Features
* The Makefile generator will now output shared libraries directly to the product directory on all platforms (previously only macOS).
## [0.5.0](https://github.com/nodejs/gyp-next/compare/v0.4.0...v0.5.0) (2020-09-30)
### Features
* Extended compile_commands_json generator to consider more file extensions than just `c` and `cc`. `cpp` and `cxx` are now supported.
* Source files with duplicate basenames are now supported.
### Removed
* The `--no-duplicate-basename-check` option was removed.
* The `msvs_enable_marmasm` configuration option was removed in favor of auto-inclusion of the "marmasm" sections for Windows on ARM.
## [0.4.0](https://github.com/nodejs/gyp-next/compare/v0.3.0...v0.4.0) (2020-07-14)
### Features
* Added support for passing arbitrary architectures to Xcode builds, enables `arm64` builds.
### Bug Fixes
* Fixed a bug on Solaris where copying archives failed.
## [0.3.0](https://github.com/nodejs/gyp-next/compare/v0.2.1...v0.3.0) (2020-06-06)
### Features
* Added support for MSVC cross-compilation. This allows compilation on x64 for a Windows ARM target.
### Bug Fixes
* Fixed XCode CLT version detection on macOS Catalina.
### [0.2.1](https://github.com/nodejs/gyp-next/compare/v0.2.0...v0.2.1) (2020-05-05)
### Bug Fixes
* Relicensed to Node.js contributors.
* Fixed Windows bug introduced in v0.2.0.
## [0.2.0](https://github.com/nodejs/gyp-next/releases/tag/v0.2.0) (2020-04-06)
This is the first release of this project, based on https://chromium.googlesource.com/external/gyp with changes made over the years in Node.js and node-gyp.

4
node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md generated vendored Normal file
View File

@@ -0,0 +1,4 @@
# Code of Conduct
* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md)
* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/HEAD/Moderation-Policy.md)

32
node_modules/node-gyp/gyp/CONTRIBUTING.md generated vendored Normal file
View File

@@ -0,0 +1,32 @@
# Contributing to gyp-next
## Code of Conduct
This project is bound to the [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md).
<a id="developers-certificate-of-origin"></a>
## Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
* (a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
* (b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
* (c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
* (d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.

28
node_modules/node-gyp/gyp/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,28 @@
Copyright (c) 2020 Node.js contributors. All rights reserved.
Copyright (c) 2009 Google Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

7
node_modules/node-gyp/gyp/README.md generated vendored Normal file
View File

@@ -0,0 +1,7 @@
GYP can Generate Your Projects.
===================================
Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
__gyp-next__ is [released](https://github.com/nodejs/gyp-next/releases) to the [__Python Packaging Index__](https://pypi.org/project/gyp-next) (PyPI) and can be installed with the command:
* `python3 -m pip install gyp-next`

12
node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc generated vendored Normal file
View File

@@ -0,0 +1,12 @@
// Copyright (c) 2013 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
// then used during the final link for modules that have large PDBs. Otherwise,
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
// (rather than the linker), this limit is avoided. With this in place PDBs may
// grow to 2GB.
//
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.

8
node_modules/node-gyp/gyp/gyp generated vendored Normal file
View File

@@ -0,0 +1,8 @@
#!/bin/sh
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -e
base=$(dirname "$0")
exec python "${base}/gyp_main.py" "$@"

5
node_modules/node-gyp/gyp/gyp.bat generated vendored Normal file
View File

@@ -0,0 +1,5 @@
@rem Copyright (c) 2009 Google Inc. All rights reserved.
@rem Use of this source code is governed by a BSD-style license that can be
@rem found in the LICENSE file.
@python "%~dp0gyp_main.py" %*

45
node_modules/node-gyp/gyp/gyp_main.py generated vendored Normal file
View File

@@ -0,0 +1,45 @@
#!/usr/bin/env python3
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
def IsCygwin():
# Function copied from pylib/gyp/common.py
try:
out = subprocess.Popen(
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
stdout, _ = out.communicate()
return "CYGWIN" in stdout.decode("utf-8")
except Exception:
return False
def UnixifyPath(path):
try:
if not IsCygwin():
return path
out = subprocess.Popen(
["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
stdout, _ = out.communicate()
return stdout.decode("utf-8")
except Exception:
return path
# Make sure we're using the version of pylib in this repo, not one installed
# elsewhere on the system. Also convert to Unix style path on Cygwin systems,
# else the 'gyp' library will not be found
path = UnixifyPath(sys.argv[0])
sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib"))
import gyp # noqa: E402
if __name__ == "__main__":
sys.exit(gyp.script_main())

367
node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py generated vendored Normal file
View File

@@ -0,0 +1,367 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""New implementation of Visual Studio project generation."""
import hashlib
import os
import random
from operator import attrgetter
import gyp.common
def cmp(x, y):
return (x > y) - (x < y)
# Initialize random number generator
random.seed()
# GUIDs for project types
ENTRY_TYPE_GUIDS = {
"project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
"folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}",
}
# ------------------------------------------------------------------------------
# Helper functions
def MakeGuid(name, seed="msvs_new"):
"""Returns a GUID for the specified target name.
Args:
name: Target name.
seed: Seed for MD5 hash.
Returns:
A GUID-line string calculated from the name and seed.
This generates something which looks like a GUID, but depends only on the
name and seed. This means the same name/seed will always generate the same
GUID, so that projects and solutions which refer to each other can explicitly
determine the GUID to refer to explicitly. It also means that the GUID will
not change when the project for a target is rebuilt.
"""
# Calculate a MD5 signature for the seed and name.
d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest)
guid = (
"{"
+ d[:8]
+ "-"
+ d[8:12]
+ "-"
+ d[12:16]
+ "-"
+ d[16:20]
+ "-"
+ d[20:32]
+ "}"
)
return guid
# ------------------------------------------------------------------------------
class MSVSSolutionEntry:
def __cmp__(self, other):
# Sort by name then guid (so things are in order on vs2008).
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
class MSVSFolder(MSVSSolutionEntry):
"""Folder in a Visual Studio project or solution."""
def __init__(self, path, name=None, entries=None, guid=None, items=None):
"""Initializes the folder.
Args:
path: Full path to the folder.
name: Name of the folder.
entries: List of folder entries to nest inside this folder. May contain
Folder or Project objects. May be None, if the folder is empty.
guid: GUID to use for folder, if not None.
items: List of solution items to include in the folder project. May be
None, if the folder does not directly contain items.
"""
if name:
self.name = name
else:
# Use last layer.
self.name = os.path.basename(path)
self.path = path
self.guid = guid
# Copy passed lists (or set to empty lists)
self.entries = sorted(entries or [], key=attrgetter("path"))
self.items = list(items or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"]
def get_guid(self):
if self.guid is None:
# Use consistent guids for folders (so things don't regenerate).
self.guid = MakeGuid(self.path, seed="msvs_folder")
return self.guid
# ------------------------------------------------------------------------------
class MSVSProject(MSVSSolutionEntry):
"""Visual Studio project."""
def __init__(
self,
path,
name=None,
dependencies=None,
guid=None,
spec=None,
build_file=None,
config_platform_overrides=None,
fixpath_prefix=None,
):
"""Initializes the project.
Args:
path: Absolute path to the project file.
name: Name of project. If None, the name will be the same as the base
name of the project file.
dependencies: List of other Project objects this project is dependent
upon, if not None.
guid: GUID to use for project, if not None.
spec: Dictionary specifying how to build this project.
build_file: Filename of the .gyp file that the vcproj file comes from.
config_platform_overrides: optional dict of configuration platforms to
used in place of the default for this target.
fixpath_prefix: the path used to adjust the behavior of _fixpath
"""
self.path = path
self.guid = guid
self.spec = spec
self.build_file = build_file
# Use project filename if name not specified
self.name = name or os.path.splitext(os.path.basename(path))[0]
# Copy passed lists (or set to empty lists)
self.dependencies = list(dependencies or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS["project"]
if config_platform_overrides:
self.config_platform_overrides = config_platform_overrides
else:
self.config_platform_overrides = {}
self.fixpath_prefix = fixpath_prefix
self.msbuild_toolset = None
def set_dependencies(self, dependencies):
self.dependencies = list(dependencies or [])
def get_guid(self):
if self.guid is None:
# Set GUID from path
# TODO(rspangler): This is fragile.
# 1. We can't just use the project filename sans path, since there could
# be multiple projects with the same base name (for example,
# foo/unittest.vcproj and bar/unittest.vcproj).
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
# GUID is the same whether it's included from base/base.sln or
# foo/bar/baz/baz.sln.
# 3. The GUID needs to be the same each time this builder is invoked, so
# that we don't need to rebuild the solution when the project changes.
# 4. We should be able to handle pre-built project files by reading the
# GUID from the files.
self.guid = MakeGuid(self.name)
return self.guid
def set_msbuild_toolset(self, msbuild_toolset):
self.msbuild_toolset = msbuild_toolset
# ------------------------------------------------------------------------------
class MSVSSolution:
"""Visual Studio solution."""
def __init__(
self, path, version, entries=None, variants=None, websiteProperties=True
):
"""Initializes the solution.
Args:
path: Path to solution file.
version: Format version to emit.
entries: List of entries in solution. May contain Folder or Project
objects. May be None, if the folder is empty.
variants: List of build variant strings. If none, a default list will
be used.
websiteProperties: Flag to decide if the website properties section
is generated.
"""
self.path = path
self.websiteProperties = websiteProperties
self.version = version
# Copy passed lists (or set to empty lists)
self.entries = list(entries or [])
if variants:
# Copy passed list
self.variants = variants[:]
else:
# Use default
self.variants = ["Debug|Win32", "Release|Win32"]
# TODO(rspangler): Need to be able to handle a mapping of solution config
# to project config. Should we be able to handle variants being a dict,
# or add a separate variant_map variable? If it's a dict, we can't
# guarantee the order of variants since dict keys aren't ordered.
# TODO(rspangler): Automatically write to disk for now; should delay until
# node-evaluation time.
self.Write()
def Write(self, writer=gyp.common.WriteOnDiff):
"""Writes the solution file to disk.
Raises:
IndexError: An entry appears multiple times.
"""
# Walk the entry tree and collect all the folders and projects.
all_entries = set()
entries_to_check = self.entries[:]
while entries_to_check:
e = entries_to_check.pop(0)
# If this entry has been visited, nothing to do.
if e in all_entries:
continue
all_entries.add(e)
# If this is a folder, check its entries too.
if isinstance(e, MSVSFolder):
entries_to_check += e.entries
all_entries = sorted(all_entries, key=attrgetter("path"))
# Open file and print header
f = writer(self.path)
f.write(
"Microsoft Visual Studio Solution File, "
"Format Version %s\r\n" % self.version.SolutionVersion()
)
f.write("# %s\r\n" % self.version.Description())
# Project entries
sln_root = os.path.split(self.path)[0]
for e in all_entries:
relative_path = gyp.common.RelativePath(e.path, sln_root)
# msbuild does not accept an empty folder_name.
# use '.' in case relative_path is empty.
folder_name = relative_path.replace("/", "\\") or "."
f.write(
'Project("%s") = "%s", "%s", "%s"\r\n'
% (
e.entry_type_guid, # Entry type GUID
e.name, # Folder name
folder_name, # Folder name (again)
e.get_guid(), # Entry GUID
)
)
# TODO(rspangler): Need a way to configure this stuff
if self.websiteProperties:
f.write(
"\tProjectSection(WebsiteProperties) = preProject\r\n"
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
"\tEndProjectSection\r\n"
)
if isinstance(e, MSVSFolder):
if e.items:
f.write("\tProjectSection(SolutionItems) = preProject\r\n")
for i in e.items:
f.write(f"\t\t{i} = {i}\r\n")
f.write("\tEndProjectSection\r\n")
if isinstance(e, MSVSProject):
if e.dependencies:
f.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
for d in e.dependencies:
f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n")
f.write("\tEndProjectSection\r\n")
f.write("EndProject\r\n")
# Global section
f.write("Global\r\n")
# Configurations (variants)
f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
for v in self.variants:
f.write(f"\t\t{v} = {v}\r\n")
f.write("\tEndGlobalSection\r\n")
# Sort config guids for easier diffing of solution changes.
config_guids = []
config_guids_overrides = {}
for e in all_entries:
if isinstance(e, MSVSProject):
config_guids.append(e.get_guid())
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
config_guids.sort()
f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
for g in config_guids:
for v in self.variants:
nv = config_guids_overrides[g].get(v, v)
# Pick which project configuration to build for this solution
# configuration.
f.write(
"\t\t%s.%s.ActiveCfg = %s\r\n"
% (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
)
)
# Enable project in this solution configuration.
f.write(
"\t\t%s.%s.Build.0 = %s\r\n"
% (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
)
)
f.write("\tEndGlobalSection\r\n")
# TODO(rspangler): Should be able to configure this stuff too (though I've
# never seen this be any different)
f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
f.write("\t\tHideSolutionNode = FALSE\r\n")
f.write("\tEndGlobalSection\r\n")
# Folder mappings
# Omit this section if there are no folders
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
f.write("\tGlobalSection(NestedProjects) = preSolution\r\n")
for e in all_entries:
if not isinstance(e, MSVSFolder):
continue # Does not apply to projects, only folders
for subentry in e.entries:
f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n")
f.write("\tEndGlobalSection\r\n")
f.write("EndGlobal\r\n")
f.close()

206
node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py generated vendored Normal file
View File

@@ -0,0 +1,206 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio project reader/writer."""
import gyp.easy_xml as easy_xml
# ------------------------------------------------------------------------------
class Tool:
"""Visual Studio tool."""
def __init__(self, name, attrs=None):
"""Initializes the tool.
Args:
name: Tool name.
attrs: Dict of tool attributes; may be None.
"""
self._attrs = attrs or {}
self._attrs["Name"] = name
def _GetSpecification(self):
"""Creates an element for the tool.
Returns:
A new xml.dom.Element for the tool.
"""
return ["Tool", self._attrs]
class Filter:
"""Visual Studio filter - that is, a virtual folder."""
def __init__(self, name, contents=None):
"""Initializes the folder.
Args:
name: Filter (folder) name.
contents: List of filenames and/or Filter objects contained.
"""
self.name = name
self.contents = list(contents or [])
# ------------------------------------------------------------------------------
class Writer:
"""Visual Studio XML project writer."""
def __init__(self, project_path, version, name, guid=None, platforms=None):
"""Initializes the project.
Args:
project_path: Path to the project file.
version: Format version to emit.
name: Name of the project.
guid: GUID to use for project, if not None.
platforms: Array of string, the supported platforms. If null, ['Win32']
"""
self.project_path = project_path
self.version = version
self.name = name
self.guid = guid
# Default to Win32 for platforms.
if not platforms:
platforms = ["Win32"]
# Initialize the specifications of the various sections.
self.platform_section = ["Platforms"]
for platform in platforms:
self.platform_section.append(["Platform", {"Name": platform}])
self.tool_files_section = ["ToolFiles"]
self.configurations_section = ["Configurations"]
self.files_section = ["Files"]
# Keep a dict keyed on filename to speed up access.
self.files_dict = dict()
def AddToolFile(self, path):
"""Adds a tool file to the project.
Args:
path: Relative path from project to tool file.
"""
self.tool_files_section.append(["ToolFile", {"RelativePath": path}])
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
"""Returns the specification for a configuration.
Args:
config_type: Type of configuration node.
config_name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Returns:
"""
# Handle defaults
if not attrs:
attrs = {}
if not tools:
tools = []
# Add configuration node and its attributes
node_attrs = attrs.copy()
node_attrs["Name"] = config_name
specification = [config_type, node_attrs]
# Add tool nodes and their attributes
if tools:
for t in tools:
if isinstance(t, Tool):
specification.append(t._GetSpecification())
else:
specification.append(Tool(t)._GetSpecification())
return specification
def AddConfig(self, name, attrs=None, tools=None):
"""Adds a configuration to the project.
Args:
name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
"""
spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools)
self.configurations_section.append(spec)
def _AddFilesToNode(self, parent, files):
"""Adds files and/or filters to the parent node.
Args:
parent: Destination node
files: A list of Filter objects and/or relative paths to files.
Will call itself recursively, if the files list contains Filter objects.
"""
for f in files:
if isinstance(f, Filter):
node = ["Filter", {"Name": f.name}]
self._AddFilesToNode(node, f.contents)
else:
node = ["File", {"RelativePath": f}]
self.files_dict[f] = node
parent.append(node)
def AddFiles(self, files):
"""Adds files to the project.
Args:
files: A list of Filter objects and/or relative paths to files.
This makes a copy of the file/filter tree at the time of this call. If you
later add files to a Filter object which was passed into a previous call
to AddFiles(), it will not be reflected in this project.
"""
self._AddFilesToNode(self.files_section, files)
# TODO(rspangler) This also doesn't handle adding files to an existing
# filter. That is, it doesn't merge the trees.
def AddFileConfig(self, path, config, attrs=None, tools=None):
"""Adds a configuration to a file.
Args:
path: Relative path to the file.
config: Name of configuration to add.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Raises:
ValueError: Relative path does not match any file added via AddFiles().
"""
# Find the file node with the right relative path
parent = self.files_dict.get(path)
if not parent:
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
# Add the config to the file node
spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools)
parent.append(spec)
def WriteIfChanged(self):
"""Writes the project file."""
# First create XML content definition
content = [
"VisualStudioProject",
{
"ProjectType": "Visual C++",
"Version": self.version.ProjectVersion(),
"Name": self.name,
"ProjectGUID": self.guid,
"RootNamespace": self.name,
"Keyword": "Win32Proj",
},
self.platform_section,
self.tool_files_section,
self.configurations_section,
["References"], # empty section
self.files_section,
["Globals"], # empty section
]
easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252")

1270
node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

1547
node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

59
node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py generated vendored Normal file
View File

@@ -0,0 +1,59 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio project reader/writer."""
import gyp.easy_xml as easy_xml
class Writer:
"""Visual Studio XML tool file writer."""
def __init__(self, tool_file_path, name):
"""Initializes the tool file.
Args:
tool_file_path: Path to the tool file.
name: Name of the tool file.
"""
self.tool_file_path = tool_file_path
self.name = name
self.rules_section = ["Rules"]
def AddCustomBuildRule(
self, name, cmd, description, additional_dependencies, outputs, extensions
):
"""Adds a rule to the tool file.
Args:
name: Name of the rule.
description: Description of the rule.
cmd: Command line of the rule.
additional_dependencies: other files which may trigger the rule.
outputs: outputs of the rule.
extensions: extensions handled by the rule.
"""
rule = [
"CustomBuildRule",
{
"Name": name,
"ExecutionDescription": description,
"CommandLine": cmd,
"Outputs": ";".join(outputs),
"FileExtensions": ";".join(extensions),
"AdditionalDependencies": ";".join(additional_dependencies),
},
]
self.rules_section.append(rule)
def WriteIfChanged(self):
"""Writes the tool file."""
content = [
"VisualStudioToolFile",
{"Version": "8.00", "Name": self.name},
self.rules_section,
]
easy_xml.WriteXmlIfChanged(
content, self.tool_file_path, encoding="Windows-1252"
)

153
node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py generated vendored Normal file
View File

@@ -0,0 +1,153 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio user preferences file writer."""
import os
import re
import socket # for gethostname
import gyp.easy_xml as easy_xml
# ------------------------------------------------------------------------------
def _FindCommandInPath(command):
"""If there are no slashes in the command given, this function
searches the PATH env to find the given command, and converts it
to an absolute path. We have to do this because MSVS is looking
for an actual file to launch a debugger on, not just a command
line. Note that this happens at GYP time, so anything needing to
be built needs to have a full path."""
if "/" in command or "\\" in command:
# If the command already has path elements (either relative or
# absolute), then assume it is constructed properly.
return command
else:
# Search through the path list and find an existing file that
# we can access.
paths = os.environ.get("PATH", "").split(os.pathsep)
for path in paths:
item = os.path.join(path, command)
if os.path.isfile(item) and os.access(item, os.X_OK):
return item
return command
def _QuoteWin32CommandLineArgs(args):
new_args = []
for arg in args:
# Replace all double-quotes with double-double-quotes to escape
# them for cmd shell, and then quote the whole thing if there
# are any.
if arg.find('"') != -1:
arg = '""'.join(arg.split('"'))
arg = '"%s"' % arg
# Otherwise, if there are any spaces, quote the whole arg.
elif re.search(r"[ \t\n]", arg):
arg = '"%s"' % arg
new_args.append(arg)
return new_args
class Writer:
"""Visual Studio XML user user file writer."""
def __init__(self, user_file_path, version, name):
"""Initializes the user file.
Args:
user_file_path: Path to the user file.
version: Version info.
name: Name of the user file.
"""
self.user_file_path = user_file_path
self.version = version
self.name = name
self.configurations = {}
def AddConfig(self, name):
"""Adds a configuration to the project.
Args:
name: Configuration name.
"""
self.configurations[name] = ["Configuration", {"Name": name}]
def AddDebugSettings(
self, config_name, command, environment={}, working_directory=""
):
"""Adds a DebugSettings node to the user file for a particular config.
Args:
command: command line to run. First element in the list is the
executable. All elements of the command will be quoted if
necessary.
working_directory: other files which may trigger the rule. (optional)
"""
command = _QuoteWin32CommandLineArgs(command)
abs_command = _FindCommandInPath(command[0])
if environment and isinstance(environment, dict):
env_list = [f'{key}="{val}"' for (key, val) in environment.items()]
environment = " ".join(env_list)
else:
environment = ""
n_cmd = [
"DebugSettings",
{
"Command": abs_command,
"WorkingDirectory": working_directory,
"CommandArguments": " ".join(command[1:]),
"RemoteMachine": socket.gethostname(),
"Environment": environment,
"EnvironmentMerge": "true",
# Currently these are all "dummy" values that we're just setting
# in the default manner that MSVS does it. We could use some of
# these to add additional capabilities, I suppose, but they might
# not have parity with other platforms then.
"Attach": "false",
"DebuggerType": "3", # 'auto' debugger
"Remote": "1",
"RemoteCommand": "",
"HttpUrl": "",
"PDBPath": "",
"SQLDebugging": "",
"DebuggerFlavor": "0",
"MPIRunCommand": "",
"MPIRunArguments": "",
"MPIRunWorkingDirectory": "",
"ApplicationCommand": "",
"ApplicationArguments": "",
"ShimCommand": "",
"MPIAcceptMode": "",
"MPIAcceptFilter": "",
},
]
# Find the config, and add it if it doesn't exist.
if config_name not in self.configurations:
self.AddConfig(config_name)
# Add the DebugSettings onto the appropriate config.
self.configurations[config_name].append(n_cmd)
def WriteIfChanged(self):
"""Writes the user file."""
configs = ["Configurations"]
for config, spec in sorted(self.configurations.items()):
configs.append(spec)
content = [
"VisualStudioUserFile",
{"Version": self.version.ProjectVersion(), "Name": self.name},
configs,
]
easy_xml.WriteXmlIfChanged(
content, self.user_file_path, encoding="Windows-1252"
)

271
node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py generated vendored Normal file
View File

@@ -0,0 +1,271 @@
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions shared amongst the Windows generators."""
import copy
import os
# A dictionary mapping supported target types to extensions.
TARGET_TYPE_EXT = {
"executable": "exe",
"loadable_module": "dll",
"shared_library": "dll",
"static_library": "lib",
"windows_driver": "sys",
}
def _GetLargePdbShimCcPath():
"""Returns the path of the large_pdb_shim.cc file."""
this_dir = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.abspath(os.path.join(this_dir, "..", ".."))
win_data_dir = os.path.join(src_dir, "data", "win")
large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc")
return large_pdb_shim_cc
def _DeepCopySomeKeys(in_dict, keys):
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
Arguments:
in_dict: The dictionary to copy.
keys: The keys to be copied. If a key is in this list and doesn't exist in
|in_dict| this is not an error.
Returns:
The partially deep-copied dictionary.
"""
d = {}
for key in keys:
if key not in in_dict:
continue
d[key] = copy.deepcopy(in_dict[key])
return d
def _SuffixName(name, suffix):
"""Add a suffix to the end of a target.
Arguments:
name: name of the target (foo#target)
suffix: the suffix to be added
Returns:
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit("#", 1)
parts[0] = f"{parts[0]}_{suffix}"
return "#".join(parts)
def _ShardName(name, number):
"""Add a shard number to the end of a target.
Arguments:
name: name of the target (foo#target)
number: shard number
Returns:
Target name with shard added (foo_1#target)
"""
return _SuffixName(name, str(number))
def ShardTargets(target_list, target_dicts):
"""Shard some targets apart to work around the linkers limits.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
Returns:
Tuple of the new sharded versions of the inputs.
"""
# Gather the targets to shard, and how many pieces.
targets_to_shard = {}
for t in target_dicts:
shards = int(target_dicts[t].get("msvs_shard", 0))
if shards:
targets_to_shard[t] = shards
# Shard target_list.
new_target_list = []
for t in target_list:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
new_target_list.append(_ShardName(t, i))
else:
new_target_list.append(t)
# Shard target_dict.
new_target_dicts = {}
for t in target_dicts:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
name = _ShardName(t, i)
new_target_dicts[name] = copy.copy(target_dicts[t])
new_target_dicts[name]["target_name"] = _ShardName(
new_target_dicts[name]["target_name"], i
)
sources = new_target_dicts[name].get("sources", [])
new_sources = []
for pos in range(i, len(sources), targets_to_shard[t]):
new_sources.append(sources[pos])
new_target_dicts[name]["sources"] = new_sources
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in sorted(new_target_dicts):
for deptype in ("dependencies", "dependencies_original"):
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
new_dependencies = []
for d in dependencies:
if d in targets_to_shard:
for i in range(targets_to_shard[d]):
new_dependencies.append(_ShardName(d, i))
else:
new_dependencies.append(d)
new_target_dicts[t][deptype] = new_dependencies
return (new_target_list, new_target_dicts)
def _GetPdbPath(target_dict, config_name, vars):
"""Returns the path to the PDB file that will be generated by a given
configuration.
The lookup proceeds as follows:
- Look for an explicit path in the VCLinkerTool configuration block.
- Look for an 'msvs_large_pdb_path' variable.
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
specified.
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
Arguments:
target_dict: The target dictionary to be searched.
config_name: The name of the configuration of interest.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
The path of the corresponding PDB file.
"""
config = target_dict["configurations"][config_name]
msvs = config.setdefault("msvs_settings", {})
linker = msvs.get("VCLinkerTool", {})
pdb_path = linker.get("ProgramDatabaseFile")
if pdb_path:
return pdb_path
variables = target_dict.get("variables", {})
pdb_path = variables.get("msvs_large_pdb_path", None)
if pdb_path:
return pdb_path
pdb_base = target_dict.get("product_name", target_dict["target_name"])
pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
return pdb_path
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
This is a workaround for targets with PDBs greater than 1GB in size, the
limit for the 1KB pagesize PDBs created by the linker by default.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
Tuple of the shimmed version of the inputs.
"""
# Determine which targets need shimming.
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get("msvs_large_pdb", 0)):
continue
# This is intended for executable, shared_library and loadable_module
# targets where every configuration is set up to produce a PDB output.
# If any of these conditions is not true then the shim logic will fail
# below.
targets_to_shim.append(t)
large_pdb_shim_cc = _GetLargePdbShimCcPath()
for t in targets_to_shim:
target_dict = target_dicts[t]
target_name = target_dict.get("target_name")
base_dict = _DeepCopySomeKeys(
target_dict, ["configurations", "default_configuration", "toolset"]
)
# This is the dict for copying the source file (part of the GYP tree)
# to the intermediate directory of the project. This is necessary because
# we can't always build a relative path to the shim source file (on Windows
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
copy_suffix = "large_pdb_copy"
copy_target_name = target_name + "_" + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name
shim_cc_path = shim_cc_dir + "/" + shim_cc_basename
copy_dict = copy.deepcopy(base_dict)
copy_dict["target_name"] = copy_target_name
copy_dict["type"] = "none"
copy_dict["sources"] = [large_pdb_shim_cc]
copy_dict["copies"] = [
{"destination": shim_cc_dir, "files": [large_pdb_shim_cc]}
]
# This is the dict for the PDB generating shim target. It depends on the
# copy target.
shim_suffix = "large_pdb_shim"
shim_target_name = target_name + "_" + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
shim_dict["target_name"] = shim_target_name
shim_dict["type"] = "static_library"
shim_dict["sources"] = [shim_cc_path]
shim_dict["dependencies"] = [full_copy_target_name]
# Set up the shim to output its PDB to the same location as the final linker
# target.
for config_name, config in shim_dict.get("configurations").items():
pdb_path = _GetPdbPath(target_dict, config_name, vars)
# A few keys that we don't want to propagate.
for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]:
config.pop(key, None)
msvs = config.setdefault("msvs_settings", {})
# Update the compiler directives in the shim target.
compiler = msvs.setdefault("VCCLCompilerTool", {})
compiler["DebugInformationFormat"] = "3"
compiler["ProgramDataBaseFileName"] = pdb_path
# Set the explicit PDB path in the appropriate configuration of the
# original target.
config = target_dict["configurations"][config_name]
msvs = config.setdefault("msvs_settings", {})
linker = msvs.setdefault("VCLinkerTool", {})
linker["GenerateDebugInformation"] = "true"
linker["ProgramDatabaseFile"] = pdb_path
# Add the new targets. They must go to the beginning of the list so that
# the dependency generation works as expected in ninja.
target_list.insert(0, full_copy_target_name)
target_list.insert(0, full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict
# Update the original target to depend on the shim target.
target_dict.setdefault("dependencies", []).append(full_shim_target_name)
return (target_list, target_dicts)

574
node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

666
node_modules/node-gyp/gyp/pylib/gyp/__init__.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

654
node_modules/node-gyp/gyp/pylib/gyp/common.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

78
node_modules/node-gyp/gyp/pylib/gyp/common_test.py generated vendored Normal file
View File

@@ -0,0 +1,78 @@
#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the common.py file."""
import gyp.common
import unittest
import sys
class TestTopologicallySorted(unittest.TestCase):
def test_Valid(self):
"""Test that sorting works on a valid graph with one possible order."""
graph = {
"a": ["b", "c"],
"b": [],
"c": ["d"],
"d": ["b"],
}
def GetEdge(node):
return tuple(graph[node])
self.assertEqual(
gyp.common.TopologicallySorted(graph.keys(), GetEdge), ["a", "c", "d", "b"]
)
def test_Cycle(self):
"""Test that an exception is thrown on a cyclic graph."""
graph = {
"a": ["b"],
"b": ["c"],
"c": ["d"],
"d": ["a"],
}
def GetEdge(node):
return tuple(graph[node])
self.assertRaises(
gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge
)
class TestGetFlavor(unittest.TestCase):
"""Test that gyp.common.GetFlavor works as intended"""
original_platform = ""
def setUp(self):
self.original_platform = sys.platform
def tearDown(self):
sys.platform = self.original_platform
def assertFlavor(self, expected, argument, param):
sys.platform = argument
self.assertEqual(expected, gyp.common.GetFlavor(param))
def test_platform_default(self):
self.assertFlavor("freebsd", "freebsd9", {})
self.assertFlavor("freebsd", "freebsd10", {})
self.assertFlavor("openbsd", "openbsd5", {})
self.assertFlavor("solaris", "sunos5", {})
self.assertFlavor("solaris", "sunos", {})
self.assertFlavor("linux", "linux2", {})
self.assertFlavor("linux", "linux3", {})
self.assertFlavor("linux", "linux", {})
def test_param(self):
self.assertFlavor("foobar", "linux2", {"flavor": "foobar"})
if __name__ == "__main__":
unittest.main()

165
node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py generated vendored Normal file
View File

@@ -0,0 +1,165 @@
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import re
import os
import locale
from functools import reduce
def XmlToString(content, encoding="utf-8", pretty=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Visual Studio files have a lot of pre-defined structures. This function makes
it easy to represent these structures as Python data structures, instead of
having to create a lot of function calls.
Each XML element of the content is represented as a list composed of:
1. The name of the element, a string,
2. The attributes of the element, a dictionary (optional), and
3+. The content of the element, if any. Strings are simple text nodes and
lists are child elements.
Example 1:
<test/>
becomes
['test']
Example 2:
<myelement a='value1' b='value2'>
<childtype>This is</childtype>
<childtype>it!</childtype>
</myelement>
becomes
['myelement', {'a':'value1', 'b':'value2'},
['childtype', 'This is'],
['childtype', 'it!'],
]
Args:
content: The structured content to be converted.
encoding: The encoding to report on the first XML line.
pretty: True if we want pretty printing with indents and new lines.
Returns:
The XML content as a string.
"""
# We create a huge list of all the elements of the file.
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
if pretty:
xml_parts.append("\n")
_ConstructContentList(xml_parts, content, pretty)
# Convert it to a string
return "".join(xml_parts)
def _ConstructContentList(xml_parts, specification, pretty, level=0):
""" Appends the XML parts corresponding to the specification.
Args:
xml_parts: A list of XML parts to be appended to.
specification: The specification of the element. See EasyXml docs.
pretty: True if we want pretty printing with indents and new lines.
level: Indentation level.
"""
# The first item in a specification is the name of the element.
if pretty:
indentation = " " * level
new_line = "\n"
else:
indentation = ""
new_line = ""
name = specification[0]
if not isinstance(name, str):
raise Exception(
"The first item of an EasyXml specification should be "
"a string. Specification was " + str(specification)
)
xml_parts.append(indentation + "<" + name)
# Optionally in second position is a dictionary of the attributes.
rest = specification[1:]
if rest and isinstance(rest[0], dict):
for at, val in sorted(rest[0].items()):
xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"')
rest = rest[1:]
if rest:
xml_parts.append(">")
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
multi_line = not all_strings
if multi_line and new_line:
xml_parts.append(new_line)
for child_spec in rest:
# If it's a string, append a text node.
# Otherwise recurse over that child definition
if isinstance(child_spec, str):
xml_parts.append(_XmlEscape(child_spec))
else:
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
if multi_line and indentation:
xml_parts.append(indentation)
xml_parts.append(f"</{name}>{new_line}")
else:
xml_parts.append("/>%s" % new_line)
def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
win32=(sys.platform == "win32")):
""" Writes the XML content to disk, touching the file only if it has changed.
Args:
content: The structured content to be written.
path: Location of the file.
encoding: The encoding to report on the first line of the XML file.
pretty: True if we want pretty printing with indents and new lines.
"""
xml_string = XmlToString(content, encoding, pretty)
if win32 and os.linesep != "\r\n":
xml_string = xml_string.replace("\n", "\r\n")
default_encoding = locale.getdefaultlocale()[1]
if default_encoding and default_encoding.upper() != encoding.upper():
xml_string = xml_string.encode(encoding)
# Get the old content
try:
with open(path) as file:
existing = file.read()
except OSError:
existing = None
# It has changed, write it
if existing != xml_string:
with open(path, "wb") as file:
file.write(xml_string)
_xml_escape_map = {
'"': "&quot;",
"'": "&apos;",
"<": "&lt;",
">": "&gt;",
"&": "&amp;",
"\n": "&#xA;",
"\r": "&#xD;",
}
_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
def _XmlEscape(value, attr=False):
""" Escape a string for inclusion in XML."""
def replace(match):
m = match.string[match.start() : match.end()]
# don't replace single quotes in attrs
if attr and m == "'":
return m
return _xml_escape_map[m]
return _xml_escape_re.sub(replace, value)

109
node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py generated vendored Normal file
View File

@@ -0,0 +1,109 @@
#!/usr/bin/env python3
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
from io import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(["test"]),
'<?xml version="1.0" encoding="utf-8"?><test/>',
)
self.assertEqual(
easy_xml.XmlToString(["test"], encoding="Windows-1252"),
'<?xml version="1.0" encoding="Windows-1252"?><test/>',
)
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>',
)
def test_EasyXml_escaping(self):
original = "<test>'\"\r&\nfoo"
converted = "&lt;test&gt;'&quot;&#xD;&amp;&#xA;foo"
converted_apos = converted.replace("'", "&apos;")
self.assertEqual(
easy_xml.XmlToString(["test3", {"a": original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>'
% (converted, converted_apos),
)
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]],
pretty=True,
),
'<?xml version="1.0" encoding="utf-8"?>\n'
"<test3>\n"
" <GrandParent>\n"
" <Parent1>\n"
" <Child/>\n"
" </Parent1>\n"
" <Parent2/>\n"
" </GrandParent>\n"
"</test3>\n",
)
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
"<Project>"
'<PropertyGroup Label="Globals">'
"<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>"
"<Keyword>Win32Proj</Keyword>"
"<RootNamespace>automated_ui_tests</RootNamespace>"
"</PropertyGroup>"
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
"<PropertyGroup "
"Condition=\"'$(Configuration)|$(Platform)'=="
'\'Debug|Win32\'" Label="Configuration">'
"<ConfigurationType>Application</ConfigurationType>"
"<CharacterSet>Unicode</CharacterSet>"
"</PropertyGroup>"
"</Project>"
)
xml = easy_xml.XmlToString(
[
"Project",
[
"PropertyGroup",
{"Label": "Globals"},
["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"],
["Keyword", "Win32Proj"],
["RootNamespace", "automated_ui_tests"],
],
["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}],
[
"PropertyGroup",
{
"Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'",
"Label": "Configuration",
},
["ConfigurationType", "Application"],
["CharacterSet", "Unicode"],
],
]
)
self.assertEqual(xml, target)
if __name__ == "__main__":
unittest.main()

55
node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py generated vendored Normal file
View File

@@ -0,0 +1,55 @@
#!/usr/bin/env python3
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""These functions are executed via gyp-flock-tool when using the Makefile
generator. Used on systems that don't have a built-in flock."""
import fcntl
import os
import struct
import subprocess
import sys
def main(args):
executor = FlockTool()
executor.Dispatch(args)
class FlockTool:
"""This class emulates the 'flock' command."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace("-", "")
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
# Note that the stock python on SunOS has a bug
# where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK
# hack instead.
fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
if sys.platform.startswith("aix"):
# Python on AIX is compiled with LARGEFILE support, which changes the
# struct size.
op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
else:
op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
fcntl.fcntl(fd, fcntl.F_SETLK, op)
return subprocess.call(cmd_list)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))

View File

File diff suppressed because it is too large Load Diff

1173
node_modules/node-gyp/gyp/pylib/gyp/generator/android.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

1321
node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,120 @@
# Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import gyp.common
import gyp.xcode_emulation
import json
import os
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
generator_supports_multiple_toolsets = True
generator_wants_sorted_dependencies = False
# Lifted from make.py. The actual values don't matter much.
generator_default_variables = {
"CONFIGURATION_NAME": "$(BUILDTYPE)",
"EXECUTABLE_PREFIX": "",
"EXECUTABLE_SUFFIX": "",
"INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni",
"PRODUCT_DIR": "$(builddir)",
"RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s",
"RULE_INPUT_EXT": "$(suffix $<)",
"RULE_INPUT_NAME": "$(notdir $<)",
"RULE_INPUT_PATH": "$(abspath $<)",
"RULE_INPUT_ROOT": "%(INPUT_ROOT)s",
"SHARED_INTERMEDIATE_DIR": "$(obj)/gen",
"SHARED_LIB_PREFIX": "lib",
"STATIC_LIB_PREFIX": "lib",
"STATIC_LIB_SUFFIX": ".a",
}
def IsMac(params):
return "mac" == gyp.common.GetFlavor(params)
def CalculateVariables(default_variables, params):
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
def AddCommandsForTarget(cwd, target, params, per_config_commands):
output_dir = params["generator_flags"].get("output_dir", "out")
for configuration_name, configuration in target["configurations"].items():
if IsMac(params):
xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
cflags = xcode_settings.GetCflags(configuration_name)
cflags_c = xcode_settings.GetCflagsC(configuration_name)
cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
else:
cflags = configuration.get("cflags", [])
cflags_c = configuration.get("cflags_c", [])
cflags_cc = configuration.get("cflags_cc", [])
cflags_c = cflags + cflags_c
cflags_cc = cflags + cflags_cc
defines = configuration.get("defines", [])
defines = ["-D" + s for s in defines]
# TODO(bnoordhuis) Handle generated source files.
extensions = (".c", ".cc", ".cpp", ".cxx")
sources = [s for s in target.get("sources", []) if s.endswith(extensions)]
def resolve(filename):
return os.path.abspath(os.path.join(cwd, filename))
# TODO(bnoordhuis) Handle generated header files.
include_dirs = configuration.get("include_dirs", [])
include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")]
includes = ["-I" + resolve(s) for s in include_dirs]
defines = gyp.common.EncodePOSIXShellList(defines)
includes = gyp.common.EncodePOSIXShellList(includes)
cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
commands = per_config_commands.setdefault(configuration_name, [])
for source in sources:
file = resolve(source)
isc = source.endswith(".c")
cc = "cc" if isc else "c++"
cflags = cflags_c if isc else cflags_cc
command = " ".join(
(
cc,
defines,
includes,
cflags,
"-c",
gyp.common.EncodePOSIXShellArgument(file),
)
)
commands.append(dict(command=command, directory=output_dir, file=file))
def GenerateOutput(target_list, target_dicts, data, params):
per_config_commands = {}
for qualified_target, target in target_dicts.items():
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
qualified_target
)
if IsMac(params):
settings = data[build_file]
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
cwd = os.path.dirname(build_file)
AddCommandsForTarget(cwd, target, params, per_config_commands)
output_dir = params["generator_flags"].get("output_dir", "out")
for configuration_name, commands in per_config_commands.items():
filename = os.path.join(output_dir, configuration_name, "compile_commands.json")
gyp.common.EnsureDirExists(filename)
fp = open(filename, "w")
json.dump(commands, fp=fp, indent=0, check_circular=False)
def PerformBuild(data, configurations, params):
pass

View File

@@ -0,0 +1,103 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import gyp
import gyp.common
import gyp.msvs_emulation
import json
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = False
generator_filelist_paths = {}
generator_default_variables = {}
for dirname in [
"INTERMEDIATE_DIR",
"SHARED_INTERMEDIATE_DIR",
"PRODUCT_DIR",
"LIB_DIR",
"SHARED_LIB_DIR",
]:
# Some gyp steps fail if these are empty(!).
generator_default_variables[dirname] = "dir"
for unused in [
"RULE_INPUT_PATH",
"RULE_INPUT_ROOT",
"RULE_INPUT_NAME",
"RULE_INPUT_DIRNAME",
"RULE_INPUT_EXT",
"EXECUTABLE_PREFIX",
"EXECUTABLE_SUFFIX",
"STATIC_LIB_PREFIX",
"STATIC_LIB_SUFFIX",
"SHARED_LIB_PREFIX",
"SHARED_LIB_SUFFIX",
"CONFIGURATION_NAME",
]:
generator_default_variables[unused] = ""
def CalculateVariables(default_variables, params):
generator_flags = params.get("generator_flags", {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
flavor = gyp.common.GetFlavor(params)
if flavor == "win":
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get("generator_flags", {})
if generator_flags.get("adjust_static_libraries", False):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
toplevel = params["options"].toplevel_dir
generator_dir = os.path.relpath(params["options"].generator_output or ".")
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get("output_dir", "out")
qualified_out_dir = os.path.normpath(
os.path.join(toplevel, generator_dir, output_dir, "gypfiles")
)
global generator_filelist_paths
generator_filelist_paths = {
"toplevel": toplevel,
"qualified_out_dir": qualified_out_dir,
}
def GenerateOutput(target_list, target_dicts, data, params):
# Map of target -> list of targets it depends on.
edges = {}
# Queue of targets to visit.
targets_to_visit = target_list[:]
while len(targets_to_visit) > 0:
target = targets_to_visit.pop()
if target in edges:
continue
edges[target] = []
for dep in target_dicts[target].get("dependencies", []):
edges[target].append(dep)
targets_to_visit.append(dep)
try:
filepath = params["generator_flags"]["output_dir"]
except KeyError:
filepath = "."
filename = os.path.join(filepath, "dump.json")
f = open(filename, "w")
json.dump(edges, f)
f.close()
print("Wrote json to %s." % filename)

View File

@@ -0,0 +1,464 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GYP backend that generates Eclipse CDT settings files.
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
files that can be imported into an Eclipse CDT project. The XML file contains a
list of include paths and symbols (i.e. defines).
Because a full .cproject definition is not created by this generator, it's not
possible to properly define the include dirs and symbols for each file
individually. Instead, one set of includes/symbols is generated for the entire
project. This works fairly well (and is a vast improvement in general), but may
still result in a few indexer issues here and there.
This generator has no automated tests, so expect it to be broken.
"""
from xml.sax.saxutils import escape
import os.path
import subprocess
import gyp
import gyp.common
import gyp.msvs_emulation
import shlex
import xml.etree.cElementTree as ET
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {}
for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]:
# Some gyp steps fail if these are empty(!), so we convert them to variables
generator_default_variables[dirname] = "$" + dirname
for unused in [
"RULE_INPUT_PATH",
"RULE_INPUT_ROOT",
"RULE_INPUT_NAME",
"RULE_INPUT_DIRNAME",
"RULE_INPUT_EXT",
"EXECUTABLE_PREFIX",
"EXECUTABLE_SUFFIX",
"STATIC_LIB_PREFIX",
"STATIC_LIB_SUFFIX",
"SHARED_LIB_PREFIX",
"SHARED_LIB_SUFFIX",
"CONFIGURATION_NAME",
]:
generator_default_variables[unused] = ""
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
# part of the path when dealing with generated headers. This value will be
# replaced dynamically for each configuration.
generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR"
def CalculateVariables(default_variables, params):
generator_flags = params.get("generator_flags", {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
flavor = gyp.common.GetFlavor(params)
default_variables.setdefault("OS", flavor)
if flavor == "win":
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get("generator_flags", {})
if generator_flags.get("adjust_static_libraries", False):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
def GetAllIncludeDirectories(
target_list,
target_dicts,
shared_intermediate_dirs,
config_name,
params,
compiler_path,
):
"""Calculate the set of include directories to be used.
Returns:
A list including all the include_dir's specified for every target followed
by any include directories that were added as cflag compiler options.
"""
gyp_includes_set = set()
compiler_includes_list = []
# Find compiler's default include dirs.
if compiler_path:
command = shlex.split(compiler_path)
command.extend(["-E", "-xc++", "-v", "-"])
proc = subprocess.Popen(
args=command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
output = proc.communicate()[1].decode("utf-8")
# Extract the list of include dirs from the output, which has this format:
# ...
# #include "..." search starts here:
# #include <...> search starts here:
# /usr/include/c++/4.6
# /usr/local/include
# End of search list.
# ...
in_include_list = False
for line in output.splitlines():
if line.startswith("#include"):
in_include_list = True
continue
if line.startswith("End of search list."):
break
if in_include_list:
include_dir = line.strip()
if include_dir not in compiler_includes_list:
compiler_includes_list.append(include_dir)
flavor = gyp.common.GetFlavor(params)
if flavor == "win":
generator_flags = params.get("generator_flags", {})
for target_name in target_list:
target = target_dicts[target_name]
if config_name in target["configurations"]:
config = target["configurations"][config_name]
# Look for any include dirs that were explicitly added via cflags. This
# may be done in gyp files to force certain includes to come at the end.
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
# remove this.
if flavor == "win":
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
cflags = msvs_settings.GetCflags(config_name)
else:
cflags = config["cflags"]
for cflag in cflags:
if cflag.startswith("-I"):
include_dir = cflag[2:]
if include_dir not in compiler_includes_list:
compiler_includes_list.append(include_dir)
# Find standard gyp include dirs.
if "include_dirs" in config:
include_dirs = config["include_dirs"]
for shared_intermediate_dir in shared_intermediate_dirs:
for include_dir in include_dirs:
include_dir = include_dir.replace(
"$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir
)
if not os.path.isabs(include_dir):
base_dir = os.path.dirname(target_name)
include_dir = base_dir + "/" + include_dir
include_dir = os.path.abspath(include_dir)
gyp_includes_set.add(include_dir)
# Generate a list that has all the include dirs.
all_includes_list = list(gyp_includes_set)
all_includes_list.sort()
for compiler_include in compiler_includes_list:
if compiler_include not in gyp_includes_set:
all_includes_list.append(compiler_include)
# All done.
return all_includes_list
def GetCompilerPath(target_list, data, options):
"""Determine a command that can be used to invoke the compiler.
Returns:
If this is a gyp project that has explicit make settings, try to determine
the compiler from that. Otherwise, see if a compiler was specified via the
CC_target environment variable.
"""
# First, see if the compiler is configured in make's settings.
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_dict = data[build_file].get("make_global_settings", {})
for key, value in make_global_settings_dict:
if key in ["CC", "CXX"]:
return os.path.join(options.toplevel_dir, value)
# Check to see if the compiler was specified as an environment variable.
for key in ["CC_target", "CC", "CXX"]:
compiler = os.environ.get(key)
if compiler:
return compiler
return "gcc"
def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path):
"""Calculate the defines for a project.
Returns:
A dict that includes explicit defines declared in gyp files along with all
of the default defines that the compiler uses.
"""
# Get defines declared in the gyp files.
all_defines = {}
flavor = gyp.common.GetFlavor(params)
if flavor == "win":
generator_flags = params.get("generator_flags", {})
for target_name in target_list:
target = target_dicts[target_name]
if flavor == "win":
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
extra_defines = msvs_settings.GetComputedDefines(config_name)
else:
extra_defines = []
if config_name in target["configurations"]:
config = target["configurations"][config_name]
target_defines = config["defines"]
else:
target_defines = []
for define in target_defines + extra_defines:
split_define = define.split("=", 1)
if len(split_define) == 1:
split_define.append("1")
if split_define[0].strip() in all_defines:
# Already defined
continue
all_defines[split_define[0].strip()] = split_define[1].strip()
# Get default compiler defines (if possible).
if flavor == "win":
return all_defines # Default defines already processed in the loop above.
if compiler_path:
command = shlex.split(compiler_path)
command.extend(["-E", "-dM", "-"])
cpp_proc = subprocess.Popen(
args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
cpp_output = cpp_proc.communicate()[0].decode("utf-8")
cpp_lines = cpp_output.split("\n")
for cpp_line in cpp_lines:
if not cpp_line.strip():
continue
cpp_line_parts = cpp_line.split(" ", 2)
key = cpp_line_parts[1]
if len(cpp_line_parts) >= 3:
val = cpp_line_parts[2]
else:
val = "1"
all_defines[key] = val
return all_defines
def WriteIncludePaths(out, eclipse_langs, include_dirs):
"""Write the includes section of a CDT settings export file."""
out.write(
' <section name="org.eclipse.cdt.internal.ui.wizards.'
'settingswizards.IncludePaths">\n'
)
out.write(' <language name="holder for library settings"></language>\n')
for lang in eclipse_langs:
out.write(' <language name="%s">\n' % lang)
for include_dir in include_dirs:
out.write(
' <includepath workspace_path="false">%s</includepath>\n'
% include_dir
)
out.write(" </language>\n")
out.write(" </section>\n")
def WriteMacros(out, eclipse_langs, defines):
"""Write the macros section of a CDT settings export file."""
out.write(
' <section name="org.eclipse.cdt.internal.ui.wizards.'
'settingswizards.Macros">\n'
)
out.write(' <language name="holder for library settings"></language>\n')
for lang in eclipse_langs:
out.write(' <language name="%s">\n' % lang)
for key in sorted(defines):
out.write(
" <macro><name>%s</name><value>%s</value></macro>\n"
% (escape(key), escape(defines[key]))
)
out.write(" </language>\n")
out.write(" </section>\n")
def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name):
options = params["options"]
generator_flags = params.get("generator_flags", {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name)
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
shared_intermediate_dirs = [
os.path.join(toplevel_build, "obj", "gen"),
os.path.join(toplevel_build, "gen"),
]
GenerateCdtSettingsFile(
target_list,
target_dicts,
data,
params,
config_name,
os.path.join(toplevel_build, "eclipse-cdt-settings.xml"),
options,
shared_intermediate_dirs,
)
GenerateClasspathFile(
target_list,
target_dicts,
options.toplevel_dir,
toplevel_build,
os.path.join(toplevel_build, "eclipse-classpath.xml"),
)
def GenerateCdtSettingsFile(
target_list,
target_dicts,
data,
params,
config_name,
out_name,
options,
shared_intermediate_dirs,
):
gyp.common.EnsureDirExists(out_name)
with open(out_name, "w") as out:
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
out.write("<cdtprojectproperties>\n")
eclipse_langs = [
"C++ Source File",
"C Source File",
"Assembly Source File",
"GNU C++",
"GNU C",
"Assembly",
]
compiler_path = GetCompilerPath(target_list, data, options)
include_dirs = GetAllIncludeDirectories(
target_list,
target_dicts,
shared_intermediate_dirs,
config_name,
params,
compiler_path,
)
WriteIncludePaths(out, eclipse_langs, include_dirs)
defines = GetAllDefines(
target_list, target_dicts, data, config_name, params, compiler_path
)
WriteMacros(out, eclipse_langs, defines)
out.write("</cdtprojectproperties>\n")
def GenerateClasspathFile(
target_list, target_dicts, toplevel_dir, toplevel_build, out_name
):
"""Generates a classpath file suitable for symbol navigation and code
completion of Java code (such as in Android projects) by finding all
.java and .jar files used as action inputs."""
gyp.common.EnsureDirExists(out_name)
result = ET.Element("classpath")
def AddElements(kind, paths):
# First, we need to normalize the paths so they are all relative to the
# toplevel dir.
rel_paths = set()
for path in paths:
if os.path.isabs(path):
rel_paths.add(os.path.relpath(path, toplevel_dir))
else:
rel_paths.add(path)
for path in sorted(rel_paths):
entry_element = ET.SubElement(result, "classpathentry")
entry_element.set("kind", kind)
entry_element.set("path", path)
AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir))
AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
# Include the standard JRE container and a dummy out folder
AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"])
# Include a dummy out folder so that Eclipse doesn't use the default /bin
# folder in the root of the project.
AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")])
ET.ElementTree(result).write(out_name)
def GetJavaJars(target_list, target_dicts, toplevel_dir):
"""Generates a sequence of all .jars used as inputs."""
for target_name in target_list:
target = target_dicts[target_name]
for action in target.get("actions", []):
for input_ in action["inputs"]:
if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"):
if os.path.isabs(input_):
yield input_
else:
yield os.path.join(os.path.dirname(target_name), input_)
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
"""Generates a sequence of all likely java package root directories."""
for target_name in target_list:
target = target_dicts[target_name]
for action in target.get("actions", []):
for input_ in action["inputs"]:
if os.path.splitext(input_)[1] == ".java" and not input_.startswith(
"$"
):
dir_ = os.path.dirname(
os.path.join(os.path.dirname(target_name), input_)
)
# If there is a parent 'src' or 'java' folder, navigate up to it -
# these are canonical package root names in Chromium. This will
# break if 'src' or 'java' exists in the package structure. This
# could be further improved by inspecting the java file for the
# package name if this proves to be too fragile in practice.
parent_search = dir_
while os.path.basename(parent_search) not in ["src", "java"]:
parent_search, _ = os.path.split(parent_search)
if not parent_search or parent_search == toplevel_dir:
# Didn't find a known root, just return the original path
yield dir_
break
else:
yield parent_search
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate an XML settings file that can be imported into a CDT project."""
if params["options"].generator_output:
raise NotImplementedError("--generator_output not implemented for eclipse")
user_config = params.get("generator_flags", {}).get("config", None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
else:
config_names = target_dicts[target_list[0]]["configurations"]
for config_name in config_names:
GenerateOutputForConfig(
target_list, target_dicts, data, params, config_name
)

89
node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py generated vendored Normal file
View File

@@ -0,0 +1,89 @@
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypd output module
This module produces gyp input as its output. Output files are given the
.gypd extension to avoid overwriting the .gyp files that they are generated
from. Internal references to .gyp files (such as those found in
"dependencies" sections) are not adjusted to point to .gypd files instead;
unlike other paths, which are relative to the .gyp or .gypd file, such paths
are relative to the directory from which gyp was run to create the .gypd file.
This generator module is intended to be a sample and a debugging aid, hence
the "d" for "debug" in .gypd. It is useful to inspect the results of the
various merges, expansions, and conditional evaluations performed by gyp
and to see a representation of what would be fed to a generator module.
It's not advisable to rename .gypd files produced by this module to .gyp,
because they will have all merges, expansions, and evaluations already
performed and the relevant constructs not present in the output; paths to
dependencies may be wrong; and various sections that do not belong in .gyp
files such as such as "included_files" and "*_excluded" will be present.
Output will also be stripped of comments. This is not intended to be a
general-purpose gyp pretty-printer; for that, you probably just want to
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
comments but won't do all of the other things done to this module's output.
The specific formatting of the output generated by this module is subject
to change.
"""
import gyp.common
import pprint
# These variables should just be spit back out as variable references.
_generator_identity_variables = [
"CONFIGURATION_NAME",
"EXECUTABLE_PREFIX",
"EXECUTABLE_SUFFIX",
"INTERMEDIATE_DIR",
"LIB_DIR",
"PRODUCT_DIR",
"RULE_INPUT_ROOT",
"RULE_INPUT_DIRNAME",
"RULE_INPUT_EXT",
"RULE_INPUT_NAME",
"RULE_INPUT_PATH",
"SHARED_INTERMEDIATE_DIR",
"SHARED_LIB_DIR",
"SHARED_LIB_PREFIX",
"SHARED_LIB_SUFFIX",
"STATIC_LIB_PREFIX",
"STATIC_LIB_SUFFIX",
]
# gypd doesn't define a default value for OS like many other generator
# modules. Specify "-D OS=whatever" on the command line to provide a value.
generator_default_variables = {}
# gypd supports multiple toolsets
generator_supports_multiple_toolsets = True
# TODO(mark): This always uses <, which isn't right. The input module should
# notify the generator to tell it which phase it is operating in, and this
# module should use < for the early phase and then switch to > for the late
# phase. Bonus points for carrying @ back into the output too.
for v in _generator_identity_variables:
generator_default_variables[v] = "<(%s)" % v
def GenerateOutput(target_list, target_dicts, data, params):
output_files = {}
for qualified_target in target_list:
[input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
if input_file[-4:] != ".gyp":
continue
input_file_stem = input_file[:-4]
output_file = input_file_stem + params["options"].suffix + ".gypd"
output_files[output_file] = output_files.get(output_file, input_file)
for output_file, input_file in output_files.items():
output = open(output_file, "w")
pprint.pprint(data[input_file], output)
output.close()

58
node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py generated vendored Normal file
View File

@@ -0,0 +1,58 @@
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
"EXECUTABLE_PREFIX",
"EXECUTABLE_SUFFIX",
"INTERMEDIATE_DIR",
"PRODUCT_DIR",
"RULE_INPUT_ROOT",
"RULE_INPUT_DIRNAME",
"RULE_INPUT_EXT",
"RULE_INPUT_NAME",
"RULE_INPUT_PATH",
"SHARED_INTERMEDIATE_DIR",
]
generator_default_variables = {}
for v in _generator_identity_variables:
generator_default_variables[v] = "<(%s)" % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
"target_list": target_list,
"target_dicts": target_dicts,
"data": data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner = "Python {} on {}\nlocals.keys() = {}\ngypsh".format(
sys.version,
sys.platform,
repr(sorted(locals.keys())),
)
code.interact(banner, local=locals)

2518
node_modules/node-gyp/gyp/pylib/gyp/generator/make.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

3978
node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,44 @@
#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the msvs.py file. """
import gyp.generator.msvs as msvs
import unittest
from io import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO()
def test_GetLibraries(self):
self.assertEqual(msvs._GetLibraries({}), [])
self.assertEqual(msvs._GetLibraries({"libraries": []}), [])
self.assertEqual(
msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"]
)
self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"])
self.assertEqual(
msvs._GetLibraries(
{
"libraries": [
"a.lib",
"b.lib",
"c.lib",
"-lb.lib",
"-lb.lib",
"d.lib",
"a.lib",
]
}
),
["c.lib", "b.lib", "d.lib", "a.lib"],
)
if __name__ == "__main__":
unittest.main()

2936
node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,55 @@
#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the ninja.py file. """
import sys
import unittest
import gyp.generator.ninja as ninja
class TestPrefixesAndSuffixes(unittest.TestCase):
def test_BinaryNamesWindows(self):
# These cannot run on non-Windows as they require a VS installation to
# correctly handle variable expansion.
if sys.platform.startswith("win"):
writer = ninja.NinjaWriter(
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win"
)
spec = {"target_name": "wee"}
self.assertTrue(
writer.ComputeOutputFileName(spec, "executable").endswith(".exe")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").endswith(".lib")
)
def test_BinaryNamesLinux(self):
writer = ninja.NinjaWriter(
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux"
)
spec = {"target_name": "wee"}
self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable"))
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").startswith("lib")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").startswith("lib")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").endswith(".so")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").endswith(".a")
)
if __name__ == "__main__":
unittest.main()

1394
node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env python3
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the xcode.py file. """
import gyp.generator.xcode as xcode
import unittest
import sys
class TestEscapeXcodeDefine(unittest.TestCase):
if sys.platform == "darwin":
def test_InheritedRemainsUnescaped(self):
self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)")
def test_Escaping(self):
self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
if __name__ == "__main__":
unittest.main()

3137
node_modules/node-gyp/gyp/pylib/gyp/input.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

98
node_modules/node-gyp/gyp/pylib/gyp/input_test.py generated vendored Normal file
View File

@@ -0,0 +1,98 @@
#!/usr/bin/env python3
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ("a", "b", "c", "d", "e"):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.items():
self.assertEqual([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes["a"], self.nodes["b"])
self._create_dependency(self.nodes["b"], self.nodes["c"])
self._create_dependency(self.nodes["c"], self.nodes["d"])
for label, node in self.nodes.items():
self.assertEqual([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes["a"], self.nodes["b"])
self._create_dependency(self.nodes["a"], self.nodes["c"])
self._create_dependency(self.nodes["b"], self.nodes["c"])
for label, node in self.nodes.items():
self.assertEqual([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes["a"], self.nodes["a"])
self.assertEqual(
[[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles()
)
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes["a"], self.nodes["b"])
self._create_dependency(self.nodes["b"], self.nodes["a"])
self.assertEqual(
[[self.nodes["a"], self.nodes["b"], self.nodes["a"]]],
self.nodes["a"].FindCycles(),
)
self.assertEqual(
[[self.nodes["b"], self.nodes["a"], self.nodes["b"]]],
self.nodes["b"].FindCycles(),
)
def test_two_cycles(self):
self._create_dependency(self.nodes["a"], self.nodes["b"])
self._create_dependency(self.nodes["b"], self.nodes["a"])
self._create_dependency(self.nodes["b"], self.nodes["c"])
self._create_dependency(self.nodes["c"], self.nodes["b"])
cycles = self.nodes["a"].FindCycles()
self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles)
self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles)
self.assertEqual(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes["a"], self.nodes["b"])
self._create_dependency(self.nodes["b"], self.nodes["c"])
self._create_dependency(self.nodes["c"], self.nodes["d"])
self._create_dependency(self.nodes["d"], self.nodes["e"])
self._create_dependency(self.nodes["e"], self.nodes["a"])
self.assertEqual(
[
[
self.nodes["a"],
self.nodes["b"],
self.nodes["c"],
self.nodes["d"],
self.nodes["e"],
self.nodes["a"],
]
],
self.nodes["a"].FindCycles(),
)
if __name__ == "__main__":
unittest.main()

771
node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

1271
node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py generated vendored Normal file

File diff suppressed because it is too large Load Diff

174
node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py generated vendored Normal file
View File

@@ -0,0 +1,174 @@
# This file comes from
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
# Do not edit! Edit the upstream one instead.
"""Python module for generating .ninja files.
Note that this is emphatically not a required piece of Ninja; it's
just a helpful utility for build-file-generation systems that already
use Python.
"""
import textwrap
def escape_path(word):
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
class Writer:
def __init__(self, output, width=78):
self.output = output
self.width = width
def newline(self):
self.output.write("\n")
def comment(self, text):
for line in textwrap.wrap(text, self.width - 2):
self.output.write("# " + line + "\n")
def variable(self, key, value, indent=0):
if value is None:
return
if isinstance(value, list):
value = " ".join(filter(None, value)) # Filter out empty strings.
self._line(f"{key} = {value}", indent)
def pool(self, name, depth):
self._line("pool %s" % name)
self.variable("depth", depth, indent=1)
def rule(
self,
name,
command,
description=None,
depfile=None,
generator=False,
pool=None,
restat=False,
rspfile=None,
rspfile_content=None,
deps=None,
):
self._line("rule %s" % name)
self.variable("command", command, indent=1)
if description:
self.variable("description", description, indent=1)
if depfile:
self.variable("depfile", depfile, indent=1)
if generator:
self.variable("generator", "1", indent=1)
if pool:
self.variable("pool", pool, indent=1)
if restat:
self.variable("restat", "1", indent=1)
if rspfile:
self.variable("rspfile", rspfile, indent=1)
if rspfile_content:
self.variable("rspfile_content", rspfile_content, indent=1)
if deps:
self.variable("deps", deps, indent=1)
def build(
self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None
):
outputs = self._as_list(outputs)
all_inputs = self._as_list(inputs)[:]
out_outputs = list(map(escape_path, outputs))
all_inputs = list(map(escape_path, all_inputs))
if implicit:
implicit = map(escape_path, self._as_list(implicit))
all_inputs.append("|")
all_inputs.extend(implicit)
if order_only:
order_only = map(escape_path, self._as_list(order_only))
all_inputs.append("||")
all_inputs.extend(order_only)
self._line(
"build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs))
)
if variables:
if isinstance(variables, dict):
iterator = iter(variables.items())
else:
iterator = iter(variables)
for key, val in iterator:
self.variable(key, val, indent=1)
return outputs
def include(self, path):
self._line("include %s" % path)
def subninja(self, path):
self._line("subninja %s" % path)
def default(self, paths):
self._line("default %s" % " ".join(self._as_list(paths)))
def _count_dollars_before_index(self, s, i):
"""Returns the number of '$' characters right in front of s[i]."""
dollar_count = 0
dollar_index = i - 1
while dollar_index > 0 and s[dollar_index] == "$":
dollar_count += 1
dollar_index -= 1
return dollar_count
def _line(self, text, indent=0):
"""Write 'text' word-wrapped at self.width characters."""
leading_space = " " * indent
while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and
# that's not an escaped space.
available_space = self.width - len(leading_space) - len(" $")
space = available_space
while True:
space = text.rfind(" ", 0, space)
if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# No such space; just use the first unescaped space we can find.
space = available_space - 1
while True:
space = text.find(" ", space + 1)
if (
space < 0
or self._count_dollars_before_index(text, space) % 2 == 0
):
break
if space < 0:
# Give up on breaking.
break
self.output.write(leading_space + text[0:space] + " $\n")
text = text[space + 1 :]
# Subsequent lines are continuations, so indent them.
leading_space = " " * (indent + 2)
self.output.write(leading_space + text + "\n")
def _as_list(self, input):
if input is None:
return []
if isinstance(input, list):
return input
return [input]
def escape(string):
"""Escape a string such that it can be embedded into a Ninja file without
further interpretation."""
assert "\n" not in string, "Ninja syntax does not allow newlines"
# We only have one special metacharacter: '$'.
return string.replace("$", "$$")

Some files were not shown because too many files have changed in this diff Show More