language
stringlengths
0
24
filename
stringlengths
9
214
code
stringlengths
99
9.93M
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/issue2328/debuggable-false/apktool.yml
version: 2.0.0 apkFileName: issue2328.apk isFrameworkApk: false usesFramework: ids: - 1 packageInfo: forcedPackageId: '127' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/issue2328/debuggable-missing/apktool.yml
version: 2.0.0 apkFileName: issue2328.apk isFrameworkApk: false usesFramework: ids: - 1 packageInfo: forcedPackageId: '127' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/issue2328/debuggable-true/apktool.yml
version: 2.0.0 apkFileName: issue2328.apk isFrameworkApk: false usesFramework: ids: - 1 packageInfo: forcedPackageId: '127' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/network_config/apktool.yml
version: 2.0.0 apkFileName: testapp.apk isFrameworkApk: false usesFramework: ids: - 1 versionInfo: versionCode: '1' versionName: '1.0'
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/network_config/smali/HelloWorld.smali
.class public LHelloWorld; .super Ljava/lang/Object; .method public static main([Ljava/lang/String;)V .registers 2 sget-object v0, Ljava/lang/System;->out:Ljava/io/PrintStream; const/high16 v1, 0x7f020000 invoke-virtual {v0, v1}, Ljava/io/PrintStream;->println(Ljava/lang/String;)V return-void .end method
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/pkgid8/apktool.yml
version: 2.0.0 apkFileName: pkgid8.apk isFrameworkApk: false usesFramework: ids: - 1 packageInfo: forcedPackageId: '128' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/testapp/apktool.yml
version: 2.3.2 apkFileName: testapp.apk isFrameworkApk: false usesFramework: ids: - 1 packageInfo: forcedPackageId: '127' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false doNotCompress: - assets/0byte_file.jpg sparseResources: false unknownFiles: AssetBundle/assets/a.txt: '8' AssetBundle/b.txt: '8'
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/testapp/res/xml/ww_box_styles_schema.xsd
<?xml version="1.0" encoding="utf-8"?> <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"> <xs:element name="test"> <xs:complexType> <xs:sequence> <xs:element name="person" type="xs:string"/> <xs:element name="address"> <xs:complexType> <xs:sequence> <xs:element name="name" type="xs:string"/> <xs:element name="address" type="xs:string"/> <xs:element name="city" type="xs:string"/> <xs:element name="country" type="xs:string"/> </xs:sequence> </xs:complexType> </xs:element> </xs:sequence> </xs:complexType> </xs:element> </xs:schema>
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/testapp/smali/HelloWorld.smali
.class public LHelloWorld; .super Ljava/lang/Object; .method public static main([Ljava/lang/String;)V .registers 2 sget-object v0, Ljava/lang/System;->out:Ljava/io/PrintStream; const/high16 v1, 0x7f020000 invoke-virtual {v0, v1}, Ljava/io/PrintStream;->println(Ljava/lang/String;)V return-void .end method
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/testapp/smali_classes2/HelloDualDexSupport.smali
.class public LHelloDualDexSupport; .super Ljava/lang/Object; .method public static main([Ljava/lang/String;)V .registers 2 sget-object v0, Ljava/lang/System;->out:Ljava/io/PrintStream; const/high16 v1, 0x7f020000 invoke-virtual {v0, v1}, Ljava/io/PrintStream;->println(Ljava/lang/String;)V return-void .end method
Apktool/brut.apktool/apktool-lib/src/test/resources/aapt2/testapp/smali_classes3/HelloTripleDexSupport.smali
.class public LHelloTripleDexSupport; .super Ljava/lang/Object; .method public static main([Ljava/lang/String;)V .registers 2 sget-object v0, Ljava/lang/System;->out:Ljava/io/PrintStream; const/high16 v1, 0x7f020000 invoke-virtual {v0, v1}, Ljava/io/PrintStream;->println(Ljava/lang/String;)V return-void .end method
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/basic.yml
!!brut.androlib.meta.MetaInfo apkFileName: basic.apk compressionType: false doNotCompress: - resources.arsc - png isFrameworkApk: false packageInfo: forcedPackageId: '127' renameManifestPackage: 'com.test.basic' sdkInfo: minSdkVersion: '4' maxSdkVersion: '30' targetSdkVersion: '22' sharedLibrary: false sparseResources: true unknownFiles: hidden.file: 1 usesFramework: ids: - 1 tag: 'tag' version: 2.8.0 versionInfo: versionCode: '71' versionName: 1.0.70
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/cve20220476.yml
!!brut.androlib.meta.MetaInfo apkFileName: cve20220476.apk compressionType: false some_var: !!javax.script.ScriptEngineManager [!!java.net.URLClassLoader [[!!java.net.URL ["https://127.0.0.1:8000"]]]] doNotCompress: - resources.arsc isFrameworkApk: false packageInfo: forcedPackageId: '127' renameManifestPackage: null sdkInfo: minSdkVersion: '25' targetSdkVersion: '30' sharedLibrary: false sparseResources: false usesFramework: ids: - 1 tag: null version: 2.6.1-ddc4bb-SNAPSHOT versionInfo: versionCode: null versionName: null
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/donotcompress_with_hieroglyph.yml
version: 2.0.0 apkFileName: testapp.apk doNotCompress: - assets/AllAssetBundles/Andriod/tx_1001_冰原1 - assets/AllAssetBundles/Andriod/tx_1001_冰原1.manifest
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/first_incorrect_indent.yml
!!brut.androlib.meta.MetaInfo version: 2.0.0 apkFileName: standard.apk compressionType: false doNotCompress: - resources.arsc isFrameworkApk: false packageInfo: forcedPackageId: '127' renameManifestPackage: null sdkInfo: minSdkVersion: '25' targetSdkVersion: '30' sharedLibrary: false sparseResources: false usesFramework: ids: - 1 tag: null versionInfo: versionCode: null versionName: null
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/list_with_indent.yml
!!brut.androlib.meta.MetaInfo apkFileName: basic.apk compressionType: false doNotCompress: - resources.arsc - png isFrameworkApk: false packageInfo: forcedPackageId: '127' renameManifestPackage: 'com.test.basic' sdkInfo: minSdkVersion: '4' maxSdkVersion: '30' targetSdkVersion: '22' sharedLibrary: false sparseResources: true unknownFiles: hidden.file: 1 usesFramework: ids: - 1 tag: 'tag' version: 2.8.0 versionInfo: versionCode: '71' versionName: 1.0.70
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/skip_incorrect_indent.yml
!!brut.androlib.meta.MetaInfo apkFileName: standard.apk version: 2.0.0 compressionType: false doNotCompress: - resources.arsc isFrameworkApk: false packageInfo: forcedPackageId: '127' renameManifestPackage: null sdkInfo: minSdkVersion: '25' targetSdkVersion: '30' sharedLibrary: false sparseResources: false usesFramework: ids: - 1 tag: null versionInfo: versionCode: null versionName: null
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/standard.yml
!!brut.androlib.meta.MetaInfo apkFileName: standard.apk compressionType: false doNotCompress: - resources.arsc isFrameworkApk: false packageInfo: forcedPackageId: '127' renameManifestPackage: null sdkInfo: minSdkVersion: '25' targetSdkVersion: '30' sharedLibrary: false sparseResources: false usesFramework: ids: - 1 tag: null version: 2.8.1 versionInfo: versionCode: null versionName: null
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/unknown_fields.yml
!!brut.androlib.meta.MetaInfo apkFileName: standard.apk compressionType: false test: test doNotCompress: - resources.arsc isFrameworkApk: false packageInfo: forcedPackageId: '127' renameManifestPackage: null test2: test2 sdkInfo: minSdkVersion: '25' targetSdkVersion: '30' sharedLibrary: false sparseResources: false usesFramework: ids: - 1 tag: null test3: test3 version: 2.8.1 versionInfo: versionCode: null versionName: null
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/apk/unknown_files.yml
version: 2.0.0 apkFileName: testapp.apk isFrameworkApk: false usesFramework: ids: - 1 packageInfo: forcedPackageId: '127' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false doNotCompress: - assets/0byte_file.jpg - arsc - png - mp3 unknownFiles: AssetBundle/assets/a.txt: '8' AssetBundle/b.txt: '8' hidden.file: '8' non\u007Fprintable.file: '8' stored.file: '0' unk_folder/unknown_file: '8' lib_bug603/bug603: '8'
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/decode/doctype/apktool.yml
version: 2.0.0 apkFileName: doctype.apk isFrameworkApk: false usesFramework: ids: - 1 packageInfo: forcedPackageId: '127' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false
YAML
Apktool/brut.apktool/apktool-lib/src/test/resources/decode/issue2543/apktool.yml
version: 2.0.0 apkFileName: issue2543.apk isFrameworkApk: false usesFramework: ids: - 1 packageInfo: forcedPackageId: '127' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false
Java
Apktool/brut.j.common/src/main/java/brut/common/BrutException.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.common; public class BrutException extends Exception { public BrutException(Throwable cause) { super(cause); } public BrutException(String message, Throwable cause) { super(message, cause); } public BrutException(String message) { super(message); } public BrutException() { } }
Java
Apktool/brut.j.common/src/main/java/brut/common/InvalidUnknownFileException.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.common; public class InvalidUnknownFileException extends BrutException { public InvalidUnknownFileException(String message) { super(message); } }
Java
Apktool/brut.j.common/src/main/java/brut/common/RootUnknownFileException.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.common; public class RootUnknownFileException extends BrutException { public RootUnknownFileException(String message) { super(message); } }
Java
Apktool/brut.j.common/src/main/java/brut/common/TraversalUnknownFileException.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.common; public class TraversalUnknownFileException extends BrutException { public TraversalUnknownFileException(String message) { super(message); } }
Text
Apktool/brut.j.common/src/templates/apache2.0-header.txt
Copyright (C) ${year} ${brut} <${brutEmail}> Copyright (C) ${year} ${ibot} <${ibotEmail}> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Apktool/brut.j.dir/build.gradle.kts
val commonsIoVersion: String by rootProject.extra dependencies { implementation(project(":brut.j.common")) implementation(project(":brut.j.util")) implementation("commons-io:commons-io:$commonsIoVersion") }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/AbstractDirectory.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; public abstract class AbstractDirectory implements Directory { protected Set<String> mFiles; protected Set<String> mFilesRecursive; protected Map<String, AbstractDirectory> mDirs; @Override public Set<String> getFiles() { return getFiles(false); } @Override public Set<String> getFiles(boolean recursive) { if (mFiles == null) { loadFiles(); } if (!recursive) { return mFiles; } if (mFilesRecursive == null) { mFilesRecursive = new LinkedHashSet<>(mFiles); for (Map.Entry<String, ? extends Directory> dir : getAbstractDirs().entrySet()) { for (String path : dir.getValue().getFiles(true)) { mFilesRecursive.add(dir.getKey() + separator + path); } } } return mFilesRecursive; } @Override public boolean containsFile(String path) { SubPath subpath; try { subpath = getSubPath(path); } catch (PathNotExist e) { return false; } if (subpath.dir != null) { return subpath.dir.containsFile(subpath.path); } return getFiles().contains(subpath.path); } @Override public boolean containsDir(String path) { SubPath subpath; try { subpath = getSubPath(path); } catch (PathNotExist e) { return false; } if (subpath.dir != null) { return subpath.dir.containsDir(subpath.path); } return getAbstractDirs().containsKey(subpath.path); } @Override public Map<String, Directory> getDirs() throws UnsupportedOperationException { return getDirs(false); } @Override public Map<String, Directory> getDirs(boolean recursive) throws UnsupportedOperationException { return new LinkedHashMap<>(getAbstractDirs(recursive)); } @Override public InputStream getFileInput(String path) throws DirectoryException { SubPath subpath = getSubPath(path); if (subpath.dir != null) { return subpath.dir.getFileInput(subpath.path); } if (! getFiles().contains(subpath.path)) { throw new PathNotExist(path); } return getFileInputLocal(subpath.path); } @Override public OutputStream getFileOutput(String path) throws DirectoryException { ParsedPath parsed = parsePath(path); if (parsed.dir == null) { getFiles().add(parsed.subpath); return getFileOutputLocal(parsed.subpath); } Directory dir; // IMPOSSIBLE_EXCEPTION try { dir = createDir(parsed.dir); } catch (PathAlreadyExists e) { dir = getAbstractDirs().get(parsed.dir); } return dir.getFileOutput(parsed.subpath); } @Override public Directory getDir(String path) throws PathNotExist { SubPath subpath = getSubPath(path); if (subpath.dir != null) { return subpath.dir.getDir(subpath.path); } if (! getAbstractDirs().containsKey(subpath.path)) { throw new PathNotExist(path); } return getAbstractDirs().get(subpath.path); } @Override public Directory createDir(String path) throws DirectoryException { ParsedPath parsed = parsePath(path); AbstractDirectory dir; if (parsed.dir == null) { if (getAbstractDirs().containsKey(parsed.subpath)) { throw new PathAlreadyExists(path); } dir = createDirLocal(parsed.subpath); getAbstractDirs().put(parsed.subpath, dir); return dir; } if (getAbstractDirs().containsKey(parsed.dir)) { dir = getAbstractDirs().get(parsed.dir); } else { dir = createDirLocal(parsed.dir); getAbstractDirs().put(parsed.dir, dir); } return dir.createDir(parsed.subpath); } @Override public boolean removeFile(String path) { SubPath subpath; try { subpath = getSubPath(path); } catch (PathNotExist e) { return false; } if (subpath.dir != null) { return subpath.dir.removeFile(subpath.path); } if (! getFiles().contains(subpath.path)) { return false; } removeFileLocal(subpath.path); getFiles().remove(subpath.path); return true; } public void copyToDir(Directory out) throws DirectoryException { DirUtil.copyToDir(out, out); } public void copyToDir(Directory out, String[] fileNames) throws DirectoryException { DirUtil.copyToDir(out, out, fileNames); } public void copyToDir(Directory out, String fileName) throws DirectoryException { DirUtil.copyToDir(out, out, fileName); } public void copyToDir(File out) throws DirectoryException { DirUtil.copyToDir(this, out); } public void copyToDir(File out, String[] fileNames) throws DirectoryException { DirUtil.copyToDir(this, out, fileNames); } public void copyToDir(File out, String fileName) throws DirectoryException { DirUtil.copyToDir(this, out, fileName); } public int getCompressionLevel(String fileName) throws DirectoryException { return -1; // Unknown } protected Map<String, AbstractDirectory> getAbstractDirs() { return getAbstractDirs(false); } protected Map<String, AbstractDirectory> getAbstractDirs(boolean recursive) { if (mDirs == null) { loadDirs(); } if (!recursive) { return mDirs; } Map<String, AbstractDirectory> dirs = new LinkedHashMap<>(mDirs); for (Map.Entry<String, AbstractDirectory> dir : getAbstractDirs().entrySet()) { for (Map.Entry<String, AbstractDirectory> subdir : dir.getValue().getAbstractDirs( true).entrySet()) { dirs.put(dir.getKey() + separator + subdir.getKey(), subdir.getValue()); } } return dirs; } public void close() throws IOException { } private SubPath getSubPath(String path) throws PathNotExist { ParsedPath parsed = parsePath(path); if (parsed.dir == null) { return new SubPath(null, parsed.subpath); } if (! getAbstractDirs().containsKey(parsed.dir)) { throw new PathNotExist(path); } return new SubPath(getAbstractDirs().get(parsed.dir), parsed.subpath); } private ParsedPath parsePath(String path) { int pos = path.indexOf(separator); if (pos == -1) { return new ParsedPath(null, path); } return new ParsedPath(path.substring(0, pos), path.substring(pos + 1)); } protected abstract void loadFiles(); protected abstract void loadDirs(); protected abstract InputStream getFileInputLocal(String name) throws DirectoryException; protected abstract OutputStream getFileOutputLocal(String name) throws DirectoryException; protected abstract AbstractDirectory createDirLocal(String name) throws DirectoryException; protected abstract void removeFileLocal(String name); private class ParsedPath { public final String dir; public final String subpath; public ParsedPath(String dir, String subpath) { this.dir = dir; this.subpath = subpath; } } private class SubPath { public final AbstractDirectory dir; public final String path; public SubPath(AbstractDirectory dir, String path) { this.dir = dir; this.path = path; } } }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/Directory.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Map; import java.util.Set; public interface Directory { Set<String> getFiles(); Set<String> getFiles(boolean recursive); Map<String, Directory> getDirs(); Map<String, Directory> getDirs(boolean recursive); boolean containsFile(String path); boolean containsDir(String path); InputStream getFileInput(String path) throws DirectoryException; OutputStream getFileOutput(String path) throws DirectoryException; Directory getDir(String path) throws PathNotExist; Directory createDir(String path) throws DirectoryException; boolean removeFile(String path); void copyToDir(Directory out) throws DirectoryException; void copyToDir(Directory out, String[] fileNames) throws DirectoryException; void copyToDir(Directory out, String fileName) throws DirectoryException; void copyToDir(File out) throws DirectoryException; void copyToDir(File out, String[] fileNames) throws DirectoryException; void copyToDir(File out, String fileName) throws DirectoryException; long getSize(String fileName) throws DirectoryException; long getCompressedSize(String fileName) throws DirectoryException; int getCompressionLevel(String fileName) throws DirectoryException; void close() throws IOException; char separator = '/'; }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/DirectoryException.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; import brut.common.BrutException; public class DirectoryException extends BrutException { private static final long serialVersionUID = -8871963042836625387L; public DirectoryException(String detailMessage, Throwable throwable) { super(detailMessage, throwable); } public DirectoryException(String detailMessage) { super(detailMessage); } public DirectoryException(Throwable throwable) { super(throwable); } public DirectoryException() { super(); } }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/DirUtil.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; import brut.common.BrutException; import brut.common.InvalidUnknownFileException; import brut.common.RootUnknownFileException; import brut.common.TraversalUnknownFileException; import brut.util.BrutIO; import brut.util.OS; import java.io.*; import java.nio.file.FileSystemException; import java.nio.file.Files; import java.util.logging.Logger; public class DirUtil { private static final Logger LOGGER = Logger.getLogger(""); private DirUtil() { // Private constructor for utility class } public static void copyToDir(Directory in, Directory out) throws DirectoryException { for (String fileName : in.getFiles(true)) { copyToDir(in, out, fileName); } } public static void copyToDir(Directory in, Directory out, String[] fileNames) throws DirectoryException { for (String fileName : fileNames) { copyToDir(in, out, fileName); } } public static void copyToDir(Directory in, Directory out, String fileName) throws DirectoryException { copyToDir(in, out, fileName, fileName); } public static void copyToDir(Directory in, Directory out, String inFile, String outFile) throws DirectoryException { try { if (in.containsDir(inFile)) { in.getDir(inFile).copyToDir(out.createDir(outFile)); } else { BrutIO.copyAndClose(in.getFileInput(inFile), out.getFileOutput(outFile)); } } catch (IOException ex) { throw new DirectoryException("Error copying file: " + inFile, ex); } } public static void copyToDir(Directory in, File out) throws DirectoryException { for (String fileName : in.getFiles(true)) { copyToDir(in, out, fileName); } } public static void copyToDir(Directory in, File out, String[] fileNames) throws DirectoryException { for (String fileName : fileNames) { copyToDir(in, out, fileName); } } public static void copyToDir(Directory in, File out, String fileName) throws DirectoryException { try { if (in.containsDir(fileName)) { OS.rmdir(new File(out, fileName)); in.getDir(fileName).copyToDir(new File(out, fileName)); } else if (!in.containsDir(fileName) && !in.containsFile(fileName)) { // Skip copies of directories/files not found. } else { String cleanedFilename = BrutIO.sanitizeUnknownFile(out, fileName); if (! cleanedFilename.isEmpty()) { File outFile = new File(out, cleanedFilename); //noinspection ResultOfMethodCallIgnored outFile.getParentFile().mkdirs(); BrutIO.copyAndClose(in.getFileInput(fileName), Files.newOutputStream(outFile.toPath())); } } } catch (FileSystemException exception) { LOGGER.warning(String.format("Skipping file %s (%s)", fileName, exception.getReason())); } catch (RootUnknownFileException | InvalidUnknownFileException | TraversalUnknownFileException | IOException exception) { LOGGER.warning(String.format("Skipping file %s (%s)", fileName, exception.getMessage())); } catch (BrutException ex) { throw new DirectoryException("Error copying file: " + fileName, ex); } } }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/ExtFile.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; import java.io.File; import java.io.IOException; import java.net.URI; public class ExtFile extends File { public ExtFile(File file) { super(file.getPath()); } public ExtFile(URI uri) { super(uri); } public ExtFile(File parent, String child) { super(parent, child); } public ExtFile(String parent, String child) { super(parent, child); } public ExtFile(String pathname) { super(pathname); } public Directory getDirectory() throws DirectoryException { if (mDirectory == null) { if (isDirectory()) { mDirectory = new FileDirectory(this); } else { mDirectory = new ZipRODirectory(this); } } return mDirectory; } public void close() throws IOException { if (mDirectory != null) { mDirectory.close(); } } private Directory mDirectory; }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/FileDirectory.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; import java.io.*; import java.net.URLDecoder; import java.util.LinkedHashMap; import java.util.LinkedHashSet; public class FileDirectory extends AbstractDirectory { private final File mDir; public FileDirectory(ExtFile dir, String folder) throws DirectoryException { this(new File(dir.toString().replaceAll("%20", " "), folder)); } public FileDirectory(String dir) throws DirectoryException, UnsupportedEncodingException { this(new File(URLDecoder.decode(dir, "UTF-8"))); } public FileDirectory(File dir) throws DirectoryException { super(); if (! dir.isDirectory()) { throw new DirectoryException("file must be a directory: " + dir); } mDir = dir; } @Override public long getSize(String fileName) throws DirectoryException { File file = new File(generatePath(fileName)); if (! file.isFile()) { throw new DirectoryException("file must be a file: " + file); } return file.length(); } @Override public long getCompressedSize(String fileName) throws DirectoryException { return getSize(fileName); } @Override protected AbstractDirectory createDirLocal(String name) throws DirectoryException { File dir = new File(generatePath(name)); //noinspection ResultOfMethodCallIgnored dir.mkdir(); return new FileDirectory(dir); } @Override protected InputStream getFileInputLocal(String name) throws DirectoryException { try { return new FileInputStream(generatePath(name)); } catch (FileNotFoundException e) { throw new DirectoryException(e); } } @Override protected OutputStream getFileOutputLocal(String name) throws DirectoryException { try { return new FileOutputStream(generatePath(name)); } catch (FileNotFoundException e) { throw new DirectoryException(e); } } @Override protected void loadDirs() { loadAll(); } @Override protected void loadFiles() { loadAll(); } @Override protected void removeFileLocal(String name) { //noinspection ResultOfMethodCallIgnored new File(generatePath(name)).delete(); } private String generatePath(String name) { return getDir().getPath() + separator + name; } private void loadAll() { mFiles = new LinkedHashSet<>(); mDirs = new LinkedHashMap<>(); File[] files = getDir().listFiles(); for (File file : files) { if (file.isFile()) { mFiles.add(file.getName()); } else { // IMPOSSIBLE_EXCEPTION try { mDirs.put(file.getName(), new FileDirectory(file)); } catch (DirectoryException ignored) {} } } } private File getDir() { return mDir; } }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/PathAlreadyExists.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; public class PathAlreadyExists extends DirectoryException { public PathAlreadyExists() { } public PathAlreadyExists(Throwable throwable) { super(throwable); } public PathAlreadyExists(String detailMessage) { super(detailMessage); } public PathAlreadyExists(String detailMessage, Throwable throwable) { super(detailMessage, throwable); } private static final long serialVersionUID = 3776428251424428904L; }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/PathNotExist.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; public class PathNotExist extends DirectoryException { public PathNotExist() { super(); } public PathNotExist(String detailMessage, Throwable throwable) { super(detailMessage, throwable); } public PathNotExist(String detailMessage) { super(detailMessage); } public PathNotExist(Throwable throwable) { super(throwable); } private static final long serialVersionUID = -6949242015506342032L; }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/ZipRODirectory.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Enumeration; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; public class ZipRODirectory extends AbstractDirectory { private final ZipFile mZipFile; private final String mPath; public ZipRODirectory(String zipFileName) throws DirectoryException { this(zipFileName, ""); } public ZipRODirectory(File zipFile) throws DirectoryException { this(zipFile, ""); } public ZipRODirectory(ZipFile zipFile) { this(zipFile, ""); } public ZipRODirectory(String zipFileName, String path) throws DirectoryException { this(new File(zipFileName), path); } public ZipRODirectory(File zipFile, String path) throws DirectoryException { super(); try { mZipFile = new ZipFile(zipFile); } catch (IOException e) { throw new DirectoryException(e); } mPath = path; } public ZipRODirectory(ZipFile zipFile, String path) { super(); mZipFile = zipFile; mPath = path; } @Override protected AbstractDirectory createDirLocal(String name) { throw new UnsupportedOperationException(); } @Override protected InputStream getFileInputLocal(String name) throws DirectoryException { try { return getZipFile().getInputStream(new ZipEntry(getPath() + name)); } catch (IOException e) { throw new PathNotExist(name, e); } } @Override protected OutputStream getFileOutputLocal(String name) { throw new UnsupportedOperationException(); } @Override protected void loadDirs() { loadAll(); } @Override protected void loadFiles() { loadAll(); } @Override protected void removeFileLocal(String name) { throw new UnsupportedOperationException(); } @Override public long getSize(String fileName) throws DirectoryException { ZipEntry entry = getZipFileEntry(fileName); return entry.getSize(); } @Override public long getCompressedSize(String fileName) throws DirectoryException { ZipEntry entry = getZipFileEntry(fileName); return entry.getCompressedSize(); } @Override public int getCompressionLevel(String fileName) throws DirectoryException { ZipEntry entry = getZipFileEntry(fileName); return entry.getMethod(); } private ZipEntry getZipFileEntry(String fileName) throws DirectoryException { ZipEntry entry = mZipFile.getEntry(fileName); if (entry == null) { throw new PathNotExist("Entry not found: " + fileName); } return entry; } private void loadAll() { mFiles = new LinkedHashSet<>(); mDirs = new LinkedHashMap<>(); int prefixLen = getPath().length(); Enumeration<? extends ZipEntry> entries = getZipFile().entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); String name = entry.getName(); if (name.equals(getPath()) || ! name.startsWith(getPath()) || name.contains(".." + separator)) { continue; } String subname = name.substring(prefixLen); int pos = subname.indexOf(separator); if (pos == -1) { if (! entry.isDirectory()) { mFiles.add(subname); continue; } } else { subname = subname.substring(0, pos); } if (! mDirs.containsKey(subname)) { AbstractDirectory dir = new ZipRODirectory(getZipFile(), getPath() + subname + separator); mDirs.put(subname, dir); } } } private String getPath() { return mPath; } private ZipFile getZipFile() { return mZipFile; } public void close() throws IOException { mZipFile.close(); } }
Java
Apktool/brut.j.dir/src/main/java/brut/directory/ZipUtils.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.directory; import brut.common.BrutException; import brut.util.BrutIO; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import java.io.*; import java.nio.file.Files; import java.util.Collection; import java.util.zip.CRC32; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; public class ZipUtils { private static Collection<String> mDoNotCompress; private ZipUtils() { // Private constructor for utility class } public static void zipFolders(final File folder, final File zip, final File assets, final Collection<String> doNotCompress) throws BrutException, IOException { mDoNotCompress = doNotCompress; ZipOutputStream zipOutputStream = new ZipOutputStream(Files.newOutputStream(zip.toPath())); zipFolders(folder, zipOutputStream); // We manually set the assets because we need to retain the folder structure if (assets != null) { processFolder(assets, zipOutputStream, assets.getPath().length() - 6); } zipOutputStream.close(); } private static void zipFolders(final File folder, final ZipOutputStream outputStream) throws BrutException, IOException { processFolder(folder, outputStream, folder.getPath().length() + 1); } private static void processFolder(final File folder, final ZipOutputStream zipOutputStream, final int prefixLength) throws BrutException, IOException { for (final File file : folder.listFiles()) { if (file.isFile()) { final String cleanedPath = BrutIO.sanitizeUnknownFile(folder, file.getPath().substring(prefixLength)); final ZipEntry zipEntry = new ZipEntry(BrutIO.normalizePath(cleanedPath)); // aapt binary by default takes in parameters via -0 arsc to list extensions that shouldn't be // compressed. We will replicate that behavior final String extension = FilenameUtils.getExtension(file.getAbsolutePath()); if (mDoNotCompress != null && (mDoNotCompress.contains(extension) || mDoNotCompress.contains(zipEntry.getName()))) { zipEntry.setMethod(ZipEntry.STORED); zipEntry.setSize(file.length()); BufferedInputStream unknownFile = new BufferedInputStream(Files.newInputStream(file.toPath())); CRC32 crc = BrutIO.calculateCrc(unknownFile); zipEntry.setCrc(crc.getValue()); unknownFile.close(); } else { zipEntry.setMethod(ZipEntry.DEFLATED); } zipOutputStream.putNextEntry(zipEntry); try (FileInputStream inputStream = new FileInputStream(file)) { IOUtils.copy(inputStream, zipOutputStream); } zipOutputStream.closeEntry(); } else if (file.isDirectory()) { processFolder(file, zipOutputStream, prefixLength); } } } }
Apktool/brut.j.util/build.gradle.kts
val commonsIoVersion: String by rootProject.extra val guavaVersion: String by rootProject.extra dependencies { implementation(project(":brut.j.common")) implementation("commons-io:commons-io:$commonsIoVersion") implementation("com.google.guava:guava:$guavaVersion") }
Java
Apktool/brut.j.util/src/main/java/brut/util/AaptManager.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; import brut.common.BrutException; import java.io.File; import java.util.ArrayList; import java.util.List; public class AaptManager { public static File getAapt2() throws BrutException { return getAapt(2); } public static File getAapt1() throws BrutException { return getAapt(1); } private static File getAapt(Integer version) throws BrutException { File aaptBinary; String aaptVersion = getAaptBinaryName(version); if (! OSDetection.is64Bit() && OSDetection.isMacOSX()) { throw new BrutException("32 bit OS detected. No 32 bit binaries available."); } // Set the 64 bit flag aaptVersion += OSDetection.is64Bit() ? "_64" : ""; try { if (OSDetection.isMacOSX()) { aaptBinary = Jar.getResourceAsFile("/prebuilt/macosx/" + aaptVersion, AaptManager.class); } else if (OSDetection.isUnix()) { aaptBinary = Jar.getResourceAsFile("/prebuilt/linux/" + aaptVersion, AaptManager.class); } else if (OSDetection.isWindows()) { aaptBinary = Jar.getResourceAsFile("/prebuilt/windows/" + aaptVersion + ".exe", AaptManager.class); } else { throw new BrutException("Could not identify platform: " + OSDetection.returnOS()); } } catch (BrutException ex) { throw new BrutException(ex); } if (aaptBinary.setExecutable(true)) { return aaptBinary; } throw new BrutException("Can't set aapt binary as executable"); } public static String getAaptExecutionCommand(String aaptPath, File aapt) throws BrutException { if (! aaptPath.isEmpty()) { File aaptFile = new File(aaptPath); if (aaptFile.canRead() && aaptFile.exists()) { //noinspection ResultOfMethodCallIgnored aaptFile.setExecutable(true); return aaptFile.getPath(); } else { throw new BrutException("binary could not be read: " + aaptFile.getAbsolutePath()); } } else { return aapt.getAbsolutePath(); } } public static int getAaptVersion(String aaptLocation) throws BrutException { return getAaptVersion(new File(aaptLocation)); } public static String getAaptBinaryName(Integer version) { return "aapt" + (version == 2 ? "2" : ""); } public static int getAppVersionFromString(String version) throws BrutException { if (version.startsWith("Android Asset Packaging Tool (aapt) 2:")) { return 2; } else if (version.startsWith("Android Asset Packaging Tool (aapt) 2.")) { return 2; // Prior to Android SDK 26.0.2 } else if (version.startsWith("Android Asset Packaging Tool, v0.")) { return 1; } throw new BrutException("aapt version could not be identified: " + version); } public static int getAaptVersion(File aapt) throws BrutException { if (!aapt.isFile()) { throw new BrutException("Could not identify aapt binary as executable."); } //noinspection ResultOfMethodCallIgnored aapt.setExecutable(true); List<String> cmd = new ArrayList<>(); cmd.add(aapt.getAbsolutePath()); cmd.add("version"); String version = OS.execAndReturn(cmd.toArray(new String[0])); if (version == null) { throw new BrutException("Could not execute aapt binary at location: " + aapt.getAbsolutePath()); } return getAppVersionFromString(version); } }
Java
Apktool/brut.j.util/src/main/java/brut/util/BrutIO.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; import brut.common.BrutException; import brut.common.InvalidUnknownFileException; import brut.common.RootUnknownFileException; import brut.common.TraversalUnknownFileException; import org.apache.commons.io.IOUtils; import java.io.*; import java.util.zip.CRC32; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; public class BrutIO { public static void copyAndClose(InputStream in, OutputStream out) throws IOException { try { IOUtils.copy(in, out); } finally { IOUtils.closeQuietly(in); IOUtils.closeQuietly(out); } } public static long recursiveModifiedTime(File[] files) { long modified = 0; for (File file : files) { long submodified = recursiveModifiedTime(file); if (submodified > modified) { modified = submodified; } } return modified; } public static long recursiveModifiedTime(File file) { long modified = file.lastModified(); if (file.isDirectory()) { File[] subfiles = file.listFiles(); for (File subfile : subfiles) { long submodified = recursiveModifiedTime(subfile); if (submodified > modified) { modified = submodified; } } } return modified; } public static CRC32 calculateCrc(InputStream input) throws IOException { CRC32 crc = new CRC32(); int bytesRead; byte[] buffer = new byte[8192]; while((bytesRead = input.read(buffer)) != -1) { crc.update(buffer, 0, bytesRead); } return crc; } public static String sanitizeUnknownFile(final File directory, final String entry) throws IOException, BrutException { if (entry.length() == 0) { throw new InvalidUnknownFileException("Invalid Unknown File"); } if (new File(entry).isAbsolute()) { throw new RootUnknownFileException("Absolute Unknown Files is not allowed"); } final String canonicalDirPath = directory.getCanonicalPath() + File.separator; final String canonicalEntryPath = new File(directory, entry).getCanonicalPath(); if (!canonicalEntryPath.startsWith(canonicalDirPath)) { throw new TraversalUnknownFileException("Directory Traversal is not allowed"); } // https://stackoverflow.com/q/2375903/455008 return canonicalEntryPath.substring(canonicalDirPath.length()); } public static String normalizePath(String path) { char separator = File.separatorChar; if (separator != '/') { return path.replace(separator, '/'); } return path; } public static void copy(File inputFile, ZipOutputStream outputFile) throws IOException { try (FileInputStream fis = new FileInputStream(inputFile)) { IOUtils.copy(fis, outputFile); } } public static void copy(ZipFile inputFile, ZipOutputStream outputFile, ZipEntry entry) throws IOException { try (InputStream is = inputFile.getInputStream(entry)) { IOUtils.copy(is, outputFile); } } }
Java
Apktool/brut.j.util/src/main/java/brut/util/DataInputDelegate.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; import java.io.DataInput; import java.io.IOException; public abstract class DataInputDelegate implements DataInput { protected final DataInput mDelegate; public DataInputDelegate(DataInput delegate) { this.mDelegate = delegate; } public int skipBytes(int n) throws IOException { return mDelegate.skipBytes(n); } public int readUnsignedShort() throws IOException { return mDelegate.readUnsignedShort(); } public int readUnsignedByte() throws IOException { return mDelegate.readUnsignedByte(); } public String readUTF() throws IOException { return mDelegate.readUTF(); } public short readShort() throws IOException { return mDelegate.readShort(); } public long readLong() throws IOException { return mDelegate.readLong(); } public String readLine() throws IOException { return mDelegate.readLine(); } public int readInt() throws IOException { return mDelegate.readInt(); } public void readFully(byte[] b, int off, int len) throws IOException { mDelegate.readFully(b, off, len); } public void readFully(byte[] b) throws IOException { mDelegate.readFully(b); } public float readFloat() throws IOException { return mDelegate.readFloat(); } public double readDouble() throws IOException { return mDelegate.readDouble(); } public char readChar() throws IOException { return mDelegate.readChar(); } public byte readByte() throws IOException { return mDelegate.readByte(); } public boolean readBoolean() throws IOException { return mDelegate.readBoolean(); } }
Java
Apktool/brut.j.util/src/main/java/brut/util/Duo.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; import java.util.Objects; public class Duo<T1, T2> { public final T1 m1; public final T2 m2; public Duo(T1 t1, T2 t2) { this.m1 = t1; this.m2 = t2; } @SuppressWarnings("unchecked") @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Duo<T1, T2> other = (Duo<T1, T2>) obj; if (!Objects.equals(this.m1, other.m1)) { return false; } return Objects.equals(this.m2, other.m2); } @Override public int hashCode() { int hash = 3; hash = 71 * hash + (this.m1 != null ? this.m1.hashCode() : 0); hash = 71 * hash + (this.m2 != null ? this.m2.hashCode() : 0); return hash; } }
Java
Apktool/brut.j.util/src/main/java/brut/util/ExtCountingDataInput.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; import org.apache.commons.io.input.CountingInputStream; import com.google.common.io.LittleEndianDataInputStream; import java.io.DataInput; import java.io.IOException; import java.util.logging.Logger; public class ExtCountingDataInput extends ExtDataInput { private final CountingInputStream mCountIn; public ExtCountingDataInput(LittleEndianDataInputStream in) { this(new CountingInputStream(in)); } public ExtCountingDataInput(CountingInputStream countIn) { // We need to explicitly cast to DataInput as otherwise the constructor is ambiguous. // We choose DataInput instead of InputStream as ExtDataInput wraps an InputStream in // a DataInputStream which is big-endian and ignores the little-endian behavior. super((DataInput) new LittleEndianDataInputStream(countIn)); mCountIn = countIn; } public int position() { return mCountIn.getCount(); } public int remaining() throws IOException { return mCountIn.available(); } public long skip(int bytes) throws IOException { return mCountIn.skip(bytes); } public int[] readSafeIntArray(int length, long maxPosition) throws IOException { int[] array = new int[length]; for (int i = 0; i < length; i++) { // #3236 - In some applications we have more strings than fit into the block. This function takes // an expected max position and if we are past it, we return early during processing. if (position() >= maxPosition) { LOGGER.warning(String.format("Bad string block: string entry is at %d, past end at %d", position(), maxPosition) ); return array; } array[i] = readInt(); } return array; } private static final Logger LOGGER = Logger.getLogger(ExtCountingDataInput.class.getName()); }
Java
Apktool/brut.j.util/src/main/java/brut/util/ExtDataInput.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; import java.io.*; public class ExtDataInput extends DataInputDelegate { public ExtDataInput(InputStream in) { this((DataInput) new DataInputStream(in)); } public ExtDataInput(DataInput delegate) { super(delegate); } public int[] readIntArray(int length) throws IOException { int[] array = new int[length]; for (int i = 0; i < length; i++) { array[i] = readInt(); } return array; } public void skipInt() throws IOException { skipBytes(4); } public void skipShort() throws IOException { skipBytes(2); } public void skipCheckShort(short expected) throws IOException { short got = readShort(); if (got != expected) { throw new IOException(String.format("Expected: 0x%08x, got: 0x%08x", expected, got)); } } public void skipCheckByte(byte expected) throws IOException { byte got = readByte(); if (got != expected) { throw new IOException(String.format("Expected: 0x%08x, got: 0x%08x", expected, got)); } } /** * The general contract of DataInput doesn't guarantee all the bytes requested will be skipped * and failure can occur for many reasons. We override this to try harder to skip all the bytes * requested (this is similar to DataInputStream's wrapper). */ public final int skipBytes(int n) throws IOException { int total = 0; int cur; while ((total < n) && ((cur = super.skipBytes(n - total)) > 0)) { total += cur; } return total; } public String readNullEndedString(int length, boolean fixed) throws IOException { StringBuilder string = new StringBuilder(16); while (length-- != 0) { short ch = readShort(); if (ch == 0) { break; } string.append((char) ch); } if (fixed) { skipBytes(length * 2); } return string.toString(); } }
Java
Apktool/brut.j.util/src/main/java/brut/util/Jar.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; import brut.common.BrutException; import org.apache.commons.io.IOUtils; import java.io.*; import java.nio.file.Files; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ThreadLocalRandom; public abstract class Jar { private static final Map<String, File> mExtracted = new HashMap<>(); public static File getResourceAsFile(String name, Class<?> clazz) throws BrutException { File file = mExtracted.get(name); if (file == null) { file = extractToTmp(name, clazz); mExtracted.put(name, file); } return file; } public static File extractToTmp(String resourcePath, Class<?> clazz) throws BrutException { return extractToTmp(resourcePath, "brut_util_Jar_", clazz); } public static File extractToTmp(String resourcePath, String tmpPrefix, Class<?> clazz) throws BrutException { try { InputStream in = clazz.getResourceAsStream(resourcePath); if (in == null) { throw new FileNotFoundException(resourcePath); } long suffix = ThreadLocalRandom.current().nextLong(); suffix = suffix == Long.MIN_VALUE ? 0 : Math.abs(suffix); File fileOut = File.createTempFile(tmpPrefix, suffix + ".tmp"); fileOut.deleteOnExit(); OutputStream out = Files.newOutputStream(fileOut.toPath()); IOUtils.copy(in, out); in.close(); out.close(); return fileOut; } catch (IOException ex) { throw new BrutException("Could not extract resource: " + resourcePath, ex); } } }
Java
Apktool/brut.j.util/src/main/java/brut/util/OS.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; import brut.common.BrutException; import org.apache.commons.io.IOUtils; import java.io.*; import java.nio.file.Files; import java.util.Arrays; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; public class OS { private static final Logger LOGGER = Logger.getLogger(""); public static void rmdir(File dir) throws BrutException { if (! dir.exists()) { return; } File[] files = dir.listFiles(); if (files == null) { return; } for (File file : files) { if (file.isDirectory()) { rmdir(file); } else { //noinspection ResultOfMethodCallIgnored file.delete(); } } //noinspection ResultOfMethodCallIgnored dir.delete(); } public static void rmfile(String file) { File del = new File(file); //noinspection ResultOfMethodCallIgnored del.delete(); } public static void rmdir(String dir) throws BrutException { rmdir(new File(dir)); } public static void cpdir(File src, File dest) throws BrutException { //noinspection ResultOfMethodCallIgnored dest.mkdirs(); File[] files = src.listFiles(); if (files == null) { return; } for (File file : files) { File destFile = new File(dest.getPath() + File.separatorChar + file.getName()); if (file.isDirectory()) { cpdir(file, destFile); continue; } try { try (InputStream in = Files.newInputStream(file.toPath())) { try (OutputStream out = Files.newOutputStream(destFile.toPath())) { IOUtils.copy(in, out); } } } catch (IOException ex) { throw new BrutException("Could not copy file: " + file, ex); } } } public static void exec(String[] cmd) throws BrutException { Process ps; int exitValue; try { ProcessBuilder builder = new ProcessBuilder(cmd); ps = builder.start(); new StreamForwarder(ps.getErrorStream(), "ERROR").start(); new StreamForwarder(ps.getInputStream(), "OUTPUT").start(); exitValue = ps.waitFor(); if (exitValue != 0) { throw new BrutException("could not exec (exit code = " + exitValue + "): " + Arrays.toString(cmd)); } } catch (IOException ex) { throw new BrutException("could not exec: " + Arrays.toString(cmd), ex); } catch (InterruptedException ex) { throw new BrutException("could not exec : " + Arrays.toString(cmd), ex); } } public static String execAndReturn(String[] cmd) { ExecutorService executor = Executors.newCachedThreadPool(); try { ProcessBuilder builder = new ProcessBuilder(cmd); builder.redirectErrorStream(true); Process process = builder.start(); StreamCollector collector = new StreamCollector(process.getInputStream()); executor.execute(collector); process.waitFor(15, TimeUnit.SECONDS); executor.shutdownNow(); if (! executor.awaitTermination(5, TimeUnit.SECONDS)) { System.err.println("Stream collector did not terminate."); } return collector.get(); } catch (IOException | InterruptedException e) { return null; } } public static File createTempDirectory() throws BrutException { try { File tmp = File.createTempFile("BRUT", null); tmp.deleteOnExit(); if (!tmp.delete()) { throw new BrutException("Could not delete tmp file: " + tmp.getAbsolutePath()); } if (!tmp.mkdir()) { throw new BrutException("Could not create tmp dir: " + tmp.getAbsolutePath()); } return tmp; } catch (IOException ex) { throw new BrutException("Could not create tmp dir", ex); } } static class StreamForwarder extends Thread { StreamForwarder(InputStream is, String type) { mIn = is; mType = type; } @Override public void run() { try { BufferedReader br = new BufferedReader(new InputStreamReader(mIn)); String line; while ((line = br.readLine()) != null) { if (mType.equals("OUTPUT")) { LOGGER.info(line); } else { LOGGER.warning(line); } } } catch (IOException ex) { ex.printStackTrace(); } } private final InputStream mIn; private final String mType; } static class StreamCollector implements Runnable { private final StringBuilder buffer = new StringBuilder(); private final InputStream inputStream; public StreamCollector(InputStream inputStream) { super(); this.inputStream = inputStream; } @Override public void run() { String line; try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) { while ((line = reader.readLine()) != null) { buffer.append(line).append('\n'); } } catch (IOException ignored) {} } public String get() { return buffer.toString(); } } }
Java
Apktool/brut.j.util/src/main/java/brut/util/OSDetection.java
/* * Copyright (C) 2010 Ryszard WiÅ›niewski <[email protected]> * Copyright (C) 2010 Connor Tumbleson <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brut.util; public class OSDetection { private static final String OS = System.getProperty("os.name").toLowerCase(); private static final String BIT = System.getProperty("sun.arch.data.model").toLowerCase(); public static boolean isWindows() { return (OS.contains("win")); } public static boolean isMacOSX() { return (OS.contains("mac")); } public static boolean isUnix() { return (OS.contains("nix") || OS.contains("nux") || OS.contains("aix") || (OS.contains("sunos"))); } public static boolean is64Bit() { if (isWindows()) { String arch = System.getenv("PROCESSOR_ARCHITECTURE"); String wow64Arch = System.getenv("PROCESSOR_ARCHITEW6432"); return arch != null && arch.endsWith("64") || wow64Arch != null && wow64Arch.endsWith("64"); } return BIT.equalsIgnoreCase("64"); } public static String returnOS() { return OS; } }
Apktool/gradle/wrapper/gradle-wrapper.properties
distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists
Apktool/scripts/linux/apktool
#!/bin/bash # # Copyright (C) 2007 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script is a wrapper for smali.jar, so you can simply call "smali", # instead of java -jar smali.jar. It is heavily based on the "dx" script # from the Android SDK # Set up prog to be the path of this script, including following symlinks, # and set up progdir to be the fully-qualified pathname of its directory. prog="$0" while [ -h "${prog}" ]; do newProg=`/bin/ls -ld "${prog}"` newProg=`expr "${newProg}" : ".* -> \(.*\)$"` if expr "x${newProg}" : 'x/' >/dev/null; then prog="${newProg}" else progdir=`dirname "${prog}"` prog="${progdir}/${newProg}" fi done oldwd=`pwd` progdir=`dirname "${prog}"` cd "${progdir}" progdir=`pwd` prog="${progdir}"/`basename "${prog}"` cd "${oldwd}" jarfile=apktool.jar libdir="$progdir" if [ ! -r "$libdir/$jarfile" ] then echo `basename "$prog"`": can't find $jarfile" exit 1 fi javaOpts="" # If you want DX to have more memory when executing, uncomment the following # line and adjust the value accordingly. Use "java -X" for a list of options # you can pass here. # javaOpts="-Xmx1024M -Dfile.encoding=utf-8 -Djdk.util.zip.disableZip64ExtraFieldValidation=true -Djdk.nio.zipfs.allowDotZipEntry=true" # Alternatively, this will extract any parameter "-Jxxx" from the command line # and pass them to Java (instead of to dx). This makes it possible for you to # add a command-line parameter such as "-JXmx256M" in your ant scripts, for # example. while expr "x$1" : 'x-J' >/dev/null; do opt=`expr "$1" : '-J\(.*\)'` javaOpts="${javaOpts} -${opt}" shift done if [ "$OSTYPE" = "cygwin" ] ; then jarpath=`cygpath -w "$libdir/$jarfile"` else jarpath="$libdir/$jarfile" fi # add current location to path for aapt PATH=$PATH:`pwd`; export PATH; exec java $javaOpts -jar "$jarpath" "$@"
Apktool/scripts/osx/apktool
#!/bin/bash # # Copyright (C) 2007 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script is a wrapper for smali.jar, so you can simply call "smali", # instead of java -jar smali.jar. It is heavily based on the "dx" script # from the Android SDK # Set up prog to be the path of this script, including following symlinks, # and set up progdir to be the fully-qualified pathname of its directory. prog="$0" while [ -h "${prog}" ]; do newProg=`/bin/ls -ld "${prog}"` newProg=`expr "${newProg}" : ".* -> \(.*\)$"` if expr "x${newProg}" : 'x/' >/dev/null; then prog="${newProg}" else progdir=`dirname "${prog}"` prog="${progdir}/${newProg}" fi done oldwd=`pwd` progdir=`dirname "${prog}"` cd "${progdir}" progdir=`pwd` prog="${progdir}"/`basename "${prog}"` cd "${oldwd}" jarfile=apktool.jar libdir="$progdir" if [ ! -r "$libdir/$jarfile" ] then echo `basename "$prog"`": can't find $jarfile" exit 1 fi javaOpts="" # If you want DX to have more memory when executing, uncomment the following # line and adjust the value accordingly. Use "java -X" for a list of options # you can pass here. # javaOpts="-Xmx1024M -Dfile.encoding=utf-8 -Djdk.util.zip.disableZip64ExtraFieldValidation=true -Djdk.nio.zipfs.allowDotZipEntry=true" # Alternatively, this will extract any parameter "-Jxxx" from the command line # and pass them to Java (instead of to dx). This makes it possible for you to # add a command-line parameter such as "-JXmx256M" in your ant scripts, for # example. while expr "x$1" : 'x-J' >/dev/null; do opt=`expr "$1" : '-J\(.*\)'` javaOpts="${javaOpts} -${opt}" shift done if [ "$OSTYPE" = "cygwin" ] ; then jarpath=`cygpath -w "$libdir/$jarfile"` else jarpath="$libdir/$jarfile" fi # add current location to path for aapt PATH=$PATH:`pwd`; export PATH; exec java $javaOpts -Djava.awt.headless=true -jar "$jarpath" "$@"
Apktool/scripts/windows/apktool.bat
@echo off setlocal set BASENAME=apktool_ chcp 65001 2>nul >nul set java_exe=java.exe if defined JAVA_HOME ( set "java_exe=%JAVA_HOME%\bin\java.exe" ) rem Find the highest version .jar available in the same directory as the script setlocal EnableDelayedExpansion pushd "%~dp0" if exist apktool.jar ( set BASENAME=apktool goto skipversioned ) set max=0 for /f "tokens=1* delims=-_.0" %%A in ('dir /b /a-d %BASENAME%*.jar') do if %%~B gtr !max! set max=%%~nB :skipversioned popd setlocal DisableDelayedExpansion rem Find out if the commandline is a parameterless .jar or directory, for fast unpack/repack if "%~1"=="" goto load if not "%~2"=="" goto load set ATTR=%~a1 if "%ATTR:~0,1%"=="d" ( rem Directory, rebuild set fastCommand=b ) if "%ATTR:~0,1%"=="-" if "%~x1"==".apk" ( rem APK file, unpack set fastCommand=d ) :load "%java_exe%" -jar -Xmx1024M -Duser.language=en -Dfile.encoding=UTF8 -Djdk.util.zip.disableZip64ExtraFieldValidation=true -Djdk.nio.zipfs.allowDotZipEntry=true "%~dp0%BASENAME%%max%.jar" %fastCommand% %* rem Pause when ran non interactively for /f "tokens=2" %%# in ("%cmdcmdline%") do if /i "%%#" equ "/c" pause
hhvm/.gitignore
*.[oad] *.hhbc *.rej *.orig .merlin .mkdir .DS_Store hphp.log /_build hphp/hack/_build/ hphp/hack/cargo_home/ /deps /hphp/test/test /hphp/test/test_fast.inc /hphp/test/test_mysql_info.inc /hphp/test/test_suite.inc /hphp/test/real_mysql_info.inc /hphp/test/*.tmp /hphp/test/vm/*.out /hphp/test/vm/*.diff /hphp/test/vm/*.log /hphp/test/vm/*.reduce_out /hphp/test/vm/*.reduce_diff /hphp/test/vm/*.reduce_exp /hphp/test/vm/*.perf /hphp/test/vm/perf/*.out /hphp/test/vm/perf/*.diff /hphp/test/vm/perf/*.perf /hphp/runtime/ext_hhvm/ext_noinline.cpp /hphp/runtime/ext/*/CMakeLists.txt /hphp/runtime/ext/*/*.so /hphp/runtime/test/hphp_runtime_test /hphp/runtime/tmp/string /hphp/hhvm/gen /hphp/tools/shmw/shmw /hphp/tools/version/version /hphp/tools/gdb/hhvm-gdb /hphp/ffi/java/classes /hphp/ffi/java/hphp_ffi_java.h /bin /output_gd/ # tags files /hphp/TAGS tags TAGS # old directories with makefiles /hphp/submodules /hphp/third_party /third-party/libzip/libzip.dylib # eclipse files .project .cproject .settings # vim files .*.swp .*.swo *.vimrc .syntastic_cpp_config *~ # vscode .vscode # CMake CMakeCache.txt CMakeFiles Makefile cmake_install.cmake install_manifest.txt # CMake-generated files that need to stay in the source tree because # of ocamlbuild /hphp/hack/src/options/buildOptions.ml /hphp/hack/opam.stamp /hphp/hack/test/.mypy_cache /hphp/util/generated-hhjs-babel-transform.txt # CPack CPackConfig.cmake CPackSourceConfig.cmake # Ninja .ninja_deps .ninja_log *.ninja # windows files *.exe # python scripts *.pyc
hhvm/.gitmodules
[submodule "__FORKS__"] path = third-party/forks url = https://github.com/hhvm/hhvm-third-party.git [submodule "fastlz"] path = third-party/fastlz/src url = https://github.com/ariya/FastLZ [submodule "fatal"] path = third-party/fatal url = https://github.com/facebook/fatal [submodule "fb303"] path = third-party/fb303/src url = https://github.com/facebookincubator/fb303.git [submodule "libafdt"] path = third-party/libafdt/src url = https://github.com/facebook/libafdt [submodule "xed-mbuild"] path = third-party/xed/mbuild url = https://github.com/intelxed/mbuild.git [submodule "xed-xed"] path = third-party/xed/xed url = https://github.com/intelxed/xed.git
hhvm/clang.code-workspace
{ "folders": [ { "path": "." } ], "settings": { "nixEnvSelector.nixFile": "${workspaceRoot}/shell_clang.nix", "nix.enableLanguageServer": true, "terminal.integrated.profiles.linux": { "Nix Shell": { "path": "nix-shell", "args": [ "shell_clang.nix" ] } }, "terminal.integrated.profiles.osx": { "Nix Shell": { "path": "nix-shell", "args": [ "shell_clang.nix" ] } }, "cmake.buildDirectory": "${workspaceFolder}/_build" }, "extensions": { "recommendations": [ "arrterian.nix-env-selector", "jnoortheen.nix-ide", "ms-vscode.cpptools-extension-pack" ] } }
Text
hhvm/CMakeLists.txt
CMAKE_MINIMUM_REQUIRED(VERSION 2.8.7 FATAL_ERROR) # Make `get_target_property()` on a target that does not exist a fatal error # https://cmake.org/cmake/help/v3.0/policy/CMP0045.html cmake_policy(SET CMP0045 NEW) # ditto for add_dependencies(): https://cmake.org/cmake/help/v3.0/policy/CMP0046.html cmake_policy(SET CMP0046 NEW) # This needs to be done before any languages are enabled or # projects are created. INCLUDE("${CMAKE_CURRENT_SOURCE_DIR}/CMake/VisualStudioToolset.cmake") # includes SET( CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMake" "${CMAKE_CURRENT_SOURCE_DIR}/build/fbcode_builder/CMake" ${CMAKE_MODULE_PATH} ) include_directories(${CMAKE_CURRENT_BINARY_DIR}) PROJECT(hhvm C CXX ASM) include(HHVMProject) if (MSVC) enable_language(ASM_MASM) endif() MARK_AS_ADVANCED(CLEAR CMAKE_INSTALL_PREFIX) IF(APPLE) # CMake really likes finding libraries inside OS X frameworks. This can # create super unexpected results, such as the LDAP framework, where the # ldap.h header there just consists of "#include <ldap.h>" -- obviously # assuming /usr/include appears on the include path before that framework # (which wasn't really supposed to be on the include path at all). This # leads to a hilarious recursive include and general fireworks. Instead, # tell CMake to search frameworks *last*, if it doesn't find something in # /usr (or MacPorts/Homebrew). SET(CMAKE_FIND_FRAMEWORK "LAST") MARK_AS_ADVANCED(CMAKE_OSX_ARCHITECTURES CMAKE_OSX_DEPLOYMENT_TARGET CMAKE_OSX_SYSROOT) SET(CMAKE_OSX_DEPLOYMENT_TARGET 10.15) ENDIF() # Check architecture OS IF(NOT CMAKE_SIZEOF_VOID_P EQUAL 8) MESSAGE(FATAL_ERROR "HHVM requires a 64bit OS") ENDIF() # Enable ccache if present and not already enabled system wide. option(SKIP_CCACHE "Skip detecting/enabling ccache - no effect if ccache enabled system wide" FALSE) if(NOT SKIP_CCACHE) find_program(CCACHE_FOUND ccache) if(CCACHE_FOUND) if (NOT ("${CMAKE_CXX_COMPILER} ${CMAKE_C_COMPILER}" MATCHES ".*ccache.*")) set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_FOUND}) message(STATUS "Found ccache: ${CCACHE_FOUND} - enabling ccache as compiler wrapper") else() message(STATUS "Found ccache - ccache already in use as C and/or CXX compiler wrapper") endif() endif(CCACHE_FOUND) endif(NOT SKIP_CCACHE) INCLUDE(HPHPFunctions) INCLUDE(CheckFunctionExists) SET(HPHP_HOME ${CMAKE_CURRENT_SOURCE_DIR}) SET(TP_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third-party") SET(TP_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}/third-party") include(MSVCDefaults) include(Options) include(HPHPCompiler) include(HPHPFindLibs) ADD_SUBDIRECTORY(third-party EXCLUDE_FROM_ALL) ADD_SUBDIRECTORY(hphp) # use GNU install dirs (e.g. lib64 instead of lib) INCLUDE(GNUInstallDirs) # modules / depends FILE(GLOB HHVM_CMAKE_FILES "CMake/*.cmake") INSTALL( FILES ${HHVM_CMAKE_FILES} DESTINATION "${CMAKE_INSTALL_LIBDIR}/hhvm/CMake" COMPONENT dev)
Markdown
hhvm/CODE_OF_CONDUCT.md
# Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies within all project spaces, and it also applies when an individual is representing the project or its community in public spaces. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at <[email protected]>. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq
hhvm/configure
#!/usr/bin/env bash DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" if [ "$1" = '--help' ] || [ "$1" = '-h' ]; then echo "usage: $0 -Dvariable=argument ..." echo '' echo 'Variables: ' options=`cat $DIR/CMake/Options.cmake | grep option | sed -e 's/^[ \t]*//' | sed 's/\s*option(/ -D/; s/ "/=ON|OFF : /; s/" / : Default: /; s/)$//' | sort` options=" -DCMAKE_BUILD_TYPE=Debug|Release|RelWithDebInfo|MinSizeRel : Sets build type \ : Default: Release $options" if which column > /dev/null; then options=`echo "$options" | column -t -s : ` fi echo "$options" exit 2 fi cmake "$@" .
Markdown
hhvm/CONTRIBUTING.md
# Contributing to HHVM We'd love to have your help in making HHVM better. Before jumping into the code, please familiarize yourself with our [coding conventions](hphp/doc/coding-conventions.md). We're also working on a [Hacker's Guide to HHVM](hphp/doc/hackers-guide). It's still very incomplete, but if there's a specific topic you'd like to see addressed sooner rather than later, let us know. For documentation and any other problems, please open an [issue](https://github.com/facebook/hhvm/issues), or better yet, [fork us and send a pull request](https://github.com/facebook/hhvm/pulls). Join us on Freenode in [#hhvm](https://webchat.freenode.net/?channels=hhvm) for general discussion, or [#hhvm-dev](https://webchat.freenode.net/?channels=hhvm-dev) for development-oriented discussion. If you want to help but don't know where to start, try fixing some of the ["probably easy" issues](https://github.com/facebook/hhvm/issues?q=is%3Aopen+is%3Aissue+label%3A%22probably+easy%22); add a test to hphp/test/slow/something_appropriate, and run it with hphp/test/run. All the open issues tagged [PHP5 incompatibility](https://github.com/facebook/hhvm/issues?labels=php5+incompatibility&page=1&state=open) are real issues reported by the community in existing PHP code and [frameworks](https://github.com/facebook/hhvm/wiki/OSS-PHP-Frameworks-Unit-Testing:-General) that could use some attention. ## Code of Conduct The code of conduct is described in [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md) ## Submitting Pull Requests Before changes can be accepted a [Contributor Licensing Agreement](https://code.facebook.com/cla) must be completed. You will be prompted to accept the CLA when you submit your first pull request. If you prefer a hard copy, you can print the [pdf](https://github.com/facebook/hhvm/raw/master/hphp/doc/FB_Individual_CLA.pdf), sign it, scan it, and send it to <[email protected]>. Please add appropriate test cases as you make changes, and make sure that they pass locally before submitting your pull request; see [here](hphp/test/README.md) for more information. All the tests are run via Phabricator, however testing locally greatly speeds up the process of accepting your changes. ### Stable Version Updates We maintain up to three [stable branches](https://github.com/facebook/hhvm/wiki/Release%20Schedule) at once (the current release plus two [LTS branches](https://github.com/facebook/hhvm/wiki/Long-term-support-%28LTS%29)). To get a fix into one of those branches, first get accepted into master, as described above. Fixes are merged into master and then merged backwards into stable releases as appropriate. Then, submit another PR against the relevant stable branch(es) cherry-picking your change into that branch, with any changes needed to properly backport. Make sure to explain in the PR summary why the change should be considered for inclusion in the stable branch -- basically, make the case for why the issue the change is fixing is worse than the possible risk of what the change might break (and thus what *we* will be responsible for debugging, fixing, and maintaining). ## Quick Links * [Slack](https://hhvm.com/slack) * [Issue tracker](https://github.com/facebook/hhvm/issues)
hhvm/default.nix
(import ( let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in fetchTarball { url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; sha256 = lock.nodes.flake-compat.locked.narHash; } ) { src = builtins.fetchGit { url = ./.; submodules = true; shallow = true; }; } ).defaultNix
hhvm/flake.lock
{ "nodes": { "flake-compat": { "flake": false, "locked": { "lastModified": 1650374568, "narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=", "owner": "edolstra", "repo": "flake-compat", "rev": "b4a34015c698c7793d592d66adbab377907a2be8", "type": "github" }, "original": { "owner": "edolstra", "repo": "flake-compat", "type": "github" } }, "flake-utils": { "locked": { "lastModified": 1667395993, "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", "owner": "numtide", "repo": "flake-utils", "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", "type": "github" }, "original": { "owner": "numtide", "repo": "flake-utils", "type": "github" } }, "nixpkgs": { "locked": { "lastModified": 1667758139, "narHash": "sha256-CbDAP6wttlaVs9s4DPZlJ5Wf6Ozz9lX7SdJVtFA8cAo=", "owner": "NixOS", "repo": "nixpkgs", "rev": "8993cc730d11148ef59e84a8f15f94f688e1bfd1", "type": "github" }, "original": { "id": "nixpkgs", "type": "indirect" } }, "nixpkgs-mozilla": { "locked": { "lastModified": 1664789696, "narHash": "sha256-UGWJHQShiwLCr4/DysMVFrYdYYHcOqAOVsWNUu+l6YU=", "owner": "mozilla", "repo": "nixpkgs-mozilla", "rev": "80627b282705101e7b38e19ca6e8df105031b072", "type": "github" }, "original": { "owner": "mozilla", "repo": "nixpkgs-mozilla", "type": "github" } }, "root": { "inputs": { "flake-compat": "flake-compat", "flake-utils": "flake-utils", "nixpkgs": "nixpkgs", "nixpkgs-mozilla": "nixpkgs-mozilla" } } }, "root": "root", "version": 7 }
hhvm/flake.nix
{ inputs = { flake-utils.url = "github:numtide/flake-utils"; flake-compat.url = "github:edolstra/flake-compat"; flake-compat.flake = false; nixpkgs-mozilla.url = "github:mozilla/nixpkgs-mozilla"; }; outputs = { self, nixpkgs, flake-utils, flake-compat, nixpkgs-mozilla }: flake-utils.lib.eachSystem [ "x86_64-darwin" "x86_64-linux" ] ( system: let pkgs = import nixpkgs { inherit system; overlays = [ nixpkgs-mozilla.overlays.rust ]; config.permittedInsecurePackages = [ # It's OK to depend on libdwarf 20210528, because we did not call # the particular vulnerable function in libdwarf "libdwarf-20210528" ]; }; devShellForPackage = hhvm: pkgs.mkShell.override { stdenv = hhvm.stdenv; } { inputsFrom = [ hhvm ]; packages = [ pkgs.rnix-lsp pkgs.fpm pkgs.rpm ]; inherit (hhvm) NIX_CFLAGS_COMPILE CMAKE_TOOLCHAIN_FILE; }; in rec { packages.hhvm = pkgs.callPackage ./hhvm.nix { lastModifiedDate = self.lastModifiedDate; }; packages.hhvm_clang = packages.hhvm.override { stdenv = pkgs.llvmPackages_14.stdenv; }; packages.default = packages.hhvm; devShells.clang = devShellForPackage packages.hhvm_clang; devShells.default = devShellForPackage packages.hhvm; ${if pkgs.hostPlatform.isLinux then "bundlers" else null} = let fpmScript = outputType: pkg: '' # Copy to a temporary directory as a workaround to https://github.com/jordansissel/fpm/issues/807 while read LINE do mkdir -p "$(dirname "./$LINE")" cp -r "/$LINE" "./$LINE" chmod --recursive u+w "./$LINE" FPM_INPUTS+=("./$LINE") done < ${pkgs.lib.strings.escapeShellArg (pkgs.referencesByPopularity pkg)} ${pkgs.lib.strings.escapeShellArg pkgs.fpm}/bin/fpm \ --verbose \ --package "$out" \ --input-type dir \ --output-type ${outputType} \ --name ${pkgs.lib.strings.escapeShellArg pkg.pname} \ --version ${ pkgs.lib.strings.escapeShellArg (builtins.replaceStrings ["-"] ["~"] pkg.version) } \ --description ${pkgs.lib.strings.escapeShellArg pkg.meta.description} \ --url ${pkgs.lib.strings.escapeShellArg pkg.meta.homepage} \ --maintainer ${pkgs.lib.strings.escapeShellArg (pkgs.lib.strings.concatStringsSep ", " (map ({name, email, ...}: "\"${name}\" <${email}>") pkg.meta.maintainers))} \ --license ${pkgs.lib.strings.escapeShellArg (pkgs.lib.strings.concatStringsSep " AND " (map ({spdxId, ...}: spdxId) (pkgs.lib.lists.toList pkg.meta.license)))} \ --after-install ${ pkgs.writeScript "after-install.sh" '' for EXECUTABLE in ${pkgs.lib.strings.escapeShellArg pkg}/bin/* do NAME=$(basename "$EXECUTABLE") update-alternatives --install "/usr/bin/$NAME" "$NAME" "$EXECUTABLE" 1 done '' } \ --before-remove ${ pkgs.writeScript "before-remove.sh" '' for EXECUTABLE in ${pkgs.lib.strings.escapeShellArg pkg}/bin/* do NAME=$(basename "$EXECUTABLE") update-alternatives --remove "$NAME" "$EXECUTABLE" done '' } \ -- \ "''${FPM_INPUTS[@]}" ''; in { rpm = pkg: pkgs.runCommand "bundle.rpm" { nativeBuildInputs = [ pkgs.rpm ]; } (fpmScript "rpm" pkg); deb = pkg: pkgs.runCommand "bundle.deb" { nativeBuildInputs = [ pkg.stdenv.cc ]; } (fpmScript "deb" pkg); }; } ); }
hhvm/hhvm.code-workspace
{ "folders": [ { "path": "." } ], "settings": { "nixEnvSelector.nixFile": "${workspaceRoot}/shell.nix", "nix.enableLanguageServer": true, "terminal.integrated.profiles.linux": { "Nix Shell": { "path": "nix-shell" } }, "terminal.integrated.profiles.osx": { "Nix Shell": { "path": "nix-shell" } }, "cmake.buildDirectory": "${workspaceFolder}/_build" }, "extensions": { "recommendations": [ "arrterian.nix-env-selector", "jnoortheen.nix-ide", "ms-vscode.cpptools-extension-pack" ] } }
hhvm/hhvm.nix
{ bison , boost , brotli , bzip2 , cacert , cmake , curl , darwin , double-conversion , editline , expat , flex , fmt_8 , freetype , fribidi , gcc-unwrapped , gd , gdb , gettext , gflags , git , glog , gmp , gperf , gperftools , hostPlatform , icu , imagemagick6 , jemalloc , lastModifiedDate , lib , libcap , libdwarf_20210528 , libedit , libelf , libevent , libkrb5 , libmcrypt , libmemcached , libpng , libsodium , libunwind , libvpx , libxml2 , libxslt , libzip , linux-pam , lz4 , numactl , oniguruma , openldap , openssl_1_1 , pcre , perl , pkg-config , python3 , re2 , re2c , rustChannelOf , stdenv , sqlite , tbb , tzdata , unixtools , unzip , uwimap , which , writeTextFile , zlib , zstd }: let # TODO(https://github.com/NixOS/nixpkgs/pull/193086): Use stdenv.cc.libcxx once it is available isDefaultStdlib = builtins.match ".*-stdlib=\+\+.*" (builtins.readFile "${stdenv.cc}/nix-support/libcxx-ldflags") == null; versionParts = builtins.match '' .* #[[:blank:]]*define[[:blank:]]+HHVM_VERSION_MAJOR[[:blank:]]+([[:digit:]]+) #[[:blank:]]*define[[:blank:]]+HHVM_VERSION_MINOR[[:blank:]]+([[:digit:]]+) #[[:blank:]]*define[[:blank:]]+HHVM_VERSION_PATCH[[:blank:]]+([[:digit:]]+) #[[:blank:]]*define[[:blank:]]+HHVM_VERSION_SUFFIX[[:blank:]]+"([^"]*)" .* '' (builtins.readFile ./hphp/runtime/version.h); makeVersion = major: minor: patch: suffix: if suffix == "-dev" then "${major}.${minor}.${patch}-dev${lastModifiedDate}" else "${major}.${minor}.${patch}"; rustChannel = rustChannelOf { # When the date attribute changes, sha256 should be updated accordingly. # # 1. Export your diff to GitHub; # 2. Wait for an error message about sha256 mismatch from the GitHub # Actions; # 3. Copy the new sha256 from the error message and paste it here; # 4. Submit the diff and export the diff to GitHub, again. # 5. Ensure no error message about sha256 mismatch from the GitHub Actions. sha256 = "wVnIzrnpYGqiCBtc3k55tw4VW8YLA3WZY0mSac+2yl0="; date = "2022-08-11"; channel = "nightly"; }; in stdenv.mkDerivation rec { rust = rustChannel.rust; pname = "hhvm"; version = builtins.foldl' lib.trivial.id makeVersion versionParts; src = ./.; nativeBuildInputs = [ bison cacert cmake flex pkg-config python3 unixtools.getconf which ] ++ lib.optionals hostPlatform.isMacOS [ # `system_cmds` provides `sysctl`, which is used in hphp/test/run.php on macOS darwin.system_cmds ]; buildInputs = [ (if isDefaultStdlib then boost else boost.override { inherit stdenv; }) brotli bzip2 (curl.override { openssl = openssl_1_1; }) ( if isDefaultStdlib then double-conversion else double-conversion.override { inherit stdenv; } ) editline expat (if isDefaultStdlib then fmt_8 else fmt_8.override { inherit stdenv; }) freetype fribidi # Workaround for https://github.com/NixOS/nixpkgs/issues/192665 gcc-unwrapped.lib gd gdb gettext git ( if isDefaultStdlib then glog else (glog.override { inherit stdenv; gflags = gflags.override { inherit stdenv; }; }).overrideAttrs (finalAttrs: previousAttrs: { # Workaround for https://github.com/google/glog/issues/709 doCheck = !stdenv.cc.isClang; }) ) gmp (if isDefaultStdlib then gperf else gperf.override { inherit stdenv; }) ( if isDefaultStdlib then gperftools else gperftools.override { inherit stdenv; } ) (if isDefaultStdlib then icu else icu.override { inherit stdenv; }) imagemagick6 jemalloc libdwarf_20210528 libedit libelf libevent libkrb5 libmcrypt libmemcached libpng libsodium libunwind libvpx libxml2 libxslt libzip lz4 oniguruma openldap openssl_1_1 pcre perl re2 re2c sqlite (if isDefaultStdlib then tbb else tbb.override { inherit stdenv; }) tzdata unzip zlib zstd ] ++ lib.optionals hostPlatform.isLinux [ libcap linux-pam numactl uwimap ] ++ lib.optionals hostPlatform.isMacOS [ darwin.apple_sdk.frameworks.CoreFoundation darwin.apple_sdk.frameworks.CoreServices ]; NIX_CFLAGS_COMPILE = lib.optionals stdenv.cc.isClang [ # Workaround for dtoa.0.3.2 "-Wno-error=unused-command-line-argument" ]; CMAKE_TOOLCHAIN_FILE = writeTextFile { name = "toolchain.cmake"; text = '' set(ENABLE_SYSTEM_LOCALE_ARCHIVE ON CACHE BOOL "Use system locale archive as the default LOCALE_ARCHIVE for nix patched glibc" FORCE) set(CAN_USE_SYSTEM_ZSTD ON CACHE BOOL "Use system zstd" FORCE) set(HAVE_SYSTEM_TZDATA_PREFIX "${tzdata}/share/zoneinfo" CACHE PATH "The zoneinfo directory" FORCE) set(HAVE_SYSTEM_TZDATA ON CACHE BOOL "Use system zoneinfo" FORCE) set(MYSQL_UNIX_SOCK_ADDR "/run/mysqld/mysqld.sock" CACHE FILEPATH "The MySQL unix socket" FORCE) set(CARGO_EXECUTABLE "${rust}/bin/cargo" CACHE FILEPATH "The nightly cargo" FORCE) set(RUSTC_EXECUTABLE "${rust}/bin/rustc" CACHE FILEPATH "The nightly rustc" FORCE) set(CMAKE_VERBOSE_MAKEFILE ON CACHE BOOL "Enable verbose output from Makefile builds" FORCE) ${ lib.optionalString hostPlatform.isMacOS '' set(CMAKE_OSX_DEPLOYMENT_TARGET "10.15" CACHE STRING "Targeting macOS version" FORCE) '' } ''; }; prePatch = '' patchShebangs . ''; preBuild = '' set -e make \ -f third-party/proxygen/CMakeFiles/bundled_proxygen.dir/build.make \ third-party/proxygen/bundled_proxygen-prefix/src/bundled_proxygen-stamp/bundled_proxygen-patch patchShebangs \ third-party/proxygen/bundled_proxygen-prefix/src/bundled_proxygen ''; doCheck = true; checkPhase = '' set -e runHook preCheck export HHVM_BIN="$PWD/hphp/hhvm/hhvm" (cd ${./.} && "$HHVM_BIN" hphp/test/run.php quick) runHook postCheck ''; meta = { description = "High-performance JIT compiler for PHP/Hack"; platforms = [ "x86_64-darwin" "x86_64-linux" ]; homepage = "https://hhvm.com"; license = [ lib.licenses.php301 { spdxId = "Zend-2.0"; fullName = "Zend License v2.0"; url = "https://www.zend.com/sites/zend/files/pdfs/2_00.txt"; } ]; maintainers = [{ email = "[email protected]"; github = "hhvm"; githubId = 4553654; name = "HHVM/Hack Open Source"; }]; }; }
PHP
hhvm/LICENSE.PHP
-------------------------------------------------------------------- The PHP License, version 3.01 Copyright (c) 1999 - 2010 The PHP Group. All rights reserved. -------------------------------------------------------------------- Redistribution and use in source and binary forms, with or without modification, is permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name "PHP" must not be used to endorse or promote products derived from this software without prior written permission. For written permission, please contact [email protected]. 4. Products derived from this software may not be called "PHP", nor may "PHP" appear in their name, without prior written permission from [email protected]. You may indicate that your software works in conjunction with PHP by saying "Foo for PHP" instead of calling it "PHP Foo" or "phpfoo" 5. The PHP Group may publish revised and/or new versions of the license from time to time. Each version will be given a distinguishing version number. Once covered code has been published under a particular version of the license, you may always continue to use it under the terms of that version. You may also choose to use such covered code under the terms of any subsequent version of the license published by the PHP Group. No one other than the PHP Group has the right to modify the terms applicable to covered code created under this License. 6. Redistributions of any form whatsoever must retain the following acknowledgment: "This product includes PHP software, freely available from <http://www.php.net/software/>". THIS SOFTWARE IS PROVIDED BY THE PHP DEVELOPMENT TEAM ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE PHP DEVELOPMENT TEAM OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------- This software consists of voluntary contributions made by many individuals on behalf of the PHP Group. The PHP Group can be contacted via Email at [email protected]. For more information on the PHP Group and the PHP project, please see <http://www.php.net>. PHP includes the Zend Engine, freely available at <http://www.zend.com>.
hhvm/LICENSE.ZEND
-------------------------------------------------------------------- The Zend Engine License, version 2.00 Copyright (c) 1999-2002 Zend Technologies Ltd. All rights reserved. -------------------------------------------------------------------- Redistribution and use in source and binary forms, with or without modification, is permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The names "Zend" and "Zend Engine" must not be used to endorse or promote products derived from this software without prior permission from Zend Technologies Ltd. For written permission, please contact [email protected]. 4. Zend Technologies Ltd. may publish revised and/or new versions of the license from time to time. Each version will be given a distinguishing version number. Once covered code has been published under a particular version of the license, you may always continue to use it under the terms of that version. You may also choose to use such covered code under the terms of any subsequent version of the license published by Zend Technologies Ltd. No one other than Zend Technologies Ltd. has the right to modify the terms applicable to covered code created under this License. 5. Redistributions of any form whatsoever must retain the following acknowledgment: "This product includes the Zend Engine, freely available at http://www.zend.com" 6. All advertising materials mentioning features or use of this software must display the following acknowledgment: "The Zend Engine is freely available at http://www.zend.com" THIS SOFTWARE IS PROVIDED BY ZEND TECHNOLOGIES LTD. ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ZEND TECHNOLOGIES LTD. BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. --------------------------------------------------------------------
Markdown
hhvm/README.md
# HHVM [HHVM page](https://hhvm.com) | [HHVM documentation](https://docs.hhvm.com/hhvm/) | [Hacklang page](http://hacklang.org) | [General group](https://www.facebook.com/groups/hhvm.general/) | [Dev group](https://www.facebook.com/groups/hhvm.dev/) | [Twitter](https://twitter.com/HipHopVM) HHVM is an open-source virtual machine designed for executing programs written in [Hack](http://hacklang.org). HHVM uses a just-in-time (JIT) compilation approach to achieve superior performance while maintaining amazing development flexibility. HHVM should be used together with a webserver like the built in, easy to deploy [Proxygen](https://docs.hhvm.com/hhvm/basic-usage/proxygen), or a [FastCGI](https://docs.hhvm.com/hhvm/advanced-usage/fastCGI)-based webserver on top of nginx or Apache. ## Installing If you're new, try our [getting started guide](https://docs.hhvm.com/hhvm/getting-started/getting-started). You can install a [prebuilt package](https://docs.hhvm.com/hhvm/installation/introduction#prebuilt-packages) or [compile from source](https://docs.hhvm.com/hhvm/installation/building-from-source). ## Running You can run standalone programs just by passing them to hhvm: `hhvm example.hack`. If you want to host a website: * Install your favorite webserver. [Proxygen](https://docs.hhvm.com/hhvm/basic-usage/proxygen) is built into HHVM, fast and easy to deploy. * Install our [package](https://docs.hhvm.com/hhvm/installation/introduction#prebuilt-packages) * Start your webserver * Run `sudo /etc/init.d/hhvm start` * Visit your site at `http://.../main.hack` Our [getting started guide](https://docs.hhvm.com/hhvm/getting-started/getting-started) provides a slightly more detailed introduction as well as links to more information. ## Contributing We'd love to have your help in making HHVM better. If you're interested, please read our [guide to contributing](CONTRIBUTING.md). ## License HHVM is licensed under the PHP and Zend licenses except as otherwise noted. The [Hack typechecker](hphp/hack) is licensed under the MIT [License](hphp/hack/LICENSE) except as otherwise noted. The [Hack Standard Library](hphp/hsl) is licensed under the MIT [License](hphp/hsl/LICENSE) except as otherwise noted. ## Reporting Crashes See [Reporting Crashes](https://github.com/facebook/hhvm/wiki/Reporting-Crashes) for helpful tips on how to report crashes in an actionable manner. ## Security For information on reporting security vulnerabilities in HHVM, see [SECURITY.md](SECURITY.md). ## FAQ Our [user FAQ](https://docs.hhvm.com/hhvm/FAQ/faq) has answers to many common questions about HHVM, from [general questions](https://docs.hhvm.com/hhvm/FAQ/faq#general) to questions geared towards those that want to [use](https://docs.hhvm.com/hhvm/FAQ/faq#users). There is also a FAQ for [contributors](https://github.com/facebook/hhvm/wiki/FAQ#contributors) to HHVM.
Markdown
hhvm/SECURITY.md
# Reporting and Fixing Security Issues Please do not open GitHub issues or pull requests - this makes the problem immediately visible to everyone, including malicious actors. Security issues in HHVM can be safely reported via HHVM's Whitehat Bug Bounty program: [facebook.com/whitehat](https://www.facebook.com/whitehat) Facebook's security team will triage your report and determine whether or not is it eligible for a bounty under our program.
hhvm/shell.nix
( import ( let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in fetchTarball { url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; sha256 = lock.nodes.flake-compat.locked.narHash; } ) { src = ./.; } ).shellNix
hhvm/shell_clang.nix
( import ( let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in fetchTarball { url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; sha256 = lock.nodes.flake-compat.locked.narHash; } ) { src = ./.; } ).shellNix.devShells.${builtins.currentSystem}.clang
JSON
hhvm/.devcontainer/devcontainer.json
// @lint-ignore-every JSONSYNTAX because the format of this file is JSONC, not strict JSON // For format details, see https://aka.ms/vscode-remote/devcontainer.json or the definition README at // https://github.com/microsoft/vscode-dev-containers/tree/master/containers/docker-existing-dockerfile { "name": "devcontainer-project", "dockerFile": "Dockerfile", "context": "${localWorkspaceFolder}", "build": { "args": { "USER_UID": "${localEnv:USER_UID}", "USER_GID": "${localEnv:USER_GID}" }, }, // run arguments passed to docker "runArgs": [ "--security-opt", "label=disable" ], "hostRequirements": { "storage": "64gb", "memory": "32gb" }, "containerEnv": { // extensions to preload before other extensions "PRELOAD_EXTENSIONS": "arrterian.nix-env-selector" }, // disable command overriding and updating remote user ID "overrideCommand": false, "userEnvProbe": "loginShell", "updateRemoteUserUID": false, // build development environment on creation, make sure you already have shell.nix // "onCreateCommand": "nix-shell --command 'echo done building nix dev environment'", "updateContentCommand": "git submodule update --init --depth 1 --recursive", // Add the IDs of extensions you want installed when the container is created.postCreateCommand "extensions": [ // select nix environment "arrterian.nix-env-selector", // extra extensions //"fsevenm.run-it-on", //"jnoortheen.nix-ide", //"ms-python.python" ], // Use 'forwardPorts' to make a list of ports inside the container available locally. "forwardPorts": [], // Use 'postCreateCommand' to run commands after the container is created. // "postCreateCommand": "go version", }
Markdown
hhvm/.github/ISSUE_TEMPLATE/bug_report.md
--- name: Bug report about: Create a report to help us improve title: '' labels: '' assignees: '' --- **Describe the bug** A clear and concise description of what the bug is. **Standalone code, or other way to reproduce the problem** > This should not depend on installing any libraries or frameworks. Ideally, it should be possible to copy-paste this into a single file and reproduce the problem by running hhvm and/or hh_client Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error **Expected behavior** A clear and concise description of what you expected to happen. **Actual behavior** Copy-paste output, or add a screenshot to illustrate what actually happens. Copy-pasted text output (e.g. from `hhvm` or `hh_client`) is preferred to screenshots. **Environment** - Operating system > For example, 'Debian Squeeze', 'Ubuntu 18.04', 'MacOS Catalina'. - Installation method > For example, 'built from source', 'apt-get with dl.hhvm.com repository', 'hhvm/hhvm on dockerhub', 'homebrew' - HHVM Version > Please include the output of `hhvm --version` and `hh_client --version` **Additional context** Add any other context about the problem here.
Markdown
hhvm/.github/ISSUE_TEMPLATE/feature_request.md
--- name: Feature request about: Suggest an idea for this project title: '' labels: '' assignees: '' --- **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
YAML
hhvm/.github/workflows/ubuntu.yml
name: Ubuntu CI on: push: branches-ignore: # Exclude the push event for exported diffs, because the CI for export # should have been covered by GitHub Actions triggered by pull requests. - 'export-D+' pull_request: concurrency: # If the workflow is triggered by a pull request, then cancel previous runs # for the same pull request, which share the same `github.ref`, otherwise the # run ID is used to identify the concurrency group, which is a no-op because # the run ID is always unique for each trigged event. group: ubuntu-ci-${{ github.event_name == 'pull_request' && github.ref || github.run_id }} cancel-in-progress: true # OUT needs to be global. Ideally it would be local to the job so we could # store it in ${{ runner.temp }} instead of the checkout directory. # Unfortunately this is hitting some GitHub Action inconsistency with `env`: # https://github.com/actions/runner/issues/480 env: OUT: ${{ format('{0}/out', github.workspace) }} DEBIAN_FRONTEND: "noninteractive" jobs: build_ubuntu_focal_nightly: runs-on: 16-core container: image: ubuntu:focal env: DISTRO: ubuntu-20.04-focal IS_NIGHTLY: 1 CLANG_VERSION: 12 steps: - name: Installing dependencies to bootstrap env run: apt update -y && apt install -y git wget lsb-release software-properties-common gpg - name: Installing llvm run: | wget https://apt.llvm.org/llvm.sh chmod +x llvm.sh # Note: Keep this version in sync with the one in the Debian control file. ./llvm.sh ${CLANG_VERSION} - name: Making LLVM the default compiler run: | update-alternatives --remove-all cc update-alternatives --remove-all c++ update-alternatives --install /usr/bin/cc cc /usr/bin/clang++-${CLANG_VERSION} 500 update-alternatives --set cc /usr/bin/clang++-${CLANG_VERSION} update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-${CLANG_VERSION} 500 update-alternatives --set c++ /usr/bin/clang++-${CLANG_VERSION} - name: Fetching HHVM and its submodules uses: actions/checkout@v3 with: submodules: 'recursive' - name: Installing HHVM deps and building HHVM run: ci/bin/make-debianish-package - name: Uploading artifacts uses: actions/upload-artifact@v3 with: name: out-directory path: ${{ env.OUT }}
hhvm/build/fbcode_builder/.gitignore
# Facebook-internal CI builds don't have write permission outside of the # source tree, so we install all projects into this directory. /facebook_ci __pycache__/ *.pyc
Python
hhvm/build/fbcode_builder/getdeps.py
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import argparse import json import os import shutil import subprocess import sys import tarfile import tempfile # We don't import cache.create_cache directly as the facebook # specific import below may monkey patch it, and we want to # observe the patched version of this function! import getdeps.cache as cache_module from getdeps.buildopts import setup_build_options from getdeps.dyndeps import create_dyn_dep_munger from getdeps.errors import TransientFailure from getdeps.fetcher import ( file_name_is_cmake_file, list_files_under_dir_newer_than_timestamp, SystemPackageFetcher, ) from getdeps.load import ManifestLoader from getdeps.manifest import ManifestParser from getdeps.platform import HostType from getdeps.runcmd import run_cmd from getdeps.subcmd import add_subcommands, cmd, SubCmd try: import getdeps.facebook # noqa: F401 except ImportError: # we don't ship the facebook specific subdir, # so allow that to fail silently pass sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "getdeps")) class UsageError(Exception): pass @cmd("validate-manifest", "parse a manifest and validate that it is correct") class ValidateManifest(SubCmd): def run(self, args): try: ManifestParser(file_name=args.file_name) print("OK", file=sys.stderr) return 0 except Exception as exc: print("ERROR: %s" % str(exc), file=sys.stderr) return 1 def setup_parser(self, parser): parser.add_argument("file_name", help="path to the manifest file") @cmd("show-host-type", "outputs the host type tuple for the host machine") class ShowHostType(SubCmd): def run(self, args): host = HostType() print("%s" % host.as_tuple_string()) return 0 class ProjectCmdBase(SubCmd): def run(self, args): opts = setup_build_options(args) if args.current_project is not None: opts.repo_project = args.current_project if args.project is None: if opts.repo_project is None: raise UsageError( "no project name specified, and no .projectid file found" ) if opts.repo_project == "fbsource": # The fbsource repository is a little special. There is no project # manifest file for it. A specific project must always be explicitly # specified when building from fbsource. raise UsageError( "no project name specified (required when building in fbsource)" ) args.project = opts.repo_project ctx_gen = opts.get_context_generator() if args.test_dependencies: ctx_gen.set_value_for_all_projects("test", "on") if args.enable_tests: ctx_gen.set_value_for_project(args.project, "test", "on") else: ctx_gen.set_value_for_project(args.project, "test", "off") if opts.shared_libs: ctx_gen.set_value_for_all_projects("shared_libs", "on") loader = ManifestLoader(opts, ctx_gen) self.process_project_dir_arguments(args, loader) manifest = loader.load_manifest(args.project) self.run_project_cmd(args, loader, manifest) def process_project_dir_arguments(self, args, loader): def parse_project_arg(arg, arg_type): parts = arg.split(":") if len(parts) == 2: project, path = parts elif len(parts) == 1: project = args.project path = parts[0] # On Windows path contains colon, e.g. C:\open elif os.name == "nt" and len(parts) == 3: project = parts[0] path = parts[1] + ":" + parts[2] else: raise UsageError( "invalid %s argument; too many ':' characters: %s" % (arg_type, arg) ) return project, os.path.abspath(path) # If we are currently running from a project repository, # use the current repository for the project sources. build_opts = loader.build_opts if build_opts.repo_project is not None and build_opts.repo_root is not None: loader.set_project_src_dir(build_opts.repo_project, build_opts.repo_root) for arg in args.src_dir: project, path = parse_project_arg(arg, "--src-dir") loader.set_project_src_dir(project, path) for arg in args.build_dir: project, path = parse_project_arg(arg, "--build-dir") loader.set_project_build_dir(project, path) for arg in args.install_dir: project, path = parse_project_arg(arg, "--install-dir") loader.set_project_install_dir(project, path) for arg in args.project_install_prefix: project, path = parse_project_arg(arg, "--install-prefix") loader.set_project_install_prefix(project, path) def setup_parser(self, parser): parser.add_argument( "project", nargs="?", help=( "name of the project or path to a manifest " "file describing the project" ), ) parser.add_argument( "--no-tests", action="store_false", dest="enable_tests", default=True, help="Disable building tests for this project.", ) parser.add_argument( "--test-dependencies", action="store_true", help="Enable building tests for dependencies as well.", ) parser.add_argument( "--current-project", help="Specify the name of the fbcode_builder manifest file for the " "current repository. If not specified, the code will attempt to find " "this in a .projectid file in the repository root.", ) parser.add_argument( "--src-dir", default=[], action="append", help="Specify a local directory to use for the project source, " "rather than fetching it.", ) parser.add_argument( "--build-dir", default=[], action="append", help="Explicitly specify the build directory to use for the " "project, instead of the default location in the scratch path. " "This only affects the project specified, and not its dependencies.", ) parser.add_argument( "--install-dir", default=[], action="append", help="Explicitly specify the install directory to use for the " "project, instead of the default location in the scratch path. " "This only affects the project specified, and not its dependencies.", ) parser.add_argument( "--project-install-prefix", default=[], action="append", help="Specify the final deployment installation path for a project", ) self.setup_project_cmd_parser(parser) def setup_project_cmd_parser(self, parser): pass class CachedProject(object): """A helper that allows calling the cache logic for a project from both the build and the fetch code""" def __init__(self, cache, loader, m): self.m = m self.inst_dir = loader.get_project_install_dir(m) self.project_hash = loader.get_project_hash(m) self.ctx = loader.ctx_gen.get_context(m.name) self.loader = loader self.cache = cache self.cache_file_name = "-".join( ( m.name, self.ctx.get("os"), self.ctx.get("distro") or "none", self.ctx.get("distro_vers") or "none", self.project_hash, "buildcache.tgz", ) ) def is_cacheable(self): """We only cache third party projects""" return self.cache and self.m.shipit_project is None def was_cached(self): cached_marker = os.path.join(self.inst_dir, ".getdeps-cached-build") return os.path.exists(cached_marker) def download(self): if self.is_cacheable() and not os.path.exists(self.inst_dir): print("check cache for %s" % self.cache_file_name) dl_dir = os.path.join(self.loader.build_opts.scratch_dir, "downloads") if not os.path.exists(dl_dir): os.makedirs(dl_dir) try: target_file_name = os.path.join(dl_dir, self.cache_file_name) if self.cache.download_to_file(self.cache_file_name, target_file_name): tf = tarfile.open(target_file_name, "r") print( "Extracting %s -> %s..." % (self.cache_file_name, self.inst_dir) ) tf.extractall(self.inst_dir) cached_marker = os.path.join(self.inst_dir, ".getdeps-cached-build") with open(cached_marker, "w") as f: f.write("\n") return True except Exception as exc: print("%s" % str(exc)) return False def upload(self): if self.is_cacheable(): # We can prepare an archive and stick it in LFS tempdir = tempfile.mkdtemp() tarfilename = os.path.join(tempdir, self.cache_file_name) print("Archiving for cache: %s..." % tarfilename) tf = tarfile.open(tarfilename, "w:gz") tf.add(self.inst_dir, arcname=".") tf.close() try: self.cache.upload_from_file(self.cache_file_name, tarfilename) except Exception as exc: print( "Failed to upload to cache (%s), continue anyway" % str(exc), file=sys.stderr, ) shutil.rmtree(tempdir) @cmd("fetch", "fetch the code for a given project") class FetchCmd(ProjectCmdBase): def setup_project_cmd_parser(self, parser): parser.add_argument( "--recursive", help="fetch the transitive deps also", action="store_true", default=False, ) parser.add_argument( "--host-type", help=( "When recursively fetching, fetch deps for " "this host type rather than the current system" ), ) def run_project_cmd(self, args, loader, manifest): if args.recursive: projects = loader.manifests_in_dependency_order() else: projects = [manifest] cache = cache_module.create_cache() for m in projects: cached_project = CachedProject(cache, loader, m) if cached_project.download(): continue inst_dir = loader.get_project_install_dir(m) built_marker = os.path.join(inst_dir, ".built-by-getdeps") if os.path.exists(built_marker): with open(built_marker, "r") as f: built_hash = f.read().strip() project_hash = loader.get_project_hash(m) if built_hash == project_hash: continue # We need to fetch the sources fetcher = loader.create_fetcher(m) fetcher.update() @cmd("install-system-deps", "Install system packages to satisfy the deps for a project") class InstallSysDepsCmd(ProjectCmdBase): def setup_project_cmd_parser(self, parser): parser.add_argument( "--recursive", help="install the transitive deps also", action="store_true", default=False, ) parser.add_argument( "--dry-run", action="store_true", default=False, help="Don't install, just print the commands specs we would run", ) parser.add_argument( "--os-type", help="Filter to just this OS type to run", choices=["linux", "darwin", "windows"], action="store", dest="ostype", default=None, ) parser.add_argument( "--distro", help="Filter to just this distro to run", choices=["ubuntu", "centos_stream"], action="store", dest="distro", default=None, ) parser.add_argument( "--distro-version", help="Filter to just this distro version", action="store", dest="distrovers", default=None, ) def run_project_cmd(self, args, loader, manifest): if args.recursive: projects = loader.manifests_in_dependency_order() else: projects = [manifest] rebuild_ctx_gen = False if args.ostype: loader.build_opts.host_type.ostype = args.ostype loader.build_opts.host_type.distro = None loader.build_opts.host_type.distrovers = None rebuild_ctx_gen = True if args.distro: loader.build_opts.host_type.distro = args.distro loader.build_opts.host_type.distrovers = None rebuild_ctx_gen = True if args.distrovers: loader.build_opts.host_type.distrovers = args.distrovers rebuild_ctx_gen = True if rebuild_ctx_gen: loader.ctx_gen = loader.build_opts.get_context_generator() manager = loader.build_opts.host_type.get_package_manager() all_packages = {} for m in projects: ctx = loader.ctx_gen.get_context(m.name) packages = m.get_required_system_packages(ctx) for k, v in packages.items(): merged = all_packages.get(k, []) merged += v all_packages[k] = merged cmd_args = None if manager == "rpm": packages = sorted(set(all_packages["rpm"])) if packages: cmd_args = ["sudo", "dnf", "install", "-y"] + packages elif manager == "deb": packages = sorted(set(all_packages["deb"])) if packages: cmd_args = ["sudo", "apt", "install", "-y"] + packages elif manager == "homebrew": packages = sorted(set(all_packages["homebrew"])) if packages: cmd_args = ["brew", "install"] + packages else: host_tuple = loader.build_opts.host_type.as_tuple_string() print( f"I don't know how to install any packages on this system {host_tuple}" ) return if cmd_args: if args.dry_run: print(" ".join(cmd_args)) else: run_cmd(cmd_args) else: print("no packages to install") @cmd("list-deps", "lists the transitive deps for a given project") class ListDepsCmd(ProjectCmdBase): def run_project_cmd(self, args, loader, manifest): for m in loader.manifests_in_dependency_order(): print(m.name) return 0 def setup_project_cmd_parser(self, parser): parser.add_argument( "--host-type", help=( "Produce the list for the specified host type, " "rather than that of the current system" ), ) def clean_dirs(opts): for d in ["build", "installed", "extracted", "shipit"]: d = os.path.join(opts.scratch_dir, d) print("Cleaning %s..." % d) if os.path.exists(d): shutil.rmtree(d) @cmd("clean", "clean up the scratch dir") class CleanCmd(SubCmd): def run(self, args): opts = setup_build_options(args) clean_dirs(opts) @cmd("show-build-dir", "print the build dir for a given project") class ShowBuildDirCmd(ProjectCmdBase): def run_project_cmd(self, args, loader, manifest): if args.recursive: manifests = loader.manifests_in_dependency_order() else: manifests = [manifest] for m in manifests: inst_dir = loader.get_project_build_dir(m) print(inst_dir) def setup_project_cmd_parser(self, parser): parser.add_argument( "--recursive", help="print the transitive deps also", action="store_true", default=False, ) @cmd("show-inst-dir", "print the installation dir for a given project") class ShowInstDirCmd(ProjectCmdBase): def run_project_cmd(self, args, loader, manifest): if args.recursive: manifests = loader.manifests_in_dependency_order() else: manifests = [manifest] for m in manifests: inst_dir = loader.get_project_install_dir_respecting_install_prefix(m) print(inst_dir) def setup_project_cmd_parser(self, parser): parser.add_argument( "--recursive", help="print the transitive deps also", action="store_true", default=False, ) @cmd("show-source-dir", "print the source dir for a given project") class ShowSourceDirCmd(ProjectCmdBase): def run_project_cmd(self, args, loader, manifest): if args.recursive: manifests = loader.manifests_in_dependency_order() else: manifests = [manifest] for m in manifests: fetcher = loader.create_fetcher(m) print(fetcher.get_src_dir()) def setup_project_cmd_parser(self, parser): parser.add_argument( "--recursive", help="print the transitive deps also", action="store_true", default=False, ) @cmd("build", "build a given project") class BuildCmd(ProjectCmdBase): def run_project_cmd(self, args, loader, manifest): if args.clean: clean_dirs(loader.build_opts) print("Building on %s" % loader.ctx_gen.get_context(args.project)) projects = loader.manifests_in_dependency_order() cache = cache_module.create_cache() if args.use_build_cache else None # Accumulate the install directories so that the build steps # can find their dep installation install_dirs = [] for m in projects: fetcher = loader.create_fetcher(m) if isinstance(fetcher, SystemPackageFetcher): # We are guaranteed that if the fetcher is set to # SystemPackageFetcher then this item is completely # satisfied by the appropriate system packages continue if args.clean: fetcher.clean() build_dir = loader.get_project_build_dir(m) inst_dir = loader.get_project_install_dir(m) if ( m == manifest and not args.only_deps or m != manifest and not args.no_deps ): print("Assessing %s..." % m.name) project_hash = loader.get_project_hash(m) ctx = loader.ctx_gen.get_context(m.name) built_marker = os.path.join(inst_dir, ".built-by-getdeps") cached_project = CachedProject(cache, loader, m) reconfigure, sources_changed = self.compute_source_change_status( cached_project, fetcher, m, built_marker, project_hash ) if os.path.exists(built_marker) and not cached_project.was_cached(): # We've previously built this. We may need to reconfigure if # our deps have changed, so let's check them. dep_reconfigure, dep_build = self.compute_dep_change_status( m, built_marker, loader ) if dep_reconfigure: reconfigure = True if dep_build: sources_changed = True extra_cmake_defines = ( json.loads(args.extra_cmake_defines) if args.extra_cmake_defines else {} ) extra_b2_args = args.extra_b2_args or [] if sources_changed or reconfigure or not os.path.exists(built_marker): if os.path.exists(built_marker): os.unlink(built_marker) src_dir = fetcher.get_src_dir() # Prepare builders write out config before the main builder runs prepare_builders = m.create_prepare_builders( loader.build_opts, ctx, src_dir, build_dir, inst_dir, loader, ) for preparer in prepare_builders: preparer.prepare(install_dirs, reconfigure=reconfigure) builder = m.create_builder( loader.build_opts, src_dir, build_dir, inst_dir, ctx, loader, final_install_prefix=loader.get_project_install_prefix(m), extra_cmake_defines=extra_cmake_defines, cmake_target=args.cmake_target if m == manifest else "install", extra_b2_args=extra_b2_args, ) builder.build(install_dirs, reconfigure=reconfigure) # If we are building the project (not dependency) and a specific # cmake_target (not 'install') has been requested, then we don't # set the built_marker. This allows subsequent runs of getdeps.py # for the project to run with different cmake_targets to trigger # cmake has_built_marker = False if not (m == manifest and args.cmake_target != "install"): with open(built_marker, "w") as f: f.write(project_hash) has_built_marker = True # Only populate the cache from continuous build runs, and # only if we have a built_marker. if args.schedule_type == "continuous" and has_built_marker: cached_project.upload() elif args.verbose: print("found good %s" % built_marker) # Paths are resolved from front. We prepend rather than append as # the last project in topo order is the project itself, which # should be first in the path, then its deps and so on. install_dirs.insert(0, inst_dir) def compute_dep_change_status(self, m, built_marker, loader): reconfigure = False sources_changed = False st = os.lstat(built_marker) ctx = loader.ctx_gen.get_context(m.name) dep_list = m.get_dependencies(ctx) for dep in dep_list: if reconfigure and sources_changed: break dep_manifest = loader.load_manifest(dep) dep_root = loader.get_project_install_dir(dep_manifest) for dep_file in list_files_under_dir_newer_than_timestamp( dep_root, st.st_mtime ): if os.path.basename(dep_file) == ".built-by-getdeps": continue if file_name_is_cmake_file(dep_file): if not reconfigure: reconfigure = True print( f"Will reconfigure cmake because {dep_file} is newer than {built_marker}" ) else: if not sources_changed: sources_changed = True print( f"Will run build because {dep_file} is newer than {built_marker}" ) if reconfigure and sources_changed: break return reconfigure, sources_changed def compute_source_change_status( self, cached_project, fetcher, m, built_marker, project_hash ): reconfigure = False sources_changed = False if cached_project.download(): if not os.path.exists(built_marker): fetcher.update() else: check_fetcher = True if os.path.exists(built_marker): check_fetcher = False with open(built_marker, "r") as f: built_hash = f.read().strip() if built_hash == project_hash: if cached_project.is_cacheable(): # We can blindly trust the build status reconfigure = False sources_changed = False else: # Otherwise, we may have changed the source, so let's # check in with the fetcher layer check_fetcher = True else: # Some kind of inconsistency with a prior build, # let's run it again to be sure os.unlink(built_marker) reconfigure = True sources_changed = True # While we don't need to consult the fetcher for the # status in this case, we may still need to have eg: shipit # run in order to have a correct source tree. fetcher.update() if check_fetcher: change_status = fetcher.update() reconfigure = change_status.build_changed() sources_changed = change_status.sources_changed() return reconfigure, sources_changed def setup_project_cmd_parser(self, parser): parser.add_argument( "--clean", action="store_true", default=False, help=( "Clean up the build and installation area prior to building, " "causing the projects to be built from scratch" ), ) parser.add_argument( "--no-deps", action="store_true", default=False, help=( "Only build the named project, not its deps. " "This is most useful after you've built all of the deps, " "and helps to avoid waiting for relatively " "slow up-to-date-ness checks" ), ) parser.add_argument( "--only-deps", action="store_true", default=False, help=( "Only build the named project's deps. " "This is most useful when you want to separate out building " "of all of the deps and your project" ), ) parser.add_argument( "--no-build-cache", action="store_false", default=True, dest="use_build_cache", help="Do not attempt to use the build cache.", ) parser.add_argument( "--schedule-type", help="Indicates how the build was activated" ) parser.add_argument( "--extra-cmake-defines", help=( "Input json map that contains extra cmake defines to be used " "when compiling the current project and all its deps. " 'e.g: \'{"CMAKE_CXX_FLAGS": "--bla"}\'' ), ) parser.add_argument( "--cmake-target", help=("Target for cmake build."), default="install", ) parser.add_argument( "--extra-b2-args", help=( "Repeatable argument that contains extra arguments to pass " "to b2, which compiles boost. " "e.g.: 'cxxflags=-fPIC' 'cflags=-fPIC'" ), action="append", ) parser.add_argument( "--shared-libs", help="Build shared libraries if possible", action="store_true", default=False, ) @cmd("fixup-dyn-deps", "Adjusts dynamic dependencies for packaging purposes") class FixupDeps(ProjectCmdBase): def run_project_cmd(self, args, loader, manifest): projects = loader.manifests_in_dependency_order() # Accumulate the install directories so that the build steps # can find their dep installation install_dirs = [] for m in projects: inst_dir = loader.get_project_install_dir_respecting_install_prefix(m) install_dirs.append(inst_dir) if m == manifest: dep_munger = create_dyn_dep_munger( loader.build_opts, install_dirs, args.strip ) if dep_munger is None: print(f"dynamic dependency fixups not supported on {sys.platform}") else: dep_munger.process_deps(args.destdir, args.final_install_prefix) def setup_project_cmd_parser(self, parser): parser.add_argument("destdir", help="Where to copy the fixed up executables") parser.add_argument( "--final-install-prefix", help="specify the final installation prefix" ) parser.add_argument( "--strip", action="store_true", default=False, help="Strip debug info while processing executables", ) @cmd("test", "test a given project") class TestCmd(ProjectCmdBase): def run_project_cmd(self, args, loader, manifest): projects = loader.manifests_in_dependency_order() # Accumulate the install directories so that the test steps # can find their dep installation install_dirs = [] for m in projects: inst_dir = loader.get_project_install_dir(m) if m == manifest or args.test_dependencies: built_marker = os.path.join(inst_dir, ".built-by-getdeps") if not os.path.exists(built_marker): print("project %s has not been built" % m.name) # TODO: we could just go ahead and build it here, but I # want to tackle that as part of adding build-for-test # support. return 1 fetcher = loader.create_fetcher(m) src_dir = fetcher.get_src_dir() ctx = loader.ctx_gen.get_context(m.name) build_dir = loader.get_project_build_dir(m) builder = m.create_builder( loader.build_opts, src_dir, build_dir, inst_dir, ctx, loader ) builder.run_tests( install_dirs, schedule_type=args.schedule_type, owner=args.test_owner, test_filter=args.filter, retry=args.retry, no_testpilot=args.no_testpilot, ) install_dirs.append(inst_dir) def setup_project_cmd_parser(self, parser): parser.add_argument( "--schedule-type", help="Indicates how the build was activated" ) parser.add_argument("--test-owner", help="Owner for testpilot") parser.add_argument("--filter", help="Only run the tests matching the regex") parser.add_argument( "--retry", type=int, default=3, help="Number of immediate retries for failed tests " "(noop in continuous and testwarden runs)", ) parser.add_argument( "--no-testpilot", help="Do not use Test Pilot even when available", action="store_true", ) @cmd("generate-github-actions", "generate a GitHub actions configuration") class GenerateGitHubActionsCmd(ProjectCmdBase): RUN_ON_ALL = """ [push, pull_request]""" def run_project_cmd(self, args, loader, manifest): platforms = [ HostType("linux", "ubuntu", "18"), HostType("darwin", None, None), HostType("windows", None, None), ] for p in platforms: if args.os_types and p.ostype not in args.os_types: continue self.write_job_for_platform(p, args) def get_run_on(self, args): if args.run_on_all_branches: return self.RUN_ON_ALL return f""" push: branches: - {args.main_branch} pull_request: branches: - {args.main_branch}""" # TODO: Break up complex function def write_job_for_platform(self, platform, args): # noqa: C901 build_opts = setup_build_options(args, platform) ctx_gen = build_opts.get_context_generator() loader = ManifestLoader(build_opts, ctx_gen) manifest = loader.load_manifest(args.project) manifest_ctx = loader.ctx_gen.get_context(manifest.name) run_on = self.get_run_on(args) # Some projects don't do anything "useful" as a leaf project, only # as a dep for a leaf project. Check for those here; we don't want # to waste the effort scheduling them on CI. # We do this by looking at the builder type in the manifest file # rather than creating a builder and checking its type because we # don't know enough to create the full builder instance here. if manifest.get("build", "builder", ctx=manifest_ctx) == "nop": return None # We want to be sure that we're running things with python 3 # but python versioning is honestly a bit of a frustrating mess. # `python` may be version 2 or version 3 depending on the system. # python3 may not be a thing at all! # Assume an optimistic default py3 = "python3" if build_opts.is_linux(): artifacts = "linux" runs_on = f"ubuntu-{args.ubuntu_version}" elif build_opts.is_windows(): artifacts = "windows" runs_on = "windows-2019" # The windows runners are python 3 by default; python2.exe # is available if needed. py3 = "python" else: artifacts = "mac" runs_on = "macOS-latest" os.makedirs(args.output_dir, exist_ok=True) job_file_prefix = "getdeps_" if args.job_file_prefix: job_file_prefix = args.job_file_prefix output_file = os.path.join(args.output_dir, f"{job_file_prefix}{artifacts}.yml") if args.job_name_prefix: job_name = args.job_name_prefix + artifacts.capitalize() else: job_name = artifacts with open(output_file, "w") as out: # Deliberate line break here because the @ and the generated # symbols are meaningful to our internal tooling when they # appear in a single token out.write("# This file was @") out.write("generated by getdeps.py\n") out.write( f""" name: {job_name} on:{run_on} jobs: """ ) getdepscmd = f"{py3} build/fbcode_builder/getdeps.py" out.write(" build:\n") out.write(" runs-on: %s\n" % runs_on) out.write(" steps:\n") if build_opts.is_windows(): # cmake relies on BOOST_ROOT but GH deliberately don't set it in order # to avoid versioning issues: # https://github.com/actions/virtual-environments/issues/319 # Instead, set the version we think we need; this is effectively # coupled with the boost manifest # This is the unusual syntax for setting an env var for the rest of # the steps in a workflow: # https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/ out.write(" - name: Export boost environment\n") out.write( ' run: "echo BOOST_ROOT=%BOOST_ROOT_1_78_0% >> %GITHUB_ENV%"\n' ) out.write(" shell: cmd\n") # The git installation may not like long filenames, so tell it # that we want it to use them! out.write(" - name: Fix Git config\n") out.write(" run: git config --system core.longpaths true\n") out.write(" - name: Disable autocrlf\n") out.write(" run: git config --system core.autocrlf false\n") out.write(" - uses: actions/checkout@v2\n") allow_sys_arg = "" if ( build_opts.allow_system_packages and build_opts.host_type.get_package_manager() ): sudo_arg = "sudo " allow_sys_arg = " --allow-system-packages" if build_opts.host_type.get_package_manager() == "deb": out.write(" - name: Update system package info\n") out.write(f" run: {sudo_arg}apt-get update\n") out.write(" - name: Install system deps\n") if build_opts.is_darwin(): # brew is installed as regular user sudo_arg = "" out.write( f" run: {sudo_arg}python3 build/fbcode_builder/getdeps.py --allow-system-packages install-system-deps --recursive {manifest.name}\n" ) projects = loader.manifests_in_dependency_order() main_repo_url = manifest.get_repo_url(manifest_ctx) has_same_repo_dep = False for m in projects: if m != manifest: if m.name == "rust": out.write(" - name: Install Rust Stable\n") out.write(" uses: dtolnay/rust-toolchain@stable\n") else: ctx = loader.ctx_gen.get_context(m.name) if m.get_repo_url(ctx) != main_repo_url: out.write(" - name: Fetch %s\n" % m.name) out.write( f" run: {getdepscmd}{allow_sys_arg} fetch --no-tests {m.name}\n" ) for m in projects: if m != manifest: if m.name == "rust": continue else: src_dir_arg = "" ctx = loader.ctx_gen.get_context(m.name) if main_repo_url and m.get_repo_url(ctx) == main_repo_url: # Its in the same repo, so src-dir is also . src_dir_arg = "--src-dir=. " has_same_repo_dep = True out.write(" - name: Build %s\n" % m.name) out.write( f" run: {getdepscmd}{allow_sys_arg} build {src_dir_arg}--no-tests {m.name}\n" ) out.write(" - name: Build %s\n" % manifest.name) project_prefix = "" if not build_opts.is_windows(): project_prefix = ( " --project-install-prefix %s:/usr/local" % manifest.name ) # If we have dep from same repo, we already built it and don't want to rebuild it again no_deps_arg = "" if has_same_repo_dep: no_deps_arg = "--no-deps " out.write( f" run: {getdepscmd}{allow_sys_arg} build {no_deps_arg}--src-dir=. {manifest.name} {project_prefix}\n" ) out.write(" - name: Copy artifacts\n") if build_opts.is_linux(): # Strip debug info from the binaries, but only on linux. # While the `strip` utility is also available on macOS, # attempting to strip there results in an error. # The `strip` utility is not available on Windows. strip = " --strip" else: strip = "" out.write( f" run: {getdepscmd}{allow_sys_arg} fixup-dyn-deps{strip} " f"--src-dir=. {manifest.name} _artifacts/{artifacts} {project_prefix} " f"--final-install-prefix /usr/local\n" ) out.write(" - uses: actions/upload-artifact@v2\n") out.write(" with:\n") out.write(" name: %s\n" % manifest.name) out.write(" path: _artifacts\n") if manifest.get("github.actions", "run_tests", ctx=manifest_ctx) != "off": out.write(" - name: Test %s\n" % manifest.name) out.write( f" run: {getdepscmd}{allow_sys_arg} test --src-dir=. {manifest.name} {project_prefix}\n" ) def setup_project_cmd_parser(self, parser): parser.add_argument( "--disallow-system-packages", help="Disallow satisfying third party deps from installed system packages", action="store_true", default=False, ) parser.add_argument( "--output-dir", help="The directory that will contain the yml files" ) parser.add_argument( "--run-on-all-branches", action="store_true", help="Allow CI to fire on all branches - Handy for testing", ) parser.add_argument( "--ubuntu-version", default="20.04", help="Version of Ubuntu to use" ) parser.add_argument( "--main-branch", default="main", help="Main branch to trigger GitHub Action on", ) parser.add_argument( "--os-type", help="Filter to just this OS type to run", choices=["linux", "darwin", "windows"], action="append", dest="os_types", default=[], ) parser.add_argument( "--job-file-prefix", type=str, help="add a prefix to all job file names", default=None, ) parser.add_argument( "--job-name-prefix", type=str, help="add a prefix to all job names", default=None, ) def get_arg_var_name(args): for arg in args: if arg.startswith("--"): return arg[2:].replace("-", "_") raise Exception("unable to determine argument variable name from %r" % (args,)) def parse_args(): # We want to allow common arguments to be specified either before or after # the subcommand name. In order to do this we add them to the main parser # and to subcommand parsers. In order for this to work, we need to tell # argparse that the default value is SUPPRESS, so that the default values # from the subparser arguments won't override values set by the user from # the main parser. We maintain our own list of desired defaults in the # common_defaults dictionary, and manually set those if the argument wasn't # present at all. common_args = argparse.ArgumentParser(add_help=False) common_defaults = {} def add_common_arg(*args, **kwargs): var_name = get_arg_var_name(args) default_value = kwargs.pop("default", None) common_defaults[var_name] = default_value kwargs["default"] = argparse.SUPPRESS common_args.add_argument(*args, **kwargs) add_common_arg("--scratch-path", help="Where to maintain checkouts and build dirs") add_common_arg( "--vcvars-path", default=None, help="Path to the vcvarsall.bat on Windows." ) add_common_arg( "--install-prefix", help=( "Where the final build products will be installed " "(default is [scratch-path]/installed)" ), ) add_common_arg( "--num-jobs", type=int, help=( "Number of concurrent jobs to use while building. " "(default=number of cpu cores)" ), ) add_common_arg( "--use-shipit", help="use the real ShipIt instead of the simple shipit transformer", action="store_true", default=False, ) add_common_arg( "--facebook-internal", help="Setup the build context as an FB internal build", action="store_true", default=None, ) add_common_arg( "--no-facebook-internal", help="Perform a non-FB internal build, even when in an fbsource repository", action="store_false", dest="facebook_internal", ) add_common_arg( "--allow-system-packages", help="Allow satisfying third party deps from installed system packages", action="store_true", default=False, ) add_common_arg( "-v", "--verbose", help="Print more output", action="store_true", default=False, ) add_common_arg( "--lfs-path", help="Provide a parent directory for lfs when fbsource is unavailable", default=None, ) ap = argparse.ArgumentParser( description="Get and build dependencies and projects", parents=[common_args] ) sub = ap.add_subparsers( # metavar suppresses the long and ugly default list of subcommands on a # single line. We still render the nicer list below where we would # have shown the nasty one. metavar="", title="Available commands", help="", ) add_subcommands(sub, common_args) args = ap.parse_args() for var_name, default_value in common_defaults.items(): if not hasattr(args, var_name): setattr(args, var_name, default_value) return ap, args def main(): ap, args = parse_args() if getattr(args, "func", None) is None: ap.print_help() return 0 try: return args.func(args) except UsageError as exc: ap.error(str(exc)) return 1 except TransientFailure as exc: print("TransientFailure: %s" % str(exc)) # This return code is treated as a retryable transient infrastructure # error by Facebook's internal CI, rather than eg: a build or code # related error that needs to be fixed before progress can be made. return 128 except subprocess.CalledProcessError as exc: print("%s" % str(exc), file=sys.stderr) print("!! Failed", file=sys.stderr) return 1 if __name__ == "__main__": sys.exit(main())
hhvm/build/fbcode_builder/LICENSE
MIT License Copyright (c) Facebook, Inc. and its affiliates. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Markdown
hhvm/build/fbcode_builder/README.md
# Easy builds for Facebook projects This directory contains tools designed to simplify continuous-integration (and other builds) of Facebook open source projects. In particular, this helps manage builds for cross-project dependencies. The main entry point is the `getdeps.py` script. This script has several subcommands, but the most notable is the `build` command. This will download and build all dependencies for a project, and then build the project itself. ## Deployment This directory is copied literally into a number of different Facebook open source repositories. Any change made to code in this directory will be automatically be replicated by our open source tooling into all GitHub hosted repositories that use `fbcode_builder`. Typically this directory is copied into the open source repositories as `build/fbcode_builder/`. # Project Configuration Files The `manifests` subdirectory contains configuration files for many different projects, describing how to build each project. These files also list dependencies between projects, enabling `getdeps.py` to build all dependencies for a project before building the project itself. # Shared CMake utilities Since this directory is copied into many Facebook open source repositories, it is also used to help share some CMake utility files across projects. The `CMake/` subdirectory contains a number of `.cmake` files that are shared by the CMake-based build systems across several different projects. # Older Build Scripts This directory also still contains a handful of older build scripts that pre-date the current `getdeps.py` build system. Most of the other `.py` files in this top directory, apart from `getdeps.py` itself, are from this older build system. This older system is only used by a few remaining projects, and new projects should generally use the newer `getdeps.py` script, by adding a new configuration file in the `manifests/` subdirectory.
CMake
hhvm/build/fbcode_builder/CMake/FBBuildOptions.cmake
# Copyright (c) Facebook, Inc. and its affiliates. function (fb_activate_static_library_option) option(USE_STATIC_DEPS_ON_UNIX "If enabled, use static dependencies on unix systems. This is generally discouraged." OFF ) # Mark USE_STATIC_DEPS_ON_UNIX as an "advanced" option, since enabling it # is generally discouraged. mark_as_advanced(USE_STATIC_DEPS_ON_UNIX) if(UNIX AND USE_STATIC_DEPS_ON_UNIX) SET(CMAKE_FIND_LIBRARY_SUFFIXES ".a" PARENT_SCOPE) endif() endfunction()
CMake
hhvm/build/fbcode_builder/CMake/FBCMakeParseArgs.cmake
# # Copyright (c) Facebook, Inc. and its affiliates. # # Helper function for parsing arguments to a CMake function. # # This function is very similar to CMake's built-in cmake_parse_arguments() # function, with some improvements: # - This function correctly handles empty arguments. (cmake_parse_arguments() # ignores empty arguments.) # - If a multi-value argument is specified more than once, the subsequent # arguments are appended to the original list rather than replacing it. e.g. # if "SOURCES" is a multi-value argument, and the argument list contains # "SOURCES a b c SOURCES x y z" then the resulting value for SOURCES will be # "a;b;c;x;y;z" rather than "x;y;z" # - This function errors out by default on unrecognized arguments. You can # pass in an extra "ALLOW_UNPARSED_ARGS" argument to make it behave like # cmake_parse_arguments(), and return the unparsed arguments in a # <prefix>_UNPARSED_ARGUMENTS variable instead. # # It does look like cmake_parse_arguments() handled empty arguments correctly # from CMake 3.0 through 3.3, but it seems like this was probably broken when # it was turned into a built-in function in CMake 3.4. Here is discussion and # patches that fixed this behavior prior to CMake 3.0: # https://cmake.org/pipermail/cmake-developers/2013-November/020607.html # # The one downside to this function over the built-in cmake_parse_arguments() # is that I don't think we can achieve the PARSE_ARGV behavior in a non-builtin # function, so we can't properly handle arguments that contain ";". CMake will # treat the ";" characters as list element separators, and treat it as multiple # separate arguments. # function(fb_cmake_parse_args PREFIX OPTIONS ONE_VALUE_ARGS MULTI_VALUE_ARGS ARGS) foreach(option IN LISTS ARGN) if ("${option}" STREQUAL "ALLOW_UNPARSED_ARGS") set(ALLOW_UNPARSED_ARGS TRUE) else() message( FATAL_ERROR "unknown optional argument for fb_cmake_parse_args(): ${option}" ) endif() endforeach() # Define all options as FALSE in the parent scope to start with foreach(var_name IN LISTS OPTIONS) set("${PREFIX}_${var_name}" "FALSE" PARENT_SCOPE) endforeach() # TODO: We aren't extremely strict about error checking for one-value # arguments here. e.g., we don't complain if a one-value argument is # followed by another option/one-value/multi-value name rather than an # argument. We also don't complain if a one-value argument is the last # argument and isn't followed by a value. list(APPEND all_args ${ONE_VALUE_ARGS}) list(APPEND all_args ${MULTI_VALUE_ARGS}) set(current_variable) set(unparsed_args) foreach(arg IN LISTS ARGS) list(FIND OPTIONS "${arg}" opt_index) if("${opt_index}" EQUAL -1) list(FIND all_args "${arg}" arg_index) if("${arg_index}" EQUAL -1) # This argument does not match an argument name, # must be an argument value if("${current_variable}" STREQUAL "") list(APPEND unparsed_args "${arg}") else() # Ugh, CMake lists have a pretty fundamental flaw: they cannot # distinguish between an empty list and a list with a single empty # element. We track our own SEEN_VALUES_arg setting to help # distinguish this and behave properly here. if ("${SEEN_${current_variable}}" AND "${${current_variable}}" STREQUAL "") set("${current_variable}" ";${arg}") else() list(APPEND "${current_variable}" "${arg}") endif() set("SEEN_${current_variable}" TRUE) endif() else() # We found a single- or multi-value argument name set(current_variable "VALUES_${arg}") set("SEEN_${arg}" TRUE) endif() else() # We found an option variable set("${PREFIX}_${arg}" "TRUE" PARENT_SCOPE) set(current_variable) endif() endforeach() foreach(arg_name IN LISTS ONE_VALUE_ARGS) if(NOT "${SEEN_${arg_name}}") unset("${PREFIX}_${arg_name}" PARENT_SCOPE) elseif(NOT "${SEEN_VALUES_${arg_name}}") # If the argument was seen but a value wasn't specified, error out. # We require exactly one value to be specified. message( FATAL_ERROR "argument ${arg_name} was specified without a value" ) else() list(LENGTH "VALUES_${arg_name}" num_args) if("${num_args}" EQUAL 0) # We know an argument was specified and that we called list(APPEND). # If CMake thinks the list is empty that means there is really a single # empty element in the list. set("${PREFIX}_${arg_name}" "" PARENT_SCOPE) elseif("${num_args}" EQUAL 1) list(GET "VALUES_${arg_name}" 0 arg_value) set("${PREFIX}_${arg_name}" "${arg_value}" PARENT_SCOPE) else() message( FATAL_ERROR "too many arguments specified for ${arg_name}: " "${VALUES_${arg_name}}" ) endif() endif() endforeach() foreach(arg_name IN LISTS MULTI_VALUE_ARGS) # If this argument name was never seen, then unset the parent scope if (NOT "${SEEN_${arg_name}}") unset("${PREFIX}_${arg_name}" PARENT_SCOPE) else() # TODO: Our caller still won't be able to distinguish between an empty # list and a list with a single empty element. We can tell which is # which, but CMake lists don't make it easy to show this to our caller. set("${PREFIX}_${arg_name}" "${VALUES_${arg_name}}" PARENT_SCOPE) endif() endforeach() # By default we fatal out on unparsed arguments, but return them to the # caller if ALLOW_UNPARSED_ARGS was specified. if (DEFINED unparsed_args) if ("${ALLOW_UNPARSED_ARGS}") set("${PREFIX}_UNPARSED_ARGUMENTS" "${unparsed_args}" PARENT_SCOPE) else() message(FATAL_ERROR "unrecognized arguments: ${unparsed_args}") endif() endif() endfunction()
CMake
hhvm/build/fbcode_builder/CMake/FBCompilerSettings.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # This file applies common compiler settings that are shared across # a number of Facebook opensource projects. # Please use caution and your best judgement before making changes # to these shared compiler settings in order to avoid accidentally # breaking a build in another project! if (WIN32) include(FBCompilerSettingsMSVC) else() include(FBCompilerSettingsUnix) endif()
CMake
hhvm/build/fbcode_builder/CMake/FBCompilerSettingsMSVC.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # This file applies common compiler settings that are shared across # a number of Facebook opensource projects. # Please use caution and your best judgement before making changes # to these shared compiler settings in order to avoid accidentally # breaking a build in another project! add_compile_options( /wd4250 # 'class1' : inherits 'class2::member' via dominance )
CMake
hhvm/build/fbcode_builder/CMake/FBCompilerSettingsUnix.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # This file applies common compiler settings that are shared across # a number of Facebook opensource projects. # Please use caution and your best judgement before making changes # to these shared compiler settings in order to avoid accidentally # breaking a build in another project! set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -Wall -Wextra -Wno-deprecated -Wno-deprecated-declarations")
CMake
hhvm/build/fbcode_builder/CMake/FBPythonBinary.cmake
# Copyright (c) Facebook, Inc. and its affiliates. include(FBCMakeParseArgs) # # This file contains helper functions for building self-executing Python # binaries. # # This is somewhat different than typical python installation with # distutils/pip/virtualenv/etc. We primarily want to build a standalone # executable, isolated from other Python packages on the system. We don't want # to install files into the standard library python paths. This is more # similar to PEX (https://github.com/pantsbuild/pex) and XAR # (https://github.com/facebookincubator/xar). (In the future it would be nice # to update this code to also support directly generating XAR files if XAR is # available.) # # We also want to be able to easily define "libraries" of python files that can # be shared and re-used between these standalone python executables, and can be # shared across projects in different repositories. This means that we do need # a way to "install" libraries so that they are visible to CMake builds in # other repositories, without actually installing them in the standard python # library paths. # # If the caller has not already found Python, do so now. # If we fail to find python now we won't fail immediately, but # add_fb_python_executable() or add_fb_python_library() will fatal out if they # are used. if(NOT TARGET Python3::Interpreter) # CMake 3.12+ ships with a FindPython3.cmake module. Try using it first. # We find with QUIET here, since otherwise this generates some noisy warnings # on versions of CMake before 3.12 if (WIN32) # On Windows we need both the Interpreter as well as the Development # libraries. find_package(Python3 COMPONENTS Interpreter Development QUIET) else() find_package(Python3 COMPONENTS Interpreter QUIET) endif() if(Python3_Interpreter_FOUND) message(STATUS "Found Python 3: ${Python3_EXECUTABLE}") else() # Try with the FindPythonInterp.cmake module available in older CMake # versions. Check to see if the caller has already searched for this # themselves first. if(NOT PYTHONINTERP_FOUND) set(Python_ADDITIONAL_VERSIONS 3 3.6 3.5 3.4 3.3 3.2 3.1) find_package(PythonInterp) # TODO: On Windows we require the Python libraries as well. # We currently do not search for them on this code path. # For now we require building with CMake 3.12+ on Windows, so that the # FindPython3 code path above is available. endif() if(PYTHONINTERP_FOUND) if("${PYTHON_VERSION_MAJOR}" GREATER_EQUAL 3) set(Python3_EXECUTABLE "${PYTHON_EXECUTABLE}") add_custom_target(Python3::Interpreter) else() string( CONCAT FBPY_FIND_PYTHON_ERR "found Python ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}, " "but need Python 3" ) endif() endif() endif() endif() # Find our helper program. # We typically install this in the same directory as this .cmake file. find_program( FB_MAKE_PYTHON_ARCHIVE "make_fbpy_archive.py" PATHS ${CMAKE_MODULE_PATH} ) set(FB_PY_TEST_MAIN "${CMAKE_CURRENT_LIST_DIR}/fb_py_test_main.py") set( FB_PY_TEST_DISCOVER_SCRIPT "${CMAKE_CURRENT_LIST_DIR}/FBPythonTestAddTests.cmake" ) set( FB_PY_WIN_MAIN_C "${CMAKE_CURRENT_LIST_DIR}/fb_py_win_main.c" ) # An option to control the default installation location for # install_fb_python_library(). This is relative to ${CMAKE_INSTALL_PREFIX} set( FBPY_LIB_INSTALL_DIR "lib/fb-py-libs" CACHE STRING "The subdirectory where FB python libraries should be installed" ) # # Build a self-executing python binary. # # This accepts the same arguments as add_fb_python_library(). # # In addition, a MAIN_MODULE argument is accepted. This argument specifies # which module should be started as the __main__ module when the executable is # run. If left unspecified, a __main__.py script must be present in the # manifest. # function(add_fb_python_executable TARGET) fb_py_check_available() # Parse the arguments set(one_value_args BASE_DIR NAMESPACE MAIN_MODULE TYPE) set(multi_value_args SOURCES DEPENDS) fb_cmake_parse_args( ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" ) fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR) # Use add_fb_python_library() to perform most of our source handling add_fb_python_library( "${TARGET}.main_lib" BASE_DIR "${ARG_BASE_DIR}" NAMESPACE "${ARG_NAMESPACE}" SOURCES ${ARG_SOURCES} DEPENDS ${ARG_DEPENDS} ) set( manifest_files "$<TARGET_PROPERTY:${TARGET}.main_lib.py_lib,INTERFACE_INCLUDE_DIRECTORIES>" ) set( source_files "$<TARGET_PROPERTY:${TARGET}.main_lib.py_lib,INTERFACE_SOURCES>" ) # The command to build the executable archive. # # If we are using CMake 3.8+ we can use COMMAND_EXPAND_LISTS. # CMP0067 isn't really the policy we care about, but seems like the best way # to check if we are running 3.8+. if (POLICY CMP0067) set(extra_cmd_params COMMAND_EXPAND_LISTS) set(make_py_args "${manifest_files}") else() set(extra_cmd_params) set(make_py_args --manifest-separator "::" "$<JOIN:${manifest_files},::>") endif() set(output_file "${TARGET}${CMAKE_EXECUTABLE_SUFFIX}") if(WIN32) set(zipapp_output "${TARGET}.py_zipapp") else() set(zipapp_output "${output_file}") endif() set(zipapp_output_file "${zipapp_output}") set(is_dir_output FALSE) if(DEFINED ARG_TYPE) list(APPEND make_py_args "--type" "${ARG_TYPE}") if ("${ARG_TYPE}" STREQUAL "dir") set(is_dir_output TRUE) # CMake doesn't really seem to like having a directory specified as an # output; specify the __main__.py file as the output instead. set(zipapp_output_file "${zipapp_output}/__main__.py") list(APPEND extra_cmd_params COMMAND "${CMAKE_COMMAND}" -E remove_directory "${zipapp_output}" ) endif() endif() if(DEFINED ARG_MAIN_MODULE) list(APPEND make_py_args "--main" "${ARG_MAIN_MODULE}") endif() add_custom_command( OUTPUT "${zipapp_output_file}" ${extra_cmd_params} COMMAND "${Python3_EXECUTABLE}" "${FB_MAKE_PYTHON_ARCHIVE}" -o "${zipapp_output}" ${make_py_args} DEPENDS ${source_files} "${TARGET}.main_lib.py_sources_built" "${FB_MAKE_PYTHON_ARCHIVE}" ) if(WIN32) if(is_dir_output) # TODO: generate a main executable that will invoke Python3 # with the correct main module inside the output directory else() add_executable("${TARGET}.winmain" "${FB_PY_WIN_MAIN_C}") target_link_libraries("${TARGET}.winmain" Python3::Python) # The Python3::Python target doesn't seem to be set up completely # correctly on Windows for some reason, and we have to explicitly add # ${Python3_LIBRARY_DIRS} to the target link directories. target_link_directories( "${TARGET}.winmain" PUBLIC ${Python3_LIBRARY_DIRS} ) add_custom_command( OUTPUT "${output_file}" DEPENDS "${TARGET}.winmain" "${zipapp_output_file}" COMMAND "cmd.exe" "/c" "copy" "/b" "${TARGET}.winmain${CMAKE_EXECUTABLE_SUFFIX}+${zipapp_output}" "${output_file}" ) endif() endif() # Add an "ALL" target that depends on force ${TARGET}, # so that ${TARGET} will be included in the default list of build targets. add_custom_target("${TARGET}.GEN_PY_EXE" ALL DEPENDS "${output_file}") # Allow resolving the executable path for the target that we generate # via a generator expression like: # "WATCHMAN_WAIT_PATH=$<TARGET_PROPERTY:watchman-wait.GEN_PY_EXE,EXECUTABLE>" set_property(TARGET "${TARGET}.GEN_PY_EXE" PROPERTY EXECUTABLE "${CMAKE_CURRENT_BINARY_DIR}/${output_file}") endfunction() # Define a python unittest executable. # The executable is built using add_fb_python_executable and has the # following differences: # # Each of the source files specified in SOURCES will be imported # and have unittest discovery performed upon them. # Those sources will be imported in the top level namespace. # # The ENV argument allows specifying a list of "KEY=VALUE" # pairs that will be used by the test runner to set up the environment # in the child process prior to running the test. This is useful for # passing additional configuration to the test. function(add_fb_python_unittest TARGET) # Parse the arguments set(multi_value_args SOURCES DEPENDS ENV PROPERTIES) set( one_value_args WORKING_DIRECTORY BASE_DIR NAMESPACE TEST_LIST DISCOVERY_TIMEOUT ) fb_cmake_parse_args( ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" ) fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR) if(NOT ARG_WORKING_DIRECTORY) # Default the working directory to the current binary directory. # This matches the default behavior of add_test() and other standard # test functions like gtest_discover_tests() set(ARG_WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}") endif() if(NOT ARG_TEST_LIST) set(ARG_TEST_LIST "${TARGET}_TESTS") endif() if(NOT ARG_DISCOVERY_TIMEOUT) set(ARG_DISCOVERY_TIMEOUT 5) endif() # Tell our test program the list of modules to scan for tests. # We scan all modules directly listed in our SOURCES argument, and skip # modules that came from dependencies in the DEPENDS list. # # This is written into a __test_modules__.py module that the test runner # will look at. set( test_modules_path "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}_test_modules.py" ) file(WRITE "${test_modules_path}" "TEST_MODULES = [\n") string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}") if (NOT "${namespace_dir}" STREQUAL "") set(namespace_dir "${namespace_dir}/") endif() set(test_modules) foreach(src_path IN LISTS ARG_SOURCES) fb_py_compute_dest_path( abs_source dest_path "${src_path}" "${namespace_dir}" "${ARG_BASE_DIR}" ) string(REPLACE "/" "." module_name "${dest_path}") string(REGEX REPLACE "\\.py$" "" module_name "${module_name}") list(APPEND test_modules "${module_name}") file(APPEND "${test_modules_path}" " '${module_name}',\n") endforeach() file(APPEND "${test_modules_path}" "]\n") # The __main__ is provided by our runner wrapper/bootstrap list(APPEND ARG_SOURCES "${FB_PY_TEST_MAIN}=__main__.py") list(APPEND ARG_SOURCES "${test_modules_path}=__test_modules__.py") add_fb_python_executable( "${TARGET}" NAMESPACE "${ARG_NAMESPACE}" BASE_DIR "${ARG_BASE_DIR}" SOURCES ${ARG_SOURCES} DEPENDS ${ARG_DEPENDS} ) # Run test discovery after the test executable is built. # This logic is based on the code for gtest_discover_tests() set(ctest_file_base "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}") set(ctest_include_file "${ctest_file_base}_include.cmake") set(ctest_tests_file "${ctest_file_base}_tests.cmake") add_custom_command( TARGET "${TARGET}.GEN_PY_EXE" POST_BUILD BYPRODUCTS "${ctest_tests_file}" COMMAND "${CMAKE_COMMAND}" -D "TEST_TARGET=${TARGET}" -D "TEST_INTERPRETER=${Python3_EXECUTABLE}" -D "TEST_ENV=${ARG_ENV}" -D "TEST_EXECUTABLE=$<TARGET_PROPERTY:${TARGET}.GEN_PY_EXE,EXECUTABLE>" -D "TEST_WORKING_DIR=${ARG_WORKING_DIRECTORY}" -D "TEST_LIST=${ARG_TEST_LIST}" -D "TEST_PREFIX=${TARGET}::" -D "TEST_PROPERTIES=${ARG_PROPERTIES}" -D "CTEST_FILE=${ctest_tests_file}" -P "${FB_PY_TEST_DISCOVER_SCRIPT}" VERBATIM ) file( WRITE "${ctest_include_file}" "if(EXISTS \"${ctest_tests_file}\")\n" " include(\"${ctest_tests_file}\")\n" "else()\n" " add_test(\"${TARGET}_NOT_BUILT\" \"${TARGET}_NOT_BUILT\")\n" "endif()\n" ) set_property( DIRECTORY APPEND PROPERTY TEST_INCLUDE_FILES "${ctest_include_file}" ) endfunction() # # Define a python library. # # If you want to install a python library generated from this rule note that # you need to use install_fb_python_library() rather than CMake's built-in # install() function. This will make it available for other downstream # projects to use in their add_fb_python_executable() and # add_fb_python_library() calls. (You do still need to use `install(EXPORT)` # later to install the CMake exports.) # # Parameters: # - BASE_DIR <dir>: # The base directory path to strip off from each source path. All source # files must be inside this directory. If not specified it defaults to # ${CMAKE_CURRENT_SOURCE_DIR}. # - NAMESPACE <namespace>: # The destination namespace where these files should be installed in python # binaries. If not specified, this defaults to the current relative path of # ${CMAKE_CURRENT_SOURCE_DIR} inside ${CMAKE_SOURCE_DIR}. e.g., a python # library defined in the directory repo_root/foo/bar will use a default # namespace of "foo.bar" # - SOURCES <src1> <...>: # The python source files. # You may optionally specify as source using the form: PATH=ALIAS where # PATH is a relative path in the source tree and ALIAS is the relative # path into which PATH should be rewritten. This is useful for mapping # an executable script to the main module in a python executable. # e.g.: `python/bin/watchman-wait=__main__.py` # - DEPENDS <target1> <...>: # Other python libraries that this one depends on. # - INSTALL_DIR <dir>: # The directory where this library should be installed. # install_fb_python_library() must still be called later to perform the # installation. If a relative path is given it will be treated relative to # ${CMAKE_INSTALL_PREFIX} # # CMake is unfortunately pretty crappy at being able to define custom build # rules & behaviors. It doesn't support transitive property propagation # between custom targets; only the built-in add_executable() and add_library() # targets support transitive properties. # # We hack around this janky CMake behavior by (ab)using interface libraries to # propagate some of the data we want between targets, without actually # generating a C library. # # add_fb_python_library(SOMELIB) generates the following things: # - An INTERFACE library rule named SOMELIB.py_lib which tracks some # information about transitive dependencies: # - the transitive set of source files in the INTERFACE_SOURCES property # - the transitive set of manifest files that this library depends on in # the INTERFACE_INCLUDE_DIRECTORIES property. # - A custom command that generates a SOMELIB.manifest file. # This file contains the mapping of source files to desired destination # locations in executables that depend on this library. This manifest file # will then be read at build-time in order to build executables. # function(add_fb_python_library LIB_NAME) fb_py_check_available() # Parse the arguments # We use fb_cmake_parse_args() rather than cmake_parse_arguments() since # cmake_parse_arguments() does not handle empty arguments, and it is common # for callers to want to specify an empty NAMESPACE parameter. set(one_value_args BASE_DIR NAMESPACE INSTALL_DIR) set(multi_value_args SOURCES DEPENDS) fb_cmake_parse_args( ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" ) fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR) string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}") if (NOT "${namespace_dir}" STREQUAL "") set(namespace_dir "${namespace_dir}/") endif() if(NOT DEFINED ARG_INSTALL_DIR) set(install_dir "${FBPY_LIB_INSTALL_DIR}/") elseif("${ARG_INSTALL_DIR}" STREQUAL "") set(install_dir "") else() set(install_dir "${ARG_INSTALL_DIR}/") endif() # message(STATUS "fb py library ${LIB_NAME}: " # "NS=${namespace_dir} BASE=${ARG_BASE_DIR}") # TODO: In the future it would be nice to support pre-compiling the source # files. We could emit a rule to compile each source file and emit a # .pyc/.pyo file here, and then have the manifest reference the pyc/pyo # files. # Define a library target to help pass around information about the library, # and propagate dependency information. # # CMake make a lot of assumptions that libraries are C++ libraries. To help # avoid confusion we name our target "${LIB_NAME}.py_lib" rather than just # "${LIB_NAME}". This helps avoid confusion if callers try to use # "${LIB_NAME}" on their own as a target name. (e.g., attempting to install # it directly with install(TARGETS) won't work. Callers must use # install_fb_python_library() instead.) add_library("${LIB_NAME}.py_lib" INTERFACE) # Emit the manifest file. # # We write the manifest file to a temporary path first, then copy it with # configure_file(COPYONLY). This is necessary to get CMake to understand # that "${manifest_path}" is generated by the CMake configure phase, # and allow using it as a dependency for add_custom_command(). # (https://gitlab.kitware.com/cmake/cmake/issues/16367) set(manifest_path "${CMAKE_CURRENT_BINARY_DIR}/${LIB_NAME}.manifest") set(tmp_manifest "${manifest_path}.tmp") file(WRITE "${tmp_manifest}" "FBPY_MANIFEST 1\n") set(abs_sources) foreach(src_path IN LISTS ARG_SOURCES) fb_py_compute_dest_path( abs_source dest_path "${src_path}" "${namespace_dir}" "${ARG_BASE_DIR}" ) list(APPEND abs_sources "${abs_source}") target_sources( "${LIB_NAME}.py_lib" INTERFACE "$<BUILD_INTERFACE:${abs_source}>" "$<INSTALL_INTERFACE:${install_dir}${LIB_NAME}/${dest_path}>" ) file( APPEND "${tmp_manifest}" "${abs_source} :: ${dest_path}\n" ) endforeach() configure_file("${tmp_manifest}" "${manifest_path}" COPYONLY) target_include_directories( "${LIB_NAME}.py_lib" INTERFACE "$<BUILD_INTERFACE:${manifest_path}>" "$<INSTALL_INTERFACE:${install_dir}${LIB_NAME}.manifest>" ) # Add a target that depends on all of the source files. # This is needed in case some of the source files are generated. This will # ensure that these source files are brought up-to-date before we build # any python binaries that depend on this library. add_custom_target("${LIB_NAME}.py_sources_built" DEPENDS ${abs_sources}) add_dependencies("${LIB_NAME}.py_lib" "${LIB_NAME}.py_sources_built") # Hook up library dependencies, and also make the *.py_sources_built target # depend on the sources for all of our dependencies also being up-to-date. foreach(dep IN LISTS ARG_DEPENDS) target_link_libraries("${LIB_NAME}.py_lib" INTERFACE "${dep}.py_lib") # Mark that our .py_sources_built target depends on each our our dependent # libraries. This serves two functions: # - This causes CMake to generate an error message if one of the # dependencies is never defined. The target_link_libraries() call above # won't complain if one of the dependencies doesn't exist (since it is # intended to allow passing in file names for plain library files rather # than just targets). # - It ensures that sources for our dependencies are built before any # executable that depends on us. Note that we depend on "${dep}.py_lib" # rather than "${dep}.py_sources_built" for this purpose because the # ".py_sources_built" target won't be available for imported targets. add_dependencies("${LIB_NAME}.py_sources_built" "${dep}.py_lib") endforeach() # Add a custom command to help with library installation, in case # install_fb_python_library() is called later for this library. # add_custom_command() only works with file dependencies defined in the same # CMakeLists.txt file, so we want to make sure this is defined here, rather # then where install_fb_python_library() is called. # This command won't be run by default, but will only be run if it is needed # by a subsequent install_fb_python_library() call. # # This command copies the library contents into the build directory. # It would be nicer if we could skip this intermediate copy, and just run # make_fbpy_archive.py at install time to copy them directly to the desired # installation directory. Unfortunately this is difficult to do, and seems # to interfere with some of the CMake code that wants to generate a manifest # of installed files. set(build_install_dir "${CMAKE_CURRENT_BINARY_DIR}/${LIB_NAME}.lib_install") add_custom_command( OUTPUT "${build_install_dir}/${LIB_NAME}.manifest" COMMAND "${CMAKE_COMMAND}" -E remove_directory "${build_install_dir}" COMMAND "${Python3_EXECUTABLE}" "${FB_MAKE_PYTHON_ARCHIVE}" --type lib-install --install-dir "${LIB_NAME}" -o "${build_install_dir}/${LIB_NAME}" "${manifest_path}" DEPENDS "${abs_sources}" "${manifest_path}" "${FB_MAKE_PYTHON_ARCHIVE}" ) add_custom_target( "${LIB_NAME}.py_lib_install" DEPENDS "${build_install_dir}/${LIB_NAME}.manifest" ) # Set some properties to pass through the install paths to # install_fb_python_library() # # Passing through ${build_install_dir} allows install_fb_python_library() # to work even if used from a different CMakeLists.txt file than where # add_fb_python_library() was called (i.e. such that # ${CMAKE_CURRENT_BINARY_DIR} is different between the two calls). set(abs_install_dir "${install_dir}") if(NOT IS_ABSOLUTE "${abs_install_dir}") set(abs_install_dir "${CMAKE_INSTALL_PREFIX}/${abs_install_dir}") endif() string(REGEX REPLACE "/$" "" abs_install_dir "${abs_install_dir}") set_target_properties( "${LIB_NAME}.py_lib_install" PROPERTIES INSTALL_DIR "${abs_install_dir}" BUILD_INSTALL_DIR "${build_install_dir}" ) endfunction() # # Install an FB-style packaged python binary. # # - DESTINATION <export-name>: # Associate the installed target files with the given export-name. # function(install_fb_python_executable TARGET) # Parse the arguments set(one_value_args DESTINATION) set(multi_value_args) fb_cmake_parse_args( ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" ) if(NOT DEFINED ARG_DESTINATION) set(ARG_DESTINATION bin) endif() install( PROGRAMS "$<TARGET_PROPERTY:${TARGET}.GEN_PY_EXE,EXECUTABLE>" DESTINATION "${ARG_DESTINATION}" ) endfunction() # # Install a python library. # # - EXPORT <export-name>: # Associate the installed target files with the given export-name. # # Note that unlike the built-in CMake install() function we do not accept a # DESTINATION parameter. Instead, use the INSTALL_DIR parameter to # add_fb_python_library() to set the installation location. # function(install_fb_python_library LIB_NAME) set(one_value_args EXPORT) fb_cmake_parse_args(ARG "" "${one_value_args}" "" "${ARGN}") # Export our "${LIB_NAME}.py_lib" target so that it will be available to # downstream projects in our installed CMake config files. if(DEFINED ARG_EXPORT) install(TARGETS "${LIB_NAME}.py_lib" EXPORT "${ARG_EXPORT}") endif() # add_fb_python_library() emits a .py_lib_install target that will prepare # the installation directory. However, it isn't part of the "ALL" target and # therefore isn't built by default. # # Make sure the ALL target depends on it now. We have to do this by # introducing yet another custom target. # Add it as a dependency to the ALL target now. add_custom_target("${LIB_NAME}.py_lib_install_all" ALL) add_dependencies( "${LIB_NAME}.py_lib_install_all" "${LIB_NAME}.py_lib_install" ) # Copy the intermediate install directory generated at build time into # the desired install location. get_target_property(dest_dir "${LIB_NAME}.py_lib_install" "INSTALL_DIR") get_target_property( build_install_dir "${LIB_NAME}.py_lib_install" "BUILD_INSTALL_DIR" ) install( DIRECTORY "${build_install_dir}/${LIB_NAME}" DESTINATION "${dest_dir}" ) install( FILES "${build_install_dir}/${LIB_NAME}.manifest" DESTINATION "${dest_dir}" ) endfunction() # Helper macro to process the BASE_DIR and NAMESPACE arguments for # add_fb_python_executable() and add_fb_python_executable() macro(fb_py_process_default_args NAMESPACE_VAR BASE_DIR_VAR) # If the namespace was not specified, default to the relative path to the # current directory (starting from the repository root). if(NOT DEFINED "${NAMESPACE_VAR}") file( RELATIVE_PATH "${NAMESPACE_VAR}" "${CMAKE_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}" ) endif() if(NOT DEFINED "${BASE_DIR_VAR}") # If the base directory was not specified, default to the current directory set("${BASE_DIR_VAR}" "${CMAKE_CURRENT_SOURCE_DIR}") else() # If the base directory was specified, always convert it to an # absolute path. get_filename_component("${BASE_DIR_VAR}" "${${BASE_DIR_VAR}}" ABSOLUTE) endif() endmacro() function(fb_py_check_available) # Make sure that Python 3 and our make_fbpy_archive.py helper script are # available. if(NOT Python3_EXECUTABLE) if(FBPY_FIND_PYTHON_ERR) message(FATAL_ERROR "Unable to find Python 3: ${FBPY_FIND_PYTHON_ERR}") else() message(FATAL_ERROR "Unable to find Python 3") endif() endif() if (NOT FB_MAKE_PYTHON_ARCHIVE) message( FATAL_ERROR "unable to find make_fbpy_archive.py helper program (it " "should be located in the same directory as FBPythonBinary.cmake)" ) endif() endfunction() function( fb_py_compute_dest_path src_path_output dest_path_output src_path namespace_dir base_dir ) if("${src_path}" MATCHES "=") # We want to split the string on the `=` sign, but cmake doesn't # provide much in the way of helpers for this, so we rewrite the # `=` sign to `;` so that we can treat it as a cmake list and # then index into the components string(REPLACE "=" ";" src_path_list "${src_path}") list(GET src_path_list 0 src_path) # Note that we ignore the `namespace_dir` in the alias case # in order to allow aliasing a source to the top level `__main__.py` # filename. list(GET src_path_list 1 dest_path) else() unset(dest_path) endif() get_filename_component(abs_source "${src_path}" ABSOLUTE) if(NOT DEFINED dest_path) file(RELATIVE_PATH rel_src "${ARG_BASE_DIR}" "${abs_source}") if("${rel_src}" MATCHES "^../") message( FATAL_ERROR "${LIB_NAME}: source file \"${abs_source}\" is not inside " "the base directory ${ARG_BASE_DIR}" ) endif() set(dest_path "${namespace_dir}${rel_src}") endif() set("${src_path_output}" "${abs_source}" PARENT_SCOPE) set("${dest_path_output}" "${dest_path}" PARENT_SCOPE) endfunction()
CMake
hhvm/build/fbcode_builder/CMake/FBPythonTestAddTests.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # Add a command to be emitted to the CTest file set(ctest_script) function(add_command CMD) set(escaped_args "") foreach(arg ${ARGN}) # Escape all arguments using "Bracket Argument" syntax # We could skip this for argument that don't contain any special # characters if we wanted to make the output slightly more human-friendly. set(escaped_args "${escaped_args} [==[${arg}]==]") endforeach() set(ctest_script "${ctest_script}${CMD}(${escaped_args})\n" PARENT_SCOPE) endfunction() if(NOT EXISTS "${TEST_EXECUTABLE}") message(FATAL_ERROR "Test executable does not exist: ${TEST_EXECUTABLE}") endif() execute_process( COMMAND ${CMAKE_COMMAND} -E env ${TEST_ENV} "${TEST_INTERPRETER}" "${TEST_EXECUTABLE}" --list-tests WORKING_DIRECTORY "${TEST_WORKING_DIR}" OUTPUT_VARIABLE output RESULT_VARIABLE result ) if(NOT "${result}" EQUAL 0) string(REPLACE "\n" "\n " output "${output}") message( FATAL_ERROR "Error running test executable: ${TEST_EXECUTABLE}\n" "Output:\n" " ${output}\n" ) endif() # Parse output string(REPLACE "\n" ";" tests_list "${output}") foreach(test_name ${tests_list}) add_command( add_test "${TEST_PREFIX}${test_name}" ${CMAKE_COMMAND} -E env ${TEST_ENV} "${TEST_INTERPRETER}" "${TEST_EXECUTABLE}" "${test_name}" ) add_command( set_tests_properties "${TEST_PREFIX}${test_name}" PROPERTIES WORKING_DIRECTORY "${TEST_WORKING_DIR}" ${TEST_PROPERTIES} ) endforeach() # Set a list of discovered tests in the parent scope, in case users # want access to this list as a CMake variable if(TEST_LIST) add_command(set ${TEST_LIST} ${tests_list}) endif() file(WRITE "${CTEST_FILE}" "${ctest_script}")
CMake
hhvm/build/fbcode_builder/CMake/FBThriftCppLibrary.cmake
# Copyright (c) Facebook, Inc. and its affiliates. include(FBCMakeParseArgs) # Generate a C++ library from a thrift file # # Parameters: # - SERVICES <svc1> [<svc2> ...] # The names of the services defined in the thrift file. # - DEPENDS <dep1> [<dep2> ...] # A list of other thrift C++ libraries that this library depends on. # - OPTIONS <opt1> [<opt2> ...] # A list of options to pass to the thrift compiler. # - INCLUDE_DIR <path> # The sub-directory where generated headers will be installed. # Defaults to "include" if not specified. The caller must still call # install() to install the thrift library if desired. # - THRIFT_INCLUDE_DIR <path> # The sub-directory where generated headers will be installed. # Defaults to "${INCLUDE_DIR}/thrift-files" if not specified. # The caller must still call install() to install the thrift library if # desired. function(add_fbthrift_cpp_library LIB_NAME THRIFT_FILE) # Parse the arguments set(one_value_args INCLUDE_DIR THRIFT_INCLUDE_DIR) set(multi_value_args SERVICES DEPENDS OPTIONS) fb_cmake_parse_args( ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" ) if(NOT DEFINED ARG_INCLUDE_DIR) set(ARG_INCLUDE_DIR "include") endif() if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR) set(ARG_THRIFT_INCLUDE_DIR "${ARG_INCLUDE_DIR}/thrift-files") endif() get_filename_component(base ${THRIFT_FILE} NAME_WE) get_filename_component( output_dir ${CMAKE_CURRENT_BINARY_DIR}/${THRIFT_FILE} DIRECTORY ) # Generate relative paths in #includes file( RELATIVE_PATH include_prefix "${CMAKE_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}" ) get_filename_component(include_prefix ${include_prefix} DIRECTORY) if (NOT "${include_prefix}" STREQUAL "") list(APPEND ARG_OPTIONS "include_prefix=${include_prefix}") endif() # CMake 3.12 is finally getting a list(JOIN) function, but until then # treating the list as a string and replacing the semicolons is good enough. string(REPLACE ";" "," GEN_ARG_STR "${ARG_OPTIONS}") # Compute the list of generated files list(APPEND generated_headers "${output_dir}/gen-cpp2/${base}_constants.h" "${output_dir}/gen-cpp2/${base}_types.h" "${output_dir}/gen-cpp2/${base}_types.tcc" "${output_dir}/gen-cpp2/${base}_types_custom_protocol.h" "${output_dir}/gen-cpp2/${base}_metadata.h" ) list(APPEND generated_sources "${output_dir}/gen-cpp2/${base}_constants.cpp" "${output_dir}/gen-cpp2/${base}_data.h" "${output_dir}/gen-cpp2/${base}_data.cpp" "${output_dir}/gen-cpp2/${base}_types.cpp" "${output_dir}/gen-cpp2/${base}_metadata.cpp" ) foreach(service IN LISTS ARG_SERVICES) list(APPEND generated_headers "${output_dir}/gen-cpp2/${service}.h" "${output_dir}/gen-cpp2/${service}.tcc" "${output_dir}/gen-cpp2/${service}AsyncClient.h" "${output_dir}/gen-cpp2/${service}_custom_protocol.h" ) list(APPEND generated_sources "${output_dir}/gen-cpp2/${service}.cpp" "${output_dir}/gen-cpp2/${service}AsyncClient.cpp" "${output_dir}/gen-cpp2/${service}_processmap_binary.cpp" "${output_dir}/gen-cpp2/${service}_processmap_compact.cpp" ) endforeach() # This generator expression gets the list of include directories required # for all of our dependencies. # It requires using COMMAND_EXPAND_LISTS in the add_custom_command() call # below. COMMAND_EXPAND_LISTS is only available in CMake 3.8+ # If we really had to support older versions of CMake we would probably need # to use a wrapper script around the thrift compiler that could take the # include list as a single argument and split it up before invoking the # thrift compiler. if (NOT POLICY CMP0067) message(FATAL_ERROR "add_fbthrift_cpp_library() requires CMake 3.8+") endif() set( thrift_include_options "-I;$<JOIN:$<TARGET_PROPERTY:${LIB_NAME}.thrift_includes,INTERFACE_INCLUDE_DIRECTORIES>,;-I;>" ) # Emit the rule to run the thrift compiler add_custom_command( OUTPUT ${generated_headers} ${generated_sources} COMMAND_EXPAND_LISTS COMMAND "${CMAKE_COMMAND}" -E make_directory "${output_dir}" COMMAND "${FBTHRIFT_COMPILER}" --legacy-strict --gen "mstch_cpp2:${GEN_ARG_STR}" "${thrift_include_options}" -I "${FBTHRIFT_INCLUDE_DIR}" -o "${output_dir}" "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" MAIN_DEPENDENCY "${THRIFT_FILE}" DEPENDS ${ARG_DEPENDS} "${FBTHRIFT_COMPILER}" ) # Now emit the library rule to compile the sources if (BUILD_SHARED_LIBS) set(LIB_TYPE SHARED) else () set(LIB_TYPE STATIC) endif () add_library( "${LIB_NAME}" ${LIB_TYPE} ${generated_sources} ) target_include_directories( "${LIB_NAME}" PUBLIC "$<BUILD_INTERFACE:${CMAKE_BINARY_DIR}>" "$<INSTALL_INTERFACE:${ARG_INCLUDE_DIR}>" ) target_link_libraries( "${LIB_NAME}" PUBLIC ${ARG_DEPENDS} FBThrift::thriftcpp2 Folly::folly mvfst::mvfst_server_async_tran mvfst::mvfst_server ) # Add ${generated_headers} to the PUBLIC_HEADER property for ${LIB_NAME} # # This allows callers to install it using # "install(TARGETS ${LIB_NAME} PUBLIC_HEADER)" # However, note that CMake's PUBLIC_HEADER behavior is rather inflexible, # and does have any way to preserve header directory structure. Callers # must be careful to use the correct PUBLIC_HEADER DESTINATION parameter # when doing this, to put the files the correct directory themselves. # We define a HEADER_INSTALL_DIR property with the include directory prefix, # so typically callers should specify the PUBLIC_HEADER DESTINATION as # "$<TARGET_PROPERTY:${LIB_NAME},HEADER_INSTALL_DIR>" set_property( TARGET "${LIB_NAME}" PROPERTY PUBLIC_HEADER ${generated_headers} ) # Define a dummy interface library to help propagate the thrift include # directories between dependencies. add_library("${LIB_NAME}.thrift_includes" INTERFACE) target_include_directories( "${LIB_NAME}.thrift_includes" INTERFACE "$<BUILD_INTERFACE:${CMAKE_SOURCE_DIR}>" "$<INSTALL_INTERFACE:${ARG_THRIFT_INCLUDE_DIR}>" ) foreach(dep IN LISTS ARG_DEPENDS) target_link_libraries( "${LIB_NAME}.thrift_includes" INTERFACE "${dep}.thrift_includes" ) endforeach() set_target_properties( "${LIB_NAME}" PROPERTIES EXPORT_PROPERTIES "THRIFT_INSTALL_DIR" THRIFT_INSTALL_DIR "${ARG_THRIFT_INCLUDE_DIR}/${include_prefix}" HEADER_INSTALL_DIR "${ARG_INCLUDE_DIR}/${include_prefix}/gen-cpp2" ) endfunction()
CMake
hhvm/build/fbcode_builder/CMake/FBThriftLibrary.cmake
# Copyright (c) Facebook, Inc. and its affiliates. include(FBCMakeParseArgs) include(FBThriftPyLibrary) include(FBThriftCppLibrary) # # add_fbthrift_library() # # This is a convenience function that generates thrift libraries for multiple # languages. # # For example: # add_fbthrift_library( # foo foo.thrift # LANGUAGES cpp py # SERVICES Foo # DEPENDS bar) # # will be expanded into two separate calls: # # add_fbthrift_cpp_library(foo_cpp foo.thrift SERVICES Foo DEPENDS bar_cpp) # add_fbthrift_py_library(foo_py foo.thrift SERVICES Foo DEPENDS bar_py) # function(add_fbthrift_library LIB_NAME THRIFT_FILE) # Parse the arguments set(one_value_args PY_NAMESPACE INCLUDE_DIR THRIFT_INCLUDE_DIR) set(multi_value_args SERVICES DEPENDS LANGUAGES CPP_OPTIONS PY_OPTIONS) fb_cmake_parse_args( ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" ) if(NOT DEFINED ARG_INCLUDE_DIR) set(ARG_INCLUDE_DIR "include") endif() if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR) set(ARG_THRIFT_INCLUDE_DIR "${ARG_INCLUDE_DIR}/thrift-files") endif() # CMake 3.12+ adds list(TRANSFORM) which would be nice to use here, but for # now we still want to support older versions of CMake. set(CPP_DEPENDS) set(PY_DEPENDS) foreach(dep IN LISTS ARG_DEPENDS) list(APPEND CPP_DEPENDS "${dep}_cpp") list(APPEND PY_DEPENDS "${dep}_py") endforeach() foreach(lang IN LISTS ARG_LANGUAGES) if ("${lang}" STREQUAL "cpp") add_fbthrift_cpp_library( "${LIB_NAME}_cpp" "${THRIFT_FILE}" SERVICES ${ARG_SERVICES} DEPENDS ${CPP_DEPENDS} OPTIONS ${ARG_CPP_OPTIONS} INCLUDE_DIR "${ARG_INCLUDE_DIR}" THRIFT_INCLUDE_DIR "${ARG_THRIFT_INCLUDE_DIR}" ) elseif ("${lang}" STREQUAL "py" OR "${lang}" STREQUAL "python") if (DEFINED ARG_PY_NAMESPACE) set(namespace_args NAMESPACE "${ARG_PY_NAMESPACE}") endif() add_fbthrift_py_library( "${LIB_NAME}_py" "${THRIFT_FILE}" SERVICES ${ARG_SERVICES} ${namespace_args} DEPENDS ${PY_DEPENDS} OPTIONS ${ARG_PY_OPTIONS} THRIFT_INCLUDE_DIR "${ARG_THRIFT_INCLUDE_DIR}" ) else() message( FATAL_ERROR "unknown language for thrift library ${LIB_NAME}: ${lang}" ) endif() endforeach() endfunction()
CMake
hhvm/build/fbcode_builder/CMake/FBThriftPyLibrary.cmake
# Copyright (c) Facebook, Inc. and its affiliates. include(FBCMakeParseArgs) include(FBPythonBinary) # Generate a Python library from a thrift file function(add_fbthrift_py_library LIB_NAME THRIFT_FILE) # Parse the arguments set(one_value_args NAMESPACE THRIFT_INCLUDE_DIR) set(multi_value_args SERVICES DEPENDS OPTIONS) fb_cmake_parse_args( ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" ) if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR) set(ARG_THRIFT_INCLUDE_DIR "include/thrift-files") endif() get_filename_component(base ${THRIFT_FILE} NAME_WE) set(output_dir "${CMAKE_CURRENT_BINARY_DIR}/${THRIFT_FILE}-py") # Parse the namespace value if (NOT DEFINED ARG_NAMESPACE) set(ARG_NAMESPACE "${base}") endif() string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}") set(py_output_dir "${output_dir}/gen-py/${namespace_dir}") list(APPEND generated_sources "${py_output_dir}/__init__.py" "${py_output_dir}/ttypes.py" "${py_output_dir}/constants.py" ) foreach(service IN LISTS ARG_SERVICES) list(APPEND generated_sources ${py_output_dir}/${service}.py ) endforeach() # Define a dummy interface library to help propagate the thrift include # directories between dependencies. add_library("${LIB_NAME}.thrift_includes" INTERFACE) target_include_directories( "${LIB_NAME}.thrift_includes" INTERFACE "$<BUILD_INTERFACE:${CMAKE_SOURCE_DIR}>" "$<INSTALL_INTERFACE:${ARG_THRIFT_INCLUDE_DIR}>" ) foreach(dep IN LISTS ARG_DEPENDS) target_link_libraries( "${LIB_NAME}.thrift_includes" INTERFACE "${dep}.thrift_includes" ) endforeach() # This generator expression gets the list of include directories required # for all of our dependencies. # It requires using COMMAND_EXPAND_LISTS in the add_custom_command() call # below. COMMAND_EXPAND_LISTS is only available in CMake 3.8+ # If we really had to support older versions of CMake we would probably need # to use a wrapper script around the thrift compiler that could take the # include list as a single argument and split it up before invoking the # thrift compiler. if (NOT POLICY CMP0067) message(FATAL_ERROR "add_fbthrift_py_library() requires CMake 3.8+") endif() set( thrift_include_options "-I;$<JOIN:$<TARGET_PROPERTY:${LIB_NAME}.thrift_includes,INTERFACE_INCLUDE_DIRECTORIES>,;-I;>" ) # Always force generation of "new-style" python classes for Python 2 list(APPEND ARG_OPTIONS "new_style") # CMake 3.12 is finally getting a list(JOIN) function, but until then # treating the list as a string and replacing the semicolons is good enough. string(REPLACE ";" "," GEN_ARG_STR "${ARG_OPTIONS}") # Emit the rule to run the thrift compiler add_custom_command( OUTPUT ${generated_sources} COMMAND_EXPAND_LISTS COMMAND "${CMAKE_COMMAND}" -E make_directory "${output_dir}" COMMAND "${FBTHRIFT_COMPILER}" --legacy-strict --gen "py:${GEN_ARG_STR}" "${thrift_include_options}" -o "${output_dir}" "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" MAIN_DEPENDENCY "${THRIFT_FILE}" DEPENDS "${FBTHRIFT_COMPILER}" ) # We always want to pass the namespace as "" to this call: # thrift will already emit the files with the desired namespace prefix under # gen-py. We don't want add_fb_python_library() to prepend the namespace a # second time. add_fb_python_library( "${LIB_NAME}" BASE_DIR "${output_dir}/gen-py" NAMESPACE "" SOURCES ${generated_sources} DEPENDS ${ARG_DEPENDS} FBThrift::thrift_py ) endfunction()
Python
hhvm/build/fbcode_builder/CMake/fb_py_test_main.py
#!/usr/bin/env python # # Copyright (c) Facebook, Inc. and its affiliates. # """ This file contains the main module code for Python test programs. """ import contextlib import ctypes import fnmatch import json import logging import optparse import os import platform import re import sys import tempfile import time import traceback import unittest import warnings # Hide warning about importing "imp"; remove once python2 is gone. with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) import imp try: from StringIO import StringIO except ImportError: from io import StringIO try: import coverage except ImportError: coverage = None # type: ignore try: from importlib.machinery import SourceFileLoader except ImportError: SourceFileLoader = None # type: ignore class get_cpu_instr_counter(object): def read(self): # TODO return 0 EXIT_CODE_SUCCESS = 0 EXIT_CODE_TEST_FAILURE = 70 class TestStatus(object): ABORTED = "FAILURE" PASSED = "SUCCESS" FAILED = "FAILURE" EXPECTED_FAILURE = "SUCCESS" UNEXPECTED_SUCCESS = "FAILURE" SKIPPED = "ASSUMPTION_VIOLATION" class PathMatcher(object): def __init__(self, include_patterns, omit_patterns): self.include_patterns = include_patterns self.omit_patterns = omit_patterns def omit(self, path): """ Omit iff matches any of the omit_patterns or the include patterns are not empty and none is matched """ path = os.path.realpath(path) return any(fnmatch.fnmatch(path, p) for p in self.omit_patterns) or ( self.include_patterns and not any(fnmatch.fnmatch(path, p) for p in self.include_patterns) ) def include(self, path): return not self.omit(path) class DebugWipeFinder(object): """ PEP 302 finder that uses a DebugWipeLoader for all files which do not need coverage """ def __init__(self, matcher): self.matcher = matcher def find_module(self, fullname, path=None): _, _, basename = fullname.rpartition(".") try: fd, pypath, (_, _, kind) = imp.find_module(basename, path) except Exception: # Finding without hooks using the imp module failed. One reason # could be that there is a zip file on sys.path. The imp module # does not support loading from there. Leave finding this module to # the others finders in sys.meta_path. return None if hasattr(fd, "close"): fd.close() if kind != imp.PY_SOURCE: return None if self.matcher.include(pypath): return None """ This is defined to match CPython's PyVarObject struct """ class PyVarObject(ctypes.Structure): _fields_ = [ ("ob_refcnt", ctypes.c_long), ("ob_type", ctypes.c_void_p), ("ob_size", ctypes.c_ulong), ] class DebugWipeLoader(SourceFileLoader): """ PEP302 loader that zeros out debug information before execution """ def get_code(self, fullname): code = super(DebugWipeLoader, self).get_code(fullname) if code: # Ideally we'd do # code.co_lnotab = b'' # But code objects are READONLY. Not to worry though; we'll # directly modify CPython's object code_impl = PyVarObject.from_address(id(code.co_lnotab)) code_impl.ob_size = 0 return code return DebugWipeLoader(fullname, pypath) def optimize_for_coverage(cov, include_patterns, omit_patterns): """ We get better performance if we zero out debug information for files which we're not interested in. Only available in CPython 3.3+ """ matcher = PathMatcher(include_patterns, omit_patterns) if SourceFileLoader and platform.python_implementation() == "CPython": sys.meta_path.insert(0, DebugWipeFinder(matcher)) class TeeStream(object): def __init__(self, *streams): self._streams = streams def write(self, data): for stream in self._streams: stream.write(data) def flush(self): for stream in self._streams: stream.flush() def isatty(self): return False class CallbackStream(object): def __init__(self, callback, bytes_callback=None, orig=None): self._callback = callback self._fileno = orig.fileno() if orig else None # Python 3 APIs: # - `encoding` is a string holding the encoding name # - `errors` is a string holding the error-handling mode for encoding # - `buffer` should look like an io.BufferedIOBase object self.errors = orig.errors if orig else None if bytes_callback: # those members are only on the io.TextIOWrapper self.encoding = orig.encoding if orig else "UTF-8" self.buffer = CallbackStream(bytes_callback, orig=orig) def write(self, data): self._callback(data) def flush(self): pass def isatty(self): return False def fileno(self): return self._fileno class BuckTestResult(unittest._TextTestResult): """ Our own TestResult class that outputs data in a format that can be easily parsed by buck's test runner. """ _instr_counter = get_cpu_instr_counter() def __init__( self, stream, descriptions, verbosity, show_output, main_program, suite ): super(BuckTestResult, self).__init__(stream, descriptions, verbosity) self._main_program = main_program self._suite = suite self._results = [] self._current_test = None self._saved_stdout = sys.stdout self._saved_stderr = sys.stderr self._show_output = show_output def getResults(self): return self._results def startTest(self, test): super(BuckTestResult, self).startTest(test) # Pass in the real stdout and stderr filenos. We can't really do much # here to intercept callers who directly operate on these fileno # objects. sys.stdout = CallbackStream( self.addStdout, self.addStdoutBytes, orig=sys.stdout ) sys.stderr = CallbackStream( self.addStderr, self.addStderrBytes, orig=sys.stderr ) self._current_test = test self._test_start_time = time.time() self._current_status = TestStatus.ABORTED self._messages = [] self._stacktrace = None self._stdout = "" self._stderr = "" self._start_instr_count = self._instr_counter.read() def _find_next_test(self, suite): """ Find the next test that has not been run. """ for test in suite: # We identify test suites by test that are iterable (as is done in # the builtin python test harness). If we see one, recurse on it. if hasattr(test, "__iter__"): test = self._find_next_test(test) # The builtin python test harness sets test references to `None` # after they have run, so we know we've found the next test up # if it's not `None`. if test is not None: return test def stopTest(self, test): sys.stdout = self._saved_stdout sys.stderr = self._saved_stderr super(BuckTestResult, self).stopTest(test) # If a failure occurred during module/class setup, then this "test" may # actually be a `_ErrorHolder`, which doesn't contain explicit info # about the upcoming test. Since we really only care about the test # name field (i.e. `_testMethodName`), we use that to detect an actual # test cases, and fall back to looking the test up from the suite # otherwise. if not hasattr(test, "_testMethodName"): test = self._find_next_test(self._suite) result = { "testCaseName": "{0}.{1}".format( test.__class__.__module__, test.__class__.__name__ ), "testCase": test._testMethodName, "type": self._current_status, "time": int((time.time() - self._test_start_time) * 1000), "message": os.linesep.join(self._messages), "stacktrace": self._stacktrace, "stdOut": self._stdout, "stdErr": self._stderr, } # TestPilot supports an instruction count field. if "TEST_PILOT" in os.environ: result["instrCount"] = ( int(self._instr_counter.read() - self._start_instr_count), ) self._results.append(result) self._current_test = None def stopTestRun(self): cov = self._main_program.get_coverage() if cov is not None: self._results.append({"coverage": cov}) @contextlib.contextmanager def _withTest(self, test): self.startTest(test) yield self.stopTest(test) def _setStatus(self, test, status, message=None, stacktrace=None): assert test == self._current_test self._current_status = status self._stacktrace = stacktrace if message is not None: if message.endswith(os.linesep): message = message[:-1] self._messages.append(message) def setStatus(self, test, status, message=None, stacktrace=None): # addError() may be called outside of a test if one of the shared # fixtures (setUpClass/tearDownClass/setUpModule/tearDownModule) # throws an error. # # In this case, create a fake test result to record the error. if self._current_test is None: with self._withTest(test): self._setStatus(test, status, message, stacktrace) else: self._setStatus(test, status, message, stacktrace) def setException(self, test, status, excinfo): exctype, value, tb = excinfo self.setStatus( test, status, "{0}: {1}".format(exctype.__name__, value), "".join(traceback.format_tb(tb)), ) def addSuccess(self, test): super(BuckTestResult, self).addSuccess(test) self.setStatus(test, TestStatus.PASSED) def addError(self, test, err): super(BuckTestResult, self).addError(test, err) self.setException(test, TestStatus.ABORTED, err) def addFailure(self, test, err): super(BuckTestResult, self).addFailure(test, err) self.setException(test, TestStatus.FAILED, err) def addSkip(self, test, reason): super(BuckTestResult, self).addSkip(test, reason) self.setStatus(test, TestStatus.SKIPPED, "Skipped: %s" % (reason,)) def addExpectedFailure(self, test, err): super(BuckTestResult, self).addExpectedFailure(test, err) self.setException(test, TestStatus.EXPECTED_FAILURE, err) def addUnexpectedSuccess(self, test): super(BuckTestResult, self).addUnexpectedSuccess(test) self.setStatus(test, TestStatus.UNEXPECTED_SUCCESS, "Unexpected success") def addStdout(self, val): self._stdout += val if self._show_output: self._saved_stdout.write(val) self._saved_stdout.flush() def addStdoutBytes(self, val): string = val.decode("utf-8", errors="backslashreplace") self.addStdout(string) def addStderr(self, val): self._stderr += val if self._show_output: self._saved_stderr.write(val) self._saved_stderr.flush() def addStderrBytes(self, val): string = val.decode("utf-8", errors="backslashreplace") self.addStderr(string) class BuckTestRunner(unittest.TextTestRunner): def __init__(self, main_program, suite, show_output=True, **kwargs): super(BuckTestRunner, self).__init__(**kwargs) self.show_output = show_output self._main_program = main_program self._suite = suite def _makeResult(self): return BuckTestResult( self.stream, self.descriptions, self.verbosity, self.show_output, self._main_program, self._suite, ) def _format_test_name(test_class, attrname): return "{0}.{1}.{2}".format(test_class.__module__, test_class.__name__, attrname) class StderrLogHandler(logging.StreamHandler): """ This class is very similar to logging.StreamHandler, except that it always uses the current sys.stderr object. StreamHandler caches the current sys.stderr object when it is constructed. This makes it behave poorly in unit tests, which may replace sys.stderr with a StringIO buffer during tests. The StreamHandler will continue using the old sys.stderr object instead of the desired StringIO buffer. """ def __init__(self): logging.Handler.__init__(self) @property def stream(self): return sys.stderr class RegexTestLoader(unittest.TestLoader): def __init__(self, regex=None): self.regex = regex super(RegexTestLoader, self).__init__() def getTestCaseNames(self, testCaseClass): """ Return a sorted sequence of method names found within testCaseClass """ testFnNames = super(RegexTestLoader, self).getTestCaseNames(testCaseClass) if self.regex is None: return testFnNames robj = re.compile(self.regex) matched = [] for attrname in testFnNames: fullname = _format_test_name(testCaseClass, attrname) if robj.search(fullname): matched.append(attrname) return matched class Loader(object): suiteClass = unittest.TestSuite def __init__(self, modules, regex=None): self.modules = modules self.regex = regex def load_all(self): loader = RegexTestLoader(self.regex) test_suite = self.suiteClass() for module_name in self.modules: __import__(module_name, level=0) module = sys.modules[module_name] module_suite = loader.loadTestsFromModule(module) test_suite.addTest(module_suite) return test_suite def load_args(self, args): loader = RegexTestLoader(self.regex) suites = [] for arg in args: suite = loader.loadTestsFromName(arg) # loadTestsFromName() can only process names that refer to # individual test functions or modules. It can't process package # names. If there were no module/function matches, check to see if # this looks like a package name. if suite.countTestCases() != 0: suites.append(suite) continue # Load all modules whose name is <arg>.<something> prefix = arg + "." for module in self.modules: if module.startswith(prefix): suite = loader.loadTestsFromName(module) suites.append(suite) return loader.suiteClass(suites) _COVERAGE_INI = """\ [report] exclude_lines = pragma: no cover pragma: nocover pragma:.*no${PLATFORM} pragma:.*no${PY_IMPL}${PY_MAJOR}${PY_MINOR} pragma:.*no${PY_IMPL}${PY_MAJOR} pragma:.*nopy${PY_MAJOR} pragma:.*nopy${PY_MAJOR}${PY_MINOR} """ class MainProgram(object): """ This class implements the main program. It can be subclassed by users who wish to customize some parts of the main program. (Adding additional command line options, customizing test loading, etc.) """ DEFAULT_VERBOSITY = 2 def __init__(self, argv): self.init_option_parser() self.parse_options(argv) self.setup_logging() def init_option_parser(self): usage = "%prog [options] [TEST] ..." op = optparse.OptionParser(usage=usage, add_help_option=False) self.option_parser = op op.add_option( "--hide-output", dest="show_output", action="store_false", default=True, help="Suppress data that tests print to stdout/stderr, and only " "show it if the test fails.", ) op.add_option( "-o", "--output", help="Write results to a file in a JSON format to be read by Buck", ) op.add_option( "-f", "--failfast", action="store_true", default=False, help="Stop after the first failure", ) op.add_option( "-l", "--list-tests", action="store_true", dest="list", default=False, help="List tests and exit", ) op.add_option( "-r", "--regex", default=None, help="Regex to apply to tests, to only run those tests", ) op.add_option( "--collect-coverage", action="store_true", default=False, help="Collect test coverage information", ) op.add_option( "--coverage-include", default="*", help='File globs to include in converage (split by ",")', ) op.add_option( "--coverage-omit", default="", help='File globs to omit from converage (split by ",")', ) op.add_option( "--logger", action="append", metavar="<category>=<level>", default=[], help="Configure log levels for specific logger categories", ) op.add_option( "-q", "--quiet", action="count", default=0, help="Decrease the verbosity (may be specified multiple times)", ) op.add_option( "-v", "--verbosity", action="count", default=self.DEFAULT_VERBOSITY, help="Increase the verbosity (may be specified multiple times)", ) op.add_option( "-?", "--help", action="help", help="Show this help message and exit" ) def parse_options(self, argv): self.options, self.test_args = self.option_parser.parse_args(argv[1:]) self.options.verbosity -= self.options.quiet if self.options.collect_coverage and coverage is None: self.option_parser.error("coverage module is not available") self.options.coverage_include = self.options.coverage_include.split(",") if self.options.coverage_omit == "": self.options.coverage_omit = [] else: self.options.coverage_omit = self.options.coverage_omit.split(",") def setup_logging(self): # Configure the root logger to log at INFO level. # This is similar to logging.basicConfig(), but uses our # StderrLogHandler instead of a StreamHandler. fmt = logging.Formatter("%(pathname)s:%(lineno)s: %(message)s") log_handler = StderrLogHandler() log_handler.setFormatter(fmt) root_logger = logging.getLogger() root_logger.addHandler(log_handler) root_logger.setLevel(logging.INFO) level_names = { "debug": logging.DEBUG, "info": logging.INFO, "warn": logging.WARNING, "warning": logging.WARNING, "error": logging.ERROR, "critical": logging.CRITICAL, "fatal": logging.FATAL, } for value in self.options.logger: parts = value.rsplit("=", 1) if len(parts) != 2: self.option_parser.error( "--logger argument must be of the " "form <name>=<level>: %s" % value ) name = parts[0] level_name = parts[1].lower() level = level_names.get(level_name) if level is None: self.option_parser.error( "invalid log level %r for log " "category %s" % (parts[1], name) ) logging.getLogger(name).setLevel(level) def create_loader(self): import __test_modules__ return Loader(__test_modules__.TEST_MODULES, self.options.regex) def load_tests(self): loader = self.create_loader() if self.options.collect_coverage: self.start_coverage() include = self.options.coverage_include omit = self.options.coverage_omit if include and "*" not in include: optimize_for_coverage(self.cov, include, omit) if self.test_args: suite = loader.load_args(self.test_args) else: suite = loader.load_all() if self.options.collect_coverage: self.cov.start() return suite def get_tests(self, test_suite): tests = [] for test in test_suite: if isinstance(test, unittest.TestSuite): tests.extend(self.get_tests(test)) else: tests.append(test) return tests def run(self): test_suite = self.load_tests() if self.options.list: for test in self.get_tests(test_suite): method_name = getattr(test, "_testMethodName", "") name = _format_test_name(test.__class__, method_name) print(name) return EXIT_CODE_SUCCESS else: result = self.run_tests(test_suite) if self.options.output is not None: with open(self.options.output, "w") as f: json.dump(result.getResults(), f, indent=4, sort_keys=True) if not result.wasSuccessful(): return EXIT_CODE_TEST_FAILURE return EXIT_CODE_SUCCESS def run_tests(self, test_suite): # Install a signal handler to catch Ctrl-C and display the results # (but only if running >2.6). if sys.version_info[0] > 2 or sys.version_info[1] > 6: unittest.installHandler() # Run the tests runner = BuckTestRunner( self, test_suite, verbosity=self.options.verbosity, show_output=self.options.show_output, ) result = runner.run(test_suite) if self.options.collect_coverage and self.options.show_output: self.cov.stop() try: self.cov.report(file=sys.stdout) except coverage.misc.CoverageException: print("No lines were covered, potentially restricted by file filters") return result def get_abbr_impl(self): """Return abbreviated implementation name.""" impl = platform.python_implementation() if impl == "PyPy": return "pp" elif impl == "Jython": return "jy" elif impl == "IronPython": return "ip" elif impl == "CPython": return "cp" else: raise RuntimeError("unknown python runtime") def start_coverage(self): if not self.options.collect_coverage: return with tempfile.NamedTemporaryFile("w", delete=False) as coverage_ini: coverage_ini.write(_COVERAGE_INI) self._coverage_ini_path = coverage_ini.name # Keep the original working dir in case tests use os.chdir self._original_working_dir = os.getcwd() # for coverage config ignores by platform/python version os.environ["PLATFORM"] = sys.platform os.environ["PY_IMPL"] = self.get_abbr_impl() os.environ["PY_MAJOR"] = str(sys.version_info.major) os.environ["PY_MINOR"] = str(sys.version_info.minor) self.cov = coverage.Coverage( include=self.options.coverage_include, omit=self.options.coverage_omit, config_file=coverage_ini.name, ) self.cov.erase() self.cov.start() def get_coverage(self): if not self.options.collect_coverage: return None try: os.remove(self._coverage_ini_path) except OSError: pass # Better to litter than to fail the test # Switch back to the original working directory. os.chdir(self._original_working_dir) result = {} self.cov.stop() try: f = StringIO() self.cov.report(file=f) lines = f.getvalue().split("\n") except coverage.misc.CoverageException: # Nothing was covered. That's fine by us return result # N.B.: the format of the coverage library's output differs # depending on whether one or more files are in the results for line in lines[2:]: if line.strip("-") == "": break r = line.split()[0] analysis = self.cov.analysis2(r) covString = self.convert_to_diff_cov_str(analysis) if covString: result[r] = covString return result def convert_to_diff_cov_str(self, analysis): # Info on the format of analysis: # http://nedbatchelder.com/code/coverage/api.html if not analysis: return None numLines = max( analysis[1][-1] if len(analysis[1]) else 0, analysis[2][-1] if len(analysis[2]) else 0, analysis[3][-1] if len(analysis[3]) else 0, ) lines = ["N"] * numLines for l in analysis[1]: lines[l - 1] = "C" for l in analysis[2]: lines[l - 1] = "X" for l in analysis[3]: lines[l - 1] = "U" return "".join(lines) def main(argv): return MainProgram(sys.argv).run() if __name__ == "__main__": sys.exit(main(sys.argv))
C
hhvm/build/fbcode_builder/CMake/fb_py_win_main.c
// Copyright (c) Facebook, Inc. and its affiliates. #define WIN32_LEAN_AND_MEAN #include <Windows.h> #include <stdio.h> #include <stdlib.h> #define PATH_SIZE 32768 typedef int (*Py_Main)(int, wchar_t**); // Add the given path to Windows's DLL search path. // For Windows DLL search path resolution, see: // https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-search-order void add_search_path(const wchar_t* path) { wchar_t buffer[PATH_SIZE]; wchar_t** lppPart = NULL; if (!GetFullPathNameW(path, PATH_SIZE, buffer, lppPart)) { fwprintf( stderr, L"warning: %d unable to expand path %s\n", GetLastError(), path); return; } if (!AddDllDirectory(buffer)) { DWORD error = GetLastError(); if (error != ERROR_FILE_NOT_FOUND) { fwprintf( stderr, L"warning: %d unable to set DLL search path for %s\n", GetLastError(), path); } } } int locate_py_main(int argc, wchar_t** argv) { /* * We have to dynamically locate Python3.dll because we may be loading a * Python native module while running. If that module is built with a * different Python version, we will end up a DLL import error. To resolve * this, we can either ship an embedded version of Python with us or * dynamically look up existing Python distribution installed on user's * machine. This way, we should be able to get a consistent version of * Python3.dll and .pyd modules. */ HINSTANCE python_dll; Py_Main pymain; // last added directory has highest priority add_search_path(L"C:\\Python36\\"); add_search_path(L"C:\\tools\\fb-python\\fb-python36\\"); add_search_path(L"C:\\Python37\\"); add_search_path(L"C:\\tools\\fb-python\\fb-python37\\"); add_search_path(L"C:\\Python38\\"); add_search_path(L"C:\\tools\\fb-python\\fb-python38\\"); // TODO(T123615656): Re-enable Python 3.9 after the fix // add_search_path(L"C:\\tools\\fb-python\\fb-python39\\"); python_dll = LoadLibraryExW(L"python3.dll", NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS); int returncode = 0; if (python_dll != NULL) { pymain = (Py_Main)GetProcAddress(python_dll, "Py_Main"); if (pymain != NULL) { returncode = (pymain)(argc, argv); } else { fprintf(stderr, "error: %d unable to load Py_Main\n", GetLastError()); } FreeLibrary(python_dll); } else { fprintf(stderr, "error: %d unable to locate python3.dll\n", GetLastError()); return 1; } return returncode; } int wmain() { /* * This executable will be prepended to the start of a Python ZIP archive. * Python will be able to directly execute the ZIP archive, so we simply * need to tell Py_Main() to run our own file. Duplicate the argument list * and add our file name to the beginning to tell Python what file to invoke. */ wchar_t** pyargv = malloc(sizeof(wchar_t*) * (__argc + 1)); if (!pyargv) { fprintf(stderr, "error: failed to allocate argument vector\n"); return 1; } /* Py_Main wants the wide character version of the argv so we pull those * values from the global __wargv array that has been prepared by MSVCRT. * * In order for the zipapp to run we need to insert an extra argument in * the front of the argument vector that points to ourselves. * * An additional complication is that, depending on who prepared the argument * string used to start our process, the computed __wargv[0] can be a simple * shell word like `watchman-wait` which is normally resolved together with * the PATH by the shell. * That unresolved path isn't sufficient to start the zipapp on windows; * we need the fully qualified path. * * Given: * __wargv == {"watchman-wait", "-h"} * * we want to pass the following to Py_Main: * * { * "z:\build\watchman\python\watchman-wait.exe", * "z:\build\watchman\python\watchman-wait.exe", * "-h" * } */ wchar_t full_path_to_argv0[PATH_SIZE]; DWORD len = GetModuleFileNameW(NULL, full_path_to_argv0, PATH_SIZE); if (len == 0 || len == PATH_SIZE && GetLastError() == ERROR_INSUFFICIENT_BUFFER) { fprintf( stderr, "error: %d while retrieving full path to this executable\n", GetLastError()); return 1; } for (int n = 1; n < __argc; ++n) { pyargv[n + 1] = __wargv[n]; } pyargv[0] = full_path_to_argv0; pyargv[1] = full_path_to_argv0; return locate_py_main(__argc + 1, pyargv); }
CMake
hhvm/build/fbcode_builder/CMake/FindDoubleConversion.cmake
# Copyright (c) Meta Platforms, Inc. and affiliates. # Finds libdouble-conversion. # # This module defines: # DOUBLE_CONVERSION_INCLUDE_DIR # DOUBLE_CONVERSION_LIBRARY # find_path(DOUBLE_CONVERSION_INCLUDE_DIR double-conversion/double-conversion.h) find_library(DOUBLE_CONVERSION_LIBRARY NAMES double-conversion) include(FindPackageHandleStandardArgs) find_package_handle_standard_args( DoubleConversion DEFAULT_MSG DOUBLE_CONVERSION_LIBRARY DOUBLE_CONVERSION_INCLUDE_DIR) mark_as_advanced(DOUBLE_CONVERSION_INCLUDE_DIR DOUBLE_CONVERSION_LIBRARY)
CMake
hhvm/build/fbcode_builder/CMake/FindGflags.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # Find libgflags. # There's a lot of compatibility cruft going on in here, both # to deal with changes across the FB consumers of this and also # to deal with variances in behavior of cmake itself. # # Since this file is named FindGflags.cmake the cmake convention # is for the module to export both GFLAGS_FOUND and Gflags_FOUND. # The convention expected by consumers is that we export the # following variables, even though these do not match the cmake # conventions: # # LIBGFLAGS_INCLUDE_DIR - where to find gflags/gflags.h, etc. # LIBGFLAGS_LIBRARY - List of libraries when using libgflags. # LIBGFLAGS_FOUND - True if libgflags found. # # We need to be able to locate gflags both from an installed # cmake config file and just from the raw headers and libs, so # test for the former and then the latter, and then stick # the results together and export them into the variables # listed above. # # For forwards compatibility, we export the following variables: # # gflags_INCLUDE_DIR - where to find gflags/gflags.h, etc. # gflags_TARGET / GFLAGS_TARGET / gflags_LIBRARIES # - List of libraries when using libgflags. # gflags_FOUND - True if libgflags found. # IF (LIBGFLAGS_INCLUDE_DIR) # Already in cache, be silent SET(Gflags_FIND_QUIETLY TRUE) ENDIF () find_package(gflags CONFIG QUIET) if (gflags_FOUND) if (NOT Gflags_FIND_QUIETLY) message(STATUS "Found gflags from package config ${gflags_CONFIG}") endif() # Re-export the config-specified libs with our local names set(LIBGFLAGS_LIBRARY ${gflags_LIBRARIES}) set(LIBGFLAGS_INCLUDE_DIR ${gflags_INCLUDE_DIR}) if(NOT EXISTS "${gflags_INCLUDE_DIR}") # The gflags-devel RPM on recent RedHat-based systems is somewhat broken. # RedHat symlinks /lib64 to /usr/lib64, and this breaks some of the # relative path computation performed in gflags-config.cmake. The package # config file ends up being found via /lib64, but the relative path # computation it does only works if it was found in /usr/lib64. # If gflags_INCLUDE_DIR does not actually exist, simply default it to # /usr/include on these systems. set(LIBGFLAGS_INCLUDE_DIR "/usr/include") set(GFLAGS_INCLUDE_DIR "/usr/include") endif() set(LIBGFLAGS_FOUND ${gflags_FOUND}) # cmake module compat set(GFLAGS_FOUND ${gflags_FOUND}) set(Gflags_FOUND ${gflags_FOUND}) else() FIND_PATH(LIBGFLAGS_INCLUDE_DIR gflags/gflags.h) FIND_LIBRARY(LIBGFLAGS_LIBRARY_DEBUG NAMES gflagsd gflags_staticd) FIND_LIBRARY(LIBGFLAGS_LIBRARY_RELEASE NAMES gflags gflags_static) INCLUDE(SelectLibraryConfigurations) SELECT_LIBRARY_CONFIGURATIONS(LIBGFLAGS) # handle the QUIETLY and REQUIRED arguments and set LIBGFLAGS_FOUND to TRUE if # all listed variables are TRUE INCLUDE(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS(gflags DEFAULT_MSG LIBGFLAGS_LIBRARY LIBGFLAGS_INCLUDE_DIR) # cmake module compat set(Gflags_FOUND ${GFLAGS_FOUND}) # compat with some existing FindGflags consumers set(LIBGFLAGS_FOUND ${GFLAGS_FOUND}) # Compat with the gflags CONFIG based detection set(gflags_FOUND ${GFLAGS_FOUND}) set(gflags_INCLUDE_DIR ${LIBGFLAGS_INCLUDE_DIR}) set(gflags_LIBRARIES ${LIBGFLAGS_LIBRARY}) set(GFLAGS_TARGET ${LIBGFLAGS_LIBRARY}) set(gflags_TARGET ${LIBGFLAGS_LIBRARY}) MARK_AS_ADVANCED(LIBGFLAGS_LIBRARY LIBGFLAGS_INCLUDE_DIR) endif() # Compat with the gflags CONFIG based detection if (LIBGFLAGS_FOUND AND NOT TARGET gflags) add_library(gflags UNKNOWN IMPORTED) if(TARGET gflags-shared) # If the installed gflags CMake package config defines a gflags-shared # target but not gflags, just make the gflags target that we define # depend on the gflags-shared target. target_link_libraries(gflags INTERFACE gflags-shared) # Export LIBGFLAGS_LIBRARY as the gflags-shared target in this case. set(LIBGFLAGS_LIBRARY gflags-shared) else() set_target_properties( gflags PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES "C" IMPORTED_LOCATION "${LIBGFLAGS_LIBRARY}" INTERFACE_INCLUDE_DIRECTORIES "${LIBGFLAGS_INCLUDE_DIR}" ) endif() endif()
CMake
hhvm/build/fbcode_builder/CMake/FindGlog.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # - Try to find Glog # Once done, this will define # # GLOG_FOUND - system has Glog # GLOG_INCLUDE_DIRS - the Glog include directories # GLOG_LIBRARIES - link these to use Glog include(FindPackageHandleStandardArgs) include(SelectLibraryConfigurations) find_library(GLOG_LIBRARY_RELEASE glog PATHS ${GLOG_LIBRARYDIR}) find_library(GLOG_LIBRARY_DEBUG glogd PATHS ${GLOG_LIBRARYDIR}) find_path(GLOG_INCLUDE_DIR glog/logging.h PATHS ${GLOG_INCLUDEDIR}) select_library_configurations(GLOG) find_package_handle_standard_args(Glog DEFAULT_MSG GLOG_LIBRARY GLOG_INCLUDE_DIR) mark_as_advanced( GLOG_LIBRARY GLOG_INCLUDE_DIR) set(GLOG_LIBRARIES ${GLOG_LIBRARY}) set(GLOG_INCLUDE_DIRS ${GLOG_INCLUDE_DIR}) if (NOT TARGET glog::glog) add_library(glog::glog UNKNOWN IMPORTED) set_target_properties(glog::glog PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${GLOG_INCLUDE_DIRS}") set_target_properties(glog::glog PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES "C" IMPORTED_LOCATION "${GLOG_LIBRARIES}") find_package(Gflags) if(GFLAGS_FOUND) message(STATUS "Found gflags as a dependency of glog::glog, include=${LIBGFLAGS_INCLUDE_DIR}, libs=${LIBGFLAGS_LIBRARY}") set_target_properties(glog::glog PROPERTIES IMPORTED_LINK_INTERFACE_LIBRARIES ${LIBGFLAGS_LIBRARY}) endif() endif()
CMake
hhvm/build/fbcode_builder/CMake/FindGMock.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # Find libgmock # # LIBGMOCK_DEFINES - List of defines when using libgmock. # LIBGMOCK_INCLUDE_DIR - where to find gmock/gmock.h, etc. # LIBGMOCK_LIBRARIES - List of libraries when using libgmock. # LIBGMOCK_FOUND - True if libgmock found. IF (LIBGMOCK_INCLUDE_DIR) # Already in cache, be silent SET(LIBGMOCK_FIND_QUIETLY TRUE) ENDIF () find_package(GTest CONFIG QUIET) if (TARGET GTest::gmock) get_target_property(LIBGMOCK_DEFINES GTest::gtest INTERFACE_COMPILE_DEFINITIONS) if (NOT ${LIBGMOCK_DEFINES}) # Explicitly set to empty string if not found to avoid it being # set to NOTFOUND and breaking compilation set(LIBGMOCK_DEFINES "") endif() get_target_property(LIBGMOCK_INCLUDE_DIR GTest::gtest INTERFACE_INCLUDE_DIRECTORIES) set(LIBGMOCK_LIBRARIES GTest::gmock_main GTest::gmock GTest::gtest) set(LIBGMOCK_FOUND ON) message(STATUS "Found gmock via config, defines=${LIBGMOCK_DEFINES}, include=${LIBGMOCK_INCLUDE_DIR}, libs=${LIBGMOCK_LIBRARIES}") else() FIND_PATH(LIBGMOCK_INCLUDE_DIR gmock/gmock.h) FIND_LIBRARY(LIBGMOCK_MAIN_LIBRARY_DEBUG NAMES gmock_maind) FIND_LIBRARY(LIBGMOCK_MAIN_LIBRARY_RELEASE NAMES gmock_main) FIND_LIBRARY(LIBGMOCK_LIBRARY_DEBUG NAMES gmockd) FIND_LIBRARY(LIBGMOCK_LIBRARY_RELEASE NAMES gmock) FIND_LIBRARY(LIBGTEST_LIBRARY_DEBUG NAMES gtestd) FIND_LIBRARY(LIBGTEST_LIBRARY_RELEASE NAMES gtest) find_package(Threads REQUIRED) INCLUDE(SelectLibraryConfigurations) SELECT_LIBRARY_CONFIGURATIONS(LIBGMOCK_MAIN) SELECT_LIBRARY_CONFIGURATIONS(LIBGMOCK) SELECT_LIBRARY_CONFIGURATIONS(LIBGTEST) set(LIBGMOCK_LIBRARIES ${LIBGMOCK_MAIN_LIBRARY} ${LIBGMOCK_LIBRARY} ${LIBGTEST_LIBRARY} Threads::Threads ) if(CMAKE_SYSTEM_NAME STREQUAL "Windows") # The GTEST_LINKED_AS_SHARED_LIBRARY macro must be set properly on Windows. # # There isn't currently an easy way to determine if a library was compiled as # a shared library on Windows, so just assume we've been built against a # shared build of gmock for now. SET(LIBGMOCK_DEFINES "GTEST_LINKED_AS_SHARED_LIBRARY=1" CACHE STRING "") endif() # handle the QUIETLY and REQUIRED arguments and set LIBGMOCK_FOUND to TRUE if # all listed variables are TRUE INCLUDE(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS( GMock DEFAULT_MSG LIBGMOCK_MAIN_LIBRARY LIBGMOCK_LIBRARY LIBGTEST_LIBRARY LIBGMOCK_LIBRARIES LIBGMOCK_INCLUDE_DIR ) MARK_AS_ADVANCED( LIBGMOCK_DEFINES LIBGMOCK_MAIN_LIBRARY LIBGMOCK_LIBRARY LIBGTEST_LIBRARY LIBGMOCK_LIBRARIES LIBGMOCK_INCLUDE_DIR ) endif()
CMake
hhvm/build/fbcode_builder/CMake/FindLibEvent.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # - Find LibEvent (a cross event library) # This module defines # LIBEVENT_INCLUDE_DIR, where to find LibEvent headers # LIBEVENT_LIB, LibEvent libraries # LibEvent_FOUND, If false, do not try to use libevent set(LibEvent_EXTRA_PREFIXES /usr/local /opt/local "$ENV{HOME}") foreach(prefix ${LibEvent_EXTRA_PREFIXES}) list(APPEND LibEvent_INCLUDE_PATHS "${prefix}/include") list(APPEND LibEvent_LIB_PATHS "${prefix}/lib") endforeach() find_package(Libevent CONFIG QUIET) if (TARGET event) # Re-export the config under our own names # Somewhat gross, but some vcpkg installed libevents have a relative # `include` path exported into LIBEVENT_INCLUDE_DIRS, which triggers # a cmake error because it resolves to the `include` dir within the # folly repo, which is not something cmake allows to be in the # INTERFACE_INCLUDE_DIRECTORIES. Thankfully on such a system the # actual include directory is already part of the global include # directories, so we can just skip it. if (NOT "${LIBEVENT_INCLUDE_DIRS}" STREQUAL "include") set(LIBEVENT_INCLUDE_DIR ${LIBEVENT_INCLUDE_DIRS}) else() set(LIBEVENT_INCLUDE_DIR) endif() # Unfortunately, with a bare target name `event`, downstream consumers # of the package that depends on `Libevent` located via CONFIG end # up exporting just a bare `event` in their libraries. This is problematic # because this in interpreted as just `-levent` with no library path. # When libevent is not installed in the default installation prefix # this results in linker errors. # To resolve this, we ask cmake to lookup the full path to the library # and use that instead. cmake_policy(PUSH) if(POLICY CMP0026) # Allow reading the LOCATION property cmake_policy(SET CMP0026 OLD) endif() get_target_property(LIBEVENT_LIB event LOCATION) cmake_policy(POP) set(LibEvent_FOUND ${Libevent_FOUND}) if (NOT LibEvent_FIND_QUIETLY) message(STATUS "Found libevent from package config include=${LIBEVENT_INCLUDE_DIRS} lib=${LIBEVENT_LIB}") endif() else() find_path(LIBEVENT_INCLUDE_DIR event.h PATHS ${LibEvent_INCLUDE_PATHS}) find_library(LIBEVENT_LIB NAMES event PATHS ${LibEvent_LIB_PATHS}) if (LIBEVENT_LIB AND LIBEVENT_INCLUDE_DIR) set(LibEvent_FOUND TRUE) set(LIBEVENT_LIB ${LIBEVENT_LIB}) else () set(LibEvent_FOUND FALSE) endif () if (LibEvent_FOUND) if (NOT LibEvent_FIND_QUIETLY) message(STATUS "Found libevent: ${LIBEVENT_LIB}") endif () else () if (LibEvent_FIND_REQUIRED) message(FATAL_ERROR "Could NOT find libevent.") endif () message(STATUS "libevent NOT found.") endif () mark_as_advanced( LIBEVENT_LIB LIBEVENT_INCLUDE_DIR ) endif()
CMake
hhvm/build/fbcode_builder/CMake/FindLibUnwind.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. find_path(LIBUNWIND_INCLUDE_DIR NAMES libunwind.h) mark_as_advanced(LIBUNWIND_INCLUDE_DIR) find_library(LIBUNWIND_LIBRARY NAMES unwind) mark_as_advanced(LIBUNWIND_LIBRARY) include(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS( LIBUNWIND REQUIRED_VARS LIBUNWIND_LIBRARY LIBUNWIND_INCLUDE_DIR) if(LIBUNWIND_FOUND) set(LIBUNWIND_LIBRARIES ${LIBUNWIND_LIBRARY}) set(LIBUNWIND_INCLUDE_DIRS ${LIBUNWIND_INCLUDE_DIR}) endif()
CMake
hhvm/build/fbcode_builder/CMake/FindPCRE.cmake
# Copyright (c) Facebook, Inc. and its affiliates. include(FindPackageHandleStandardArgs) find_path(PCRE_INCLUDE_DIR NAMES pcre.h) find_library(PCRE_LIBRARY NAMES pcre) find_package_handle_standard_args( PCRE DEFAULT_MSG PCRE_LIBRARY PCRE_INCLUDE_DIR ) mark_as_advanced(PCRE_INCLUDE_DIR PCRE_LIBRARY)
CMake
hhvm/build/fbcode_builder/CMake/FindPCRE2.cmake
# Copyright (c) Facebook, Inc. and its affiliates. include(FindPackageHandleStandardArgs) find_path(PCRE2_INCLUDE_DIR NAMES pcre2.h) find_library(PCRE2_LIBRARY NAMES pcre2-8) find_package_handle_standard_args( PCRE2 DEFAULT_MSG PCRE2_LIBRARY PCRE2_INCLUDE_DIR ) set(PCRE2_DEFINES "PCRE2_CODE_UNIT_WIDTH=8") mark_as_advanced(PCRE2_INCLUDE_DIR PCRE2_LIBRARY PCRE2_DEFINES)
CMake
hhvm/build/fbcode_builder/CMake/FindRe2.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2. find_library(RE2_LIBRARY re2) mark_as_advanced(RE2_LIBRARY) find_path(RE2_INCLUDE_DIR NAMES re2/re2.h) mark_as_advanced(RE2_INCLUDE_DIR) include(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS( RE2 REQUIRED_VARS RE2_LIBRARY RE2_INCLUDE_DIR) if(RE2_FOUND) set(RE2_LIBRARY ${RE2_LIBRARY}) set(RE2_INCLUDE_DIR, ${RE2_INCLUDE_DIR}) endif()
CMake
hhvm/build/fbcode_builder/CMake/FindSodium.cmake
# Written in 2016 by Henrik Steffen Gaßmann <[email protected]> # # To the extent possible under law, the author(s) have dedicated all # copyright and related and neighboring rights to this software to the # public domain worldwide. This software is distributed without any warranty. # # You should have received a copy of the CC0 Public Domain Dedication # along with this software. If not, see # # http://creativecommons.org/publicdomain/zero/1.0/ # ######################################################################## # Tries to find the local libsodium installation. # # On Windows the sodium_DIR environment variable is used as a default # hint which can be overridden by setting the corresponding cmake variable. # # Once done the following variables will be defined: # # sodium_FOUND # sodium_INCLUDE_DIR # sodium_LIBRARY_DEBUG # sodium_LIBRARY_RELEASE # # # Furthermore an imported "sodium" target is created. # if (CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_C_COMPILER_ID STREQUAL "Clang") set(_GCC_COMPATIBLE 1) endif() # static library option if (NOT DEFINED sodium_USE_STATIC_LIBS) option(sodium_USE_STATIC_LIBS "enable to statically link against sodium" OFF) endif() if(NOT (sodium_USE_STATIC_LIBS EQUAL sodium_USE_STATIC_LIBS_LAST)) unset(sodium_LIBRARY CACHE) unset(sodium_LIBRARY_DEBUG CACHE) unset(sodium_LIBRARY_RELEASE CACHE) unset(sodium_DLL_DEBUG CACHE) unset(sodium_DLL_RELEASE CACHE) set(sodium_USE_STATIC_LIBS_LAST ${sodium_USE_STATIC_LIBS} CACHE INTERNAL "internal change tracking variable") endif() ######################################################################## # UNIX if (UNIX) # import pkg-config find_package(PkgConfig QUIET) if (PKG_CONFIG_FOUND) pkg_check_modules(sodium_PKG QUIET libsodium) endif() if(sodium_USE_STATIC_LIBS) foreach(_libname ${sodium_PKG_STATIC_LIBRARIES}) if (NOT _libname MATCHES "^lib.*\\.a$") # ignore strings already ending with .a list(INSERT sodium_PKG_STATIC_LIBRARIES 0 "lib${_libname}.a") endif() endforeach() list(REMOVE_DUPLICATES sodium_PKG_STATIC_LIBRARIES) # if pkgconfig for libsodium doesn't provide # static lib info, then override PKG_STATIC here.. if (NOT sodium_PKG_STATIC_FOUND) set(sodium_PKG_STATIC_LIBRARIES libsodium.a) endif() set(XPREFIX sodium_PKG_STATIC) else() if (NOT sodium_PKG_FOUND) set(sodium_PKG_LIBRARIES sodium) endif() set(XPREFIX sodium_PKG) endif() find_path(sodium_INCLUDE_DIR sodium.h HINTS ${${XPREFIX}_INCLUDE_DIRS} ) find_library(sodium_LIBRARY_DEBUG NAMES ${${XPREFIX}_LIBRARIES} HINTS ${${XPREFIX}_LIBRARY_DIRS} ) find_library(sodium_LIBRARY_RELEASE NAMES ${${XPREFIX}_LIBRARIES} HINTS ${${XPREFIX}_LIBRARY_DIRS} ) ######################################################################## # Windows elseif (WIN32) set(sodium_DIR "$ENV{sodium_DIR}" CACHE FILEPATH "sodium install directory") mark_as_advanced(sodium_DIR) find_path(sodium_INCLUDE_DIR sodium.h HINTS ${sodium_DIR} PATH_SUFFIXES include ) if (MSVC) # detect target architecture file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/arch.cpp" [=[ #if defined _M_IX86 #error ARCH_VALUE x86_32 #elif defined _M_X64 #error ARCH_VALUE x86_64 #endif #error ARCH_VALUE unknown ]=]) try_compile(_UNUSED_VAR "${CMAKE_CURRENT_BINARY_DIR}" "${CMAKE_CURRENT_BINARY_DIR}/arch.cpp" OUTPUT_VARIABLE _COMPILATION_LOG ) string(REGEX REPLACE ".*ARCH_VALUE ([a-zA-Z0-9_]+).*" "\\1" _TARGET_ARCH "${_COMPILATION_LOG}") # construct library path if (_TARGET_ARCH STREQUAL "x86_32") string(APPEND _PLATFORM_PATH "Win32") elseif(_TARGET_ARCH STREQUAL "x86_64") string(APPEND _PLATFORM_PATH "x64") else() message(FATAL_ERROR "the ${_TARGET_ARCH} architecture is not supported by Findsodium.cmake.") endif() string(APPEND _PLATFORM_PATH "/$$CONFIG$$") if (MSVC_VERSION LESS 1900) math(EXPR _VS_VERSION "${MSVC_VERSION} / 10 - 60") else() math(EXPR _VS_VERSION "${MSVC_VERSION} / 10 - 50") endif() string(APPEND _PLATFORM_PATH "/v${_VS_VERSION}") if (sodium_USE_STATIC_LIBS) string(APPEND _PLATFORM_PATH "/static") else() string(APPEND _PLATFORM_PATH "/dynamic") endif() string(REPLACE "$$CONFIG$$" "Debug" _DEBUG_PATH_SUFFIX "${_PLATFORM_PATH}") string(REPLACE "$$CONFIG$$" "Release" _RELEASE_PATH_SUFFIX "${_PLATFORM_PATH}") find_library(sodium_LIBRARY_DEBUG libsodium.lib HINTS ${sodium_DIR} PATH_SUFFIXES ${_DEBUG_PATH_SUFFIX} ) find_library(sodium_LIBRARY_RELEASE libsodium.lib HINTS ${sodium_DIR} PATH_SUFFIXES ${_RELEASE_PATH_SUFFIX} ) if (NOT sodium_USE_STATIC_LIBS) set(CMAKE_FIND_LIBRARY_SUFFIXES_BCK ${CMAKE_FIND_LIBRARY_SUFFIXES}) set(CMAKE_FIND_LIBRARY_SUFFIXES ".dll") find_library(sodium_DLL_DEBUG libsodium HINTS ${sodium_DIR} PATH_SUFFIXES ${_DEBUG_PATH_SUFFIX} ) find_library(sodium_DLL_RELEASE libsodium HINTS ${sodium_DIR} PATH_SUFFIXES ${_RELEASE_PATH_SUFFIX} ) set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_BCK}) endif() elseif(_GCC_COMPATIBLE) if (sodium_USE_STATIC_LIBS) find_library(sodium_LIBRARY_DEBUG libsodium.a HINTS ${sodium_DIR} PATH_SUFFIXES lib ) find_library(sodium_LIBRARY_RELEASE libsodium.a HINTS ${sodium_DIR} PATH_SUFFIXES lib ) else() find_library(sodium_LIBRARY_DEBUG libsodium.dll.a HINTS ${sodium_DIR} PATH_SUFFIXES lib ) find_library(sodium_LIBRARY_RELEASE libsodium.dll.a HINTS ${sodium_DIR} PATH_SUFFIXES lib ) file(GLOB _DLL LIST_DIRECTORIES false RELATIVE "${sodium_DIR}/bin" "${sodium_DIR}/bin/libsodium*.dll" ) find_library(sodium_DLL_DEBUG ${_DLL} libsodium HINTS ${sodium_DIR} PATH_SUFFIXES bin ) find_library(sodium_DLL_RELEASE ${_DLL} libsodium HINTS ${sodium_DIR} PATH_SUFFIXES bin ) endif() else() message(FATAL_ERROR "this platform is not supported by FindSodium.cmake") endif() ######################################################################## # unsupported else() message(FATAL_ERROR "this platform is not supported by FindSodium.cmake") endif() ######################################################################## # common stuff # extract sodium version if (sodium_INCLUDE_DIR) set(_VERSION_HEADER "${_INCLUDE_DIR}/sodium/version.h") if (EXISTS _VERSION_HEADER) file(READ "${_VERSION_HEADER}" _VERSION_HEADER_CONTENT) string(REGEX REPLACE ".*#[ \t]*define[ \t]*SODIUM_VERSION_STRING[ \t]*\"([^\n]*)\".*" "\\1" sodium_VERSION "${_VERSION_HEADER_CONTENT}") set(sodium_VERSION "${sodium_VERSION}" PARENT_SCOPE) endif() endif() # communicate results include(FindPackageHandleStandardArgs) find_package_handle_standard_args( Sodium # The name must be either uppercase or match the filename case. REQUIRED_VARS sodium_LIBRARY_RELEASE sodium_LIBRARY_DEBUG sodium_INCLUDE_DIR VERSION_VAR sodium_VERSION ) if(Sodium_FOUND) set(sodium_LIBRARIES optimized ${sodium_LIBRARY_RELEASE} debug ${sodium_LIBRARY_DEBUG}) endif() # mark file paths as advanced mark_as_advanced(sodium_INCLUDE_DIR) mark_as_advanced(sodium_LIBRARY_DEBUG) mark_as_advanced(sodium_LIBRARY_RELEASE) if (WIN32) mark_as_advanced(sodium_DLL_DEBUG) mark_as_advanced(sodium_DLL_RELEASE) endif() # create imported target if(sodium_USE_STATIC_LIBS) set(_LIB_TYPE STATIC) else() set(_LIB_TYPE SHARED) endif() if(NOT TARGET sodium) add_library(sodium ${_LIB_TYPE} IMPORTED) endif() set_target_properties(sodium PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${sodium_INCLUDE_DIR}" IMPORTED_LINK_INTERFACE_LANGUAGES "C" ) if (sodium_USE_STATIC_LIBS) set_target_properties(sodium PROPERTIES INTERFACE_COMPILE_DEFINITIONS "SODIUM_STATIC" IMPORTED_LOCATION "${sodium_LIBRARY_RELEASE}" IMPORTED_LOCATION_DEBUG "${sodium_LIBRARY_DEBUG}" ) else() if (UNIX) set_target_properties(sodium PROPERTIES IMPORTED_LOCATION "${sodium_LIBRARY_RELEASE}" IMPORTED_LOCATION_DEBUG "${sodium_LIBRARY_DEBUG}" ) elseif (WIN32) set_target_properties(sodium PROPERTIES IMPORTED_IMPLIB "${sodium_LIBRARY_RELEASE}" IMPORTED_IMPLIB_DEBUG "${sodium_LIBRARY_DEBUG}" ) if (NOT (sodium_DLL_DEBUG MATCHES ".*-NOTFOUND")) set_target_properties(sodium PROPERTIES IMPORTED_LOCATION_DEBUG "${sodium_DLL_DEBUG}" ) endif() if (NOT (sodium_DLL_RELEASE MATCHES ".*-NOTFOUND")) set_target_properties(sodium PROPERTIES IMPORTED_LOCATION_RELWITHDEBINFO "${sodium_DLL_RELEASE}" IMPORTED_LOCATION_MINSIZEREL "${sodium_DLL_RELEASE}" IMPORTED_LOCATION_RELEASE "${sodium_DLL_RELEASE}" ) endif() endif() endif()