Skip to content

Commit

Permalink
Sync coremltools-4.0b1 (apple#731)
Browse files Browse the repository at this point in the history
  • Loading branch information
1duo authored Jun 22, 2020
1 parent 0730319 commit 0d38c5a
Show file tree
Hide file tree
Showing 685 changed files with 117,537 additions and 47,081 deletions.
74 changes: 0 additions & 74 deletions BUILD.md

This file was deleted.

40 changes: 40 additions & 0 deletions BUILDING.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
### Building

To build coremltools from source, you require you need
[CMake](https://cmake.org) and
[Miniconda](https://docs.conda.io/en/latest/miniconda.html) to configure the
project.

Our makefile & scripts require the **zsh** shell (default shell for macOS
10.16+) installed in `/usr/bin`.

The following targets will handle the development environment for you. If you
need to add packages, edit the reqs/pip files and the auto-environment will
install them automatically.


* `build` | Build coremltools in *debug* mode (include symbols).
* `docs` | Build documentation.
* `clean` | Clean build dir.
* `clean_envs` | Delete all envs created by the scripts.
* `lint` | Linter.
* `proto` | Build coremltools and rebuild MLModel protobuf sources.
* `release` | Setup the package for release, but don’t upload to pypi. Include all wheels from build/dist in the built package.
* `style` | Style checking.
* `test` | Run all tests. Pass TEST_PACKAGES="..." to set which packages to test.
* `test_fast` | Run all fast tests.
* `test_slow` | Run all non-fast tests.
* `wheel` | Build wheels in *release* mode.

By default, we use python 3.7 but you can can pass `python=2.7` (or 3.6, 3.8
etc.) as a argument to change the env / build / wheel python version.

*Using an unmanaged developer environment*

Use `make env` to create an auto-set-up development environment with the
correct package dependencies. This env will not be changed by scripts after
creation. However, provided scripts & makefiles do not currently support custom
development environments; rather, they will always auto-activate the managed
environment. Environments are generated and stored at
`envs/coremltools-py<version string>`

50 changes: 22 additions & 28 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,16 @@ if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
")
endif()

set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)

# Globally ignore "no symbols" warnings during compilation
SET(CMAKE_CXX_ARCHIVE_CREATE "<CMAKE_AR> Scr <TARGET> <LINK_FLAGS> <OBJECTS>")
if(APPLE)
SET(CMAKE_CXX_ARCHIVE_FINISH "<CMAKE_RANLIB> -no_warning_for_no_symbols -c <TARGET>")
endif()

find_program(HAS_CCACHE ccache)
if(HAS_CCACHE)
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
Expand All @@ -42,28 +52,10 @@ include_directories(
${PYTHON_INCLUDE_DIRS}
)

set(CMAKE_CXX_FLAGS " \
${CMAKE_CXX_FLAGS} \
--std=c++14 \
")

if(APPLE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fobjc-arc ")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fobjc-arc")
endif()

set(CMAKE_EXE_LINKER_FLAGS " \
${CMAKE_EXE_LINKER_FLAGS} \
--std=c++14 \
")
set(CMAKE_MODULE_LINKER_FLAGS " \
${CMAKE_MODULE_LINKER_FLAGS} \
--std=c++14 \
")
set(CMAKE_SHARED_LINKER_FLAGS " \
${CMAKE_SHARED_LINKER_FLAGS} \
--std=c++14 \
")

add_library(caffeconverter
SHARED
caffeconverter/CaffeConverterLib.cpp
Expand Down Expand Up @@ -117,18 +109,21 @@ if (APPLE)
set_target_properties(caffeconverter PROPERTIES LINK_FLAGS "-undefined dynamic_lookup")
endif()

file(COPY ${CMAKE_SOURCE_DIR}/README.rst DESTINATION ${CMAKE_BINARY_DIR})
file(COPY ${CMAKE_SOURCE_DIR}/README.md DESTINATION ${CMAKE_BINARY_DIR})
file(COPY ${CMAKE_SOURCE_DIR}/coremltools/__init__.py
DESTINATION ${CMAKE_BINARY_DIR}/coremltools)
file(COPY ${CMAKE_SOURCE_DIR}/coremltools/__main__.py
DESTINATION ${CMAKE_BINARY_DIR}/coremltools)
set(copy_dirs _deps _scripts converters graph_visualization models proto)
file(COPY ${CMAKE_SOURCE_DIR}/coremltools/version.py
DESTINATION ${CMAKE_BINARY_DIR}/coremltools)

set(copy_dirs _deps _scripts converters models proto)
foreach(cdir IN ITEMS ${copy_dirs})
file(COPY ${CMAKE_SOURCE_DIR}/coremltools/${cdir}
DESTINATION ${CMAKE_BINARY_DIR}/coremltools)
endforeach()

if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
set(_additional_caffeconverter_command COMMAND strip -x ${PROJECT_SOURCE_DIR}/coremltools/libcaffeconverter.so)
endif()

Expand Down Expand Up @@ -174,14 +169,13 @@ if (APPLE AND CORE_VIDEO AND CORE_ML AND FOUNDATION)
${CORE_VIDEO}
${CORE_ML}
${FOUNDATION}
${PYTHON_LIBRARIES}
)

if(APPLE)
set(osx_export_file ${CMAKE_SOURCE_DIR}/coremlpython/exported_symbols_osx.ver)
set_property(TARGET coremlpython APPEND PROPERTY LINK_DEPENDS "${osx_export_file}")
set_property(TARGET coremlpython APPEND_STRING PROPERTY LINK_FLAGS " -Wl,-exported_symbols_list,${osx_export_file} ")

# Allow Python to be found at runtime instead of compile/link time
# This is apparently the default on Linux
set_property(TARGET coremlpython APPEND_STRING PROPERTY LINK_FLAGS "-undefined dynamic_lookup")
Expand All @@ -190,10 +184,10 @@ else()
set(linux_export_file coremlpython/exported_symbols_linux.ver)
set_property(TARGET coremlpython APPEND_STRING PROPERTY LINK_FLAGS " -Wl,--version-script=${linux_export_file} ")
endif()

set_property(TARGET coremlpython APPEND_STRING PROPERTY LINK_FLAGS " -Wl,-dead_strip")

if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
set(_additional_libcoremlpython_command
COMMAND strip -x ${PROJECT_SOURCE_DIR}/coremltools/libcoremlpython.so
)
Expand All @@ -212,7 +206,7 @@ endif()

set(PYTHON_TAG "cp${PYTHON_VERSION_MAJOR}${PYTHON_VERSION_MINOR}")
if(APPLE)
set(PLAT_NAME "macosx_10_15_intel;macosx_10_14_intel;macosx_10_13_intel;macosx_10_12_intel")
set(PLAT_NAME "macosx_10_16_intel;macosx_10_15_intel;macosx_10_14_intel;macosx_10_13_intel;macosx_10_12_intel")
elseif("${CMAKE_SYSTEM_NAME}" MATCHES "Linux")
set(PLAT_NAME "manylinux1_x86_64")
else()
Expand Down Expand Up @@ -245,7 +239,7 @@ add_custom_target(pip_install_dev
)

add_custom_target(pytest
COMMAND pytest -r fs ${PROJECT_SOURCE_DIR}/coremltools/test/
COMMAND pytest -r fs ${PROJECT_SOURCE_DIR}/coremltools/test/ --timeout=600
DEPENDS pip_install_dev
USES_TERMINAL
)
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ Contribution Guidelines

**Core ML Open Source Community**

The Core ML open source community welcomes all contributions and ideas to grow the product. This can occur within this repo as well as [onnx-coreml](https://github.com/onnx/onnx-coreml) or [tf-coreml](https://github.com/tf-coreml/tf-coreml).
The Core ML open source community welcomes all contributions and ideas to grow the product.

This could be provided in a couple of ways:

Expand Down
2 changes: 1 addition & 1 deletion LICENSE.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Copyright (c) 2017, Apple Inc. All rights reserved.
Copyright (c) 2020, Apple Inc. All rights reserved.

Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:

Expand Down
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
include README.md
110 changes: 22 additions & 88 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,26 @@
Core ML Community Tools
=======================

Core ML is an Apple framework to integrate machine learning models into your
app. Core ML provides a unified representation for all models. Your app uses
Core ML APIs and user data to make predictions, and to fine-tune models, all on
the user’s device. Core ML optimizes on-device performance by leveraging the
CPU, GPU, and Neural Engine while minimizing its memory footprint and power
consumption. Running a model strictly on the user’s device removes any need for
a network connection, which helps keep the user’s data private and your app
responsive.

Core ML community tools contains all supporting tools for Core ML model
conversion, editing and validation. This includes deep learning frameworks like
TensorFlow, Keras, Caffe as well as classical machine learning frameworks like
LIBSVB, scikit-learn, and XGBoost.
TensorFlow, PyTorch, Keras, Caffe as well as classical machine learning
frameworks like LIBSVM, scikit-learn, and XGBoost.

With coremltools, you can do the following:

- Convert trained models from frameworks like TensorFlow and PyTorch to the
Core ML format.
- Read, write, and optimize Core ML models.
- Verify conversion/creation (on macOS) by making predictions using Core ML.

To get the latest version of coremltools:

Expand All @@ -18,90 +34,8 @@ pip install --upgrade coremltools

For the latest changes please see the [release notes](https://github.com/apple/coremltools/releases/).

# Table of Contents

- [Neural network conversion](#Neural-network-conversion)
- [Core ML specification](#Core-ML-specification)
- [coremltools user guide and examples](#user-guide-and-examples)
- [Installation from Source](#Installation)

## Neural Network Conversion

[Link](examples/NeuralNetworkGuide.md) to the detailed NN conversion guide.

There are several `converters` available to translate neural networks trained
in various frameworks into the Core ML model format. Following formats can be
converted to the Core ML `.mlmodel` format through the coremltools python
package (this repo):

- Caffe V1 (`.prototxt`, `.caffemodel` format)
- Keras API (2.2+) (`.h5` format)
- TensorFlow 1 (1.13+) (`.pb` frozen graph def format)
- TensorFlow 2 (`.h5` and `SavedModel` formats)

In addition, there are two more neural network converters build on top of `coremltools`:
- [onnx-coreml](https://github.com/onnx/onnx-coreml): to convert `.onnx` model format. Several frameworks such as PyTorch, MXNet, CaffeV2 etc
provide native export to the ONNX format.
- [tfcoreml](https://github.com/tf-coreml/tf-coreml): to convert TensorFlow models. For producing Core ML models targeting iOS 13 or later,
tfcoreml defers to the TensorFlow converter implemented inside coremltools.
For iOS 12 or earlier, the code path is different and lives entirely in the [tfcoreml](https://github.com/tf-coreml/tf-coreml) package.

To get an overview on how to use the converters and features such as
post-training quantization using coremltools, please see the [neural network
guide](examples/NeuralNetworkGuide.md).

## Core ML Specification

- Core ML specification is fully described in a set of protobuf files.
They are all located in the folder `mlmodel/format/`
- For an overview of the Core ML framework API, see [here](https://developer.apple.com/documentation/coreml).
- To find the list of model types supported by Core ML, see [this](https://github.com/apple/coremltools/blob/1fcac9eb087e20bcc91b41bc938112fa91b4e5a8/mlmodel/format/Model.proto#L229)
portion of the `model.proto` file.
- To find the list of neural network layer types supported see [this](https://github.com/apple/coremltools/blob/1fcac9eb087e20bcc91b41bc938112fa91b4e5a8/mlmodel/format/NeuralNetwork.proto#L472)
portion of the `NeuralNetwork.proto` file.
- Auto-generated documentation for all the protobuf files can be found at this [link](https://apple.github.io/coremltools/coremlspecification/)

## User Guide and Examples

- [API documentation](https://apple.github.io/coremltools)
- [Updatable models](examples/updatable_models)
- [Neural network inference examples](examples/neural_network_inference)
- [Neural network guide](examples/NeuralNetworkGuide.md)
- [Miscellaneous How-to code snippets](examples/APIExamples.md)

## Installation

We recommend using virtualenv to use, install, or build coremltools. Be
sure to install virtualenv using your system pip.

```shell
pip install virtualenv
```

The method for installing `coremltools` follows the
[standard python package installation steps](https://packaging.python.org/installing/).
To create a Python virtual environment called `pythonenv` follow these steps:

```shell
# Create a folder for your virtualenv
mkdir mlvirtualenv
cd mlvirtualenv

# Create a Python virtual environment for your Core ML project
virtualenv pythonenv
```

To activate your new virtual environment and install `coremltools` in this
environment, follow these steps:

```shell
# Active your virtual environment
source pythonenv/bin/activate


# Install coremltools in the new virtual environment, pythonenv
(pythonenv) pip install -U coremltools
```
# Documentation

The package [documentation](https://apple.github.io/coremltools) contains
more details on how to use coremltools.
* [User Guides and Examples](https://coremltools.readme.io/)
* [Core ML Specification](https://mlmodel.readme.io/)
* [API Reference](https://coremltools.readme.io/reference/convertersconvert)
Loading

0 comments on commit 0d38c5a

Please sign in to comment.