Skip to content

Commit

Permalink
Merge pull request BVLC#167 from BVLC/next
Browse files Browse the repository at this point in the history
So be it.
  • Loading branch information
shelhamer committed Feb 26, 2014
2 parents a9cce84 + 527cfab commit 9da7bcb
Show file tree
Hide file tree
Showing 65 changed files with 2,008 additions and 1,434,280 deletions.
39 changes: 27 additions & 12 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
## General

# Compiled Object files
*.slo
*.lo
Expand All @@ -19,25 +21,38 @@
*.pb.cc
*_pb2.py

# bin files
# Compiled python
*.pyc

# Compiled MATLAB
*.mex
*.mexa64
*.mexmaci64

# build, distribute, and bins
build/*
distribute/*
*.testbin
*.bin

# vim swp files
# Editor temporaries
*.swp

# matlab binary
*.mexa64
*~

# IPython notebook checkpoints
.ipynb_checkpoints

# anything under data/ unless we force include them
data/*

# anything under distribute
distribute/*
## Caffe

# user's specified config
# User's build configuration
Makefile.config
docs/_site

# Models, Data, and Examples are either
# 1. reference, and not casually committed
# 2. custom, and live on their own unless they're deliberated contributed
models/*
data/*
examples/*

# Don't version the generated documentation
docs/_site
28 changes: 22 additions & 6 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ TEST_SRCS := $(shell find src/$(PROJECT) -name "test_*.cpp")
GTEST_SRC := src/gtest/gtest-all.cpp
# TEST_HDRS are the test header files
TEST_HDRS := $(shell find src/$(PROJECT) -name "test_*.hpp")
# TOOL_SRCS are the source files for the tool binaries
TOOL_SRCS := $(shell find tools -name "*.cpp")
# EXAMPLE_SRCS are the source files for the example binaries
EXAMPLE_SRCS := $(shell find examples -name "*.cpp")
# PROTO_SRCS are the protocol buffer definitions
Expand All @@ -46,16 +48,18 @@ PROTO_GEN_CC := ${PROTO_SRCS:.proto=.pb.cc}
PROTO_GEN_PY := ${PROTO_SRCS:.proto=_pb2.py}
# The objects corresponding to the source files
# These objects will be linked into the final shared library, so we
# exclude the test and example objects.
# exclude the tool, example, and test objects.
CXX_OBJS := $(addprefix $(BUILD_DIR)/, ${CXX_SRCS:.cpp=.o})
CU_OBJS := $(addprefix $(BUILD_DIR)/, ${CU_SRCS:.cu=.cuo})
PROTO_OBJS := $(addprefix $(BUILD_DIR)/, ${PROTO_GEN_CC:.cc=.o})
OBJS := $(PROTO_OBJS) $(CXX_OBJS) $(CU_OBJS)
# program and test objects
# tool, example, and test objects
TOOL_OBJS := $(addprefix $(BUILD_DIR)/, ${TOOL_SRCS:.cpp=.o})
EXAMPLE_OBJS := $(addprefix $(BUILD_DIR)/, ${EXAMPLE_SRCS:.cpp=.o})
TEST_OBJS := $(addprefix $(BUILD_DIR)/, ${TEST_SRCS:.cpp=.o})
GTEST_OBJ := $(addprefix $(BUILD_DIR)/, ${GTEST_SRC:.cpp=.o})
# program and test bins
# tool, example, and test bins
TOOL_BINS := ${TOOL_OBJS:.o=.bin}
EXAMPLE_BINS := ${EXAMPLE_OBJS:.o=.bin}
TEST_BINS := ${TEST_OBJS:.o=.testbin}

Expand Down Expand Up @@ -86,13 +90,14 @@ PYTHON_LDFLAGS := $(LDFLAGS) $(foreach library,$(PYTHON_LIBRARIES),-l$(library))
##############################
# Define build targets
##############################
.PHONY: all init test clean linecount examples py mat distribute py$(PROJECT) mat$(PROJECT) proto
.PHONY: all init test clean linecount tools examples py mat distribute py$(PROJECT) mat$(PROJECT) proto

all: init $(NAME) $(STATIC_NAME) examples
all: init $(NAME) $(STATIC_NAME) tools examples
@echo $(CXX_OBJS)

init:
@ mkdir -p $(foreach obj,$(OBJS),$(dir $(obj)))
@ mkdir -p $(foreach obj,$(TOOL_OBJS),$(dir $(obj)))
@ mkdir -p $(foreach obj,$(EXAMPLE_OBJS),$(dir $(obj)))
@ mkdir -p $(foreach obj,$(TEST_OBJS),$(dir $(obj)))
@ mkdir -p $(foreach obj,$(GTEST_OBJ),$(dir $(obj)))
Expand All @@ -102,6 +107,8 @@ linecount: clean

test: init $(TEST_BINS)

tools: init $(TOOL_BINS)

examples: init $(EXAMPLE_BINS)

py$(PROJECT): py
Expand Down Expand Up @@ -134,6 +141,10 @@ runtest: test
$(TEST_BINS): %.testbin : %.o $(GTEST_OBJ) $(STATIC_NAME) $(TEST_HDRS)
$(CXX) $< $(GTEST_OBJ) $(STATIC_NAME) -o $@ $(CXXFLAGS) $(LDFLAGS) $(WARNINGS)

$(TOOL_BINS): %.bin : %.o $(STATIC_NAME)
$(CXX) $< $(STATIC_NAME) -o $@ $(CXXFLAGS) $(LDFLAGS) $(WARNINGS)
@echo

$(EXAMPLE_BINS): %.bin : %.o $(STATIC_NAME)
$(CXX) $< $(STATIC_NAME) -o $@ $(CXXFLAGS) $(LDFLAGS) $(WARNINGS)
@echo
Expand Down Expand Up @@ -172,6 +183,10 @@ $(BUILD_DIR)/src/$(PROJECT)/util/%.cuo: src/$(PROJECT)/util/%.cu
$(CUDA_DIR)/bin/nvcc $(NVCCFLAGS) $(CUDA_ARCH) -c $< -o $@
@echo

$(BUILD_DIR)/tools/%.o: tools/%.cpp
$(CXX) $< $(CXXFLAGS) -c -o $@ $(LDFLAGS)
@echo

$(BUILD_DIR)/examples/%.o: examples/%.cpp
$(CXX) $< $(CXXFLAGS) -c -o $@ $(LDFLAGS)
@echo
Expand Down Expand Up @@ -201,8 +216,9 @@ distribute: all
mkdir $(DISTRIBUTE_DIR)
# add include
cp -r include $(DISTRIBUTE_DIR)/
# add example binaries
# add tool and example binaries
mkdir $(DISTRIBUTE_DIR)/bin
cp $(TOOL_BINS) $(DISTRIBUTE_DIR)/bin
cp $(EXAMPLE_BINS) $(DISTRIBUTE_DIR)/bin
# add libraries
mkdir $(DISTRIBUTE_DIR)/lib
Expand Down
19 changes: 19 additions & 0 deletions data/cifar10/get_cifar10.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/usr/bin/env sh
# This scripts downloads the CIFAR10 (binary version) data and unzips it.

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $DIR

echo "Downloading..."

wget -q http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz

echo "Unzipping..."

tar -xf cifar-10-binary.tar.gz && rm -f cifar-10-binary.tar.gz
mv cifar-10-batches-bin/* . && rm -rf cifar-10-batches-bin

# Creation is split out because leveldb sometimes causes segfault
# and needs to be re-created.

echo "Done."
12 changes: 0 additions & 12 deletions data/create_mnist.sh

This file was deleted.

20 changes: 20 additions & 0 deletions data/ilsvrc12/get_ilsvrc_aux.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#!/usr/bin/env sh
#
# N.B. This does not download the ilsvrcC12 data set, as it is gargantuan.
# This script downloads the imagenet example auxiliary files including:
# - the ilsvrc12 image mean, binaryproto
# - synset ids and words
# - the training splits with labels

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $DIR

echo "Downloading..."

wget -q https://www.dropbox.com/s/g5myor4y2scdv95/caffe_ilsvrc12.tar.gz

echo "Unzipping..."

tar -xf caffe_ilsvrc12.tar.gz && rm -f caffe_ilsvrc12.tar.gz

echo "Done."
3 changes: 3 additions & 0 deletions data/get_mnist.sh → data/mnist/get_mnist.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
#!/usr/bin/env sh
# This scripts downloads the mnist data and unzips it.

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $DIR

echo "Downloading..."

wget -q http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz
Expand Down
3 changes: 0 additions & 3 deletions data/train_mnist.sh

This file was deleted.

28 changes: 9 additions & 19 deletions docs/imagenet_pretrained.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,33 +6,23 @@ title: Caffe
Running Pretrained ImageNet
===========================

[View this page as an IPython Notebook](http://nbviewer.ipython.org/url/caffe.berkeleyvision.org/imagenet_pretrained_files/imagenet_pretrained.ipynb)

For easier use of pretrained models, we provide a wrapper specifically written
for the case of ImageNet, so one can take an image and directly compute features
or predictions from them. Both Python and Matlab wrappers are provided. We will
describe the use of the Python wrapper here, and the Matlab wrapper usage is
very similar.

We assume that you have successfully compiled Caffe and set the correct
`PYTHONPATH`. If not, please refer to the [installation
instructions](installation.html). You will use our pre-trained imagenet model,
which you can
[download here](https://www.dropbox.com/s/n3jups0gr7uj0dv/caffe_reference_imagenet_model)
(232.57MB). Note that this pre-trained model is licensed for academic research /
non-commercial use only.
[View this page as an IPython Notebook](http://nbviewer.ipython.org/github/BVLC/caffe/blob/master/examples/imagenet_pretrained.ipynb)

For easier use of pretrained models, we provide a wrapper specifically written for the case of ImageNet, so one can take an image and directly compute features or predictions from them. Both Python and Matlab wrappers are provided. We will describe the use of the Python wrapper here, and the Matlab wrapper usage is very similar.

We assume that you have successfully compiled Caffe and set the correct `PYTHONPATH`. If not, please refer to the [installation instructions](installation.html). You will use our pre-trained imagenet model, which you can download (232.57MB) by running `models/get_caffe_reference_imagenet_model.sh`.Note that this pre-trained model is licensed for academic research / non-commercial use only.

Ready? Let's start.


from caffe import imagenet
from matplotlib import pyplot

# Set the right path to your model file, pretrained model,
# and the image you would like to classify.
MODEL_FILE = 'examples/imagenet_deploy.prototxt'
PRETRAINED = '/home/jiayq/Downloads/caffe_reference_imagenet_model'
IMAGE_FILE = '/home/jiayq/lena.png'
MODEL_FILE = 'models/imagenet.prototxt'
PRETRAINED = 'models/caffe_reference_imagenet_model'
IMAGE_FILE = '/path/to/lena.png'

Loading a network is easy. imagenet.ImagenetClassifier wraps everything. In
default, the classifier will crop the center and corners of an image, as well as
Expand Down
Loading

0 comments on commit 9da7bcb

Please sign in to comment.